Operative PostDoc
Computer human interaction; computational aesthetics
- sergejsto@googlemail.com
- HiB, room 308M1
Supervised by: Stefan Bruckner
Sergej Stoppel finished his masters in Mathematics in 2014. He worked as a researcher at Fraunhofer Institute for Production Systems and Design Technology from 2010 until 2014.
Sergej defended his PhD thesis "User-Centric Parameter Specification for Interactive Virtual and Physical Visual Representations" in October 2018.
Currently Sergej is a postdoc at the visualization group at the University of Bergen. His main research interests lie in visual data science.
Publications
2022
@inproceedings {Trautner-2022-HCP,
author = {Trautner, Thomas and Sbardellati, Maximilian and Stoppel, Sergej and Bruckner, Stefan},
title = {{Honeycomb Plots: Visual Enhancements for Hexagonal Maps}},
booktitle = {Proc. of VMV 2022: Vision, Modeling, and Visualization},
editor = {Bender, Jan and Botsch, Mario and Keim, Daniel A.},
pages = {65--73},
year = {2022},
publisher = {The Eurographics Association},
ISBN = {978-3-03868-189-2},
DOI = {10.2312/vmv.20221205},
abstract = {Aggregation through binning is a commonly used technique for visualizing large, dense, and overplotted two-dimensional data sets. However, aggregation can hide nuanced data-distribution features and complicates the display of multiple data-dependent variables, since color mapping is the primary means of encoding. In this paper, we present novel techniques for enhancing hexplots with spatialization cues while avoiding common disadvantages of three-dimensional visualizations. In particular, we focus on techniques relying on preattentive features that exploit shading and shape cues to emphasize relative value differences. Furthermore, we introduce a novel visual encoding that conveys information about the data distributions or trends within individual tiles. Based on multiple usage examples from different domains and real-world scenarios, we generate expressive visualizations that increase the information content of classic hexplots and validate their effectiveness in a user study.},
pdf = "pdfs/Trautner-2022-HCP.pdf",
thumbnails = "images/Trautner-2022-HCP-thumb.png",
images = "images/Trautner-2022-HCP-thumb.png",
youtube = "https://youtu.be/mU7QFVP3yKQ",
git = "https://github.com/TTrautner/HoneycombPlots"
}
2020
@article{Trautner-2020-SunspotPlots,
author = {Trautner, T. and Bolte, F. and Stoppel, S. and Bruckner, S.},
title = {Sunspot Plots: Model-based Structure Enhancement for Dense Scatter Plots},
journal = {Computer Graphics Forum},
volume = {39},
number = {3},
pages = {551--563},
keywords = {information visualization, scatterplots, kernel density estimation},
doi = {10.1111/cgf.14001},
abstract = {Scatter plots are a powerful and well-established technique for visualizing the relationships between two variables as a collection of discrete points. However, especially when dealing with large and dense data, scatter plots often exhibit problems such as overplotting, making the data interpretation arduous. Density plots are able to overcome these limitations in highly populated regions, but fail to provide accurate information of individual data points. This is particularly problematic in sparse regions where the density estimate may not provide a good representation of the underlying data. In this paper, we present sunspot plots, a visualization technique that communicates dense data as a continuous data distribution, while preserving the discrete nature of data samples in sparsely populated areas. We furthermore demonstrate the advantages of our approach on typical failure cases of scatter plots within synthetic and real-world data sets and validate its effectiveness in a user study.},
year = {2020},
pdf = "pdfs/Trautner_2020_SunspotPlots_PDF.pdf",
thumbnails = "images/Trautner_2020_SunspotPlots_thumb.png",
images = "images/Trautner_2020_SunspotPlots_thumb.png",
vid = "vids/Trautner_2020_SunspotPlots_video.mp4",
youtube = "https://youtu.be/G6l-y6YGjzQ",
project = "MetaVis"
}
@article{Solteszova-2019-MLT,
author = {Solteszova, V. and Smit, N. N. and Stoppel, S. and Gr\"{u}ner, R. and Bruckner, S.},
title = {Memento: Localized Time-Warping for Spatio-Temporal Selection},
journal = {Computer Graphics Forum},
volume = {39},
number = {1},
pages = {231--243},
year = {2020},
keywords = {interaction, temporal data, visualization, spatio-temporal projection},
images = "images/Solteszova-2019-MLT.jpg",
thumbnails = "images/Solteszova-2019-MLT-1.jpg",
pdf = "pdfs/Solteszova-2019-MLT.pdf",
doi = {10.1111/cgf.13763},
abstract = {Abstract Interaction techniques for temporal data are often focused on affecting the spatial aspects of the data, for instance through the use of transfer functions, camera navigation or clipping planes. However, the temporal aspect of the data interaction is often neglected. The temporal component is either visualized as individual time steps, an animation or a static summary over the temporal domain. When dealing with streaming data, these techniques are unable to cope with the task of re-viewing an interesting local spatio-temporal event, while continuing to observe the rest of the feed. We propose a novel technique that allows users to interactively specify areas of interest in the spatio-temporal domain. By employing a time-warp function, we are able to slow down time, freeze time or even travel back in time, around spatio-temporal events of interest. The combination of such a (pre-defined) time-warp function and brushing directly in the data to select regions of interest allows for a detailed review of temporally and spatially localized events, while maintaining an overview of the global spatio-temporal data. We demonstrate the utility of our technique with several usage scenarios.},
project = "MetaVis,ttmedvis,VIDI"
}
2019
@ARTICLE {Stoppel-2019-LFL,
author = "Stoppel, Sergej and Bruckner, Stefan",
title = "LinesLab: A Flexible Low-Cost Approach for the Generation of Physical Monochrome Art",
journal = "Computer Graphics Forum",
year = "2019",
abstract = "The desire for the physical generation of computer art has seen a significant body of research that has resulted in sophisticated robots and painting machines, together with specialized algorithms mimicking particular artistic techniques. The resulting setups are often expensive and complex, making them unavailable for recreational and hobbyist use. In recent years, however, a new class of affordable low-cost plotters and cutting machines has reached the market. In this paper, we present a novel system for the physical generation of line and cut-out art based on digital images, targeted at such off-the-shelf devices. Our approach uses a meta-optimization process to generate results that represent the tonal content of a digital image while conforming to the physical and mechanical constraints of home-use devices. By flexibly combining basic sets of positional and shape encodings, we are able to recreate a wide range of artistic styles. Furthermore, our system optimizes the output in terms of visual perception based on the desired viewing distance, while remaining scalable with respect to the medium size.",
pdf = "pdfs/Stoppel-2019-LFL.pdf",
images = "images/Stoppel-2019-LFL.jpg",
thumbnails = "images/Stoppel-2019-LFL.png",
publisher = "The Eurographics Association and John Wiley and Sons Ltd.",
doi = "10.1111/cgf.13609",
youtube = "https://www.youtube.com/watch?v=WdZJmU6fOAY",
project = "MetaVis"
}
@ARTICLE {Stoppel-2019-FVI,
author = "Sergej Stoppel and Magnus Paulson Erga and Stefan Bruckner",
title = "Firefly: Virtual Illumination Drones for Interactive Visualization",
journal = "IEEE Transactions on Visualization and Computer Graphics",
year = "2019",
volume = "25",
pages = "1204-1213",
abstract = "Light specification in three dimensional scenes is a complex problem and several approaches have been presented that aim to automate this process. However, there are many scenarios where a static light setup is insufficient, as the scene content and camera position may change. Simultaneous manual control over the camera and light position imposes a high cognitive load on the user. To address this challenge, we introduce a novel approach for automatic scene illumination with Fireflies. Fireflies are intelligent virtual light drones that illuminate the scene by traveling on a closed path. The Firefly path automatically adapts to changes in the scene based on an outcome-oriented energy function. To achieve interactive performance, we employ a parallel rendering pipeline for the light path evaluations. We provide a catalog of energy functions for various application scenarios and discuss the applicability of our method on several examples.",
pdf = "pdfs/VIS2018-Firefly.pdf",
vid = "vids/FinalVideo.mp4",
images = "images/Teaser.png",
thumbnails = "images/HeadRightCroppedThumbnail.png",
doi = "10.1109/TVCG.2018.2864656",
project = "MetaVis"
}
2018
@ARTICLE {PhDThesis2018Stoppel,
author = "Stoppel, Sergej",
title = "User-Centric Parameter Specification for Interactive Virtual and Physical Visual Representations",
journal = "Universitetet i Bergen",
year = "2018"
}
@INPROCEEDINGS {Stoppel-2018-SSW,
author = "Sergej Stoppel and Stefan Bruckner",
title = "Smart Surrogate Widgets for Direct Volume Manipulation",
booktitle = "Proceedings of IEEE PacificVis 2018",
year = "2018",
pages = "36--45",
month = "apr",
abstract = "Interaction is an essential aspect in volume visualization, yet commonmanipulation tools such as bounding boxes or clipping planewidgets provide rather crude tools as they neglect the complex structureof the underlying data. In this paper, we introduce a novelvolume interaction approach based on smart widgets that are automaticallyplaced directly into the data in a visibility-driven manner.By adapting to what the user actually sees, they act as proxies thatallow for goal-oriented modifications while still providing an intuitiveset of simple operations that is easy to control. In particular, ourmethod is well-suited for direct manipulation scenarios such as touchscreens, where traditional user interface elements commonly exhibitlimited utility. To evaluate out approach we conducted a qualitativeuser study with nine participants with various backgrounds.",
pdf = "pdfs/Stoppel-2018-SSW.pdf",
images = "images/Stoppel-2018-SSW.jpg",
thumbnails = "images/Stoppel-2018-SSW.png",
youtube = "https://www.youtube.com/watch?v=wMRw-W0SrLk",
event = "IEEE PacificVis 2018",
keywords = "smart interfaces, volume manipulation, volume visualization",
doi = "10.1109/PacificVis.2018.00014",
project = "MetaVis"
}
2017
@ARTICLE {Stoppel-2017-VPI,
author = "Sergej Stoppel and Stefan Bruckner",
title = "Vol²velle: Printable Interactive Volume Visualization",
journal = "IEEE Transactions on Visualization and Computer Graphics",
year = "2017",
volume = "23",
number = "1",
pages = "861--870",
month = "jan",
abstract = "Interaction is an indispensable aspect of data visualization. The presentation of volumetric data, in particular, often significantly benefits from interactive manipulation of parameters such as transfer functions, rendering styles, or clipping planes. However, when we want to create hardcopies of such visualizations, this essential aspect is lost. In this paper, we present a novel approach for creating hardcopies of volume visualizations which preserves a certain degree of interactivity. We present a method for automatically generating Volvelles, printable tangible wheel charts that can be manipulated to explore different parameter settings. Our interactive system allows the flexible mapping of arbitrary visualization parameters and supports advanced features such as linked views. The resulting designs can be easily reproduced using a standard printer and assembled within a few minutes.",
pdf = "pdfs/Stoppel-2017-VPI.pdf",
images = "images/Stoppel-2017-VPI.jpg",
thumbnails = "images/Stoppel-2017-VPI.png",
youtube = "https://www.youtube.com/watch?v=Z1K8t-FCiXI",
doi = "10.1109/TVCG.2016.2599211",
event = "IEEE SciVis 2016",
keywords = "physical visualization, interaction, volume visualization, illustrative visualization",
location = "Baltimore, USA"
}