@MISC{1300Schikorra2011, AUTHOR = {Daniel Schikorra}, TITLE = {Untersuchung und Integration von Mensch-Maschine-Schnittstellen für immersive 3D Video Betrachtung}, SCHOOL = {Technische Universit{\"a}t Berlin}, YEAR = {2011}, MONTH = mar, PDF = {http://elvera.nue.tu-berlin.de/files/1300Schikorra2011.pdf}, ABSTRACT = {The thesis deals with innovative input tools and their integration into human-machine interfaces for an immersive 3D-video experience. Through the correct perspective image synthesis the spectator, by moving his head over the motion-parallax in a Freee-viewpoint video is able to get more hints about the depth perception than with the stereoscopy alone. With the link of a 3D-mouse a scene can be navigated intuitively or allows the user to experience it interactively. The integration of a haptic interface enables the viewer to physically touch an objects surface structure and its form by means of an additional sense in order to get depth information, that otherwise might be difficult or impossible to receive visually. A framework for the display of dynamic 3D-point clouds has been created to expand the motion control system by the SpaceNavigator, the head position by the TrackIR and the haptic feedback by the Novint Falcon. OpenGL camera options are presented and confronted with the orientation in space quaternions, vectors and matrices. The result is an experimental immersive 3D-viewer which processes the data of the input devices and transforms them into OpenGL camera instructions, additionally augmenting the video stream with haptic information, and allowing for scene relighting with a 3D-mouse-the SpaceNavigator.} }