% Categories: INTERMEDIA % Encoding: utf-8 @InCollection{Waltl2012_UserCentricraey, author = {Waltl, Markus and Raffelsberger, Christian and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {User Centric Media}, publisher = {Springer Verlag}, title = {Metadata-Based Content Management and Sharing System for Improved User Experience}, year = {2012}, address = {Berlin, Heidelberg, New York}, editor = {Alvarez, Federico and Costa, Cristina}, month = {dec}, pages = {132-140}, series = {Lecture Notes of the Institute for Computer Sciences, Social Informatics and Telecommunications Engineering}, volume = {60}, doi = {10.1007/978-3-642-35145-7_17}, keywords = {Metadata; UPnP A/V; Content Management; Content Sharing; MPEG-V; Sensory Effects; User Experience}, language = {EN}, url = {http://dx.doi.org/10.1007/978-3-642-35145-7_17} } @TechReport{Waltl2011_TR, author = {Waltl, Markus and Timmerer, Christian and Rainer, Benjamin and Hellwagner, Hermann}, institution = {Alpen-Adria-Universität Klagenfurt}, title = {Sensory Effects for Ambient Experiences in the World Wide Web}, year = {2011}, address = {Klagenfurt, Austria}, month = {jul}, number = {TR/ITEC/11/1.13}, abstract = {More and more content in various formats become available via the World Wide Web (WWW). Currently available Web browsers are able to access and interpret these contents (i.e., Web videos, text, image, and audio). These contents stimulate only senses like audition or vision. Recently, it has been proposed to stimulate also other senses while consuming multimedia content through so-called sensory effects. These sensory effects aim to enhance the ambient experience by providing effects, such as, light, wind, vibration, etc. The effects are represented as Sensory Effect Metadata (SEM) which is associated to multimedia content and is rendered on devices like fans, vibration chairs, or lamps. In this paper we present a plug-in for the Mozilla Firefox browser which is able to render such sensory effects that are provided via the WWW. Furthermore, the paper describes two user studies conducted with the plug-in and presents the results achieved.}, keywords = {World Wide Web, MPEG-V, Subjective Quality Assessment, Sensory Effects, Quality of Multimedia Experience}, language = {EN}, pages = {12}, pdf = {https://www.itec.aau.at/bib/files/Waltl_TR11113.pdf}, publisher = {Alpen-Adria Universität Klagenfurt} } @InProceedings{Waltl2010_WIAMIS_SensoryEffects, author = {Waltl, Markus and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the 11th International Workshop on Image Analysis for Multimedia Interactive Services (WIAMIS'10)}, title = {Increasing the User Experience of Multimedia Presentations with Sensory Effects}, year = {2010}, address = {Los Alamitos, CA, USA}, editor = {Leonardi, Riccardo and Migliorati, Pierangelo and Cavallaro, Andrea}, month = {apr}, pages = {1-4}, publisher = {IEEE}, abstract = {The term Universal Multimedia Experience (UME) has gained momentum and is well recognized within the research community. As this approach puts the user into the center stage, additional complexity is added to the overall quality assessment problem which calls for a scientific framework to capture, measure, quantify, judge, and explain the user experience. In previous work we have proposed the annotation of multimedia content with sensory effect metadata that can be used to stimulate also other senses than vision or audition. In this paper we report first results obtained from subjective tests in the area of sensory effects attached to traditional multimedia presentations such as movies that shall lead to an enhanced, unique, and worthwhile user experience.}, isbn13 = {9781424478484}, keywords = {Sensory Information, MPEG-V}, language = {EN}, location = {Desenzano del Garda, Italy}, pdf = {https://www.itec.aau.at/bib/files/wiamis2010_mwcthh.pdf}, talkdate = {2010.04.13}, talktype = {poster} } @InProceedings{Waltl2010_QoMEX2010_SensoryEffects, author = {Waltl, Markus and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the 2nd International Workshop on Quality of Multimedia Experience (QoMEX'10)}, title = {Improving the Quality of Multimedia Experience through Sensory Effects}, year = {2010}, address = {Los Alamitos, CA, USA}, editor = {Perkis, Andrew and Möller, Sebastian and Svensson, Peter and Reibman, Amy}, month = {jun}, pages = {124-129}, publisher = {IEEE}, abstract = {In previous and related work sensory effects are presented as a tool for increasing the user experience of multimedia presentations by stimulating also other senses than vision or audition. In this paper we primarily investigated the relationship of the Quality of Experience (QoE) due to various video bit-rates of multimedia contents annotated with sensory effects (e.g., wind, vibration, light). Therefore, we defined a subjective quality assessment methodology based on standardized methods. The paper describes the test environment, its setup, and conditions in detail. Furthermore, we experimented with a novel voting device that allows for continuous voting feedback during a sequence in addition to the overall quality voting at the end of each sequence. The results obtained from the subjective quality assessment are presented and discussed thoroughly. In anticipation of the results we can report an improvement of the quality of the multimedia experience thanks to the sensory effects.}, doi = {10.1109/QOMEX.2009.5246962}, isbn13 = {9781424469581}, keywords = {Sensory Information, MPEG-V}, language = {EN}, location = {Trondheim, Norway}, pdf = {https://www.itec.aau.at/bib/files/qomex2010_mwcthh.pdf}, talkdate = {2010.06.21}, talktype = {poster}, url = {http://www.qomex2010.org} } @InProceedings{Waltl2010_IMOF2010_SensoryEffects, author = {Waltl, Markus and Raffelsberger, Christian and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings CD of the 2nd International ICST Conference on User Centric Future Media Internet}, title = {Metadata-based Content Management and Sharing System for Improved User Experience}, year = {2010}, address = {Berlin, Heidelberg, New York}, editor = {Alvarez, Federico and Costa, Cristina}, month = {sep}, pages = {1-9}, publisher = {Springer Verlag GmbH}, abstract = {In the past years the amount of multimedia content on the Internet or in home networks has been drastically increasing. Instead of buying traditional media (such as CDs or DVDs) users tend to buy online media. This leads to the difficulty of managing the content (e.g., movies, images). A vast amount of tools for content management exists but they are mainly focusing on one type of content (e.g., only images). Furthermore, most of the available tools are not configurable to the user’s preferences and cannot be accessed by different devices (e.g., TV, computer, mobile phone) in the home network. In this paper we present a UPnP A/V-based system for managing and sharing audio/visual content in home environments which is configurable to the user’s preferences. Furthermore, the paper depicts how this system can be used to improve the user experience by using MPEG-V.}, isbn13 = {9789639995000}, keywords = {Sensory Information, MPEG-V}, language = {EN}, location = {Palma de Mallorca, Spain}, pdf = {https://www.itec.aau.at/bib/files/ucmedia2010_mwcrcthh.pdf}, talkdate = {2010.09.01}, talktype = {registered} } @InProceedings{Timmerer2010_WISMA_SensoryEffects, author = {Timmerer, Christian and Waltl, Markus and Hellwagner, Hermann}, booktitle = {Proceedings of the Workshop on Interoperable Social Multimedia Applications (WISMA 2010)}, title = {Are Sensory Effects Ready for the World Wide Web?}, year = {2010}, address = {Aachen, Germany}, editor = {Carreras, Anna and Delgado, Jaime and Maroñas, Xavier and Rodríguez, Víctor}, month = {may}, pages = {57-60}, publisher = {CEUR Workshop Proceedings (CEUR-WS.org)}, abstract = {The World Wide Web (WWW) is one of the main entry points to access and consume Internet content in various forms. In particular, the Web browser is used to access different types of media (i.e., text, image, audio, and video) and on some platforms is the only way to access the vast amount of information on the Web. Recently, it has been proposed to stimulate also other senses than vision or audition while consuming multimedia content through so- called sensory effects, with the aim to increase the user’s Quality of Experience (QoE). The effects are represented as Sensory Effects Metadata (SEM) which is associated to traditional multimedia content and is rendered (synchronized with the media) on sensory devices like fans, vibration chairs, lamps, etc. In this paper we provide a principal investigation of whether the sensory effects are ready for the WWW and, in anticipation of the result, we propose how to embed sensory effect metadata within Web content and the synchronized rendering thereof.}, keywords = {MPEG-V}, language = {EN}, location = {Barcelona, Spain}, pdf = {https://www.itec.aau.at/bib/files/MPEG-V_v2.1.pdf}, talkdate = {2010.05.19}, talktype = {registered} } @InProceedings{Waltl2009, author = {Waltl, Markus and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the First International Workshop on Quality of Multimedia Experience (QoMEX 2009)}, title = {A Test-Bed for Quality of Multimedia Experience Evaluation of Sensory Effects}, year = {2009}, address = {Los Alamitos, CA, USA}, editor = {Ebrahim, Touradj and El-Maleh, Khaled and Dane, Gokce and Karam, Lina}, month = {jul}, pages = {145-150}, publisher = {IEEE}, abstract = {This paper introduces a prototype test-bed for triggering sensory effects like light, wind, or vibration when presenting audiovisual resources, e.g., a video, to users. The ISO/IEC MPEG is currently standardizing the Sensory Effect Description Language (SEDL) for describing such effects. This language is briefly described in the paper and the testbed that is destined to evaluate the quality of the multimedia experience of users is presented. It consists of a video annotation tool for sensory effects, a corresponding simulation tool, and a real test system. Initial experiments and results on determining the color of light effects from the video content are reported.}, doi = {10.1109/QOMEX.2009.5246962}, isbn13 = {978-1-4244-4370-3}, issn = {978-1-4244-43}, keywords = {Sensory Information, MPEG-V}, language = {EN}, location = {San Diego, CA}, pdf = {https://www.itec.aau.at/bib/files/qomex2009_mwcthh.pdf}, talkdate = {2009.07.31}, talktype = {registered}, url = {http://www.qomex2009.org} } @InProceedings{Reiterer2009b, author = {Reiterer, Bernhard and Concolato, Cyril and Hellwagner, Hermann}, booktitle = {Proceedings of 1st International ICST Conference on User Centric Media - UCMedia 2009}, title = {Natural-Language-based Conversion of Images to Mobile Multimedia Experiences}, year = {2009}, address = {Berlin, Heidelberg, New York}, editor = {Daras, Patros and Chlamtac, Imrich}, month = dec, pages = {4 - CD}, publisher = {Springer}, series = {LNICST - Lecture Notes of the Institute for Computer Sciences, Social-Informatics and Telecommunications Engineering}, abstract = {We describe an approach for viewing any large, detail-rich picture on a small display by generating a video from the image, as taken by a virtual camera moving across it at varying distance. Our main innovation is the ability to build the virtual camera's motion from a textual description of a picture, e.g., a museum caption, so that relevance and ordering of image regions are determined by co-analyzing image annotations and natural language text. Furthermore, our system arranges the resulting presentation such that it is synchronized with an audio track generated from the text by use of a text-to-speech system.}, issn = {9789639799844}, keywords = {image adaptation - text analysis - image annotation - digital cultural heritage - computer animation}, language = {EN}, talktype = {none}, url = {http://www.usercentricmedia.org/index.shtml} } @InProceedings{Reiterer2009a, author = {Reiterer, Bernhard and Hellwagner, Hermann}, booktitle = {Proceedings International InterMedia Summer School 2009}, title = {Animated Picture Presentation Steered by Natural Language}, year = {2009}, address = {Geneva}, editor = {Nadia, Magnenat-Thalmann and Seunghyun, Han and Dimitris, Potopsaltou}, month = jun, pages = {24-32}, publisher = {MIRALab at University of Geneva}, series = {International InterMedia Summer School}, abstract = {In this paper, we present an approach for presenting large, feature-rich pictures on small displays by generating an animation and subsequently a video from the image, as it could be taken by a virtual camera moving across the image. Our main innovation is the ability to build the virtual camera's motion upon a textual description of a picture, as from a museum caption, so that relevance and ordering of image regions is determined by co-analyzing image annotations and text. Furthermore, our system can arrange the resulting presentation in a way that it is synchronized with an audio track generated from the text by use of a text-to-speech system.}, keywords = {image adaptation, text parsing, image annotation, digital cultural heritage, computer animation}, language = {EN}, talktype = {none}, url = {http://intermediaschool.miralab.unige.ch/} } @InProceedings{Reiterer2009, author = {Reiterer, Bernhard and Lachner, Janine and Lorenz, Andreas and Zimmermann, Andreas and Hellwagner, Hermann}, booktitle = {Advances in Semantic Media Adaptation and Personalization}, title = {Research Directions Toward User-centric Multimedia}, year = {2009}, address = {Boca Raton (Florida)}, editor = {Angelides, Marios C and Mylonas, Phivos and Wallace, Manolis}, month = mar, pages = {21-42}, publisher = {Auerbach Publications}, abstract = {Currently, much research aims at coping with the shortcomings in multimedia consumption that may exist in a user's current context, e.g., due to the absence of appropriate devices at many locations, a lack of capabilities of mobile devices, restricted access to content, or non-personalized user interfaces. Recently, solutions to specific problems have been emerging, e.g., wireless access to multimedia repositories over standardized interfaces; however, due to usability restrictions the user has to spend much effort to or is even incapable of fulfilling his/her demands. The vision of user-centric multimedia places the user in the center of multimedia services to support his/her multimedia consumption intelligently, dealing with the aforementioned issues while minimizing required work. Essential features of such a vision are comprehensive context awareness, personalized user interfaces, and multimedia content adaptation. These aspects are addressed in this paper as major challenges toward a user-centric multimedia framework.}, doi = {10.1201/9781420076653-c2}, edition = {2}, isbn10 = {1420076647}, isbn13 = {978-1420076646}, issn = {9781420076646}, language = {EN}, talktype = {none}, url = {http://www.crcpress.com/product/isbn/9781420076646} } @Article{Reiterer2008, author = {Reiterer, Bernhard and Concolato, Cyril and Lachner, Janine and Le Feuvre, Jean and Moissinac, Jean-Claude and Lenzi, Stefano and Chessa, Stefano and Ferrá, Enrique Fernández and Menaya, Juan José González and Hellwagner, Hermann}, journal = {The Visual Computer, International Journal of Computer Graphics}, title = {User-centric universal multimedia access in home networks}, year = {2008}, issn = {01782789}, month = jul, number = {7-9}, pages = {837-845}, volume = {24}, abstract = {Much research is currently being conducted towards Universal Multimedia Access, aiming at removing barriers that arise when multimedia content is to be consumed with more and more heterogeneous devices and over diverse networks. We argue that users should be put at the center of the research work to enable user-centric multimedia access. In this paper we present the requirements for a user-centric multimedia access system in a networked home environment. These requirements are easy access to available content repositories, context awareness, content adaptation and session migration. After showing the limits of state-of-the-art technologies, we present the architecture of a system which allows unified access to the home network content, automatically delivered to rendering devices close to the user, adapted according to the rendering device constraints, and which is also capable of session mobility.}, address = {Berlin, Heidelberg, New York}, doi = {10.1007/s00371-008-0265-5}, keywords = {Universal Multimedia Access · Multimedia adaptation · UPnP AV · Context awareness · Content sharing}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/User-centric universal multimedia access in home networks.pdf}, publisher = {Springer}, url = {http://http://www.springerlink.com/content/fpjj19237704788k/} } @InProceedings{Bolla2008, author = {Bolla, Raffaele and Repetto, Matteo and Chessa, Stefano and Furfari, Francecso and Zutter, Saar De and Walle, Rik Van de and Reiterer, Bernhard and Hellwagner, Hermann and Asbach, Mark and Wien, Mathias}, booktitle = {13th IEEE International Conference on Emerging Technologies and Factory Automation (ETFA08)}, title = {A Context-Aware Architecture for QoS and Transcoding Management of Multimedia Streams in Smart Homes}, year = {2008}, address = {Los Alamitos, CA, USA}, editor = {IEEE, Industrial Electronics Society}, month = sep, pages = {1354-1361}, publisher = {IEEE}, abstract = {Current trends in smart homes suggest that several multimedia services will soon converge towards common standards and platforms. However this rapid evolution gives rise to several issues related to the management of a large number of multimedia streams in the home communication infrastructure. An issue of particular relevance is how a context acquisition system can be used to support the management of such a large number of streams with respect to the Quality of Service (QoS), to their adaptation to the available bandwidth or to the capacity of the involved devices, and to their migration and adaptation driven by the users' needs that are implicitly or explicitly notified to the system. Under this scenario this paper describes the experience of the INTERMEDIA project in the exploitation of context information to support QoS, migration, and adaptation of multimedia streams.}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/04638575.pdf}, talktype = {none} } @InProceedings{VanDeursen2007, author = {Deursen, Davy Van and Bruyne, Sarah De and Lancker, Wim Van and Neve, Wesley De and Schrijver, Davy De and Hellwagner, Hermann and Walle, Rik Van de}, booktitle = {IEEE International Symposium on Multimedia 2007 (ISM2007)}, title = {MuMiVA: A Multimedia Delivery Platform using Format-agnostic, XML-driven Content Adaptation}, year = {2007}, address = {Los Alamitos, CA, USA}, editor = {Bulterman, Dick and Mori, Kinji and Tsai, Jeffrey J P}, month = dec, pages = {131-138}, publisher = {IEEE}, abstract = {Due to the increasing heterogeneity in the current multimedia landscape, the delivery of multimedia content has become an important issue today. This heterogeneity is not only reflected by a plethora of different usage environments, but also by the presence of multiple (scalable) coding formats. Therefore, format-independent adaptation engines have to be used within a multimedia delivery platform, which are able to adapt the multimedia content according to a certain usage environment, independent of the underlying coding format of the content. By relying on automatically created textual descriptions of the highlevel syntax of binary media resources, a format-independent adaptation engine can be build. MPEG-21 generic Bitstream Syntax Schema (gBS Schema) is a tool that is part of the MPEG-21 Multimedia Framework. It enables the use of generic Bitstream Syntax Descriptions (gBSDs), i.e., textual descriptions in XML, to steer the adaptation of a binary media resource, using format-independent adaptation logic. In this paper, we address the design and performance evaluation of a multimedia delivery platform that relies on gBS Schema-driven adaptation engines. This platform is called MuMiVA; it is a fully integrated, extensible platform for multimedia delivery in heterogeneous usage environments, using streaming technologies. To demonstrate the flexibility of our multimedia delivery platform, we discuss the functioning of two different applications (i.e., exploitation of temporal scalability and shot selection) applied to two different coding formats (i.e., MPEG-4 Visual and H.264/AVC). Keywords— Content adaptation, Content delivery, MPEG-21 gBS Schema, XML transformations.}, isbn10 = {0769530583}, isbn13 = {978-0769530581}, keywords = {0769530583}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/MuMiVA.pdf}, talktype = {none}, url = {http://www.ism2007.ncu.edu.tw/} } @InProceedings{Lachner2007, author = {Lachner, Janine and Lorenz, Andreas and Reiterer, Bernhard and Zimmermann, Andreas and Hellwagner, Hermann}, booktitle = {Second International Workshop on Semantic Media Adaptation and Personalization (SMAP 2007)}, title = {Challenges toward User-centric Multimedia}, year = {2007}, address = {Los Alamitos, CA, USA}, editor = {Mylonas, Phivos and Wallace, Manolis and Angelides, Marios C}, month = sep, pages = {159-164}, publisher = {IEEE}, abstract = {Currently, much research aims at coping with the shortcomings in multimedia consumption that may exist in a user's current context, e.g., due to the absence of appropriate devices at many locations, a lack of capabilities of mobile devices, restricted access to content, or non-personalized user interfaces. Recently, solutions to specific problems have been emerging, e.g., wireless access to multimedia repositories over standardized interfaces; however, due to usability restrictions the user has to spend much effort to or is even incapable of fulfilling his/her demands. The vision of user-centric multimedia places the user in the center of multimedia services to support his/her multimedia consumption intelligently, dealing with the aforementioned issues while minimizing required work. Essential features of such a vision are comprehensive context awareness, personalized user interfaces, and multimedia content adaptation. These aspects are addressed in this paper as major challenges toward a user-centric multimedia framework.}, isbn10 = {0769430400}, isbn13 = {9780769530406}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/Challenges toward User-centric Multimedia.pdf}, talktype = {none}, url = {http://www.smap2007.org/} }