% Categories: QUALINET % Encoding: utf-8 @Article{timmerer2015_computer, author = {Timmerer, Christian and Ebrahimi, Touradj and Pereira, Fernando}, journal = {IEEE Computer}, title = {Toward a New Assessment of Quality}, year = {2015}, issn = {0018-9162}, month = {mar}, number = {3}, pages = {108-110}, volume = {48}, abstract = {A convergence of trends is shifting the focus of quality assessment from compliance with system design goals to fulfillment of user needs or expectations in different contexts.}, address = {Los Alamitos, CA, USA}, keywords = {Quality of Experience, Quality of Sensory Experience, Quality of Life}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/07063175.pdf}, publisher = {IEEE Computer Society} } @InProceedings{Rainer2015_MMSysDemo, author = {Rainer, Benjamin and Petscharnig, Stefan and Timmerer, Christian}, booktitle = {Proceedings of the 6th ACM International Conference on Multimedia Systems}, title = {Merge And Forward - Self-organized Inter-Destination Multimedia Synchronization}, year = {2015}, address = {New York, U.S.A}, editor = {not, available}, month = {mar}, pages = {77-80}, publisher = {ACM International Conference on Multimedia Systems}, abstract = {Social networks have become ubiquitous and with these new possible ways for social communication and experiencing multimedia together the traditional TV scenario drifts more and more towards a distributed social experience. Asynchronism in the multimedia playback of the users may have a significant impact on the acceptability of systems providing the distributed multimedia experience. The synchronization needed in such systems is called Inter-Destination Multimedia Synchronization. In this paper we propose a demo that implements IDMS by the means of our self-organized and distributed approach assisted by pull-based streaming. We also provide a video of the planned demonstration and provide the mobile application as open source licensed under the GNU LGPL.}, language = {EN}, location = {Portland, Oregon}, talkdate = {2015.03.18}, talktype = {poster} } @Article{timmerer2014_computer, author = {Timmerer, Christian and Rainer, Benjamin}, journal = {IEEE Computer}, title = {The Social Multimedia Experience}, year = {2014}, issn = {0018-9162}, month = {mar}, number = {3}, pages = {67-69}, volume = {47}, abstract = {Inter-destination multimedia synchronization and quality of experience are critical to the success of social TV, which integrates television viewing with social networking.}, address = {Los Alamitos, CA, USA}, keywords = {Quality of Experience, Inter-Destination Media Synchronization, Social TV, DASH, IDMS, QoE}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/timmerer-sme-mar13.pdf}, publisher = {IEEE Computer Society} } @InCollection{Timmerer2014_QuASE, author = {Timmerer, Christian and Waltl, Markus and Rainer, Benjamin and Murray, Niall}, booktitle = {Quality of Experience: Advanced Concepts, Applications and Methods}, publisher = {Springer}, title = {Sensory Experience: Quality of Experience Beyond Audio-Visual}, year = {2014}, address = {Heidelberg}, editor = {Möller, Sebastian and Raake, Alexander}, month = {mar}, pages = {351-365}, abstract = {This chapter introduces the concept of Sensory Experience which aims to define the Quality of Experience (QoE) going beyond audio-visual content. In particular, we show how to utilize sensory effects such as ambient light, scent, wind, or vibration as additional dimensions contributing to the quality of the user experience. Therefore, we utilize a standardized representation format for sensory effects that are attached to traditional multimedia resources such as audio, video, and image contents. Sensory effects are rendered on special devices (e.g., fans, lights, motion chair, scent emitter) in synchronization with the traditional multimedia resources and shall stimulate also other senses than hearing and seeing with the intention to increase the Quality of Experience (QoE), in this context referred to as Sensory Experience.}, keywords = {Quality of Experience, Quality of Sensory Experience, MPEG-V, Sensory Effects}, language = {EN} } @Article{Timmerer2014_MMC, author = {Timmerer, Christian and Waltl, Markus and Rainer, Benjamin and Lederer, Stefan and Hellwagner, Hermann}, journal = {IEEE Multimedia Communications Technical Committee E-Letter}, title = {Enhancing 3D Video to enable a Fully Immersive Sensory Experiences}, year = {2014}, month = {jan}, number = {1}, pages = {23-26}, volume = {9}, address = {New York, NY, USA}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/E-Letter-Jan2014.pdf}, publisher = {IEEE Communications Society [online]}, url = {http://committees.comsoc.org/mmc/e-news/E-Letter-Jan2014.pdf} } @InProceedings{Rainer2014_NOMS_QCMAN, author = {Rainer, Benjamin and Timmerer, Christian}, booktitle = {In Proceedings of the IEEE Network Operations and Management Symposium (IEEE NOMS 2014)}, title = {A Subjective Evaluation using Crowdsourcing of Adaptive Media Playout utilizing Audio-Visual Content Features}, year = {2014}, address = {Los Alamitos, CA, USA}, editor = {Lutfiyya, Hanan and Cholda, Piotr}, month = {may}, pages = {0--0}, publisher = {IEEE}, abstract = {Synchronizing multimedia playback among geographically distributed clients is a challenging task and is referred to as Inter-Destination Media Synchronization (IDMS). In this paper we discuss the uses cases of IDMS as identified within the SocialSensor Project and based on these use cases we derive a novel Adaptive Media Playout (AMP) scheme which aims on carrying out the process of synchronizing the media playback at the clients to a given synchronization point. We propose how visual and acoustic features can be used to achieve a QoE-aware and context-aware AMP scheme.}, isbn13 = {-}, keywords = {Adaptive Media Playout, Inter-Destination Media Synchronization, Social TV}, language = {EN}, location = {Krakow, Poland}, pdf = {https://www.itec.aau.at/bib/files/qcman-final.pdf}, talkdate = {2014.05.09}, talktype = {registered}, url = {http://www.qomex2013.org} } @InProceedings{Rainer2014_Mobiquituous, author = {Klusch, Matthais and Kapahnke, Patrick and Cao, Xiaoqi and Rainer, Benjamin and Timmerer, Christian and Mangold, Stefan}, booktitle = {Proceedings of the 11th International Conference on Mobile and Ubiquitous Systems: Computing, Networking and Services}, title = {MyMedia: Mobile Semantic Peer-to-Peer Video Search and Live Streaming}, year = {2014}, address = {New York, NY, USA}, editor = {Youssef, Moustafa}, month = {dec}, pages = {10}, publisher = {ACM}, abstract = {Mobile peer-to-peer (P2P) computing with applications such as for video on demand, file sharing, and video conferencing is gaining momentum based on new standards and technologies such as IETF PPSP, WiFi-Direct and BitTorrent live streaming. In this paper, we describe the mobile system MyMedia, that allows users to search, share and experience videos and live recordings using P2P and at the best quality possible with respect to available network capacity. In particular, the MyMedia system features a high-precision semantic P2P search and dynamic network-adaptive P2P live streaming of MPEG videos over HTTP based on the ISO/IEC standard MPEG-DASH from mobile to mobile devices in unstructured wireless P2P networks. These features have been integrated in the mobile application TIFF EventLive of the 54th Thessaloniki international film festival. The evaluation of their performance and device energy consumption, and a first user evaluation at the festival showed that the MyMedia system is suitable and accepted by users for its purpose in practice. The MyMedia system is available as open-source software for the Android operating system.}, isbn13 = {-}, language = {EN}, location = {London, Great Britain}, pdf = {https://www.itec.aau.at/bib/files/mobi_mymedia_paper.pdf}, talkdate = {2014.12.05}, talktype = {registered}, url = {http://mobiquitous.org/2014} } @Article{Rainer2014_AUTI, author = {Rainer, Benjamin and Timmerer, Christian}, journal = {ACM Transactions on Multimedia Computing, Communications, and Applications (TOMM)}, title = {A Generic Utility Model Representing the Quality of Sensory Experience}, year = {2014}, month = {oct}, pages = {14:1--14:17}, address = {New York, NY, USA}, booktitle = {ACM Trans. Multimedia Comput. Commun. Appl.}, editor = {Steinmetz, Ralf and Ghinea, Gheorghita and Timmerer, Christian and Lin, Weisi and Gulliver, Stephen and Zha, Zheng-Jun and Zhang, Lei and M\"{u}hlh\"{a}user, Max and Smeaton, Alan}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/TOMCCAP-SE-Model.pdf}, publisher = {ACM}, talktype = {none} } @InProceedings{Rainer2014_ACM_MM_SELFORG, author = {Rainer, Benjamin and Timmerer, Christian}, booktitle = {Proceedings of the 22st ACM International Conference on Multimedia}, title = {Self-Organized Inter-Destination Multimedia Synchronization For Adaptive Media Streaming}, year = {2014}, address = {New York, NY, USA}, editor = {ACM,}, month = {nov}, pages = {10}, publisher = {ACM}, abstract = {Social networks have become pervasive and have changed the way of social interaction. The traditional TV experience drifts from an event tied to a certain place with the family or friends to a location-independent and distributed social experience. Additionally, more and more video on-demand services adopt a pull-based streaming approach. In order to provide a synchronized and distributed TV experience we introduce a self-organized Inter-Destination Multimedia Synchronization (IDMS) framework for adaptive media streaming. In particular, we extend the principles of IDMS to adaptive media streaming over HTTP (i.e., MPEG-DASH) and enable a synchronized multimedia playback among geographically distributed clients. Therefore, we introduce session management to MPEG-DASH and for negotiating on a reference playback timestamp among the participating peers in an IDMS session we propose a distributed control scheme. We evaluate our proposed scheme with respect to scalability and time required for negotiating on the reference playback timestamp. Furthermore, we investigate how to compensate the identified asynchronism by using adaptive media playout with respect to the Quality of Experience (QoE). Therefore, we define a temporal distortion measure for audio and video which allows us to model the impact of playback rate variations on the QoE. This measure is evaluated by conducting a subjective quality assessment using crowdsourcing.}, isbn13 = {-}, keywords = {Inter-Destination Multimedia Synchronization, Adaptive Media Streaming, Self-Organization, Quality of Experience, Dynamic Adaptive Streaming over HTTP}, language = {EN}, location = {Orlando, Florida}, pdf = {https://www.itec.aau.at/bib/files/acmm14.pdf}, talkdate = {2014.11.03}, talktype = {registered}, url = {http://acmmm.org/2014/} } @InProceedings{Rain1412:VNext, title = {Quality of Experience of Web-based Adaptive HTTP Streaming Clients in Real-World Environments using Crowdsourcing}, author = {Rainer, Benjamin and Timmerer, Christian}, booktitle = {First International Workshop on VideoNext: Design, Quality and Deployment of Adaptive Video Streaming}, year = {2014}, address = {Australia, Sydney}, editor = {N, N}, month = {dec}, pages = {1-6}, publisher = {ACM}, keywords = {Dynamic Adaptive Streaming over HTTP; Crowdsourcing; Subjective Quality Assessment; Quality of Experience; QoE; DASH; MPEG}, language = {EN}, location = {Singapore}, pdf = {https://www.itec.aau.at/bib/files/videoNextDASH.pdf}, talkdate = {2014.12.02}, talktype = {registered} } @Article{Hossfeld2014_Computer, author = {Ho{\ss}feld, Tobias and Keimel, Christian and Timmerer, Christian}, journal = {Computer}, title = {Crowdsourcing Quality-of-Experience Assessments}, year = {2014}, month = {sep}, number = {9}, pages = {98--102}, volume = {47}, abstract = {Crowdsourced quality-of-experience (QoE) assessments are more cost-effective and flexible than traditional in-lab evaluations but require careful test design, innovative incentive mechanisms, and technical expertise to address various implementation challenges.}, address = {Los Alamitos, CA, USA}, doi = {10.1109/MC.2014.245}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/mco2014090098.pdf}, publisher = {IEEE Computer Society} } @InProceedings{Hossfeld2014, author = {Ho{\ss}feld, Tobias and Hirth, Matthias and Korshunov, Pavel and Hanhart, Philippe and Gardlo, Bruno and Keimel, Christian and Timmerer, Christian}, booktitle = {2014 IEEE International Workshop on Multimedia Signal Processing (MMSP 2014)}, title = {Survey of Web-based Crowdsourcing Frameworks for Subjective Quality Assessment}, year = {2014}, address = {Piscataway, N.J. 08854, U.S.A.}, editor = {Rahardja, Susanto and Zhang, Zhengyou and Pereira, Fernando and Loui, Alexander}, month = {sep}, pages = {6}, publisher = {IEEE}, abstract = {The popularity of the crowdsourcing for performing various tasks online increased significantly in the past few years. The low cost and flexibility of crowdsourcing, in particular, attracted researchers in the field of subjective multimedia evalua- tions and Quality of Experience (QoE). Since online assessment of multimedia content is challenging, several dedicated frameworks were created to aid in the designing of the tests, including the support of the testing methodologies like ACR, DCR, and PC, setting up the tasks, training sessions, screening of the subjects, and storage of the resulted data. In this paper, we focus on the web-based frameworks for multimedia quality assessments that support commonly used crowdsourcing platforms such as Amazon Mechanical Turk and Microworkers. We provide a detailed overview of the crowdsourcing frameworks and evaluate them to aid researchers in the field of QoE assessment in the selection of frameworks and crowdsourcing platforms that are adequate for their experiments.}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/PDF_check_PID3271407.pdf}, talktype = {none} } @Article{Ghinea2014_Tutorial, author = {Ghinea, Gheorghita and Timmerer, Christian and Lin, Weisi and Gulliver, Stephen}, journal = {ACM Transactions on Multimedia Computing, Communications, and Applications (TOMM)}, title = {Mulsemedia: State of the Art, Perspectives, and Challenges}, year = {2014}, issn = {1551-6857}, month = {sep}, number = {1s}, pages = {17:1--17:23}, volume = {11}, abstract = {Mulsemedia—multiple sensorial media—captures a wide variety of research efforts and applications. This article presents a historic perspective on mulsemedia work and reviews current developments in the area. These take place across the traditional multimedia spectrum—from virtual reality applications to computer games—as well as efforts in the arts, gastronomy, and therapy, to mention a few. We also describe standardization efforts, via the MPEG-V standard, and identify future developments and exciting challenges the community needs to overcome.}, address = {New York, NY, USA}, keywords = {Mulsemedia, contour perception, flow visualization, multisensory, perceptual theory, visual cortex, visualization}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/a17-ghinea.pdf}, publisher = {ACM} } @Article{Ghinea2014_Intro, author = {Ghinea, Gheorghita and Timmerer, Christian and Lin, Weisi and Gulliver, Stephen}, journal = {ACM Transactions on Multimedia Computing, Communications, and Applications (TOMM)}, title = {Guest Editorial: Special Issue on Multiple Sensorial (MulSeMedia) Multimodal Media: Advances and Applications}, year = {2014}, issn = {1551-6857}, month = {sep}, number = {1s}, pages = {9:1--9:2}, volume = {11}, address = {New York, NY, USA}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/a9-ghinea.pdf}, publisher = {ACM} } @InProceedings{Waltl2013_QoMEX2013_4DPlayer, author = {Waltl, Markus and Rainer, Benjamin and Lederer, Stefan and Timmerer, Christian and Gassner, Katharina and Terlutter, Ralf}, booktitle = {Proceedings of the 5th International Workshop on Quality of Multimedia Experience (QoMEX'13)}, title = {A 4D Multimedia Player enabling Sensory Experience}, year = {2013}, address = {Los Alamitos, CA, USA}, editor = {Timmerer, Christian and Le Callet, Patrick and Varela, Martin and Winkler, Stefan and Falk, Tiago H}, month = {jul}, pages = {126-127}, publisher = {IEEE}, abstract = {Lately, 3D is gaining momentum in cinemas and home environments. However, 2D and 3D video content only stimulates senses like hearing and seeing. In this paper we focus on a more enhanced level of entertainment by presenting a 4D multimedia player and a corresponding demonstration setup, which stimulates further senses such as haptics using the MPEG-V: Media Context and Control standard. The presented demonstration setup uses stereoscopic 3D and sensory devices, i.e., fans, vibration panels and lights. The combination of conventional 3D content with tailored sensory effects allows us to further enhance the viewing experience of the users.}, isbn13 = {-}, keywords = {3D, 4D, Sensory Effects, Sensory Experience, MPEG-V}, language = {EN}, location = {Klagenfurt am Wörthersee, Austria}, pdf = {https://www.itec.aau.at/bib/files/QoMEX2013_4DPlayer.pdf}, talkdate = {2013.07.04}, talktype = {poster}, url = {http://www.qomex2013.org} } @Article{Waltl2012_MPEGVSI, author = {Waltl, Markus and Rainer, Benjamin and Timmerer, Christian and Hellwagner, Hermann}, journal = {Signal Processing: Image Communication}, title = {An End-to-End tool chain for sensory experience based on MPEG-V}, year = {2013}, month = {feb}, number = {2}, pages = {136--150}, volume = {28}, abstract = {This paper provides an overview of our research conducted in the area of Sensory Experience including our implementations using MPEG-V Part 3 entitled ”Sensory Information”. MPEG-V Part 3 introduces Sensory Experience as a tool to increase the Quality of Experience by annotating traditional multimedia data with sensory effects. These sensory effects are rendered on special devices like fans, vibration chairs, ambient lights, scent disposers, water sprayers, or heating/cooling devices stimulating senses beyond the traditional ones. The paper's main focus is on the end-to-end aspects including the generation, transmission, and synchronized rendering of sensory effects with the traditional multimedia data taking movie clips as an example. Therefore, we present in this paper an open source tool chain that provides a complete end-to-end sensory effect generation and consumption framework. Furthermore, we summarize results from various subjective quality assessments conducted in this area. Finally, we point out research challenges that may encourage further research within this emerging domain.}, address = {Amsterdam, Netherlands}, doi = {10.1016/j.image.2012.10.009}, keywords = {Quality of Experience; Sensory Experience; Subjective Quality Assessment; Experimental Results; MPEG-V; Annotation Tool; Simulation Tool; Web Browser Plug-in; Sensory Information}, language = {EN}, publisher = {Elsevier}, url = {http://dx.doi.org/10.1016/j.image.2012.10.009} } @InProceedings{Timmerer2013_QoMEX2013_UtilityModel, author = {Timmerer, Christian and Rainer, Benjamin and Markus, Waltl}, booktitle = {Proceedings of the 5th International Workshop on Quality of Multimedia Experience (QoMEX'13)}, title = {A Utility Model for Sensory Experience}, year = {2013}, address = {Los Alamitos, CA, USA}, editor = {Timmerer, Christian and Le Callet, Patrick and Varela, Martin and Winkler, Stefan and Falk, Tiago H}, month = {jul}, pages = {224-229}, publisher = {IEEE}, abstract = {Enriching multimedia with additional effects such as olfaction, light, wind, or vibration is gaining more and more momentum in both research and industry. Hence, there is the need to determine the influence of individual effects on the Quality of Experience (QoE). In this paper, we present a subjective quality assessment using the MPEG-V standard to annotate video sequences with individual sensory effects (i.e., wind, light, and vibration) and all combinations thereof. Based on the results we derive a utility model for sensory experience that accounts for the assessed sensory effects. Finally, we provide an example instantiation of the utility model and validate it against current and past results of our subjective quality assessments conducted so far.}, isbn13 = {-}, keywords = {Quality of Multimedia Experience, Sensory Effects, MPEG-V, Sensory Experience, Utility Model}, language = {EN}, location = {Klagenfurt am Wörthersee, Austria}, pdf = {https://www.itec.aau.at/bib/files/QoMEX2013_UtilityModel.pdf}, talkdate = {2013.07.05}, talktype = {registered}, url = {http://www.qomex2013.org} } @InProceedings{Sieber2013, author = {Sieber, Christian and Ho{\ss}feld, Tobias and Zinner, Thomas and Tran-Gia, Phuoc and Timmerer, Christian}, booktitle = {Integrated Network Management (IM 2013), 2013 IFIP/IEEE International Symposium on}, title = {Implementation and User-centric Comparison of a Novel Adaptation Logic for DASH with SVC}, year = {2013}, address = {New York, NY, USA}, editor = {De Turck, Filip and Diao, Yixin and Hong, Choong Seon and Medhi, Deep and Sadre, Ramin}, month = {may}, pages = {1318-1323}, publisher = {IEEE Communications Society}, abstract = {The MPEG-DASH standard allows the client-centric access to different representations of video content via the HTTP protocol. The client can flexibly switch between different qualities, i.e., different bit rates and thus avoid waiting times during the video playback due to empty playback buffers. However, quality switches and the playback of lower qualities is perceived by the user which may reduce the Quality of Experience (QoE). Therefore, novel algorithms are required which manage the streaming behavior with respect to the user's requirements and which do not waste network resources. As indicated by recent studies, scalable video coding (SVC) may use the current network and content distribution infrastructure in a more efficient way than with single layer codecs. The contribution of this paper is the design and the implementation of a novel DASH/SVC streaming algorithm. By means of measurements in a test-bed, its performance and benefits are evaluated and compared to existing algorithms from an user-centric view point with objective performance metrics. Our findings show that the proposed algorithm outperforms other DASH mechanisms in terms of video quality, low switching frequency and usage of the available resources in a realistic mobile network scenario. This is a first step towards true QoE management of video streaming in the Internet with DASH and SVC.}, isbn13 = {978-1-4673-5229-1}, keywords = {quality of experience, mpeg-dash, scalable video coding, adaptive media streaming}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/06573184.pdf}, talktype = {none} } @InProceedings{Rainer_2013_PQS, author = {Rainer, Benjamin and Timmerer, Christian and Waltl, Markus}, booktitle = {4th International Workshop on Perceptual Quality of Systems 2013 (PQS 2013)}, title = {Recommendations for the Subjective Evaluation of Sensory Experience}, year = {2013}, address = {Vienna, Austria}, editor = {Schatz, Raimund}, month = {sep}, pages = {1-6}, publisher = {IEEE}, abstract = {Selecting and adopting the appropriate assessment method for conducting subjective quality assessments is a challenging task. The method decides whether the assessment is successful in delivering the correct answers to previously set up hypotheses. Therefore, in this paper we provide recommendations on test methods used in the domain of Sensory Experience. The proposed test methods comprise single stimulus and double stimulus methods. These test methods were used in previous studies and are presented in combination with the results of the subjective quality assessments with which they were used. Furthermore, we briefly outline our test setup, test design, and test content for assessing Sensory Experience which have been validated through conducted assessments.}, keywords = {Subjective Evaluation Methods; Sensory Experience; Recommendations}, language = {EN}, location = {Vienna, Austria}, talkdate = {2013.09.03}, talktype = {registered} } @InProceedings{Rainer2013_QoMEX2013_MediaPlayout, author = {Rainer, Benjamin and Timmerer, Christian}, booktitle = {Proceedings of the 5th International Workshop on Quality of Multimedia Experience (QoMEX'13)}, title = {Adaptive Media Playout for Inter-Destination Media Synchronization}, year = {2013}, address = {Los Alamitos, CA, USA}, editor = {Timmerer, Christian and Le Callet, Patrick and Varela, Martin and Winkler, Stefan and Falk, Tiago H}, month = {jul}, pages = {44-45}, publisher = {IEEE}, abstract = {Synchronizing multimedia playback among geographically distributed clients is a challenging task and is referred to as Inter-Destination Media Synchronization (IDMS). In this paper we discuss the uses cases of IDMS as identified within the SocialSensor Project and based on these use cases we derive a novel Adaptive Media Playout (AMP) scheme which aims on carrying out the process of synchronizing the media playback at the clients to a given synchronization point. We propose how visual and acoustic features can be used to achieve a QoE-aware and context-aware AMP scheme.}, isbn13 = {-}, keywords = {Adaptive Media Playout, Inter-Destination Media Synchronization, Social TV}, language = {EN}, location = {Klagenfurt am Wörthersee, Austria}, pdf = {https://www.itec.aau.at/bib/files/QoMEX2013_MediaPlayout.pdf}, talkdate = {2013.07.03}, talktype = {poster}, url = {http://www.qomex2013.org} } @InProceedings{Rainer2013_QoMEX2013_EvalPlatform, author = {Rainer, Benjamin and Waltl, Markus and Timmerer, Christian}, booktitle = {Proceedings of the 5th International Workshop on Quality of Multimedia Experience (QoMEX'13)}, title = {A Web based Subjective Evaluation Platform}, year = {2013}, address = {Los Alamitos, CA, USA}, editor = {Timmerer, Christian and Le Callet, Patrick and Varela, Martin and Winkler, Stefan and Falk, Tiago H}, month = {jul}, pages = {24-25}, publisher = {IEEE}, abstract = {Preparing and conducting subjective quality assessments is a time consuming and expensive task. Therefore, we present a Web-based evaluation framework which aims on reducing the time needed for planning and designing a subjective quality assessment. The presented framework can be used for both crowdsourced and laboratory experiments. It should ease the task of designing a subjective quality assessment by providing a flexible framework. The framework has proven its applicability and flexibility to design and conduct assessments in the past and is available as open source.}, isbn13 = {-}, keywords = {Evaluation Platform, Crowdsourced Quality Evaluation, Laboratory Quality Evaluation, Quality Assessment Framework}, language = {EN}, location = {Klagenfurt am Wörthersee, Austria}, pdf = {https://www.itec.aau.at/bib/files/QoMEX2013_EvalPlatform.pdf}, talkdate = {2013.07.03}, talktype = {poster}, url = {http://www.qomex2013.org} } @InProceedings{Liu2013, author = {Liu, Yaning and Geurts, Joost and Point, Jean-Charles and Lederer, Stefan and Rainer, Benjamin and Mueller, Christopher and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the IEEE international Conference on Communication (ICC) 2013 – Next-Generation Networking Symposium}, title = {Dynamic Adaptive Streaming over CCN: A Caching and Overhead Analysis}, year = {2013}, address = {Budapest}, editor = {Mattheisen, Christopher and Murase, Tutomu}, month = {jun}, pages = {2222-2226}, publisher = {IEEE}, abstract = {In this paper, we present our implementation and evaluation of Dynamic Adaptive Streaming over Content centric networking (DASC) which implements MPEG Dynamic Adaptive Streaming over HTTP (DASH) utilizing a Content Centric Networking (CCN) naming scheme to identify content segments in a CCN network. In particular, video segments formatted according to MPEG-DASH are available in different quality levels but instead of HTTP, CCN is used for referencing and delivery. Based on the conditions of the network, the DASC client issues interests for segments achieving the best throughput. Due to segment caching within the network, subsequent requests for the same content can be served quicker. As a result, the quality of the video a user receives progressively improves, effectively overcoming bottlenecks in the network. We present two sets of experiments to evaluate the performance of DASC showing that throughput indeed improves. However, the generated overhead is relatively large and the adaptation strategy used for DASH that assumes an end-to-end connection could be revised for the hop-by-hop architecture of CCN.}, keywords = {Content Centric Networking, Dynamic Adaptive Streaming, HTTP Video Streaming, MPEG-DASH}, language = {EN}, location = {Budapest, Hungary}, pdf = {https://www.itec.aau.at/bib/files/ICC2013 -DASH Over CCN.PDF}, talkdate = {2013.06.11}, talktype = {registered}, url = {http://www.ieee-icc.org} } @InProceedings{Lederer2013b, author = {Lederer, Stefan and Mueller, Christopher and Rainer, Benjamin and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {In Proceedings of the IEEE ICC'13 - Workshop on Immersive \& Interactive Multimedia Communications over the Future Internet}, title = {Adaptive Streaming over Content Centric Networks in Mobile Networks using Multiple Links}, year = {2013}, address = {Budapest}, editor = {Assuncao, Pedro and Atzori, Luigi and Dagiuklas, Tasos and Kondoz, Ahmet}, month = {jun}, pages = {687-691}, publisher = {IEEE}, abstract = {This paper presents the usage of Content Centric Networking (CCN) for adaptive multimedia streaming in mobile environments, leveraging the recent ISO/IEC MPEG Dynamic Adaptive Streaming over HTTP (DASH) standard. The performance of DASH over CCN is evaluated using real-world mobile bandwidth traces and compared to previous evaluations of different DASH-based as well as proprietary systems. As there are no client-server connections in CCN, it offers the possibility to transfer data from multiple sources as well as over multiple links in parallel, which is definitely an important feature, e.g., for mobile devices offering multiple network links. This functionality is used and evaluated in this paper in combination with DASH, making it possible to dynamically choose the best performing link for media streaming, which is a clear advantage over DASH using HTTP and the TCP/IP protocol stack. The evaluation therefore investigates DASH over CCN in two scenarios using synthetic and real-world mobile bandwidth traces respectively, showing a significantly better performance than conventional DASH using only one connection.}, keywords = {MPEG-DASH, CCN, Dynamic Adaptive Streaming over HTTP, Content Centric Networking, Evaluation}, language = {EN}, location = {Budapest, Hungary}, pdf = {https://www.itec.aau.at/bib/files/ICC2013_Mobile_DASHoverCCN.pdf}, talkdate = {2013.06.13}, talktype = {registered}, url = {http://multicomm.diee.unica.it/} } @InProceedings{Lederer2013a, author = {Lederer, Stefan and Mueller, Christopher and Timmerer, Christian and Concolato, Cyril and Le Feuvre, Jean and Fliegel, Karel}, booktitle = {Proceedings of the 4th ACM Multimedia Systems Conference}, title = {Distributed DASH Dataset}, year = {2013}, address = {New York, NY, USA}, editor = {Griwodz, Carsten}, month = {feb}, pages = {pp. 131-135}, publisher = {ACM}, abstract = {The delivery of multimedia content over HTTP and on top of existing Internet infrastructures is becoming the preferred method within heterogeneous environment. The basic design principle is having an intelligent client which selects given and applicable media representations by issuing HTTP requests for individual segments based on the users' context and current conditions. Typically, this client behavior differs between implementations of the same kind and for the objective evaluations thereof appropriate datasets are needed. This paper presents a distributed dataset for the recently published MPEG-DASH standard which is mirrored at different sites across Europe, namely Klagenfurt, Paris, and Prague. A client implementation may choose to request segments from these sites and dynamically switch to a different location, e.g., in case the one currently used causes any issues. Thus, this distributed DASH dataset can be used for real-world evaluations enabling the simulation of switching between different content delivery networks.}, keywords = {Dataset, Dynamic Adaptive Streaming over HTTP, DASH.}, language = {EN}, location = {Oslo, Norway}, pdf = {https://www.itec.aau.at/bib/files/MMSys_CDN_Simulation_Dataset_v2.0.pdf}, talkdate = {2013.02.27}, talktype = {registered}, url = {http://www.mmsys.org/} } @InProceedings{Alberti_QoMEX2013_DASH, author = {Alberti, Claudio and Renzi, Daniele and Timmerer, Christian and Mueller, Christopher and Lederer, Stefan and Battista, Stefano and Mattavelli, Marco}, booktitle = {Proceedings of the 5th International Workshop on Quality of Multimedia Experience (QoMEX'13)}, title = {Automated QoE Evaluation of Dynamic Adaptive Streaming over HTTP}, year = {2013}, address = {Los Alamitos, CA, USA}, editor = {Timmerer, Christian and Le Callet, Patrick and Varela, Martin and Winkler, Stefan and Falk, Tiago}, month = {jul}, pages = {58--63}, publisher = {IEEE}, abstract = {Dynamic Adaptive Streaming over HTTP (DASH) is referred to as a multimedia streaming standard to deliver high quality multimedia content over the Internet using conventional HTTP Web servers. As a fundamental feature, it enables automatic switching of quality levels according to network conditions, user requirements, and expectations. Currently, the proposed adaptation schemes for HTTP streaming mostly rely on throughput measurements and/or buffer-related metrics, such as buffer exhaustion and levels. In this paper, we propose to enhance the DASH adaptation logic by feeding it with additional information from our evaluation of the users' perception approximating the user- perceived quality of video playback. The proposed model aims at conveniently combining TCP-, buffer-, and media content-related metrics as well as user requirements and expectations to be used as an input for the DASH adaptation logic. Experiments have demonstrated that the chosen model enhances the capability of the adaptation logic to select the optimal video quality level. Finally, we integrated all our findings into a real DASH system with QoE monitoring capabilities.}, keywords = {DASH, Quality of Experience, monitoring, content adaptation}, language = {EN}, location = {Klagenfurt am Wörthersee, Austria}, pdf = {https://www.itec.aau.at/bib/files/p20-Alberti.pdf}, talkdate = {2013.07.03}, talktype = {registered} } @InProceedings{Waltl2012_QoMEX2012_SensoryEffects, author = {Waltl, Markus and Timmerer, Christian and Rainer, Benjamin and Hellwagner, Hermann}, booktitle = {Proceedings of the 4th International Workshop on Quality of Multimedia Experience (QoMEX'12)}, title = {Sensory Effect Dataset and Test Setups}, year = {2012}, address = {Yarra Valley, Australia}, editor = {Burnett, Ian and Wu, Henry}, month = {jul}, pages = {115--120}, publisher = {IEEE}, abstract = {Additional constituents for the representation of multimedia content gained more and more attention. For example, the amount of cinemas equipped with additional devices (e.g., ambient light, vibrating seats, wind generators, water sprayers, heater/coolers) that stimulate senses going beyond audition and vision increases. On the content side the MPEG-V standard specifies – among others – Sensory Effect Metadata (SEM) which provides means to describe sensory effects such as wind, vibration, light, etc. to be attached to audio-visual content and, thus, offering an enhanced and immersive experience for the user. However, there is a lack of a common set of test content allowing for various subjective user studies and verification across different test sites. In this paper we provide our dataset comprising a number of videos from different genres enriched with MPEG-V compliant Sensory Effect Metadata descriptions. Furthermore, we describe possible test setups using off-the-shelf hardware for conducting subjective quality assessments.}, isbn13 = {-}, keywords = {Sensory Experience, Sensory Effects, MPEG-V, Dataset, Test Environment}, language = {EN}, location = {Yarra Valley, Australia}, pdf = {https://www.itec.aau.at/bib/files/QoMEX2012_Dataset.pdf}, talkdate = {2012.07.05}, talktype = {registered}, url = {http://www.qomex2012.org} } @Article{Waltl2012_MTAP, author = {Waltl, Markus and Timmerer, Christian and Rainer, Benjamin and Hellwagner, Hermann}, journal = {Multimedia Tools and Applications}, title = {Sensory Effects for Ambient Experiences in the World Wide Web}, year = {2012}, month = {may}, number = {-}, pages = {1--20}, volume = {-}, abstract = {More and more content in various formats becomes available via the WorldWideWeb (WWW). Currently availableWeb browsers are able to access and interpret these contents (i.e., Web videos, text, image, and audio). These contents stimulate only senses like audition or vision. Recently, it has been proposed to stimulate also other senses while consuming multimedia content, through so-called sensory effects. These sensory effects aim to enhance the ambient experience by providing effects such as light, wind, vibration, etc. The effects are represented as Sensory Effect Metadata (SEM) description which is associated to multimedia content and is rendered on devices like fans, vibration chairs, or lamps. In this paper we present two subjective quality assessments which comprise sensory effects, such as light, in the area of the WWW and their results achieved. The first assessment evaluates the influence of light effects on the Quality of Experience (QoE). The second assessment measures the impact of different settings for the color calculation on the viewing experience. Furthermore, we describe a Web browser plug-in for Mozilla Firefox which is able to render such sensory effects that are provided via the WWW.}, address = {Berlin, Heidelberg, New York}, doi = {10.1007/s11042-012-1099-8}, keywords = {World Wide Web, MPEG-V, Subjective quality assessment, Sensory effects, Quality of multimedia experience}, language = {EN}, publisher = {Springer Verlag}, url = {http://dx.doi.org/10.1007/s11042-012-1099-8} } @InProceedings{Waltl2012_MMM, author = {Waltl, Markus and Rainer, Benjamin and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Advances in Multimedia Modeling}, title = {Enhancing the User Experience with the Sensory Effect Media Player and AmbientLib}, year = {2012}, address = {Berlin, Heidelberg, New York}, editor = {Schoeffmann, Klaus and Merialdo, Bernard and Hauptmann, Alexander and Ngo, Chong-Wah and Andreopoulos, Yiannis and Breiteneder, Christian}, month = {jan}, pages = {624--626}, publisher = {Springer}, series = {LNCS 7131}, abstract = {Multimedia content is increasingly used in every area of our life. Still, each type of content only stimulates the visual and/or the hearing system. Thus, the user experience depends only on those two stimuli. In this paper we introduce a standard which offers the possibility to add additional effects to multimedia content. Furthermore, we present a multimedia player and a Web browser plug-in which uses this standard to stimulate further senses by using additional sensory effects (i.e., wind, vibration, and light) to enhance the user experience resulting in a unique, worthwhile sensory experience.}, keywords = {MPEG-V, User Experience, Sensory Experience, Media Player, Ambient, World Wide Web}, language = {EN}, location = {Klagenfurt, Austria}, pdf = {https://www.itec.aau.at/bib/files/mwbrcthh_mmm2012.pdf}, talkdate = {2012.01.05}, talktype = {poster} } @InProceedings{Waltl2012_ACMMM_OSSC, author = {Waltl, Markus and Rainer, Benjamin and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the 20th ACM Multimedia (MM'12)}, title = {A Toolset for the Authoring, Simulation, and Rendering of Sensory Experiences}, year = {2012}, address = {Nara, Japan}, editor = {Babaguchi, Noboru and Aizawa, Kiyoharu and Smith, John}, month = {oct}, pages = {1469-1472}, publisher = {ACM}, abstract = {This paper describes a toolset for the authoring, simulating, and rendering of multimedia content annotated with Sensory Effect Metadata (SEM) descriptions as specified in Part 3 of the MPEG V standard. This part of MPEG-V standardizes the description of sensory effects (e.g., light, wind) in order to be rendered on sensory devices (e.g., fans, vibration chairs) aiming at generating a sensory experience stimulating possibly all human senses. Our implementation comprises a toolset to author sensory effects associated with multimedia content and the simulation thereof. Furthermore, it includes a library, a standalone player, and a Web browser plug-in which enables the playback and rendering of sensory effects on off-the-shelf rendering devices and in various contexts. All software modules are available under the GNU General Public License (GPL) v3 and the GNU Lesser General Public License (LGPL) v3 respectively.}, isbn13 = {-}, keywords = {MPEG-V, Annotation Tool, Simulator, Media Player, Web Browser Plug-in, Sensory Effects, Sensory Experience}, language = {EN}, location = {Nara, Japan}, pdf = {https://www.itec.aau.at/bib/files/acmmm2012_ossc_mwbrcthh.pdf}, talkdate = {2012.10.31}, talktype = {registered}, url = {http://www.acmm2012.org} } @Article{Timmerer2012909, author = {Timmerer, Christian and Waltl, Markus and Rainer, Benjamin and Hellwagner, Hermann}, journal = {Signal Processing: Image Communication}, title = {Assessing the quality of sensory experience for multimedia presentations}, year = {2012}, month = {sep}, number = {8}, pages = {909--916}, volume = {27}, abstract = {This paper introduces the concept of sensory experience by utilizing sensory effects such as wind or lighting as another dimension which contributes to the quality of the user experience. In particular, we utilize a representation format for sensory effects that are attached to traditional multimedia resources such as audio, video, and image contents. Sensory effects (e.g., wind, lighting, explosion, heat, cold) are rendered on special devices (e.g., fans, ambient lights, motion chair, air condition) in synchronization with the traditional multimedia resources and shall stimulate other senses than audition and vision (e.g., mechanoreception, equilibrioception, thermoreception), with the intention to increase the users Quality of Experience (QoE). In particular, the paper provides a comprehensive introduction into the concept of sensory experience, its assessment in terms of the QoE, and related standardization and implementation efforts. Finally, we will highlight open issues and research challenges including future work.}, address = {Amsterdam, Netherlands}, doi = {10.1016/j.image.2012.01.016}, keywords = {Quality of Experience, Sensory experience, Subjective quality assessment, Experimental results, MPEG-V}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/1-s2.0-S0923596512000252-main.pdf}, publisher = {Elsevier}, url = {http://dx.doi.org/10.1016/j.image.2012.01.016} } @InProceedings{Rainer2012_QoMEX2012_SensoryEffects, author = {Rainer, Benjamin and Waltl, Markus and Cheng, Eva and Shujau, Muawiyath and Timmerer, Christian and Davis, Stephen and Burnett, Ian and Hellwagner, Hermann}, booktitle = {Proceedings of the 4th International Workshop on Quality of Multimedia Experience (QoMEX'12)}, title = {Investigating the Impact of Sensory Effects on the Quality of Experience and Emotional Response in Web Videos}, year = {2012}, address = {Yarra Valley, Australia}, editor = {Burnett, Ian and Wu, Henry}, month = {jul}, pages = {278--283}, publisher = {IEEE}, abstract = {Multimedia is ubiquitously available online with large amounts of video increasingly consumed through Web sites such as YouTube or Google Video. However, online multimedia typically limits users to visual/auditory stimulus, with onscreen visual media accompanied by audio. The recent introduction of MPEG-V proposed multi-sensory user experiences in multimedia environments, such as enriching video content with so-called sensory effects like wind, vibration, light, etc. In MPEG-V, these sensory effects are represented as Sensory Effect Metadata (SEM), which is additionally associated to the multimedia content. This paper presents three user studies that utilize the sensory effects framework of MPEG-V, investigating the emotional response of users and enhancement of Quality of Experience (QoE) of Web video sequences from a range of genres with and without sensory effects. In particular, the user studies were conducted in Austria and Australia to investigate whether geography and cultural differences affect users’ elicited emotional responses and QoE.}, isbn13 = {-}, keywords = {Quality of Multimedia Experience, Sensory Effects, MPEG-V, Subjective Quality Assessment, World Wide Web, Sensory Experience}, language = {EN}, location = {Yarra Valley, Australia}, pdf = {https://www.itec.aau.at/bib/files/QoMEX2012_UserStudy.pdf}, talkdate = {2012.07.07}, talktype = {registered}, url = {http://www.qomex2012.org} } @InProceedings{Mueller2012a, author = {Mueller, Christopher and Lederer, Stefan and Timmerer, Christian}, booktitle = {Proceedings of the Fourth Annual ACM SIGMM Workshop on Mobile Video (MoVid12)}, title = {An Evaluation of Dynamic Adaptive Streaming over HTTP in Vehicular Environments}, year = {2012}, address = {New York, NY, USA}, editor = {Hefeeda, Mohamed and Hsu, Cheng-Hsin and Chatterjee, Mainak and Venkatasubramanian, Nalini and Ganguly, Samrat}, month = {feb}, pages = {37-42}, publisher = {ACM}, abstract = {MPEGs' Dynamic Adaptive Streaming over HTTP (MPEG-DASH) is an emerging standard designed for media delivery over the top of existing infrastructures and able to handle varying bandwidth conditions during a streaming session. This requirement is very important, specifically within mobile environments and, thus, DASH could potentially become a major driver for mobile multimedia streaming. Hence, this paper provides a detailed evaluation of our implementation of MPEG DASH compared to the most popular propriety systems, i.e., Microsoft Smooth Steaming, Adobe HTTP Dynamic Streaming, and Apple HTTP Live Streaming. In particular, these systems will be evaluated under restricted conditions which are due to vehicular mobility. In anticipation of the results, our prototype implementation of MPEG-DASH can very well compete with state-of-the-art solutions and, thus, can be regarded as a mature standard ready for industry adaption.}, keywords = {Dynamic Adaptive Streaming over HTTP, MPEG-DASH, Microsoft Smooth Streaming, Adobe HTTP Dynamic Streaming, Evaluation, Apple HTTP Live Streaming, Mobile Networks, Vehicular Mobility}, language = {EN}, location = {Chapel Hill, North Carolina, USA}, pdf = {https://www.itec.aau.at/bib/files/p37-mueller.pdf}, talkdate = {2012.02.24}, talktype = {registered} } @InProceedings{Mueller2012VCIP, author = {Mueller, Christopher and Lederer, Stefan and Timmerer, Christian}, booktitle = {Proceedings of the IEEE Conference on Visual Communications and Image Processing Conference (VCIP 2012)}, title = {A Proxy Effect Analysis and Fair Adaptation Algorithm for Multiple Competing Dynamic Adaptive Streaming over HTTP Clients}, year = {2012}, address = {San Diego, CA, USA}, editor = {Aizawa, Kiyoharu and Kuo, Jay and Liu, Zicheng}, month = {nov}, pages = {6}, publisher = {IEEE}, abstract = {Multimedia streaming technologies based on the Hypertext Transfer Protocol (HTTP) are very popular and used by many content providers such as Netflix, Hulu, and Vudu. Recently, ISO/IEC MPEG has ratified Dynamic Adaptive Streaming over HTTP (DASH) which extends the traditional HTTP streaming with an adaptive component addressing the issue of varying bandwidth conditions that users are facing in networks based on the Internet Protocol (IP). Additionally, industry has already deployed several solutions based on such an approach which simplifies large scale deployment because the whole streaming logic is located at the client. However, these features may introduce drawbacks when multiple clients compete for a network bottleneck due to the fact that the clients are not aware of the network infrastructure such as proxies or other clients. This paper identifies these negative effects and the evaluation thereof using MPEG-DASH and Microsoft Smooth Streaming. Furthermore, we propose a novel adaptation algorithm introducing the concept of fairness regarding a cluster of clients. In anticipation of the results we can conclude that we achieve more efficient bottleneck bandwidth utilization and less quality switches.}, language = {EN}, location = {San Diego, USA}, pdf = {https://www.itec.aau.at/bib/files/PID2500949.pdf}, talkdate = {2012.11.29}, talktype = {registered}, url = {http://www.vcip2012.org} } @InProceedings{Mueller2012, author = {Lederer, Stefan and Mueller, Christopher and Timmerer, Christian}, booktitle = {Proceedings of the Third Annual {ACM SIGMM} Conference on Multimedia Systems ({MMSys12})}, title = {Dynamic Adaptive Streaming over {HTTP} Dataset}, year = {2012}, address = {New York, NY, USA}, editor = {Claypool, Mark and Griwodz, Carsten and Mayer-Patel, Ketan}, month = {feb}, pages = {89-94}, publisher = {ACM}, abstract = {Adaptive HTTP streaming got lot of attention in recent years and with dynamic adaptive streaming over HTTP (DASH) a standard is available. Many papers cover this topic and present their research results, but unfortunately all of them use their own private dataset which – in most cases – is not publicly available. Hence, it is difficult to compare, e.g., adaptation algorithms in an objective way due to the lack of a common dataset which shall be used as basis for such experiments. In this paper, we present our DASH dataset featuring our DASHEncoder, an open source DASH content generation tool. We also provide basic evaluations of the different segment lengths, the influence of HTTP server settings, and, in this context, we show some of the advantages as well as problems of shorter segment lengths.}, keywords = {Dynamic Adaptive Streaming over HTTP, DASH, Dataset, Encoder, Content Generation Tool}, language = {EN}, location = {Chapel Hill, North Carolina, USA}, pdf = {https://www.itec.aau.at/bib/files/p89-lederer.pdf}, talkdate = {2012.02.22}, talktype = {registered} } @InProceedings{Lederer2012c, author = {Liu, Yaning and Geurts, Joost and Rainer, Benjamin and Lederer, Stefan and Mueller, Christopher and Timmerer, Christian}, booktitle = {CCNx Community Meeting (CCNxConn 2012)}, title = {DASH over CCN: A CCN use-case for a Social Media based collaborative project}, year = {2012}, address = {Sophia Antipolis}, editor = {Carofiglio, Giovanna}, month = {sep}, pages = {1-1}, publisher = {Parc}, keywords = {CCN, DASH}, language = {EN}, location = {Sophia Antipolis, France}, pdf = {https://www.itec.aau.at/bib/files/DashOverCCN.pdf}, talkdate = {2012.09.12}, talktype = {registered}, url = {http://www.ccnx.org/ccnxcon2012/} } @InProceedings{Lederer2012b, author = {Lederer, Stefan and Mueller, Christopher and Rainer, Benjamin and Waltl, Markus and Timmerer, Christian}, booktitle = {Proceedings of the IEEE Conference on Visual Communications and Image Processing Conference (VCIP 2012)}, title = {An open source MPEG DASH evaluation suite}, year = {2012}, address = {San Diego, CA, USA}, editor = {Izquierdo, Ebroul and Wang, Xin}, month = {nov}, pages = {1-1}, publisher = {IEEE}, abstract = {In this paper we demonstrate our MPEG-DASH evaluation suite, which comprises several components on the client side as well as on the server side. The major client components are the VLC DASH plugin, libDASH, and DASH-JS, a JavaScript-based DASH client. These tools enable performance tests on various platforms, e.g., Windows and Linux as well as mobile platforms such as Android. Moreover, due to their flexible structure it is possible to integrate adaptation logics and evaluate them under consistent conditions. On the server side we provide the content generation tool DASHEncoder, our MPEG-DASH datasets well as the MPEG-DASH conformance validator.}, keywords = {MPEG DASH, Open Source, Demo}, language = {EN}, location = {San Diego, USA}, pdf = {https://www.itec.aau.at/bib/files/open-source_MPEG-DASH_evaluation_suite.pdf}, talkdate = {2012.11.29}, talktype = {registered}, url = {http://www.vcip2012.org} } @InProceedings{Lederer2012, author = {Lederer, Stefan and Mueller, Christopher and Timmerer, Christian}, booktitle = {Proceedings of the 19th International Packet Video Workshop ({PV} 2012)}, title = {Towards Peer-Assisted Dynamic Adaptive Streaming over HTTP}, year = {2012}, address = {Munich, Germany}, editor = {Guillemot, Christine and Chakareski, Jacob and Steinbach, Eckehard}, month = {may}, pages = {1-6}, publisher = {IEEE}, abstract = {This paper presents our peer-assisted Dynamic Adaptive Streaming over HTTP (pDASH) proposal as well as an evaluation based on our DASH simulation environment in comparison to conventional approaches, i.e., non-peer-assisted DASH. Our approach maintains the standard conformance to MPEG-DASH enabling an easy and straightforward way of enhancing a streaming system with peer assistance to reduce the bandwidth and infrastructure requirements of the content/service provider. In anticipation of the results our system achieves a bandwidth reduction of Content Distribution Networks (CDN) and as a consequence the corresponding infrastructure costs of the content/service providers by up to 25% by leveraging the upstream capacity of neighboring peers. Furthermore, the cost savings have been evaluated using a cost model that is based on the current Amazon CloudFront pricing scheme. Furthermore, we have also evaluated the performance impact that various combinations of quality levels of the content could have in a peer-assisted streaming system as well as the client behavior in such an environment.}, keywords = {Peer-Assisted Streaming, MPEG-DASH, Dynamic Adaptive Streaming over HTTP, CDN Bandwidth Reduction, Peer-to-Peer Streaming.}, language = {EN}, location = {Munich, Germany}, pdf = {https://www.itec.aau.at/bib/files/Paper53.pdf}, talkdate = {2012.05.10}, talktype = {registered} } @Article{Hossfeld2012, author = {Ho{\ss}feld, Tobias and Schatz, Raimund and Varela, Martin and Timmerer, Christian}, journal = {Communications Magazine, IEEE}, title = {Challenges of QoE Management for Cloud Applications}, year = {2012}, month = {apr}, number = {4}, pages = {28-36}, volume = {50}, abstract = {Cloud computing is currently gaining enormous momentum due to a number of promised benefits: ease of use in terms of deployment, administration, and maintenance, along with high scalability and flexibility to create new services. However, as more personal and business applications migrate to the cloud, service quality will become an important differentiator between providers. In particular, quality of experience as perceived by users has the potential to become the guiding paradigm for managing quality in the cloud. In this article, we discuss technical challenges emerging from shifting services to the cloud, as well as how this shift impacts QoE and QoE management. Thereby, a particular focus is on multimedia cloud applications. Together with a novel QoE-based classification scheme of cloud applications, these challenges drive the research agenda on QoE management for cloud applications.}, address = {New York, NY, USA}, doi = {10.1109/MCOM.2012.6178831}, keywords = {cloud computing, multimedia computing, software quality, QoE management, QoE-based classification scheme, multimedia cloud applications, quality management, quality of experience, service quality, Cloud computing, Multimedia communication, Quality of service, Streaming media}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/06178831.pdf}, publisher = {IEEE Communications Society} } @TechReport{Waltl2011_TR, author = {Waltl, Markus and Timmerer, Christian and Rainer, Benjamin and Hellwagner, Hermann}, institution = {Alpen-Adria-Universität Klagenfurt}, title = {Sensory Effects for Ambient Experiences in the World Wide Web}, year = {2011}, address = {Klagenfurt, Austria}, month = {jul}, number = {TR/ITEC/11/1.13}, abstract = {More and more content in various formats become available via the World Wide Web (WWW). Currently available Web browsers are able to access and interpret these contents (i.e., Web videos, text, image, and audio). These contents stimulate only senses like audition or vision. Recently, it has been proposed to stimulate also other senses while consuming multimedia content through so-called sensory effects. These sensory effects aim to enhance the ambient experience by providing effects, such as, light, wind, vibration, etc. The effects are represented as Sensory Effect Metadata (SEM) which is associated to multimedia content and is rendered on devices like fans, vibration chairs, or lamps. In this paper we present a plug-in for the Mozilla Firefox browser which is able to render such sensory effects that are provided via the WWW. Furthermore, the paper describes two user studies conducted with the plug-in and presents the results achieved.}, keywords = {World Wide Web, MPEG-V, Subjective Quality Assessment, Sensory Effects, Quality of Multimedia Experience}, language = {EN}, pages = {12}, pdf = {https://www.itec.aau.at/bib/files/Waltl_TR11113.pdf}, publisher = {Alpen-Adria Universität Klagenfurt} } @InProceedings{Waltl2011_MMWeb, author = {Waltl, Markus and Rainer, Benjamin and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the Workshop on Multimedia on the Web (MMWeb) 2011}, title = {Sensory Experience for Videos on the Web}, year = {2011}, address = {Los Alamitos, CA, USA}, editor = {Böszörmenyi, Laszlo and Marques, Oge and Lux, Mathias and Klamma, Ralf}, month = {sep}, pages = {1--3}, publisher = {IEEE}, abstract = {More and more multimedia content is becoming available via the World Wide Web (WWW). These contents stimulate only senses like hearing or vision. Recently, it has been proposed to stimulate also other senses while consuming multimedia content, through so-called sensory effects. These sensory effects aim at enhancing the user’s viewing experience by providing effects such as light, wind, vibration, etc. These effects are represented as Sensory Effect Metadata (SEM) which is associated to multimedia content and is rendered on devices like fans, lamps, or vibration chairs. In this paper, we present a plug-in for Web browsers which is able to render such sensory effects provided via Web content, and we describe a demonstrator that uses this plug-in to control an amBX system.}, keywords = {Quality of Experience, Sensory Experience, MPEG-V, Web Browser Plug-in, World Wide Web}, language = {EN}, location = {Graz, Austria}, pdf = {https://www.itec.aau.at/bib/files/MMWeb2011_mwbrcthh.pdf}, talkdate = {2011.09.08}, talktype = {poster} } @InProceedings{Waltl2010_WIAMIS_SensoryEffects, author = {Waltl, Markus and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the 11th International Workshop on Image Analysis for Multimedia Interactive Services (WIAMIS'10)}, title = {Increasing the User Experience of Multimedia Presentations with Sensory Effects}, year = {2010}, address = {Los Alamitos, CA, USA}, editor = {Leonardi, Riccardo and Migliorati, Pierangelo and Cavallaro, Andrea}, month = {apr}, pages = {1-4}, publisher = {IEEE}, abstract = {The term Universal Multimedia Experience (UME) has gained momentum and is well recognized within the research community. As this approach puts the user into the center stage, additional complexity is added to the overall quality assessment problem which calls for a scientific framework to capture, measure, quantify, judge, and explain the user experience. In previous work we have proposed the annotation of multimedia content with sensory effect metadata that can be used to stimulate also other senses than vision or audition. In this paper we report first results obtained from subjective tests in the area of sensory effects attached to traditional multimedia presentations such as movies that shall lead to an enhanced, unique, and worthwhile user experience.}, isbn13 = {9781424478484}, keywords = {Sensory Information, MPEG-V}, language = {EN}, location = {Desenzano del Garda, Italy}, pdf = {https://www.itec.aau.at/bib/files/wiamis2010_mwcthh.pdf}, talkdate = {2010.04.13}, talktype = {poster} } @InProceedings{Waltl2010_QoMEX2010_SensoryEffects, author = {Waltl, Markus and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the 2nd International Workshop on Quality of Multimedia Experience (QoMEX'10)}, title = {Improving the Quality of Multimedia Experience through Sensory Effects}, year = {2010}, address = {Los Alamitos, CA, USA}, editor = {Perkis, Andrew and Möller, Sebastian and Svensson, Peter and Reibman, Amy}, month = {jun}, pages = {124-129}, publisher = {IEEE}, abstract = {In previous and related work sensory effects are presented as a tool for increasing the user experience of multimedia presentations by stimulating also other senses than vision or audition. In this paper we primarily investigated the relationship of the Quality of Experience (QoE) due to various video bit-rates of multimedia contents annotated with sensory effects (e.g., wind, vibration, light). Therefore, we defined a subjective quality assessment methodology based on standardized methods. The paper describes the test environment, its setup, and conditions in detail. Furthermore, we experimented with a novel voting device that allows for continuous voting feedback during a sequence in addition to the overall quality voting at the end of each sequence. The results obtained from the subjective quality assessment are presented and discussed thoroughly. In anticipation of the results we can report an improvement of the quality of the multimedia experience thanks to the sensory effects.}, doi = {10.1109/QOMEX.2009.5246962}, isbn13 = {9781424469581}, keywords = {Sensory Information, MPEG-V}, language = {EN}, location = {Trondheim, Norway}, pdf = {https://www.itec.aau.at/bib/files/qomex2010_mwcthh.pdf}, talkdate = {2010.06.21}, talktype = {poster}, url = {http://www.qomex2010.org} } @InProceedings{Timmerer2010_ACMMM_SensoryExperience, author = {Timmerer, Christian and Müller, Karsten}, booktitle = {Proceedings of the International Conference on Multimedia (MM '10)}, title = {Immersive Future Media Technologies: From 3D Video to Sensory Experiences}, year = {2010}, address = {New York, NY, USA}, editor = {del Bimbo, Alberto and Chang, Shih-Fu and Smeulders, Arnold}, month = {oct}, pages = {1781--1782}, publisher = {ACM}, abstract = {In this tutorial we present immersive future media technologies ranging from 3D video to sensory experiences. The former targets stereo and multi-view video technologies whereas the latter aims at stimulating other senses than vision or audition enabling an advanced user experiences through sensory effects.}, doi = {10.1145/1873951.1874369}, keywords = {Measurement, Experimentation, Human Factors, Standardization, Immersive Media, 3D Video, Stereo Video, Multi-view Video, Sensory Effects, MPEG-V, Quality of Experience}, language = {EN}, location = {Florence, Italy}, pdf = {https://www.itec.aau.at/bib/files/tut08x-timmerer.pdf}, talkdate = {2010.10.25}, talktype = {registered}, url = {http://doi.acm.org/10.1145/1873951.1874369} } @InProceedings{Waltl2009, author = {Waltl, Markus and Timmerer, Christian and Hellwagner, Hermann}, booktitle = {Proceedings of the First International Workshop on Quality of Multimedia Experience (QoMEX 2009)}, title = {A Test-Bed for Quality of Multimedia Experience Evaluation of Sensory Effects}, year = {2009}, address = {Los Alamitos, CA, USA}, editor = {Ebrahim, Touradj and El-Maleh, Khaled and Dane, Gokce and Karam, Lina}, month = {jul}, pages = {145-150}, publisher = {IEEE}, abstract = {This paper introduces a prototype test-bed for triggering sensory effects like light, wind, or vibration when presenting audiovisual resources, e.g., a video, to users. The ISO/IEC MPEG is currently standardizing the Sensory Effect Description Language (SEDL) for describing such effects. This language is briefly described in the paper and the testbed that is destined to evaluate the quality of the multimedia experience of users is presented. It consists of a video annotation tool for sensory effects, a corresponding simulation tool, and a real test system. Initial experiments and results on determining the color of light effects from the video content are reported.}, doi = {10.1109/QOMEX.2009.5246962}, isbn13 = {978-1-4244-4370-3}, issn = {978-1-4244-43}, keywords = {Sensory Information, MPEG-V}, language = {EN}, location = {San Diego, CA}, pdf = {https://www.itec.aau.at/bib/files/qomex2009_mwcthh.pdf}, talkdate = {2009.07.31}, talktype = {registered}, url = {http://www.qomex2009.org} } @InProceedings{Timmerer2009a, author = {Timmerer, Christian and Gelissen, Jean and Waltl, Markus and Hellwagner, Hermann}, booktitle = {Proceedings of the 2009 NEM Summit}, title = {Interfacing with Virtual Worlds}, year = {2009}, address = {Heidelberg}, editor = {Hrasnica, Halid}, month = sep, pages = {118-123}, publisher = {Eurescom – the European Institute for Research and Strategic Studies in Telecommunications – GmbH}, abstract = {Virtual worlds (often referred to as 3D3C for 3D visualization & navigation and the 3C’s of Community, Creation and Commerce) integrate existing and emerging (media) technologies (e.g. instant messaging, video, 3D, VR, AI, chat, voice, etc.) that allow for the support of existing and the development of new kinds of networked services. The emergence of virtual worlds as platforms for networked services is recognized by businesses as an important enabler as it offers the power to reshape the way companies interact with their environments (markets, customers, suppliers, creators, stakeholders, etc.) in a fashion comparable to the Internet and to allow for the development of new (breakthrough) business models, services, applications and devices. Each virtual world however has a different culture and audience making use of these specific worlds for a variety of reasons. These differences in existing Metaverses permit users to have unique experiences. In order to bridge these differences in existing and emerging Metaverses a standardized framework is required, i.e., MPEG-V Media Context and Control (ISO/IEC 23005), that will provide a lower entry level to (multiple) virtual worlds both for the provider of goods and services as well as the user. The aim of this paper is to provide an overview of MPEG-V and its intended standardization areas. Additionally, a review about MPEG-V’s most advanced part – Sensory Information – is given.}, issn = {9783000289538}, keywords = {MPEG-V}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/nem2009_ctjgmwhh.pdf}, talktype = {none}, url = {http://www.nem-summit.eu} }