Survey of Web-based Crowdsourcing Frameworks for Subjective Quality Assessment (bibtex)
@InProceedings{Hossfeld2014, author = {Ho{\ss}feld, Tobias and Hirth, Matthias and Korshunov, Pavel and Hanhart, Philippe and Gardlo, Bruno and Keimel, Christian and Timmerer, Christian}, booktitle = {2014 IEEE International Workshop on Multimedia Signal Processing (MMSP 2014)}, title = {Survey of Web-based Crowdsourcing Frameworks for Subjective Quality Assessment}, year = {2014}, address = {Piscataway, N.J. 08854, U.S.A.}, editor = {Rahardja, Susanto and Zhang, Zhengyou and Pereira, Fernando and Loui, Alexander}, month = {sep}, pages = {6}, publisher = {IEEE}, abstract = {The popularity of the crowdsourcing for performing various tasks online increased significantly in the past few years. The low cost and flexibility of crowdsourcing, in particular, attracted researchers in the field of subjective multimedia evalua- tions and Quality of Experience (QoE). Since online assessment of multimedia content is challenging, several dedicated frameworks were created to aid in the designing of the tests, including the support of the testing methodologies like ACR, DCR, and PC, setting up the tasks, training sessions, screening of the subjects, and storage of the resulted data. In this paper, we focus on the web-based frameworks for multimedia quality assessments that support commonly used crowdsourcing platforms such as Amazon Mechanical Turk and Microworkers. We provide a detailed overview of the crowdsourcing frameworks and evaluate them to aid researchers in the field of QoE assessment in the selection of frameworks and crowdsourcing platforms that are adequate for their experiments.}, language = {EN}, pdf = {https://www.itec.aau.at/bib/files/PDF_check_PID3271407.pdf}, talktype = {none} }
Powered by bibtexbrowser (with ITEC extensions)