@misc{16619, author = {Minh-Son Dao and Michael Riegler and Duc-Tien Dang-Nguyen and Cathal Gurrin and Yuta Nakashima and Mianxiong Dong}, title = {ICDAR{\textquoteright}22: Intelligent Cross-Data Analysis and Retrieval}, abstract = {We have witnessed the rise of cross-data against multimodal dataproblems recently. The cross-modal retrieval system uses a textualquery to look for images; the air quality index can be predictedusing lifelogging images; the congestion can be predicted usingweather and tweets data; daily exercises and meals can help topredict the sleeping quality are some examples of this researchdirection. Although vast investigations focusing on multimodal dataanalytics have been developed, few cross-data (e.g., cross-modaldata, cross-domain, cross-platform) research has been carried on.In order to promote intelligent cross-data analytics and retrievalresearch and to bring a smart, sustainable society to human beings,the specific article collection on "Intelligent Cross-Data Analysisand Retrieval" is introduced. This Research Topic welcomes thosewho come from diverse research domains and disciplines such aswell-being, disaster prevention and mitigation, mobility, climatechange, tourism, healthcare, and food computing.}, year = {2022}, journal = {2022 International Conference on Multimedia Retrieval}, month = {06/2022}, publisher = {ACM}, address = {2022, Newark, NJ, USA}, }