{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,2,19]],"date-time":"2026-02-19T00:55:02Z","timestamp":1771462502433,"version":"3.50.1"},"reference-count":41,"publisher":"Elsevier BV","license":[{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/www.elsevier.com\/tdm\/userlicense\/1.0\/"},{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/www.elsevier.com\/legal\/tdmrep-license"},{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/doi.org\/10.15223\/policy-017"},{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/doi.org\/10.15223\/policy-037"},{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/doi.org\/10.15223\/policy-012"},{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2023,1,1]],"date-time":"2023-01-01T00:00:00Z","timestamp":1672531200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/doi.org\/10.15223\/policy-004"}],"content-domain":{"domain":["elsevier.com","sciencedirect.com"],"crossmark-restriction":true},"short-container-title":["Expert Systems with Applications"],"published-print":{"date-parts":[[2023,1]]},"DOI":"10.1016\/j.eswa.2022.118631","type":"journal-article","created":{"date-parts":[[2022,8,28]],"date-time":"2022-08-28T11:49:10Z","timestamp":1661687350000},"page":"118631","update-policy":"https:\/\/linproxy.fan.workers.dev:443\/https\/doi.org\/10.1016\/elsevier_cm_policy","source":"Crossref","is-referenced-by-count":33,"special_numbering":"C","title":["MDFN: Mask deep fusion network for visible and infrared image fusion without reference ground-truth"],"prefix":"10.1016","volume":"211","author":[{"ORCID":"https:\/\/linproxy.fan.workers.dev:443\/https\/orcid.org\/0000-0002-2723-4574","authenticated-orcid":false,"given":"Chaoxun","family":"Guo","sequence":"first","affiliation":[]},{"given":"Dandan","family":"Fan","sequence":"additional","affiliation":[]},{"given":"Zhixing","family":"Jiang","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/linproxy.fan.workers.dev:443\/https\/orcid.org\/0000-0002-5027-5286","authenticated-orcid":false,"given":"David","family":"Zhang","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"key":"10.1016\/j.eswa.2022.118631_b1","series-title":"Information fusion (Fusion), 2017 20th international conference on","first-page":"1","article-title":"Multi-sensor image fusion based on fourth order partial differential equations","author":"Bavirisetti","year":"2017"},{"key":"10.1016\/j.eswa.2022.118631_b2","doi-asserted-by":"crossref","first-page":"199","DOI":"10.1016\/j.optcom.2014.12.032","article-title":"Detail preserved fusion of visible and infrared images using regional saliency extraction and multi-scale image decomposition","volume":"341","author":"Cui","year":"2015","journal-title":"Optical Communications"},{"key":"10.1016\/j.eswa.2022.118631_b3","doi-asserted-by":"crossref","first-page":"75","DOI":"10.1016\/j.inffus.2016.03.003","article-title":"A review of remote sensing image fusion methods","volume":"32","author":"Ghassemian","year":"2016","journal-title":"Information Fusion"},{"issue":"2","key":"10.1016\/j.eswa.2022.118631_b4","doi-asserted-by":"crossref","first-page":"127","DOI":"10.1016\/j.inffus.2011.08.002","article-title":"A new image fusion performance metric based on visual information fidelity","volume":"14","author":"Han","year":"2013","journal-title":"Information Fusion"},{"issue":"3","key":"10.1016\/j.eswa.2022.118631_b5","doi-asserted-by":"crossref","first-page":"1076","DOI":"10.1109\/TIP.2016.2633863","article-title":"Perceptual image fusion using wavelets","volume":"26","author":"Hill","year":"2017","journal-title":"IEEE Transactions on Image Processing"},{"issue":"12","key":"10.1016\/j.eswa.2022.118631_b6","doi-asserted-by":"crossref","first-page":"2706","DOI":"10.1109\/TMM.2017.2711422","article-title":"An adaptive fusion algorithm for visible and infrared videos based on entropy and the cumulative distribution of gray levels","volume":"19","author":"Hu","year":"2017","journal-title":"IEEE Transactions on Multimedia"},{"issue":"17","key":"10.1016\/j.eswa.2022.118631_b7","doi-asserted-by":"crossref","first-page":"17633","DOI":"10.1007\/s11042-015-2879-8","article-title":"Image fusion method of SAR and infrared image based on curvelet transform with adaptive weighting","volume":"76","author":"Ji","year":"2017","journal-title":"Multimedia Tools and Applications"},{"issue":"8","key":"10.1016\/j.eswa.2022.118631_b8","doi-asserted-by":"crossref","first-page":"4118","DOI":"10.1109\/TIP.2018.2836307","article-title":"Fusing hyperspectral and multispectral images via coupled sparse tensor factorization","volume":"27","author":"Li","year":"2018","journal-title":"IEEE Transactions on Image Processing"},{"key":"10.1016\/j.eswa.2022.118631_b9","doi-asserted-by":"crossref","first-page":"3748","DOI":"10.1109\/TIP.2021.3065171","article-title":"Layer-output guided complementary attention learning for image defocus blur detection","volume":"30","author":"Li","year":"2021","journal-title":"IEEE Transactions on Image Processing"},{"key":"10.1016\/j.eswa.2022.118631_b10","doi-asserted-by":"crossref","first-page":"100","DOI":"10.1016\/j.inffus.2016.05.004","article-title":"Pixel-level image fusion: A survey of the state of the art","volume":"33","author":"Li","year":"2017","journal-title":"Information Fusion"},{"issue":"7","key":"10.1016\/j.eswa.2022.118631_b11","doi-asserted-by":"crossref","first-page":"2864","DOI":"10.1109\/TIP.2013.2244222","article-title":"Image fusion with guided filtering","volume":"22","author":"Li","year":"2013","journal-title":"IEEE Transactions on Image Processing"},{"key":"10.1016\/j.eswa.2022.118631_b12","doi-asserted-by":"crossref","first-page":"109","DOI":"10.1016\/j.inffus.2021.02.008","article-title":"An infrared and visible image fusion method based on multi-scale transformation and norm optimization","volume":"71","author":"Li","year":"2021","journal-title":"Information Fusion"},{"issue":"5","key":"10.1016\/j.eswa.2022.118631_b13","doi-asserted-by":"crossref","first-page":"2614","DOI":"10.1109\/TIP.2018.2887342","article-title":"DenseFuse: A fusion approach to infrared and visible images","volume":"28","author":"Li","year":"2019","journal-title":"IEEE Transactions on Image Processing"},{"key":"10.1016\/j.eswa.2022.118631_b14","doi-asserted-by":"crossref","DOI":"10.1016\/j.infrared.2019.103039","article-title":"Infrared and visible image fusion with ResNet and zero-phase component analysis","volume":"102","author":"Li","year":"2019","journal-title":"Infrared Physics & Technology"},{"key":"10.1016\/j.eswa.2022.118631_b15","series-title":"2018 24th international conference on pattern recognition (ICPR)","first-page":"2705","article-title":"Infrared and visible image fusion using a deep learning framework","author":"Li","year":"2018"},{"key":"10.1016\/j.eswa.2022.118631_b16","article-title":"RFN-Nest: An end-to-end residual fusion network for infrared and visible images","author":"Li","year":"2021","journal-title":"Information Fusion"},{"key":"10.1016\/j.eswa.2022.118631_b17","doi-asserted-by":"crossref","first-page":"191","DOI":"10.1016\/j.inffus.2016.12.001","article-title":"Multi-focus image fusion with a deep convolutional neural network","volume":"36","author":"Liu","year":"2017","journal-title":"Information Fusion"},{"issue":"12","key":"10.1016\/j.eswa.2022.118631_b18","doi-asserted-by":"crossref","first-page":"1882","DOI":"10.1109\/LSP.2016.2618776","article-title":"Image fusion with convolutional sparse representation","volume":"23","author":"Liu","year":"2016","journal-title":"IEEE Signal Processing Letters"},{"issue":"5","key":"10.1016\/j.eswa.2022.118631_b19","doi-asserted-by":"crossref","first-page":"347","DOI":"10.1049\/iet-ipr.2014.0311","article-title":"Simultaneous image fusion and denoising with adaptive sparse representation","volume":"9","author":"Liu","year":"2015","journal-title":"IET Image Processing"},{"key":"10.1016\/j.eswa.2022.118631_b20","doi-asserted-by":"crossref","first-page":"100","DOI":"10.1016\/j.inffus.2016.02.001","article-title":"Infrared and visible image fusion via gradient transfer and total variation minimization","volume":"31","author":"Ma","year":"2016","journal-title":"Information Fusion"},{"key":"10.1016\/j.eswa.2022.118631_b21","doi-asserted-by":"crossref","first-page":"85","DOI":"10.1016\/j.inffus.2019.07.005","article-title":"Infrared and visible image fusion via detail preserving adversarial learning","volume":"54","author":"Ma","year":"2020","journal-title":"Information Fusion"},{"key":"10.1016\/j.eswa.2022.118631_b22","doi-asserted-by":"crossref","DOI":"10.1016\/j.infrared.2018.06.002","article-title":"Multi-scale decomposition based fusion of infrared and visible image via total variation and saliency analysis","author":"Ma","year":"2018","journal-title":"Infrared Physics & Technology"},{"key":"10.1016\/j.eswa.2022.118631_b23","doi-asserted-by":"crossref","first-page":"153","DOI":"10.1016\/j.inffus.2018.02.004","article-title":"Infrared and visible image fusion methods and applications: A survey","volume":"45","author":"Ma","year":"2019","journal-title":"Information Fusion"},{"key":"10.1016\/j.eswa.2022.118631_b24","first-page":"1","article-title":"STDFusionNet: An infrared and visible image fusion network based on salient target detection","volume":"70","author":"Ma","year":"2021","journal-title":"IEEE Transactions on Instrumentation and Measurement"},{"key":"10.1016\/j.eswa.2022.118631_b25","doi-asserted-by":"crossref","first-page":"11","DOI":"10.1016\/j.inffus.2018.09.004","article-title":"FusionGAN: A generative adversarial network for infrared and visible image fusion","volume":"48","author":"Ma","year":"2019","journal-title":"Information Fusion"},{"issue":"7","key":"10.1016\/j.eswa.2022.118631_b26","doi-asserted-by":"crossref","first-page":"594","DOI":"10.3390\/rs8070594","article-title":"Pansharpening by convolutional neural networks","volume":"8","author":"Masi","year":"2016","journal-title":"Remote Sensing"},{"issue":"2","key":"10.1016\/j.eswa.2022.118631_b27","doi-asserted-by":"crossref","first-page":"143","DOI":"10.1016\/j.inffus.2006.02.001","article-title":"Remote sensing image fusion using the curvelet transform","volume":"8","author":"Nencini","year":"2007","journal-title":"Information Fusion"},{"key":"10.1016\/j.eswa.2022.118631_b28","series-title":"Proceedings 2003 international conference on image processing (Cat. No. 03CH37429), Vol. 3","first-page":"III","article-title":"A new quality metric for image fusion","author":"Piella","year":"2003"},{"key":"10.1016\/j.eswa.2022.118631_b29","series-title":"2017 IEEE international conference on computer vision (ICCV)","first-page":"4724","article-title":"Deepfuse: A deep unsupervised approach for exposure fusion with extreme exposure image pairs","author":"Prabhakar","year":"2017"},{"issue":"7","key":"10.1016\/j.eswa.2022.118631_b30","doi-asserted-by":"crossref","first-page":"313","DOI":"10.1049\/el:20020212","article-title":"Information measure for performance of image fusion","volume":"38","author":"Qu","year":"2002","journal-title":"Electronics Letters"},{"issue":"1","key":"10.1016\/j.eswa.2022.118631_b31","article-title":"Assessment of image fusion procedures using entropy, image quality, and multispectral classification","volume":"2","author":"Roberts","year":"2008","journal-title":"Journal of Applied Remote Sensing"},{"issue":"1","key":"10.1016\/j.eswa.2022.118631_b32","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1561\/0600000009","article-title":"Image alignment and stitching: A tutorial","volume":"2","author":"Szeliski","year":"2007","journal-title":"Foundations and Trends\u00ae in Computer Graphics and Vision"},{"issue":"10","key":"10.1016\/j.eswa.2022.118631_b33","doi-asserted-by":"crossref","first-page":"4062","DOI":"10.1109\/JSTARS.2019.2937690","article-title":"Image registration with Fourier-based image correlation: A comprehensive review of developments and applications","volume":"12","author":"Tong","year":"2019","journal-title":"IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing"},{"key":"10.1016\/j.eswa.2022.118631_b34","doi-asserted-by":"crossref","unstructured":"Wu, H., Zheng, S., Zhang, J., & Huang, K. 2018. Fast end-to-end trainable guided filter. In Proceedings of the IEEE conference on computer vision and pattern recognition (CVPR).","DOI":"10.1109\/CVPR.2018.00197"},{"key":"10.1016\/j.eswa.2022.118631_b35","article-title":"U2Fusion: A unified unsupervised image fusion network","author":"Xu","year":"2020","journal-title":"IEEE Transactions on Pattern Analysis and Machine Intelligence"},{"key":"10.1016\/j.eswa.2022.118631_b36","doi-asserted-by":"crossref","unstructured":"Xu, H., Ma, J., Le, Z., Jiang, J., & Guo, X. 2020. Fusiondn: A unified densely connected network for image fusion. In Proceedings of the AAAI conference on artificial intelligence, vol. 34. (pp. 12484\u201312491).","DOI":"10.1609\/aaai.v34i07.6936"},{"key":"10.1016\/j.eswa.2022.118631_b37","doi-asserted-by":"crossref","DOI":"10.1016\/j.eswa.2022.116905","article-title":"DSG-fusion: Infrared and visible image fusion via generative adversarial networks and guided filter","volume":"200","author":"Yang","year":"2022","journal-title":"Expert Systems with Applications"},{"key":"10.1016\/j.eswa.2022.118631_b38","doi-asserted-by":"crossref","first-page":"57","DOI":"10.1016\/j.inffus.2017.05.006","article-title":"Sparse representation based multi-sensor image fusion for multi-focus and multi-modality images: a review","volume":"40","author":"Zhang","year":"2018","journal-title":"Information Fusion"},{"key":"10.1016\/j.eswa.2022.118631_b39","doi-asserted-by":"crossref","unstructured":"Zhang, X., Ye, P., & Xiao, G. 2020. VIFB: A visible and infrared image fusion benchmark. In Proceedings of the IEEE\/CVF conference on computer vision and pattern recognition workshops.","DOI":"10.1109\/CVPRW50498.2020.00060"},{"issue":"4","key":"10.1016\/j.eswa.2022.118631_b40","doi-asserted-by":"crossref","first-page":"866","DOI":"10.1109\/TMM.2017.2760100","article-title":"Multisensor image fusion and enhancement in spectral total variation domain","volume":"20","author":"Zhao","year":"2018","journal-title":"IEEE Transactions on Multimedia"},{"key":"10.1016\/j.eswa.2022.118631_b41","doi-asserted-by":"crossref","first-page":"15","DOI":"10.1016\/j.inffus.2015.11.003","article-title":"Perceptual fusion of infrared and visible images through a hybrid multi-scale decomposition with Gaussian and bilateral filters","volume":"30","author":"Zhou","year":"2016","journal-title":"Information Fusion"}],"container-title":["Expert Systems with Applications"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/api.elsevier.com\/content\/article\/PII:S0957417422016785?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/api.elsevier.com\/content\/article\/PII:S0957417422016785?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2025,10,17]],"date-time":"2025-10-17T06:08:25Z","timestamp":1760681305000},"score":1,"resource":{"primary":{"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/linkinghub.elsevier.com\/retrieve\/pii\/S0957417422016785"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2023,1]]},"references-count":41,"alternative-id":["S0957417422016785"],"URL":"https:\/\/linproxy.fan.workers.dev:443\/https\/doi.org\/10.1016\/j.eswa.2022.118631","relation":{},"ISSN":["0957-4174"],"issn-type":[{"value":"0957-4174","type":"print"}],"subject":[],"published":{"date-parts":[[2023,1]]},"assertion":[{"value":"Elsevier","name":"publisher","label":"This article is maintained by"},{"value":"MDFN: Mask deep fusion network for visible and infrared image fusion without reference ground-truth","name":"articletitle","label":"Article Title"},{"value":"Expert Systems with Applications","name":"journaltitle","label":"Journal Title"},{"value":"https:\/\/linproxy.fan.workers.dev:443\/https\/doi.org\/10.1016\/j.eswa.2022.118631","name":"articlelink","label":"CrossRef DOI link to publisher maintained version"},{"value":"article","name":"content_type","label":"Content Type"},{"value":"\u00a9 2022 Elsevier Ltd. All rights reserved.","name":"copyright","label":"Copyright"}],"article-number":"118631"}}