-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrefs.bib
615 lines (548 loc) · 37.2 KB
/
refs.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
% tile2vec very interesting
@inproceedings{jean2019tile2vec,
title = {Tile2vec: Unsupervised representation learning for spatially distributed data},
author = {Jean, Neal and Wang, Sherrie and Samar, Anshul and Azzari, George and Lobell, David and Ermon, Stefano},
booktitle = {Proceedings of the AAAI Conference on Artificial Intelligence},
volume = {33},
number = {01},
pages = {3967--3974},
year = {2019}
}
% Survey of self-supervised and datasets for remote sensing
@misc{wang2022selfsupervised,
title = {Self-supervised Learning in Remote Sensing: A Review},
author = {Yi Wang and Conrad M Albrecht and Nassim Ait Ali Braham and Lichao Mou and Xiao Xiang Zhu},
year = {2022},
eprint = {2206.13188},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@misc{wang2023ssl4eos12,
title = {SSL4EO-S12: A Large-Scale Multi-Modal, Multi-Temporal Dataset for Self-Supervised Learning in Earth Observation},
author = {Yi Wang and Nassim Ait Ali Braham and Zhitong Xiong and Chenying Liu and Conrad M Albrecht and Xiao Xiang Zhu},
year = {2023},
eprint = {2211.07044},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@Article{chen2020mocov2,
author = {Xinlei Chen and Haoqi Fan and Ross Girshick and Kaiming He},
title = {Improved Baselines with Momentum Contrastive Learning},
journal = {arXiv preprint arXiv:2003.04297},
year = {2020},
}
@article{JMLR:v9:vandermaaten08a,
author = {Laurens van der Maaten and Geoffrey Hinton},
title = {Visualizing Data using t-SNE},
journal = {Journal of Machine Learning Research},
year = {2008},
volume = {9},
number = {86},
pages = {2579--2605},
url = {http://jmlr.org/papers/v9/vandermaaten08a.html}
}
@misc{goodfellow2014generative,
title = {Generative Adversarial Networks},
author = {Ian J. Goodfellow and Jean Pouget-Abadie and Mehdi Mirza and Bing Xu and David Warde-Farley and Sherjil Ozair and Aaron Courville and Yoshua Bengio},
year = {2014},
eprint = {1406.2661},
archivePrefix = {arXiv},
primaryClass = {stat.ML}
}
@misc{he2015deep,
title = {Deep Residual Learning for Image Recognition},
author = {Kaiming He and Xiangyu Zhang and Shaoqing Ren and Jian Sun},
year = {2015},
eprint = {1512.03385},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@misc{radford2016unsupervised,
title = {Unsupervised Representation Learning with Deep Convolutional Generative Adversarial Networks},
author = {Alec Radford and Luke Metz and Soumith Chintala},
year = {2016},
eprint = {1511.06434},
archivePrefix = {arXiv},
primaryClass = {cs.LG}
}
% Dataset to do pre-training o self-supervised for embeddings as it's
% sentinel-2 and has water bodies on it
@article{helber2019eurosat,
title = {Eurosat: A novel dataset and deep learning benchmark for land use and land cover classification},
author = {Helber, Patrick and Bischke, Benjamin and Dengel, Andreas and Borth, Damian},
journal = {IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing},
year = {2019},
publisher = {IEEE}
}
@INPROCEEDINGS{bigearthnet,
author = {Sumbul, Gencer and Charfuelan, Marcela and Demir, Begüm and Markl, Volker},
booktitle = {IGARSS 2019 - 2019 IEEE International Geoscience and Remote Sensing Symposium},
title = {Bigearthnet: A Large-Scale Benchmark Archive for Remote Sensing Image Understanding},
year = {2019},
volume = {},
number = {},
pages = {5901-5904},
doi = {10.1109/IGARSS.2019.8900532} }
% Maybe interesting papers of self-supervised and pixel level data taken from
% wang2022selfsupervised
@article{Li_2022,
doi = {10.1109/tgrs.2022.3147513},
url = {https://doi.org/10.1109%2Ftgrs.2022.3147513},
year = 2022,
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
volume = {60},
pages = {1--14},
author = {Haifeng Li and Yi Li and Guo Zhang and Ruoyun Liu and Haozhe Huang and Qing Zhu and Chao Tao},
title = {Global and Local Contrastive Self-Supervised Learning for Semantic Segmentation of {HR} Remote Sensing Images},
journal = {{IEEE} Transactions on Geoscience and Remote Sensing}
}
% Small model
@article{Jung2021SelfsupervisedLW,
title = {Self‐supervised learning with randomised layers for remote sensing},
author = {Heechul Jung and Taegyun Jeon},
journal = {Electronics Letters},
year = {2021}
}
@misc{gui2023survey,
title={A Survey of Self-supervised Learning from Multiple Perspectives: Algorithms, Applications and Future Trends},
author={Jie Gui and Tuo Chen and Jing Zhang and Qiong Cao and Zhenan Sun and Hao Luo and Dacheng Tao},
year={2023},
eprint={2301.05712},
archivePrefix={arXiv},
primaryClass={cs.LG}
}
@Article{rs12244140,
AUTHOR = {Wang, Zhaobin and Gao, Xiong and Zhang, Yaonan and Zhao, Guohui},
TITLE = {MSLWENet: A Novel Deep Learning Network for Lake Water Body Extraction of Google Remote Sensing Images},
JOURNAL = {Remote Sensing},
VOLUME = {12},
YEAR = {2020},
NUMBER = {24},
ARTICLE-NUMBER = {4140},
URL = {https://www.mdpi.com/2072-4292/12/24/4140},
ISSN = {2072-4292},
ABSTRACT = {Lake water body extraction from remote sensing images is a key technique for spatial geographic analysis. It plays an important role in the prevention of natural disasters, resource utilization, and water quality monitoring. Inspired by the recent years of research in computer vision on fully convolutional neural networks (FCN), an end-to-end trainable model named the multi-scale lake water extraction network (MSLWENet) is proposed. We use ResNet-101 with depthwise separable convolution as an encoder to obtain the high-level feature information of the input image and design a multi-scale densely connected module to expand the receptive field of feature points by different dilation rates without increasing the computation. In the decoder, the residual convolution is used to abstract the features and fuse the features at different levels, which can obtain the final lake water body extraction map. Through visual interpretation of the experimental results and the calculation of the evaluation indicators, we can see that our model extracts the water bodies of small lakes well and solves the problem of large intra-class variance and small inter-class variance in the lakes’ water bodies. The overall accuracy of our model is up to 98.53% based on the evaluation indicators. Experimental results demonstrate that the MSLWENet, which benefits from the convolutional neural network, is an excellent lake water body extraction network.},
DOI = {10.3390/rs12244140}
}
@misc{li2022efficient,
title = {Efficient Self-supervised Vision Transformers for Representation Learning},
author = {Chunyuan Li and Jianwei Yang and Pengchuan Zhang and Mei Gao and Bin Xiao and Xiyang Dai and Lu Yuan and Jianfeng Gao},
year = {2022},
eprint = {2106.09785},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@misc{caron2021unsupervised,
title = {Unsupervised Learning of Visual Features by Contrasting Cluster Assignments},
author = {Mathilde Caron and Ishan Misra and Julien Mairal and Priya Goyal and Piotr Bojanowski and Armand Joulin},
year = {2021},
eprint = {2006.09882},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@inproceedings{inproceedings,
author = {Singh, Suriya and Batra, Anil and Pang, Guan and Torresani, Lorenzo and Basu, Saikat and Paluri, Manohar and Jawahar, C},
year = {2018},
month = {09},
pages = {},
title = {Self-Supervised Feature Learning for Semantic Segmentation of Overhead Imagery}
}
@ARTICLE{9460820,
author = {Li, Wenyuan and Chen, Hao and Shi, Zhenwei},
journal = {IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing},
title = {Semantic Segmentation of Remote Sensing Images With Self-Supervised Multitask Representation Learning},
year = {2021},
volume = {14},
number = {},
pages = {6438-6450},
doi = {10.1109/JSTARS.2021.3090418} }
@misc{kirillov2023segment,
title = {Segment Anything},
author = {Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollár and Ross Girshick},
year = {2023},
eprint = {2304.02643},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@misc{akiva2020h2onet,
title = {H2O-Net: Self-Supervised Flood Segmentation via Adversarial Domain Adaptation and Label Refinement},
author = {Peri Akiva and Matthew Purri and Kristin Dana and Beth Tellman and Tyler Anderson},
year = {2020},
eprint = {2010.05309},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@Article{eoreader_paper,
AUTHOR = {Maxant, Jérôme and Braun, Rémi and Caspard, Mathilde and Clandillon, Stephen},
TITLE = {ExtractEO, a Pipeline for Disaster Extent Mapping in the Context of Emergency Management},
JOURNAL = {Remote Sensing},
VOLUME = {14},
YEAR = {2022},
NUMBER = {20},
ARTICLE-NUMBER = {5253},
URL = {https://www.mdpi.com/2072-4292/14/20/5253},
ISSN = {2072-4292},
DOI = {10.3390/rs14205253}
}
@article{sentinel-2,
title = {Sentinel-2: ESA's Optical High-Resolution Mission for GMES Operational Services},
journal = {Remote Sensing of Environment},
volume = {120},
pages = {25-36},
year = {2012},
note = {The Sentinel Missions - New Opportunities for Science},
issn = {0034-4257},
doi = {https://doi.org/10.1016/j.rse.2011.11.026},
url = {https://www.sciencedirect.com/science/article/pii/S0034425712000636},
author = {M. Drusch and U. {Del Bello} and S. Carlier and O. Colin and V. Fernandez and F. Gascon and B. Hoersch and C. Isola and P. Laberinti and P. Martimort and A. Meygret and F. Spoto and O. Sy and F. Marchese and P. Bargellini},
keywords = {Sentinel-2, GMES, Remote sensing, Optical multi-spectral instrument, Land cover classification},
abstract = {Global Monitoring for Environment and Security (GMES) is a joint initiative of the European Commission (EC) and the European Space Agency (ESA), designed to establish a European capacity for the provision and use of operational monitoring information for environment and security applications. ESA's role in GMES is to provide the definition and the development of the space- and ground-related system elements. GMES Sentinel-2 mission provides continuity to services relying on multi-spectral high-resolution optical observations over global terrestrial surfaces. The key mission objectives for Sentinel-2 are: (1) To provide systematic global acquisitions of high-resolution multi-spectral imagery with a high revisit frequency, (2) to provide enhanced continuity of multi-spectral imagery provided by the SPOT (Satellite Pour l'Observation de la Terre) series of satellites, and (3) to provide observations for the next generation of operational products such as land-cover maps, land change detection maps, and geophysical variables. Consequently, Sentinel-2 will directly contribute to the Land Monitoring, Emergency Response, and Security services. The corresponding user requirements have driven the design toward a dependable multi-spectral Earth-observation system featuring the Multi Spectral Instrument (MSI) with 13 spectral bands spanning from the visible and the near infrared to the short wave infrared. The spatial resolution varies from 10m to 60m depending on the spectral band with a 290km field of view. This unique combination of high spatial resolution, wide field of view and spectral coverage will represent a major step forward compared to current multi-spectral missions. The mission foresees a series of satellites, each having a 7.25-year lifetime over a 15-year period starting with the launch of Sentinel-2A foreseen in 2013. During full operations two identical satellites will be maintained in the same orbit with a phase delay of 180° providing a revisit time of five days at the equator. This paper provides an overview of the GMES Sentinel-2 mission including a technical system concept overview, image quality, Level 1 data processing and operational applications.}
}
@www{whatiscopernicus,
author = {Copernicus},
title = {About Copernicus},
year = {2023},
url = {https://www.copernicus.eu/en/about-copernicus},
}
@INCOLLECTION{NDVIsource,
author = {{Rouse}, J.~W., Jr. and {Haas}, R.~H. and {Schell}, J.~A. and {Deering}, D.~W.},
title = "{Monitoring Vegetation Systems in the Great Plains with Erts}",
booktitle = {NASA Special Publication},
year = 1974,
volume = {351},
pages = {309},
adsurl = {https://ui.adsabs.harvard.edu/abs/1974NASSP.351..309R},
adsnote = {Provided by the SAO/NASA Astrophysics Data System}
}
@misc{DINO,
title = {Emerging Properties in Self-Supervised Vision Transformers},
author = {Mathilde Caron and Hugo Touvron and Ishan Misra and Hervé Jégou and Julien Mairal and Piotr Bojanowski and Armand Joulin},
year = {2021},
eprint = {2104.14294},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@misc{dosovitskiy2021image,
title={An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale},
author={Alexey Dosovitskiy and Lucas Beyer and Alexander Kolesnikov and Dirk Weissenborn and Xiaohua Zhai and Thomas Unterthiner and Mostafa Dehghani and Matthias Minderer and Georg Heigold and Sylvain Gelly and Jakob Uszkoreit and Neil Houlsby},
year={2021},
eprint={2010.11929},
archivePrefix={arXiv},
primaryClass={cs.CV}
}
@misc{loshchilov2019decoupled,
title = {Decoupled Weight Decay Regularization},
author = {Ilya Loshchilov and Frank Hutter},
year = {2019},
eprint = {1711.05101},
archivePrefix = {arXiv},
primaryClass = {cs.LG}
}
@Article{rs14133013,
AUTHOR = {Ade, Christiana and Khanna, Shruti and Lay, Mui and Ustin, Susan L. and Hestir, Erin L.},
TITLE = {Genus-Level Mapping of Invasive Floating Aquatic Vegetation Using Sentinel-2 Satellite Remote Sensing},
JOURNAL = {Remote Sensing},
VOLUME = {14},
YEAR = {2022},
NUMBER = {13},
ARTICLE-NUMBER = {3013},
URL = {https://www.mdpi.com/2072-4292/14/13/3013},
ISSN = {2072-4292},
ABSTRACT = {Invasive floating aquatic vegetation negatively impacts wetland ecosystems and mapping this vegetation through space and time can aid in designing and assessing effective control strategies. Current remote sensing methods for mapping floating aquatic vegetation at the genus level relies on airborne imaging spectroscopy, resulting in temporal gaps because routine hyperspectral satellite coverage is not yet available. Here we achieved genus level and species level discrimination between two invasive aquatic vegetation species using Sentinel 2 multispectral satellite data and machine-learning classifiers in summer and fall. The species of concern were water hyacinth (Eichornia crassipes) and water primrose (Ludwigia spp.). Our classifiers also identified submerged and emergent aquatic vegetation at the community level. Random forest models using Sentinel-2 data achieved an average overall accuracy of 90%, and class accuracies of 79–91% and 85–95% for water hyacinth and water primrose, respectively. To our knowledge, this is the first study that has mapped water primrose to the genus level using satellite remote sensing. Sentinel-2 derived maps compared well to those derived from airborne imaging spectroscopy and we also identified misclassifications that can be attributed to the coarser Sentinel-2 spectral and spatial resolutions. Our results demonstrate that the intra-annual temporal gaps between airborne imaging spectroscopy observations can be supplemented with Sentinel-2 satellite data and thus, rapidly growing/expanding vegetation can be tracked in real time. Such improvements have potential management benefits by improving the understanding of the phenology, spread, competitive advantages, and vulnerabilities of these aquatic plants.},
DOI = {10.3390/rs14133013}
}
@inbook{donyana1,
author = {Diaz-Delgado, Ricardo and Bustamante, Javier and Aragonés, David},
year = {2008},
month = {01},
pages = {159-163},
title = {Caso 5. La teledetección como herramienta en la cartografía de especies invasoras: Azolla filiculoides en Doñana}
}
@article{donyana2,
author = {Garcia Murillo, Pablo and Fernández-Zamudio, Rocío and Cirujano, Santos and Sousa, Arturo and Espinar, Juan},
year = {2007},
month = {10},
pages = {243-250},
title = {The invasion of Doñana National Park (SW Spain) by the mosquito fern (Azolla filiculoides Lam)},
volume = {26},
journal = {Limnetica},
doi = {10.23818/limn.26.21}
}
@article{huang2009applications,
title = {Applications of remote sensing to alien invasive plant studies},
author = {Huang, Cho-Ying and Asner, Gregory P},
journal = {Sensors (Basel)},
volume = {9},
number = {6},
pages = {4869-89},
year = {2009},
doi = {10.3390/s90604869},
url = {https://www.ncbi.nlm.nih.gov/pubmed/22408558},
pmid = {22408558},
issn = {1424-8220 (Print), 1424-8220 (Electronic), 1424-8220 (Linking)},
pmc = {PMC3291943},
affiliation = {Department of Geomatics, National Cheng Kung University, Tainan 70101, Taiwan},
language = {eng},
keywords = {biological invasions, high spatial resolution, high temporal resolution, hyperspectral remote sensing, image fusion, light detection and ranging (LiDAR), moderate spatial/spectral resolution},
date_added = {2023-09-02},
date_modified = {2023-09-02},
}
@article{aguir2013,
author = {Aguiar, Francisca and Ferreira, Maria},
year = {2013},
month = {01},
pages = {1107-1119},
title = {Plant invasions in the rivers of the Iberian Peninsula, south-western Europe: A review},
volume = {147},
journal = {Plant Biosystems},
doi = {10.1080/11263504.2013.861539}
}
@book{bhlitem21490,
title = {Fieldiana Zoology},
volume = {n.s. no.87 (1997)},
copyright = {In copyright. Digitized with the permission of the rights holder.},
url = {https://www.biodiversitylibrary.org/item/21490},
note = {https://www.biodiversitylibrary.org/bibliography/42256},
publisher = {[Chicago] Chicago Natural History Museum [1945?-},
author = {Chicago Natural History Museum. and Field Museum of Natural History. and Field Museum of Natural History. Department of Anthropology.},
year = {1997},
pages = {148},
keywords = {Zoology},
}
@article{invasive_species,
author = {Cassey, Phillip and Blackburn, Tim M. and Duncan, Richard P. and Chown, Steven L.},
title = {Concerning invasive species: Reply to Brown and Sax},
journal = {Austral Ecology},
volume = {30},
number = {4},
pages = {475-480},
keywords = {biological invasion, ecosystem functioning, exotic species, mass extinction event, scientific role},
doi = {https://doi.org/10.1111/j.1442-9993.2005.01505.x},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1111/j.1442-9993.2005.01505.x},
eprint = {https://onlinelibrary.wiley.com/doi/pdf/10.1111/j.1442-9993.2005.01505.x},
year = {2005}
}
@misc{barlowtwins,
title = {Barlow Twins: Self-Supervised Learning via Redundancy Reduction},
author = {Jure Zbontar and Li Jing and Ishan Misra and Yann LeCun and Stéphane Deny},
year = {2021},
eprint = {2103.03230},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@misc{he2020momentum,
title = {Momentum Contrast for Unsupervised Visual Representation Learning},
author = {Kaiming He and Haoqi Fan and Yuxin Wu and Saining Xie and Ross Girshick},
year = {2020},
eprint = {1911.05722},
archivePrefix = {arXiv},
primaryClass = {cs.CV}
}
@misc{grill2020bootstrap,
title = {Bootstrap your own latent: A new approach to self-supervised Learning},
author = {Jean-Bastien Grill and Florian Strub and Florent Altché and Corentin Tallec and Pierre H. Richemond and Elena Buchatskaya and Carl Doersch and Bernardo Avila Pires and Zhaohan Daniel Guo and Mohammad Gheshlaghi Azar and Bilal Piot and Koray Kavukcuoglu and Rémi Munos and Michal Valko},
year = {2020},
eprint = {2006.07733},
archivePrefix = {arXiv},
primaryClass = {cs.LG}
}
@misc{chen2020simple,
title = {A Simple Framework for Contrastive Learning of Visual Representations},
author = {Ting Chen and Simon Kornblith and Mohammad Norouzi and Geoffrey Hinton},
year = {2020},
eprint = {2002.05709},
archivePrefix = {arXiv},
primaryClass = {cs.LG}
}
@article{TUCKER1979127,
title = {Red and photographic infrared linear combinations for monitoring vegetation},
journal = {Remote Sensing of Environment},
volume = {8},
number = {2},
pages = {127-150},
year = {1979},
issn = {0034-4257},
doi = {https://doi.org/10.1016/0034-4257(79)90013-0},
url = {https://www.sciencedirect.com/science/article/pii/0034425779900130},
author = {Compton J. Tucker},
abstract = {In situ collected spectrometer data were used to evaluate and quantify the relationships between various linear combinations of red and photographic infrared radiances and experimental plot biomass, leaf water content, and chlorophyll content. The radiance variables evaluated included the red and photographic infrared (IR) radiance and the linear combinations of the IR/red ratio, the square root of the IR/red ratio, the IR-red difference, the vegetation index, and the transformed vegetation index. In addition, the corresponding green and red linear combinations were evaluated for comparative purposes. Three data sets were used from June, September, and October sampling periods. Regression analysis showed the increased utility of the IR and red linear combinations vis-à-vis the same green and red linear combinations. The red and IR linear combinations had 7% and 14% greater regression significance than the green and red linear combinations for the June and September sampling periods, respectively. The vegetation index, transformed vegetation index, and square root of the IR/red ratio were the most significant, followed closely by the IR/red ratio. Less than a 6% difference separated the highest and lowest of these four ER and red linear combinations. The use of these linear combinations was shown to be sensitive primarily to the green leaf area or green leaf biomass. As such, these linear combinations of the red and photographic IR radiances can be employed to monitor the photosynthetically active biomass of plant canopies.}
}
@article{Huang2021,
title = {A commentary review on the use of normalized difference vegetation index (NDVI) in the era of popular remote sensing},
author = {Huang, Sha and Tang, Lina and Hupy, Joseph P. and Wang, Yang and Shao, Guofan},
journal = {Journal of Forestry Research},
volume = {32},
number = {1},
pages = {1-6},
year = {2021},
month = {02},
doi = {10.1007/s11676-020-01155-1},
url = {https://doi.org/10.1007/s11676-020-01155-1},
issn = {1993-0607},
doi = {10.1007/s11676-020-01155-1},
}
@Article{srilanka_veg,
AUTHOR = {Kariyawasam, Champika S. and Kumar, Lalit and Kogo, Benjamin Kipkemboi and Ratnayake, Sujith S.},
TITLE = {Long-Term Changes of Aquatic Invasive Plants and Implications for Future Distribution: A Case Study Using a Tank Cascade System in Sri Lanka},
JOURNAL = {Climate},
VOLUME = {9},
YEAR = {2021},
NUMBER = {2},
ARTICLE-NUMBER = {31},
URL = {https://www.mdpi.com/2225-1154/9/2/31},
ISSN = {2225-1154},
ABSTRACT = {Climate variability can influence the dynamics of aquatic invasive alien plants (AIAPs) that exert tremendous pressure on aquatic systems, leading to loss of biodiversity, agricultural wealth, and ecosystem services. However, the magnitude of these impacts remains poorly known. The current study aims to analyse the long-term changes in the spatio-temporal distribution of AIAPs under the influence of climate variability in a heavily infested tank cascade system (TCS) in Sri Lanka. The changes in coverage of various features in the TCS were analysed using the supervised maximum likelihood classification of ten Landsat images over a 27-year period, from 1992 to 2019 using ENVI remote sensing software. The non-parametric Mann–Kendall trend test and Sen’s slope estimate were used to analyse the trend of annual rainfall and temperature. We observed a positive trend of temperature that was statistically significant (p value < 0.05) and a positive trend of rainfall that was not statistically significant (p values > 0.05) over the time period. Our results showed fluctuations in the distribution of AIAPs in the short term; however, the coverage of AIAPs showed an increasing trend in the study area over the longer term. Thus, this study suggests that the AIAPs are likely to increase under climate variability in the study area.},
DOI = {10.3390/cli9020031}
}
@ARTICLE{10.3389/fmars.2022.1004012,
AUTHOR = {Roca, Mar and Dunbar, Martha Bonnet and Román, Alejandro and Caballero, Isabel and Zoffoli, Maria Laura and Gernez, Pierre and Navarro, Gabriel},
TITLE = {Monitoring the marine invasive alien species Rugulopteryx okamurae using unmanned aerial vehicles and satellites},
JOURNAL = {Frontiers in Marine Science},
VOLUME = {9},
YEAR = {2022},
URL = {https://www.frontiersin.org/articles/10.3389/fmars.2022.1004012},
DOI = {10.3389/fmars.2022.1004012},
ISSN = {2296-7745},
ABSTRACT = {Rugulopteryx okamurae is a species of brown macroalgae belonging to the Dictyotaceae family and native to the north-western Pacific. As an Invasive Alien Species (IAS), it was first detected in the Strait of Gibraltar in 2015. Since then, R. okamurae has been spreading rapidly through the submerged euphotic zone, colonizing from 0 to 50 m depth and generating substantial economic and environmental impacts on the Andalusian coasts (southern Spain). More than 40% of marine IAS in the European Union (EU) are macroalgae, representing one of the main threats to biodiversity and ecosystem functioning in coastal habitats. This study presents a monitoring pilot of beached R. okamurae and fresh R. okamurae down to 5 m depth in Tarifa (Cadiz, Spain), combining multispectral remote sensing data collected by sensors on-board Unmanned Aerial Vehicles (UAVs) and satellites, and how this information can be used to support decision-making and policy. We used an UAV flight carried out at Bolonia beach (Tarifa, Spain) on 1<sup>st</sup> July 2021 and Sentinel-2 (S2) and Landsat-8 (L8) image acquisitions close to the drone flight date. In situ data were also measured on the same date of the flight, and they were used to train the supervised classification Super Vector Machine (SVM) method based on the spectral information obtained for each substrate cover. The results obtained show how multispectral images allow the detection of beached R. okamurae, and the classification accuracy for water, land vegetation, sand and R. okamurae depending on the image resolution (8.3 cm/pixel for UAV flight, 10 m/pixel for S2 and 30 m/pixel for L8). While the UAV imagery precisely delimited the area occupied by this macroalgae, satellite data were capable of detecting its presence, and able to generate early warnings. This study demonstrates the usefulness of multispectral remote sensing techniques to be incorporated in continuous monitoring programmes of the marine IAS R. okamurae in coastal areas. This information is also key to supporting regional, national and European policies in order to adapt strategic management of invasive marine macrophytes.}
}
@article{Cortes1995,
author = {Corinna Cortes and Vladimir Vapnik},
title = {Support-vector networks},
journal = {Machine Learning},
volume = {20},
number = {3},
year = {1995},
pages = {273-297},
issn = {1573-0565},
doi = {10.1007/BF00994018},
url = {https://doi.org/10.1007/BF00994018}
}
@Article{rs12244021,
AUTHOR = {Singh, Geethen and Reynolds, Chevonne and Byrne, Marcus and Rosman, Benjamin},
TITLE = {A Remote Sensing Method to Monitor Water, Aquatic Vegetation, and Invasive Water Hyacinth at National Extents},
JOURNAL = {Remote Sensing},
VOLUME = {12},
YEAR = {2020},
NUMBER = {24},
ARTICLE-NUMBER = {4021},
URL = {https://www.mdpi.com/2072-4292/12/24/4021},
ISSN = {2072-4292},
ABSTRACT = {Diverse freshwater biological communities are threatened by invasive aquatic alien plant (IAAP) invasions and consequently, cost countries millions to manage. The effective management of these IAAP invasions necessitates their frequent and reliable monitoring across a broad extent and over a long-term. Here, we introduce and apply a monitoring approach that meet these criteria and is based on a three-stage hierarchical classification to firstly detect water, then aquatic vegetation and finally water hyacinth (Pontederia crassipes, previously Eichhornia crassipes), the most damaging IAAP species within many regions of the world. Our approach circumvents many challenges that restricted previous satellite-based water hyacinth monitoring attempts to smaller study areas. The method is executable on Google Earth Engine (GEE) extemporaneously and utilizes free, medium resolution (10–30 m) multispectral Earth Observation (EO) data from either Landsat-8 or Sentinel-2. The automated workflow employs a novel simple thresholding approach to obtain reliable boundaries for open-water, which are then used to limit the area for aquatic vegetation detection. Subsequently, a random forest modelling approach is used to discriminate water hyacinth from other detected aquatic vegetation using the eight most important variables. This study represents the first national scale EO-derived water hyacinth distribution map. Based on our model, it is estimated that this pervasive IAAP covered 417.74 km2 across South Africa in 2013. Additionally, we show encouraging results for utilizing the automatically derived aquatic vegetation masks to fit and evaluate a convolutional neural network-based semantic segmentation model, removing the need for detection of surface water extents that may not always be available at the required spatio-temporal resolution or accuracy. The water hyacinth species discrimination has a 0.80, or greater, overall accuracy (0.93), F1-score (0.87) and Matthews correlation coefficient (0.80) based on 98 widely distributed field sites across South Africa. The results suggest that the introduced workflow is suitable for monitoring changes in the extent of open water, aquatic vegetation, and water hyacinth for individual waterbodies or across national extents. The GEE code can be accessed here.},
DOI = {10.3390/rs12244021}
}
@inproceedings{NIPS2012_c399862d,
author = {Krizhevsky, Alex and Sutskever, Ilya and Hinton, Geoffrey E},
booktitle = {Advances in Neural Information Processing Systems},
editor = {F. Pereira and C.J. Burges and L. Bottou and K.Q. Weinberger},
pages = {},
publisher = {Curran Associates, Inc.},
title = {ImageNet Classification with Deep Convolutional Neural Networks},
url = {https://proceedings.neurips.cc/paper_files/paper/2012/file/c399862d3b9d6b76c8436e924a68c45b-Paper.pdf},
volume = {25},
year = {2012}
}
@article{de2021continual,
title = {A continual learning survey: Defying forgetting in classification tasks},
author = {De Lange, Matthias and Aljundi, Rahaf and Masana, Marc and Parisot, Sarah and Jia, Xu and Leonardis, Ale{\v{s}} and Slabaugh, Gregory and Tuytelaars, Tinne},
journal = {IEEE transactions on pattern analysis and machine intelligence},
volume = {44},
number = {7},
pages = {3366--3385},
year = {2021},
publisher = {IEEE}
}
@inproceedings{purushwalkam2022challenges,
title = {The challenges of continuous self-supervised learning},
author = {Purushwalkam, Senthil and Morgado, Pedro and Gupta, Abhinav},
booktitle = {European Conference on Computer Vision},
pages = {702--721},
year = {2022},
organization = {Springer}
}
@article{kirkpatrick2017overcoming,
title = {Overcoming catastrophic forgetting in neural networks},
author = {Kirkpatrick, James and Pascanu, Razvan and Rabinowitz, Neil and Veness, Joel and Desjardins, Guillaume and Rusu, Andrei A and Milan, Kieran and Quan, John and Ramalho, Tiago and Grabska-Barwinska, Agnieszka and others},
journal = {Proceedings of the national academy of sciences},
volume = {114},
number = {13},
pages = {3521--3526},
year = {2017},
publisher = {National Acad Sciences}
}
@ARTICLE{10135093,
author = {Marsocci, Valerio and Scardapane, Simone},
journal = {IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing},
title = {Continual Barlow Twins: Continual Self-Supervised Learning for Remote Sensing Semantic Segmentation},
year = {2023},
volume = {16},
number = {},
pages = {5049-5060},
doi = {10.1109/JSTARS.2023.3280029} }
@ARTICLE{9127795,
author = {Cheng, Gong and Xie, Xingxing and Han, Junwei and Guo, Lei and Xia, Gui-Song},
journal = {IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing},
title = {Remote Sensing Image Scene Classification Meets Deep Learning: Challenges, Methods, Benchmarks, and Opportunities},
year = {2020},
volume = {13},
number = {},
pages = {3735-3756},
doi = {10.1109/JSTARS.2020.3005403} }
@misc{loshchilov2017sgdr,
title = {SGDR: Stochastic Gradient Descent with Warm Restarts},
author = {Ilya Loshchilov and Frank Hutter},
year = {2017},
eprint = {1608.03983},
archivePrefix = {arXiv},
primaryClass = {cs.LG}
}
@article{Mendieta2023GFMBG,
title = {GFM: Building Geospatial Foundation Models via Continual Pretraining},
author = {Matías Mendieta and Boran Han and Xingjian Shi and Yi Zhu and Chen Chen and Mu Li},
journal = {ArXiv},
year = {2023},
volume = {abs/2302.04476},
url = {https://api.semanticscholar.org/CorpusID:256697124}
}
@Article{SEN12MS_dataset,
AUTHOR = {Schmitt, M. and Hughes, L. H. and Qiu, C. and Zhu, X. X.},
TITLE = {SEN12MS – A CURATED DATASET OF GEOREFERENCED MULTI-SPECTRAL SENTINEL-1/2 IMAGERY FOR DEEP LEARNING AND DATA FUSION},
JOURNAL = {ISPRS Annals of the Photogrammetry, Remote Sensing and Spatial Information Sciences},
VOLUME = {IV-2/W7},
YEAR = {2019},
PAGES = {153--160},
URL = {https://isprs-annals.copernicus.org/articles/IV-2-W7/153/2019/},
DOI = {10.5194/isprs-annals-IV-2-W7-153-2019}
}
@Article{postdam_dataset,
AUTHOR = {Rottensteiner, F. and Sohn, G. and Jung, J. and Gerke, M. and Baillard, C. and Benitez, S. and Breitkopf, U.},
TITLE = {THE ISPRS BENCHMARK ON URBAN OBJECT CLASSIFICATION AND 3D BUILDING RECONSTRUCTION},
JOURNAL = {ISPRS Annals of the Photogrammetry, Remote Sensing and Spatial Information Sciences},
VOLUME = {I-3},
YEAR = {2012},
PAGES = {293--298},
URL = {https://isprs-annals.copernicus.org/articles/I-3/293/2012/},
DOI = {10.5194/isprsannals-I-3-293-2012}
}
@ARTICLE{millionaid,
author = {Long, Yang and Xia, Gui-Song and Li, Shengyang and Yang, Wen and Yang, Michael Ying and Zhu, Xiao Xiang and Zhang, Liangpei and Li, Deren},
journal = {IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing},
title = {On Creating Benchmark Dataset for Aerial Image Interpretation: Reviews, Guidances, and Million-AID},
year = {2021},
volume = {14},
number = {},
pages = {4205-4230},
doi = {10.1109/JSTARS.2021.3070368} }
@article{patternet,
title = {PatternNet: A benchmark dataset for performance evaluation of remote sensing image retrieval},
journal = {ISPRS Journal of Photogrammetry and Remote Sensing},
volume = {145},
pages = {197-209},
year = {2018},
note = {Deep Learning RS Data},
issn = {0924-2716},
doi = {https://doi.org/10.1016/j.isprsjprs.2018.01.004},
url = {https://www.sciencedirect.com/science/article/pii/S0924271618300042},
author = {Weixun Zhou and Shawn Newsam and Congmin Li and Zhenfeng Shao},
keywords = {Remote sensing, Content based image retrieval (CBIR), Benchmark dataset, Handcrafted features, Deep learning, Convolutional neural networks},
abstract = {Benchmark datasets are critical for developing, evaluating, and comparing remote sensing image retrieval (RSIR) approaches. However, current benchmark datasets are deficient in that (1) they were originally collected for land use/land cover classification instead of RSIR; (2) they are relatively small in terms of the number of classes as well as the number of images per class which makes them unsuitable for developing deep learning based approaches; and (3) they are not appropriate for RSIR due to the large amount of background present in the images. These limitations restrict the development of novel approaches for RSIR, particularly those based on deep learning which require large amounts of training data. We therefore present a new large-scale remote sensing dataset termed “PatternNet” that was collected specifically for RSIR. PatternNet was collected from high-resolution imagery and contains 38 classes with 800 images per class. Significantly, PatternNet’s large scale makes it suitable for developing novel, deep learning based approaches for RSIR. We use PatternNet to evaluate the performance of over 35 RSIR methods ranging from traditional handcrafted feature based methods to recent, deep learning based ones. These results serve as a baseline for future research on RSIR.}
}