-
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathSatellites-sensors.html
765 lines (696 loc) · 60.7 KB
/
Satellites-sensors.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta content="width=device-width, initial-scale=1.0" name="viewport">
<title>Resolution and calibration</title>
<meta content="" name="description">
<meta content="" name="keywords">
<!-- Favicons -->
<link href="assets/img/Favicon-1.png" rel="icon">
<link href="assets/img/Favicon-1.png" rel="apple-touch-icon">
<!-- Google Fonts -->
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,300i,400,400i,600,600i,700,700i|Raleway:300,300i,400,400i,500,500i,600,600i,700,700i|Poppins:300,300i,400,400i,500,500i,600,600i,700,700i" rel="stylesheet">
<!-- Vendor CSS Files -->
<link href="assets/vendor/aos/aos.css" rel="stylesheet">
<link href="assets/vendor/bootstrap/css/bootstrap.min.css" rel="stylesheet">
<link href="assets/vendor/bootstrap-icons/bootstrap-icons.css" rel="stylesheet">
<link href="assets/vendor/boxicons/css/boxicons.min.css" rel="stylesheet">
<link href="assets/vendor/glightbox/css/glightbox.min.css" rel="stylesheet">
<link href="assets/vendor/swiper/swiper-bundle.min.css" rel="stylesheet">
<!-- Creating a python code section-->
<link rel="stylesheet" href="assets/css/prism.css">
<script src="assets/js/prism.js"></script>
<!-- Template Main CSS File -->
<link href="assets/css/style.css" rel="stylesheet">
<!-- To set the icon, visit https://fontawesome.com/account-->
<script src="https://kit.fontawesome.com/5d25c1efd3.js" crossorigin="anonymous"></script>
<!-- end of icon-->
<script type="text/javascript" async
src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/MathJax.js?config=TeX-MML-AM_CHTML">
</script>
<!-- =======================================================
* Template Name: iPortfolio
* Updated: Sep 18 2023 with Bootstrap v5.3.2
* Template URL: https://bootstrapmade.com/iportfolio-bootstrap-portfolio-websites-template/
* Author: BootstrapMade.com
* License: https://bootstrapmade.com/license/
======================================================== -->
</head>
<body>
<!-- ======= Mobile nav toggle button ======= -->
<i class="bi bi-list mobile-nav-toggle d-xl-none"></i>
<!-- ======= Header ======= -->
<header id="header">
<div class="d-flex flex-column">
<div class="profile">
<img src="assets/img/myphoto.jpeg" alt="" class="img-fluid rounded-circle">
<h1 class="text-light"><a href="index.html">Arun</a></h1>
<div class="social-links mt-3 text-center">
<a href="https://www.linkedin.com/in/arunp77/" target="_blank" class="linkedin"><i class="bx bxl-linkedin"></i></a>
<a href="https://github.com/arunp77" target="_blank" class="github"><i class="bx bxl-github"></i></a>
<a href="https://twitter.com/arunp77_" target="_blank" class="twitter"><i class="bx bxl-twitter"></i></a>
<a href="https://www.instagram.com/arunp77/" target="_blank" class="instagram"><i class="bx bxl-instagram"></i></a>
<a href="https://arunp77.medium.com/" target="_blank" class="medium"><i class="bx bxl-medium"></i></a>
</div>
</div>
<nav id="navbar" class="nav-menu navbar">
<ul>
<li><a href="index.html#hero" class="nav-link scrollto active"><i class="bx bx-home"></i> <span>Home</span></a></li>
<li><a href="index.html#about" class="nav-link scrollto"><i class="bx bx-user"></i> <span>About</span></a></li>
<li><a href="index.html#resume" class="nav-link scrollto"><i class="bx bx-file-blank"></i> <span>Resume</span></a></li>
<li><a href="index.html#portfolio" class="nav-link scrollto"><i class="bx bx-book-content"></i> <span>Portfolio</span></a></li>
<li><a href="index.html#skills-and-tools" class="nav-link scrollto"><i class="bx bx-wrench"></i> <span>Skills and Tools</span></a></li>
<li><a href="index.html#language" class="nav-link scrollto"><i class="bi bi-menu-up"></i> <span>Languages</span></a></li>
<li><a href="index.html#awards" class="nav-link scrollto"><i class="bi bi-award-fill"></i> <span>Awards</span></a></li>
<li><a href="index.html#professionalcourses" class="nav-link scrollto"><i class="bx bx-book-alt"></i> <span>Professional Certification</span></a></li>
<li><a href="index.html#publications" class="nav-link scrollto"><i class="bx bx-news"></i> <span>Publications</span></a></li>
<li><a href="index.html#extra-curricular" class="nav-link scrollto"><i class="bx bx-rocket"></i> <span>Extra-Curricular Activities</span></a></li>
<!-- <li><a href="#contact" class="nav-link scrollto"><i class="bx bx-envelope"></i> <span>Contact</span></a></li> -->
</ul>
</nav><!-- .nav-menu -->
</div>
</header><!-- End Header -->
<main id="main">
<!-- ======= Breadcrumbs ======= -->
<section id="breadcrumbs" class="breadcrumbs">
<div class="container">
<div class="d-flex justify-content-between align-items-center">
<h2></h2>
<ol>
<li><a href="Remote-sensing-content.html" class="clickable-box"><i class="fas fa-arrow-left"></i> Content </a></li>
<li><a href="index.html" class="clickable-box"> Home <i class="fas fa-arrow-right"></i></a></li>
</ol>
</div>
</div>
</section><!-- End Breadcrumbs -->
<!------ right dropdown menue ------->
<div class="right-side-list">
<div class="dropdown">
<button class="dropbtn"><strong>Shortcuts:</strong></button>
<div class="dropdown-content">
<ul>
<li><a href="cloud-compute.html"><i class="fas fa-cloud"></i> Cloud</a></li>
<li><a href="AWS-GCP.html"><i class="fas fa-cloud"></i> AWS-GCP</a></li>
<li><a href="amazon-s3.html"><i class="fas fa-cloud"></i> AWS S3</a></li>
<li><a href="ec2-confi.html"><i class="fas fa-server"></i> EC2</a></li>
<li><a href="Docker-Container.html"><i class="fab fa-docker" style="color: rgb(22, 22, 22);"></i> Docker</a></li>
<li><a href="Jupyter-nifi.html"><i class="fab fa-python" style="color: rgb(15, 15, 15);"></i> Jupyter-nifi</a></li>
<li><a href="snowflake-task-stream.html"><i class="fas fa-snowflake"></i> Snowflake</a></li>
<li><a href="data-model.html"><i class="fas fa-database"></i> Data modeling</a></li>
<li><a href="sql-basics.html"><i class="fas fa-table"></i> QL</a></li>
<li><a href="sql-basic-details.html"><i class="fas fa-database"></i> SQL</a></li>
<li><a href="Bigquerry-sql.html"><i class="fas fa-database"></i> Bigquerry</a></li>
<li><a href="scd.html"><i class="fas fa-archive"></i> SCD</a></li>
<li><a href="sql-project.html"><i class="fas fa-database"></i> SQL project</a></li>
<!-- Add more subsections as needed -->
</ul>
</div>
</div>
</div>
<!-- ======= Portfolio Details Section ======= -->
<section id="portfolio-details" class="portfolio-details">
<div class="container">
<div class="row gy-4">
<h1>Resolution and calibration</h1>
<div class="image">
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/Active-passive.png" alt="" style="max-width: 80%; max-height: 80%;">
<figcaption style="text-align: center;"><strong>Image credit:</strong><a href="https://arunp77.github.io/Arun-Kumar-Pandey/" target="_blank"> Arun Kumar Pandey</a> (part of the image taken from various sources). </figcaption>
</figure>
</div>
<section id="Section-1">
<h3>Analog and digital images </h3>
In remote sensing, it is very important to understand the data provided by sensors in order to interpret them properly. The first thing to do is to understand what a satellite image
is and why it is different from a photograph. The main difference between a photograph and a satellite image is that the photograph has an analogue format and is usually printed
on paper before being interpreted. The satellite image has a digital format and a computer is generally used to analyse and interpret it. The evolution of remote sensing techniques
has witnessed a transition from traditional analog methods to the contemporary era of digital technology. A comparative table below summarizes key differences between the two.
<br><br>
<table>
<thead>
<tr>
<th>Aspect</th>
<th>Analog Photographs</th>
<th>Digital Images</th>
</tr>
</thead>
<tbody>
<tr>
<td>Capture Method</td>
<td>Uses film and optical systems for capturing images.</td>
<td>Employs electronic sensors to directly capture digital data.</td>
</tr>
<tr>
<td>Resolution</td>
<td>Limited resolution, typically lower than digital counterparts.</td>
<td>Higher resolution, enabling finer detail and clarity.</td>
</tr>
<tr>
<td>Flexibility</td>
<td>Limited post-capture manipulation options.</td>
<td>Allows extensive post-processing for enhancement and analysis.</td>
</tr>
<tr>
<td>Storage</td>
<td>Requires physical storage space for film rolls.</td>
<td>Data stored digitally, facilitating easy archiving and retrieval.</td>
</tr>
<tr>
<td>Transmission</td>
<td>Time-consuming and manual distribution of physical prints.</td>
<td>Easily transmitted electronically for rapid sharing and analysis.</td>
</tr>
<tr>
<td>Cost</td>
<td>Lower initial costs, but ongoing expenses for film and processing.</td>
<td>Higher initial investment, but lower ongoing costs for storage.</td>
</tr>
<tr>
<td>Environmental Impact</td>
<td>Chemical processes in film development can be environmentally harmful.</td>
<td>Generally more environmentally friendly, with less chemical waste.</td>
</tr>
<tr>
<td>Integration with Technology</td>
<td>Limited integration with modern data analysis tools.</td>
<td>Seamless integration with advanced data science and analytics tools.</td>
</tr>
</tbody>
</table><br>
<h4>Digital image formats</h4>
<ul>
<li>The distinction between analogue and digital formats lies in how they represent and store information. In the analogue format, data is continuous, seamlessly spread throughout.
For instance, when you capture an image in an analogue format, there are no discernible edges between different parts of the image.</li>
<li>On the other hand, digital format employs a different approach. It saves information in separate blocks, often referred to as <strong>pixels</strong>. When you zoom in on a digital image, you'll
notice these pixels—small squares of various colors in the case of satellite imagery. This discrete representation, organized in a matrix of squares, is a key characteristic of
digital formats.</li>
<li>The digital format relies on a mathematical process called a '<strong><em>binary system</em></strong>'. This system is the foundation of computer operations, enabling them to register, calculate, save data, and display
images. In the binary system, computers comprehend only electric pulses, translating them into a binary code of <code>0</code>s and <code>1</code>s. This binary language forms the basis of the entire computer world.</li>
<li>Mathematicians initially doubted the ability of computers to handle decimal systems, the way we traditionally count (0 to 9, then a new series from 10 to 19, and so on). However, computers
operate in a binary mode, going from 0 to 1, restarting the series. It's a binary world where '<code>yes</code>' or '<code>no</code>,' <code>0</code> or <code>1</code>, encapsulates the fundamental language of computation</li>
<br>
<div style="background-color: #18ce3080; padding: 10px; border-radius: 5px; border-color: black;">
<ul>
<li>A group of 2 numbers (also called 'digits') is called a 'bit';</li>
<li>A group of 8 bits is called a 'byte' (= 256 in the decimal system);</li>
<li>1 Kb is equal to 1000 bytes;</li>
<li>1 Mb is equal to 1,000,000 bytes;</li>
<li>If your computer has a memory of 64Mb, this means that it can cope with data containing up to 64 × 1,000,000 × 8, that is 512,000,000 bits or electric pulses;</li>
<li>And if your computer has a hard disk of 2Gb, it means that it can contain data amounting to 2 × 1,000,000,000 × 8, that is 16,000,000,000 bits or electric pulses.</li>
</ul>
</div>
</ul>
<h3>Visual image interpretation</h3>
It implies the human’s ability to analyze the content of images, e.g., land cover and land use from remote sensing imagery. Visual image interpretation encompasses two steps:
<ul>
<li>first the perception of objects according to their external attributes and,</li>
<li>second, the actual interpretation of their meaning.</li>
</ul>
The following table provides a guideline for visually interpreting images. The complexity increases as the table progresses starting with the basic
elements such as contrast and color and ending with the interpretation of the spatial context:<br><br>
<table>
<tr>
<th>Attribute</th>
<th>Description (example)</th>
<th>Interpretation (example)</th>
</tr>
<tr>
<td>Contrast, color, brightness</td>
<td>Transition from light to dark blue</td>
<td>Variations in water depth</td>
</tr>
<tr>
<td>Geometry (shape, size)</td>
<td>Sinuous ribbon-like object</td>
<td>River</td>
</tr>
<tr>
<td>Texture (structure of a surface)</td>
<td>Rough surface with vertical line patterns</td>
<td>Maize cultivation</td>
</tr>
<tr>
<td>Spatial context (functional interrelationship)</td>
<td>Rail tracks that intersect a building</td>
<td>Railway station</td>
</tr>
</table><br>
<h3>What is raster and vector data?</h3>
Raster data and vector data are two primary types of geospatial data representations used in <a href="https://www.esri.com/en-us/what-is-gis/overview" target="_blank">geographic information systems (GIS)</a> and related fields.
<div class="box">
A raster is a matrix of cells (pixels) organized into rows and columns
</div><br>
Raster data is a type of geospatial data representation that is used to capture and store information about the characteristics of geographic phenomena. Unlike vector data, which represents spatial
information using points, lines, and polygons, raster data is structured as a grid of cells, where each cell or pixel holds a specific value. These cells collectively form a grid,
and each cell contains a value that represents a certain attribute or characteristic at that location. This attribute could be elevation, temperature, land cover, or any other measurable quantity.
<ol>
<li><strong>Raster Data:</strong> Raster data represents the Earth's surface as a grid of cells or pixels, with each cell containing a single value or multiple values representing a specific attribute. Examples of raster data include:
<ul>
<li><strong>Satellite imagery:</strong> Captures images of the Earth's surface, often divided into bands representing different wavelengths of light.</li>
<li><strong>Digital elevation models (DEMs):</strong> Represent elevation data as a grid, commonly used for terrain analysis and visualization.</li>
<li><strong>Land cover classification:</strong> Categorizes the Earth's surface into different land cover types (e.g., forests, water bodies) based on spectral characteristics.</li>
</ul>
<p><b>Raster Data Formats: </b>Raster data is typically stored in formats that are optimized for storing gridded or pixel-based data. Some common raster data formats include:</p>
<ul>
<li><b>GeoTIFF (.tif, .tiff):</b> A widely used format for storing georeferenced raster data, including satellite imagery and elevation models.</li>
<li><b>JPEG (.jpg, .jpeg):</b> A compressed format commonly used for storing aerial and satellite imagery.</li>
<li><b>NetCDF (.nc):</b> A self-describing format commonly used for storing multidimensional scientific data, such as climate and atmospheric data.</li>
<li><b>HDF (.hdf, .he5):</b> Hierarchical Data Format, often used for storing remote sensing data and complex scientific data.</li>
<li><b>ENVI (.img, .dat):</b> A proprietary format used by the ENVI software for storing and processing remote sensing data.</li>
</ul>
</li>
<li><strong>Vector data:</strong>
<ul>
<li><strong>Points:</strong> Represent specific locations on the Earth's surface, such as cities, landmarks, or GPS coordinates.</li>
<li><strong>Lines:</strong> Represent linear features such as roads, rivers, boundaries, and transportation networks.</li>
<li><strong>Polygons:</strong> Represent areas such as land parcels, administrative boundaries, and land use zones.</li>
</ul>
<p>Vector data is typically stored in formats that are optimized for storing geometric objects, such as points, lines, and polygons. Some common vector data formats include:</p>
<ul>
<li><b>Shapefile (.shp, .shx, .dbf):</b> A widely used format developed by Esri for storing geometric objects and associated attribute data.</li>
<li><b>GeoJSON (.geojson):</b> A format based on JSON for encoding geographic data structures, commonly used for web-based mapping applications.</li>
<li><b>GML (Geography Markup Language):</b> An XML-based format for encoding and transmitting geographic data.</li>
<li><b>KML (.kml, .kmz):</b> A file format used for displaying geographic data in web-based applications like Google Earth.</li>
<li><b>DXF (.dxf):</b> A format used for exchanging computer-aided design (CAD) data, often used for storing vector data in GIS applications.</li>
</ul>
</li>
</ol>
<p>While raster and vector data are the two primary formats, it's important to note that some types of geospatial information may not fit neatly into these categories. For example, <a href="https://en.wikipedia.org/wiki/Lidar" target="_blank">LiDAR</a> (Light Detection and Ranging) data, which captures three-dimensional point clouds of the Earth's surface, is often stored and analyzed as raster data (e.g., DEMs derived from LiDAR), but it can also be represented as point clouds or vector features. Similarly, some types of geospatial data, such as satellite imagery with vector overlays or thematic layers, may combine elements of both raster and vector data formats.</p>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/raster-data.png" alt="" style="max-width: 80%; max-height: 80%;">
<figcaption style="text-align: center;">Raster spatial image format (<strong>Image credit:</strong><a href="https://pages.cms.hu-berlin.de/EOL/geo_rs/S02_image-prop1.html" target="_blank"> Humbold University</a>). </figcaption>
</figure>
<table>
<tr>
<th>Characteristic</th>
<th>Raster Data</th>
<th>Vector Data</th>
</tr>
<tr>
<td>Representation</td>
<td>Represented as a grid of regularly spaced cells or pixels.</td>
<td>Represents geographic features using points, lines, and polygons.</td>
</tr>
<tr>
<td>Structure</td>
<td>Each cell in the grid contains a specific value, representing a certain attribute or characteristic.</td>
<td>Points represent individual locations, lines represent linear features, and polygons represent areas.</td>
</tr>
<tr>
<td>Examples</td>
<td>Satellite imagery, digital elevation models (DEMs), aerial photographs, thermal images.</td>
<td>Road networks, political boundaries, lakes, buildings.</td>
</tr>
<tr>
<td>Characteristics</td>
<td>Well-suited for continuous and regularly varying data. Useful for representing imagery.</td>
<td>Well-suited for representing discrete features and the relationships between them.</td>
</tr>
<tr>
<td>Geometry</td>
<td>Uses a grid of cells.</td>
<td>Uses points, lines, and polygons.</td>
</tr>
<tr>
<td>Data Types</td>
<td>Suitable for continuous and regularly varying data.</td>
<td>Effective for discrete features and their relationships.</td>
</tr>
<tr>
<td>Storage</td>
<td>Can be more storage-intensive for certain types of information.</td>
<td>Generally less storage-intensive for the same information.</td>
</tr>
<tr>
<td>Analysis</td>
<td>Used for spatial analysis involving continuous data.</td>
<td>Used for analyzing discrete features and their attributes.</td>
</tr>
</table><br>
<h5>Image format</h5>
<p>An image format is a file format that is used to store digital images. The most common image formats regarding digital remote sensing imagery are illustrated in the table below:</p>
<table>
<tr>
<th>Format name</th>
<th>Extension</th>
<th>Description</th>
</tr>
<tr>
<td>GeoTIFF</td>
<td>.tif, .tiff, .gtiff</td>
<td>TIFF + geospatial reference</td>
</tr>
<tr>
<td>ENVI</td>
<td>.bsq, .bil, .bip, .dat</td>
<td>Generic, often used in the imaging spectroscopy community; Header file (.hdr) with metadata!</td>
</tr>
<tr>
<td>JPEG2000</td>
<td>.jp2, .j2k</td>
<td>Used by many data providers; usually for integer values only</td>
</tr>
<tr>
<td>HDF4, HDF5</td>
<td>.hdf, .h4, .hdf4, .h5, .hdf5</td>
<td>Hierarchical data format, version 4 or 5; multi-resolution raster</td>
</tr>
<tr>
<td>netCDF</td>
<td>.nc</td>
<td>Network Common Data Format; multi-resolution raster</td>
</tr>
<tr>
<td>SAVE</td>
<td></td>
<td>Standard Archive Format for Europe, e.g., Sentinel-1 and Sentinel-2</td>
</tr>
</table><br>
<h4>The pixels</h4>
In remote sensing, <strong>pixels</strong> are the smallest units of a digital image, representing specific areas on the Earth's surface. Each pixel has a numeric value reflecting the characteristics of the
corresponding scene, such as brightness or color. Remote sensing images are composed of a grid of pixels, and the <strong>resolution</strong> is determined by the size of these pixels. Higher <strong>resolution</strong>
provides more detailed images. Pixels are crucial for the digital representation, storage, and analysis of remote sensing data. <p> </p>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/the-pixels.png" alt="" style="max-width: 60%; max-height: 60%;">
<figcaption style="text-align: center;"><strong>Image credit:</strong><a href="https://www.mdpi.com/2072-4292/9/9/967" target="_blank"> © Darius Phiri and Justin Morgenroth</a></figcaption>
</figure><br>
Each pixel holds a numeric value, known as a pixel value, reflecting attributes like color or brightness. In remote sensing, pixel values encode information about the observed scene,
facilitating data interpretation and analysis. These values are fundamental for understanding and processing digital imagery.
<img src="assets/img/remote-sensing/pixel-values.gif" alt="" style="max-width: 80%; max-height: 80%;">
Human vision perceives color through detecting the entire visible spectrum, and our brains process this information into distinct colors. In contrast, many sensors work by capturing
information within narrow wavelength ranges, storing it in channels or bands. Digital representation involves combining and displaying these channels using primary colors
(blue, green, and red). Each channel's data is represented by one of these colors, and the relative brightness (digital value) of each pixel in each channel determines the
final color by combining the primary colors in varying proportions. <p></p>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/the-pixels-1.png" alt="" style="max-width: 80%; max-height: 80%;">
<figcaption style="text-align: center;"><strong>Image credit:</strong><a href="https://ieeexplore.ieee.org/document/6658949" target="_blank"> © Lefei Zhang et al.</a></figcaption>
</figure>
<!----------------Resolution -------->
<h2>Resolution</h2>
<p>The resolution of an image refers to the potential detail provided by the imagery. In remote sensing we refer to three types of resolution: spatial, spectral and temporal.</p>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/resolution-0.png" alt="" style="max-width: 80%; max-height: 80%;">
<figcaption style="text-align: center;"><strong>Image credit:</strong><a href="https://eo-college.org/topic/the-resolutions-dimensions-of-eo-data/" target="_blank"> © EO College.</a></figcaption>
</figure>
<ul>
<li><strong>Spatial Resolution: </strong>Spatial Resolution refers to the size of the smallest feature that can be detected by a satellite sensor or displayed in a
satellite image. It is usually presented as a single value representing the length of one side of a square. For example, a spatial resolution of 250m means
that one pixel represents an area 250 by 250 meters on the ground.</li>
<li><strong>Spectral Resolution: </strong>Spectral Resolution refers to the ability of a satellite sensor to measure specific wavlengths of the electromagnetic
spectrum. The finer the spectral resolution, the narrower the wavelength range for a particular channel or band.</li>
<li><strong>Temporal resolution: </strong>Temporal resolution refers to the time between images. The capability for satellites to provide images of the same
geographical area more frequently has increased dramatically since the dawn of the space age.</li>
<li><strong>Radiometric resolution: </strong>While the arrangement of pixels describes the spatial structure of an image, the radiometric
characteristics describe the actual information content in an image. Every time an image is acquired on film or by a sensor, its sensitivity to the magnitude of the electromagnetic energy
determines the radiometric resolution. The radiometric resolution of an imaging system describes its ability to discriminate very slight differences in energy The finer the radiometric
resolution of a sensor, the more sensitive it is to detecting small differences in reflected or emitted energy.</li>
</ul>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/resolution-1.png" alt="" style="max-width: 50%; max-height: 50%;">
</figure>
<h4>1. Spatial Resolution, Pixel Size, and Scale</h4>
The spatial resolution describes above all the pixel size and/or the pixel distance (in the picture or at the object) and thus the ability of the sensor for detail differentiation. It depends on the type of sensor, the size of the CCD array (charge-coupled device) and the viewing angle.
<ul>
<li>For some remote sensing instruments, the distance between the target being imaged and the platform, plays a large role in determining the detail of information obtained and the total imaged by the sensor.</li>
<li>Sensors onboard platforms far away from their targets, typically view a larger area, but cannot provide great detail. The detail discernible in an image is dependent on the spatial resolution of the sensor and refers to the size of the smallest possible feature that can be detected. </li>
<li><strong>Instantaneous Field of View (IFOV):</strong> Spatial resolution of passive sensors depends primarily on their IFOV.</li>
<ul>
<li>FOV = field of view, defines the swath width.</li>
<li>IFOV = instantaneous field of view, defines the angle of the individual sensors.</li>
<li>GIFOV = ground instantaneous field of view, is the IFOV projected onto the ground.</li>
<li>GSD = ground sample distance (from GIFOV, detector size), defines the resulting pixel size</li>
</ul>
<img src="assets/img/remote-sensing/IFOV.png" alt="" style="max-width: 60%; max-height: auto;">
<li>Images where only large features are visible are said to have coarse or low resolution. In fine or high resolution images, small objects can be detected.</li>
<li>The pixel size of the final image product thus depends on the sensor and fluctuates strongly between 0.4 and 900 m. For common free satellites they are 5 m (RapidEye), 30 m (Landsat 8) or 250/500 m (MODIS). Sentinel-2 has spatial resolutions of 10, 20 and 60 m depending on the channel. In addition to contrast, spatial image resolution is primarily decisive for the recognition and differentiation of objects.</p></li>
<img src="assets/img/remote-sensing/resolution.png" alt="" style="max-width: 50%; max-height: 50%;">
</ul>
<h4>2. Temporal resolution</h4>
<p>
In remote sensing, temporal resolution refers to the frequency or repeat cycle with which a sensor acquires data over the same location. It is typically expressed in days, weeks, or months,
and it is an important factor in determining the suitability of remote sensing data for a particular application.</p>
<p>High temporal resolution data is useful for monitoring rapidly changing phenomena, such as wildfires, deforestation, and crop growth. It can also be used to create time-lapse animations
that show how a landscape has changed over time.</p>
<p>Low temporal resolution data is more suitable for mapping large areas and for applications where changes occur slowly, such as land cover classification and geological mapping.</p>
<p>The temporal resolution of a remote sensor is determined by a number of factors, including:</p>
<ul>
<li><strong>The orbit of the sensor:</strong> Satellites that orbit the Earth in polar orbits have a higher temporal resolution than satellites that orbit in geostationary orbits. This is because
polar-orbiting satellites fly over the same location every few days, while geostationary satellites remain stationary over a fixed location</li>
<li><strong>The swath width of the sensor:</strong> The swath width is the width of the area that the sensor can image at once. Sensors with a wide swath width have a lower temporal
resolution than sensors with a narrow swath width.</li>
<li><strong>The cloud cover:</strong> Clouds can obscure the Earth's surface, making it impossible for a sensor to acquire data. This can reduce the temporal resolution of the data.</li>
</ul>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/temporal-resolution.png" alt="" style="max-width: 80%; max-height: 80%;">
<figcaption style="text-align: center;"><strong>Image credit:</strong>
<a href="https://natural-resources.canada.ca/maps-tools-and-publications/satellite-imagery-and-air-photos/tutorial-fundamentals-remote-sensing/satellites-and-sensors/satellite-characteristics-orbits-and-swaths/9283" target="_blank">
© Remote Sensing Tutorials (Natural Resources Canada).</a></figcaption>
</figure>
<div style="background-color: #cddcdf; padding: 15px; margin: 15px; border-radius: 5px;">
<h4>Swaths:</h4>
<p>
The swath width refers to the coverage area on the Earth's surface captured by a satellite during a single pass. It is determined by the satellite's sensor characteristics and the
satellite's altitude. A wider swath allows for broader coverage but may sacrifice image resolution.
</p>
<p>
Understanding the interplay between orbits and swaths is essential for optimizing satellite mission design. LEO satellites, for instance, may have smaller swaths but offer higher revisit
frequencies, making them suitable for applications requiring frequent observations. GEO satellites, on the other hand, have a fixed view but cover a larger area with each pass, making
them ideal for continuous monitoring.
</p>
</div>
<h4>3. <a href="https://pages.cms.hu-berlin.de/EOL/geo_rs/S03_image-prop2.html" target="_blank">Radiometric resolution</a></h4>
The radiometric resolution indicates the ability of a sensor to distinguish between different intensities within the respective wavelength range of a channel. Simply speaking, it is the contrast of an image, indicating the number of grayscales - expressed in bits:
<ul>
<li>1 bit = \(2^1\) = 2 gray levels</li>
<li>2 bit = \(2^2\) = 4 grey levels</li>
<li>4 bit = \(2^4\) = 16 grey levels</li>
<li>8 bit = \(2^8\) = 256 greyscales.</li>
</ul>
<strong>Bit depths: </strong>Radiometric resolution is often described in terms of bit depth, representing the number of bits used to represent pixel values. For example, an 8-bit image has 2<sup>8</sup> (256) possible values, while a 16-bit image can represent 2<sup>16</sup> (65,536) values.
An 8-bit sensor can distinguish between 256 levels of radiation, while a 16-bit sensor can distinguish between 65,536 levels of radiation.
This means that a 16-bit image will have a wider range of colors and shades than an 8-bit image.
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/radio-bit-resol.jpg" alt="" style="max-width: 90%; max-height: 90%;">
<figcaption style="text-align: center;"><strong>Image credit:</strong>
<a href="https://learn.opengeoedu.de/en/fernerkundung/vorlesung/Remote%20Sensing/aufloesungen#radiometric-resolution" target="_blank">
© OpenGeoEdu.</a></figcaption>
</figure>
<ul>
<li><strong>Quantization Levels: </strong> It is often expressed in terms of the number of quantization levels or bits used to represent the intensity values. Higher radiometric
resolution corresponds to a greater number of possible values.</li>
<li><strong>Sensitivity to Brightness Variations:</strong> A sensor with high radiometric resolution can capture subtle differences in brightness, allowing for more detailed and nuanced information
about the objects or features in the scene.</li>
<li><strong>Dynamic range: </strong>The dynamic range of a sensor is closely related to radiometric resolution. A broader dynamic range enables the sensor to capture a wide range of intensities, from very dark to very bright</li>
<li><strong>Application: </strong>Radiometric resolution is crucial in applications where detecting small variations in reflectance or emitted radiation is essential. This includes tasks like land cover classification, mineral
identification, and environmental monitoring.</li>
<li><strong>Trade offs: </strong>There can be trade-offs between radiometric and spatial resolution in remote sensing systems. Increasing radiometric resolution may require sacrificing spatial resolution and vice versa.</li>
</ul>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/radiometric-resolution.png" alt="" style="max-width: 90%; max-height: 90%;">
<figcaption style="text-align: center;"><strong>Image credit:</strong>
<a href="http://www.morrisriedel.de/introduction-to-deep-learning-models" target="_blank">
© Deep learning: Introduction to Deep Learning Models (Dr. – Ing. Gabriele Cavallaro).</a></figcaption>
</figure>
Radiometric resolution is important for a number of reasons, including:
<ul>
<li><strong>Image quality:</strong> Radiometric resolution directly affects the image quality of a sensor. A higher radiometric resolution will produce a more detailed and accurate image.</li>
<li><strong>Image processing:</strong> Radiometric resolution can also affect the ability of image processing algorithms to extract information from images. For example, a higher radiometric
resolution may be required to accurately identify objects in an image.</li>
<li><strong>Scientific applications:</strong> Radiometric resolution is also important in many scientific applications, such as remote sensing and astronomy. In these applications,
the ability to distinguish between different levels of radiation can be critical for making accurate measurements.</li>
</ul>
The radiometric resolution of a sensor is determined by a number of factors, including:
<ul>
<li><strong>The type of sensor:</strong> Different types of sensors have different radiometric resolutions. For example, electronic cameras typically have higher radiometric resolutions than film cameras.</li>
<li><strong>The sensitivity of the sensor:</strong> The sensitivity of the sensor also affects the radiometric resolution. A more sensitive sensor can distinguish between smaller levels of radiation.</li>
<li><strong>The ADC (analog-to-digital converter):</strong> The ADC is responsible for converting the analog signal from the sensor into a digital signal that can be processed by the computer. The
resolution of the ADC will determine the maximum radiometric resolution of the sensor.</li>
</ul>
<h4>4. Spectral Resolution </h4>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/spectral-spectrum.png" alt="" style="max-width: 50%; max-height: 50%;">
<figcaption style="text-align: center;"><strong>Image credit: </strong><a href="https://commons.wikimedia.org/wiki/File:Spectral_sampling_RGB_multispectral_hyperspectral_imaging.svg">Lucasbosch</a>,<a href="https://creativecommons.org/licenses/by-sa/4.0">CC BY-SA 4.0</a>, via Wikimedia Commons</figcaption>
</figure>
<ul>
<li>Spectral resolution refers to a sensor's ability to distinguish between different wavelengths or bands of electromagnetic radiation in the electromagnetic
spectrum. It is a crucial aspect of remote sensing systems, influencing the capacity to capture and analyze various features on the Earth's surface. </li>
<li>Spectral resolution describes the amount of spectral detail in a band. High spectral resolution means its bands are more narrow. Whereas low spectral
resolution has broader bands covering more of the spectrum.</li>
<li><strong>Wavelength Discrimination: </strong>Remote sensing sensors with higher spectral resolution can differentiate between a greater number of narrow bands across the electromagnetic spectrum.
This discrimination allows for more detailed analysis of surface features.</li>
<li><strong>Bands and Channels: </strong>Spectral resolution is often associated with the number and width of bands or channels in a sensor. Multispectral sensors capture data in several broad
bands, while hyperspectral sensors operate with numerous narrow and contiguous bands.</li>
<li><strong>Applications: </strong>Different surface materials and features interact with electromagnetic radiation in distinctive ways. Higher spectral resolution enables the
identification and analysis of specific materials, vegetation types, and environmental conditions.</li>
<li><strong>Spectral Signatures: </strong>Each material exhibits a unique spectral signature, a distinctive pattern of reflectance or emission across different wavelengths. Fine spectral
resolution facilitates the accurate identification of these signatures, contributing to more precise classification and interpretation.</li>
</ul>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/resolution-spectral.png" alt="" style="max-width: 90%; max-height: 90%;">
<figcaption style="text-align: center;"><strong> Last image credit:</strong><a href="https://blog.descarteslabs.com/a-look-into-the-fundamentals-of-remote-sensing" target="_blank"> © 2020 Descartes Labs Inc., All Rights Reserved.</a>
(a). represents the image in the visible range. (b). This image is in the near infrared, red, and green bands, where we see all the vegitation popping out strongly as a red color, since near infrared radiation is being reflected very strongly.
(c). The final image displays the two short wave infrared bands and near infrared. Here we see agricultural fields emitting very strongly in the infrared bands, and are seeing more detail in the soil properties in yellow and brown.
</figcaption>
</figure>
<h4>5. Multispectral Scanning</h4>
<p>Multispectral remote sensing is a type of remote sensing that utilizes multiple bands of the electromagnetic spectrum to capture images of the Earth's surface. Unlike traditional single-band imaging, which captures information from a single
wavelength range, multispectral scanning collects data from multiple bands, typically four to six, each sensitive to a specific portion of the electromagnetic spectrum. This multispectral approach provides a more comprehensive view of the
Earth's surface, allowing for the identification and differentiation of various features that would be difficult to discern using single-band imagery.</p>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/multispectral-spectra-1.png" alt="" style="max-width: 50%; max-height: 50%;">
<figcaption style="text-align: center;"><strong>Image credit: </strong>Courtesy of Smart Vision Lights.</figcaption>
</figure>
<p>In spectral imaging, an object’s spectral information is parsed into multiple images, with each image resulting from a different wavelength band reflected or emitted from the object. The wavelength range of images can span
from the ultraviolet to near-infrared (NIR) using standard silicon-based sensors, and into the NIR/shortwave infrared (SWIR) region using InGaAs (indium gallium arsenide)
and other SWIR-compatible sensors. Images from each wavelength band are
combined into a data matrix called an image cube. An image cube is essentially a stack of images in which each image in the stack corresponds to a discrete spectral band. The number of spectral bands in the cube
differentiates hyperspectral from multi- spectral imaging. “Hyper” implies the accumulation of tens to hundreds of wavelength band images, and “multi” consists of more than one spectral band image</p>
<p><strong>Benefits: </strong>
Multispectral remote sensing offers several advantages over traditional single-band imaging:
<ul>
<li><strong>Enhanced Image Detail:</strong> By combining data from multiple spectral bands, multispectral images provide a more detailed and nuanced representation of the Earth's surface, revealing subtle variations in land cover, vegetation, and other features.</li>
<li><strong>Improved Land Cover Classification:</strong> Multispectral imagery is particularly useful in classifying different types of land cover, including forests, grasslands, urban areas, and water bodies. The ability to capture information across multiple wavelengths enables the identification of unique spectral signatures associated with different land cover types.</li>
<li><strong>Vegetation Analysis:</strong> Multispectral images are valuable tools for analyzing vegetation health, crop growth, and changes in plant cover. The near-infrared band, for instance, is sensitive to chlorophyll content, allowing for the assessment of plant productivity and stress.</li>
<li><strong>Water Resource Monitoring:</strong> Multispectral scanning can be used to detect and map water bodies, monitor water quality, and assess the extent of water stress or drought conditions. The near-infrared and red bands are particularly useful for discriminating between land and water features.</li>
<li><strong>Environmental Monitoring:</strong> Multispectral imagery plays a crucial role in environmental monitoring applications, including tracking deforestation, detecting wildfires, and assessing the impact of pollution on land and water resources. Multispectral data can provide valuable insights into the causes and consequences of environmental changes.</li>
</ul></p>
<p><strong>Applications</strong>
Multispectral remote sensing has found applications in a wide range of fields, including:
<ul>
<li><strong>Agriculture:</strong> Multispectral images are used to monitor crop health, identify areas of stress or disease, and optimize irrigation practices. By analyzing spectral signatures, farmers can make informed decisions to improve crop yields and resource management.</li>
<li><strong>Forestry:</strong> Multispectral data is employed to assess forest health, track deforestation, and map forest types. Multispectral images can reveal subtle changes in forest cover, allowing for early detection of deforestation and monitoring of forest regeneration efforts.</li>
<li><strong>Geology:</strong> Multispectral scanning can aid in geological mapping, mineral exploration, and identification of landforms and soil types. Multispectral imagery can provide valuable insights into the geological composition of an area, assisting in mineral exploration and geological surveys.</li>
<li><strong>Urban Planning:</strong> Multispectral images can be used to map urban areas, analyze urban sprawl, and identify areas of potential development. Multispectral data can help urban planners assess land use patterns, identify suitable areas for development, and plan for sustainable urban growth.</li>
<li><strong>Disaster Management:</strong> Multispectral scanning plays a critical role in disaster management, including mapping floodplains, tracking wildfires, and assessing damage after natural disasters. Multispectral imagery can provide real-time information about the extent and impact of disasters, aiding in emergency response and recovery efforts.</li>
</ul>
</p>
<p><strong>Challenges of Multispectral Remote Sensing:</strong>
Despite its numerous advantages, multispectral remote sensing faces certain challenges:
<ul>
<li><strong>Atmospheric Interference:</strong> Clouds, haze, and dust can obscure the Earth's surface, reducing the quality of multispectral images and limiting their usefulness in certain conditions. Atmospheric correction techniques are employed to minimize the impact of atmospheric interference, but they don't always fully compensate for these effects.</li>
<li><strong>Sensor Limitations:</strong> Different sensors have varying resolutions, spectral ranges, and sensitivity, which can affect their ability to capture certain details or distinguish between subtle spectral variations. Sensor calibration and data processing techniques are used to enhance image quality and reduce sensor-specific limitations.</li>
<li><strong>Data Processing and Analysis:</strong> Interpreting and extracting meaningful information from large volumes of multispectral data can be computationally demanding and require specialized expertise. Advanced image processing algorithms and machine learning techniques are being developed to automate and improve the analysis of multispectral data.</li>
</ul>
</p>
<p><strong>Future directions: </strong>
Technological advancements in sensor technology, data processing algorithms, and artificial intelligence are driving the continuous evolution of multispectral remote sensing. Next-generation sensors will offer higher resolutions, broader spectral ranges, and improved sensitivity, providing even more detailed and accurate images of the Earth's surface. These advancements will further enhance the capabilities of multispectral scanning in addressing environmental challenges, supporting sustainable development, and enabling a deeper understanding of our planet's diverse landscapes.
</p>
</section>
<section id="">
<h2>Hyperspectral imaging</h2>
<p>Hyperspectral imaging is a powerful remote sensing technique that captures highly detailed images of the Earth's surface by measuring electromagnetic radiation across a broad range of wavelengths. Unlike traditional imaging techniques that capture images in a few broad bands, hyperspectral imaging captures hundreds or even thousands of bands, providing a continuous spectrum of information. This rich spectral data allows for the identification and differentiation of a wide range of features with exquisite precision.</p>
<p><strong>Types of hyperspectral imaging</strong></p>
<p>There are six main types of hyperspectral imaging:
<ul>
<li><strong>Whiskbroom Imaging:</strong> This type of hyperspectral imaging uses a two-dimensional array of detectors to capture images. The sensor is rotated across the scene, and each detector measures the radiation at a specific wavelength.</li>
<li><strong>Pushbroom Imaging:</strong> This is the most common type of hyperspectral imaging, and it uses a linear array of detectors to capture images. The sensor is scanned across the scene, and each detector measures the radiation at a specific wavelength.</li>
<li><strong>Snapshot Imaging:</strong> This type of hyperspectral imaging uses a staring array of detectors to capture images. The entire scene is imaged at once, and each detector measures the radiation at a specific wavelength.</li>
<li><strong>Line Scanning Imaging:</strong> This type of hyperspectral imaging uses a single detector to scan across the scene. The detector measures the radiation at all wavelengths at each point along the scan line.</li>
<li><strong>Tunable Filter Imaging:</strong> This type of hyperspectral imaging uses a tunable filter to select specific wavelengths of radiation. The filter is tuned to a particular wavelength, and the sensor captures an image at that wavelength. The process is repeated for all wavelengths of interest.</li>
<li><strong>Fourier Transform Spectroscopy (FTS):</strong> This type of hyperspectral imaging uses an interferometer to measure the spectrum of radiation at each point in the scene. The interferometer produces an interference pattern, which is then analyzed to extract the spectral information.</li>
</ul></p>
<figure style="text-align: center;">
<img src="assets/img/remote-sensing/scanners.png" alt="" style="max-width: 90%; max-height: 90%;">
<figcaption style="text-align: center;"><strong>image credit:</strong> © Arun-Kumar-Pandey and upper pannel is taken from
<a href="https://www.vision-systems.com/cameras-accessories/article/14181365/hyperspectral-imaging-sensor-and-camera-requirements" target="_blank">Vision systems</a></figcaption>
</figure>
<table border="1">
<thead>
<tr>
<th>Type</th>
<th>Description</th>
<th>Advantages</th>
<th>Disadvantages</th>
</tr>
</thead>
<tbody>
<tr>
<td>Pushbroom</td>
<td>Scans a linear array of detectors across the scene</td>
<td>Simple, fast</td>
<td>Poor spatial resolution</td>
</tr>
<tr>
<td>Whiskbroom</td>
<td>Rotates a two-dimensional array of detectors across the scene</td>
<td>High spatial resolution</td>
<td>Complex, slow</td>
</tr>
<tr>
<td>Snapshot</td>
<td>Uses a staring array of detectors to capture the entire scene at once</td>
<td>High spatial resolution, fast</td>
<td>Expensive</td>
</tr>
<tr>
<td>Line scanning</td>
<td>Scans a single detector across the scene</td>
<td>Simple, fast</td>
<td>Poor spatial resolution</td>
</tr>
<tr>
<td>Tunable filter</td>
<td>Selects specific wavelengths of radiation with a tunable filter</td>
<td>Can capture images at specific wavelengths</td>
<td>Slow</td>
</tr>
<tr>
<td>FTS</td>
<td>Uses an interferometer to measure the spectrum of radiation at each point in the scene</td>
<td>High spectral resolution</td>
<td>Complex, slow</td>
</tr>
</tbody>
</table><br>
</section>
<!-------------------------- Reference ------------------------------->
<section id="reference">
<h2>References</h2>
<ul>
<li><a href="https://www.esa.int/SPECIALS/Eduspace_EN/SEM4HR3Z2OF_0.html" target="_blank">Analogue versus digital images.</a></li>
<li><a href="http://www.lmars.whu.edu.cn/prof_web/zhangliangpei/rs/publication/Hyperspectral%20Remote%20Sensing%20Image%20Subpixel.pdf" target="_blank">Hyperspectral Remote Sensing Image Subpixel
Target Detection Based on Supervised Metric Learning.</a></li>
<li><a href="https://blog.descarteslabs.com/a-look-into-the-fundamentals-of-remote-sensing" target="_blank">Educational Series (part one): A Look into the Fundamentals of Remote Sensing</a></li>
<li>Hyperspectral Imaging: A Review on UAV-Based Sensors, Data Processing and Applications for Agriculture and Forestry</li>
<LI><a href="https://www.eumetsat.int/data-calibration-and-uncertainty" target="_blank">Data calibration and uncertainty.</a></LI>
</ul>
</section>
<div class="navigation">
<a href="Remote-sensing-content.html" class="clickable-box">
<span class="arrow-left">Content</span>
</a>
<a href="index.html" class="clickable-box">
<span class="arrow-right">Home</span>
</a>
</div>
</div>
</section><!-- End Portfolio Details Section -->
</main><!-- End #main -->
<!-- ======= Footer ======= -->
<footer id="footer">
<div class="container">
<div class="copyright">
© Copyright <strong><span>Arun</span></strong>
</div>
</div>
</footer><!-- End Footer -->
<a href="#" class="back-to-top d-flex align-items-center justify-content-center"><i class="bi bi-arrow-up-short"></i></a>
<!-- Vendor JS Files -->
<script src="assets/vendor/purecounter/purecounter_vanilla.js"></script>
<script src="assets/vendor/aos/aos.js"></script>
<script src="assets/vendor/bootstrap/js/bootstrap.bundle.min.js"></script>
<script src="assets/vendor/glightbox/js/glightbox.min.js"></script>
<script src="assets/vendor/isotope-layout/isotope.pkgd.min.js"></script>
<script src="assets/vendor/swiper/swiper-bundle.min.js"></script>
<script src="assets/vendor/typed.js/typed.umd.js"></script>
<script src="assets/vendor/waypoints/noframework.waypoints.js"></script>
<script src="assets/vendor/php-email-form/validate.js"></script>
<!-- Template Main JS File -->
<script src="assets/js/main.js"></script>
<script>
document.addEventListener("DOMContentLoaded", function () {
hljs.initHighlightingOnLoad();
});
</script>
</body>
</html>