Coverage for /home/runner/work/torchcvnn/torchcvnn/src/torchcvnn/datasets/slc/dataset.py: 0%
49 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-13 08:53 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-13 08:53 +0000
1# MIT License
3# Copyright (c) 2024 Jeremy Fix
5# Permission is hereby granted, free of charge, to any person obtaining a copy
6# of this software and associated documentation files (the "Software"), to deal
7# in the Software without restriction, including without limitation the rights
8# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9# copies of the Software, and to permit persons to whom the Software is
10# furnished to do so, subject to the following conditions:
12# The above copyright notice and this permission notice shall be included in
13# all copies or substantial portions of the Software.
15# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21# SOFTWARE.
23# Standard imports
24import glob
25import pathlib
27# External imports
28from torch.utils.data import Dataset
29import numpy as np
31# Local imports
32from .slc_file import SLCFile
35class SLCDataset(Dataset):
36 r"""
37 SLCDataset
39 The format is described in https://uavsar.jpl.nasa.gov/science/documents/stack-format.html
41 This object does not download the data for you, you must have the data on your local machine. For example, you can
42 register and access data from the NASA JetLab https://uavsar.jpl.nasa.gov
44 Note the datafiles can be quite large. For example, the quad polarization from Los Angeles SSurge_15305 is a bit
45 more than 30 GB. If you take the downsampled datasets 2x8, it is 2GB.
47 Note the 1x1 is 1.67 m slant range x 0.6 m azimuth.
49 Note:
50 As an example, using the example `read_slc.py`, with the SSurge_15305 stack provided
51 by the UAVSar, the Pauli representation of the four polarizations is shown below :
53 .. figure:: ../assets/datasets/slc_SSurge_15305.png
54 :alt: Pauli representation of a :math:`3000 \times 3000` crop of the SSurge_15305 stack
55 :width: 50%
56 :align: center
59 The code may look like this :
61 .. code-block:: python
63 import numpy as np
64 import torchcvnn
65 from torchcvnn.datasets.slc.dataset import SLCDataset
67 def get_pauli(data):
68 # Returns Pauli in (H, W, C)
69 HH = data["HH"]
70 HV = data["HV"]
71 VH = data["VH"]
72 VV = data["VV"]
74 alpha = HH + VV
75 beta = HH - VV
76 gamma = HV + VH
78 return np.stack([beta, gamma, alpha], axis=-1)
81 patch_size = (3000, 3000)
82 dataset = SLCDataset(
83 rootdir,
84 transform=get_pauli,
85 patch_size=patch_size,
86 )
88 Arguments:
89 rootdir: the path containing the SLC and ANN files
90 transform : the transform applied to the patches. It applies
91 on a dictionnary of patches {'HH': np.array, 'HV': np.array, ...}
92 patch_size: the dimensions of the patches to consider (rows, cols)
93 patch_stride: the shift between two consecutive patches, default:patch_size
94 """
96 def __init__(
97 self,
98 rootdir: str = None,
99 transform=None,
100 patch_size: tuple = (128, 128),
101 patch_stride: tuple = None,
102 ):
103 super().__init__()
105 self.transform = transform
106 self.patch_size = patch_size
107 self.patch_stride = patch_stride
108 if self.patch_stride is None:
109 self.patch_stride = patch_size
111 # Let us find all the SLC files
112 # We group the polorizations of the stack together
113 self.slcs = glob.glob(str(pathlib.Path(rootdir) / "*.slc"))
114 self.slc_polarizations = {}
115 self.patch_counts = {}
116 for slc in self.slcs:
117 slc_file = SLCFile(slc, patch_size=patch_size, patch_stride=patch_stride)
118 slc_key = slc_file.key
119 if slc_key not in self.slc_polarizations:
120 self.slc_polarizations[slc_key] = {}
122 self.slc_polarizations[slc_key][slc_file.polarization] = slc_file
123 self.patch_counts[slc_key] = len(slc_file)
125 # Sanity checks
126 # 1- For every SLC stack, we must have the same number of patches
127 # 2- All the SLCs must have the same number of polarizations
128 polarization_count = None
129 for slc_key, slc_polarizations in self.slc_polarizations.items():
130 if polarization_count is None:
131 polarization_count = len(slc_polarizations)
132 else:
133 assert polarization_count == len(slc_polarizations)
135 patch_count = None
136 for polarization, slc_file in slc_polarizations.items():
137 if patch_count is None:
138 patch_count = len(slc_file)
139 else:
140 assert patch_count == len(slc_file)
142 self.nsamples = sum(self.patch_counts.values())
144 def __len__(self):
145 return self.nsamples
147 def __getitem__(self, item):
148 """
149 Usefull params :
150 - 1x1_slc_azimuth_pixel_spacing
151 - 1x1_slc_range_pixel_spacing
152 - global_average_squint_angle
153 - center_wavelength
154 - slc_SEGMENT_1x1_rows
155 - slc_SEGMENT_1x1_columns
156 """
157 # Now, request the patch from the right SLC file
158 assert 0 <= item < self.nsamples
160 # 1- Find the right SLC file given self.patch_counts and item index
161 for slc_key, count in self.patch_counts.items():
162 if item < count:
163 slcs = self.slc_polarizations[slc_key]
164 break
165 else:
166 item -= count
167 sorted_keys = sorted(slcs.keys())
168 # 2- Find the right patch from all the polarizations
169 patches = {pol: slcs[pol][item] for pol in sorted_keys}
171 # 3a- Stack the patches
172 # 3b- Apply the transform
173 if self.transform is not None:
174 patches = self.transform(patches)
175 else:
176 patches = np.stack([patchi for _, patchi in patches.items()])
178 return patches