Coverage for /home/runner/work/torchcvnn/torchcvnn/src/torchcvnn/datasets/s1slc.py: 0%
41 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-13 08:53 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-13 08:53 +0000
1# MIT License
3# Copyright (c) 2025 Quentin Gabot
5# Permission is hereby granted, free of charge, to any person obtaining a copy
6# of this software and associated documentation files (the "Software"), to deal
7# in the Software without restriction, including without limitation the rights
8# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9# copies of the Software, and to permit persons to whom the Software is
10# furnished to do so, subject to the following conditions:
12# The above copyright notice and this permission notice shall be included in
13# all copies or substantial portions of the Software.
15# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21# SOFTWARE.
24# Standard imports
25import os
27# External imports
28from torch.utils.data import Dataset
29import numpy as np
30import torch
33class S1SLC(Dataset):
34 r"""
35 The Polarimetric SAR dataset with the labels provided by
36 https://ieee-dataport.org/open-access/s1slccvdl-complex-valued-annotated-single-look-complex-sentinel-1-sar-dataset-complex
38 We expect the data to be already downloaded and available on your drive.
40 Arguments:
41 root: the top root dir where the data are expected. The data should be organized as follows: Sao Paulo/HH.npy, Sao Paulo/HV.npy, Sao Paulo/Labels.npy, Houston/HH.npy, Houston/HV.npy, Houston/Labels.npy, Chicago/HH.npy, Chicago/HV.npy, Chicago/Labels.npy
42 transform : the transform applied the cropped image
43 lazy_loading : if True, the data is loaded only when requested. If False, the data is loaded at the initialization of the dataset.
45 Note:
46 An example usage :
48 .. code-block:: python
50 import torchcvnn
51 from torchcvnn.datasets import S1SLC
53 def transform(patches):
54 # If you wish, you could filter out some polarizations
55 # S1SLC provides the dual HH, HV polarizations
56 patches = [np.abs(patchi) for _, patchi in patches.items()]
57 return np.stack(patches)
59 dataset = S1SLC(rootdir, transform=transform
60 X, y = dataset[0]
62 """
64 def __init__(self, root, transform=None, lazy_loading=True):
65 self.transform = transform
66 self.lazy_loading = lazy_loading
67 # Get list of subfolders in the root path
68 subfolders = [
69 os.path.join(root, name)
70 for name in os.listdir(root)
71 if os.path.isdir(os.path.join(root, name))
72 ]
74 self.data = []
75 self.labels = []
77 for subfolder in subfolders:
78 # Define paths to the .npy files
79 hh_path = os.path.join(subfolder, "HH.npy")
80 hv_path = os.path.join(subfolder, "HV.npy")
81 labels_path = os.path.join(subfolder, "Labels.npy")
83 # Load the .npy files
84 hh = np.load(hh_path, mmap_mode="r")
85 hv = np.load(hv_path, mmap_mode="r")
87 if not lazy_loading:
88 # If not lazy loading, we load all the data in main memory
89 # Concatenate HH and HV to create a two-channel array
90 data = np.stack((hh, hv), axis=1) # Shape: (B, 2, H, W)
91 else:
92 # If lazy loading, we store the paths to the .npy files
93 num_patches = hh.shape[0]
94 data = [
95 (hh_path, hv_path, patch_idx) for patch_idx in range(num_patches)
96 ]
98 # For the labels, we can preload everything in main memory
99 label = np.load(labels_path, mmap_mode="r")
100 label = [int(l.item()) - 1 for l in label] # Convert to 0-indexed labels
102 # Append data and labels to the lists
103 self.data.extend(data)
104 self.labels.extend(label)
106 self.classes = list(set(self.labels))
108 def __len__(self):
109 return len(self.data)
111 def __getitem__(self, idx):
113 if self.lazy_loading:
114 hh_path, hv_path, patch_idx = self.data[idx]
116 # Load the .npy files
117 hh = np.load(hh_path)
118 hv = np.load(hv_path)
120 # Extract the right patch
121 hh_patch = hh[patch_idx]
122 hv_patch = hv[patch_idx]
124 # Concatenate HH and HV to create a two-channel array
125 image = np.stack((hh_patch, hv_patch), axis=0) # Shape: (2, H, W)
126 else:
127 image = self.data[idx]
129 label = self.labels[idx]
131 if self.transform:
132 image = self.transform(image)
134 return image, label