Skip to content
Snippets Groups Projects
Commit be6ba62b authored by Tamino Huxohl's avatar Tamino Huxohl
Browse files

integrate bed removal into dataset

parent f4fbd9d8
No related branches found
No related tags found
No related merge requests found
import os import os
from typing import Optional
import pandas as pd import pandas as pd
import pydicom import pydicom
import numpy as np import numpy as np
from torch.utils.data import Dataset from torch.utils.data import Dataset
from mu_map.data.remove_bed import DEFAULT_BED_CONTOURS_FILENAME, load_contours
HEADER_DISC_FIRST = "discard_first" HEADER_DISC_FIRST = "discard_first"
HEADER_DISC_LAST = "discard_last" HEADER_DISC_LAST = "discard_last"
...@@ -62,6 +65,7 @@ class MuMapDataset(Dataset): ...@@ -62,6 +65,7 @@ class MuMapDataset(Dataset):
dataset_dir: str, dataset_dir: str,
csv_file: str = "meta.csv", csv_file: str = "meta.csv",
images_dir: str = "images", images_dir: str = "images",
bed_contours_file: Optional[str] = DEFAULT_BED_CONTOURS_FILENAME,
discard_μ_map_slices: bool = True, discard_μ_map_slices: bool = True,
): ):
super().__init__() super().__init__()
...@@ -70,8 +74,12 @@ class MuMapDataset(Dataset): ...@@ -70,8 +74,12 @@ class MuMapDataset(Dataset):
self.dir_images = os.path.join(dataset_dir, images_dir) self.dir_images = os.path.join(dataset_dir, images_dir)
self.csv_file = os.path.join(dataset_dir, csv_file) self.csv_file = os.path.join(dataset_dir, csv_file)
self.bed_contours_file = os.path.join(dataset_dir, bed_contours_file) if bed_contours_file else None
self.bed_contours = load_contours(self.bed_contours_file) if bed_contours_file else None
# read CSV file and from that access DICOM files # read CSV file and from that access DICOM files
self.table = pd.read_csv(self.csv_file) self.table = pd.read_csv(self.csv_file)
self.table["id"] = self.table["id"].apply(int)
self.discard_μ_map_slices = discard_μ_map_slices self.discard_μ_map_slices = discard_μ_map_slices
...@@ -87,6 +95,11 @@ class MuMapDataset(Dataset): ...@@ -87,6 +95,11 @@ class MuMapDataset(Dataset):
if self.discard_μ_map_slices: if self.discard_μ_map_slices:
mu_map = discard_slices(row, mu_map) mu_map = discard_slices(row, mu_map)
if self.bed_contours:
bed_contour = self.bed_contours[row["id"]]
for i in range(mu_map.shape[0]):
mu_map[i] = cv.drawContours(mu_map[i], [bed_contour], -1, 0.0, -1)
recon = align_images(recon, mu_map) recon = align_images(recon, mu_map)
return recon, mu_map return recon, mu_map
......
...@@ -19,10 +19,8 @@ def load_contours(filename: str) -> Dict[int, np.ndarray]: ...@@ -19,10 +19,8 @@ def load_contours(filename: str) -> Dict[int, np.ndarray]:
with open(filename, mode="r") as f: with open(filename, mode="r") as f:
contours = json.load(f) contours = json.load(f)
for key, contour in contours.items(): _map = map(lambda item: (int(item[0]), np.array(item[1]).astype(int)), contours.items())
del contours[key] return dict(_map)
contours[int(key)] = np.array(contour).astype(int)
return contours
if __name__ == "__main__": if __name__ == "__main__":
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment