Commit
·
4021c62
1
Parent(s):
39b990a
update scripts
Browse files- scripts/DL_save_nifti.py +26 -70
- scripts/LICENSE +15 -0
- scripts/apply_window.py +54 -0
- scripts/batch_download_zips.py +29 -7
scripts/DL_save_nifti.py
CHANGED
@@ -1,55 +1,28 @@
|
|
1 |
-
#!/usr/bin/env python
|
2 |
-
"""
|
3 |
-
Ke Yan
|
4 |
-
Imaging Biomarkers and Computer-Aided Diagnosis Laboratory
|
5 |
-
National Institutes of Health Clinical Center
|
6 |
-
May 2018
|
7 |
-
THIS SOFTWARE IS PROVIDED BY THE AUTHOR(S) ``AS IS'' AND ANY EXPRESS OR
|
8 |
-
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
9 |
-
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
10 |
-
IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
|
11 |
-
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
12 |
-
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
13 |
-
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
14 |
-
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
15 |
-
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
16 |
-
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
17 |
-
"""
|
18 |
-
|
19 |
"""
|
20 |
A simple demo to load 2D 16-bit slices from DeepLesion and save to 3D nifti volumes.
|
21 |
The nifti volumes can be viewed in software such as 3D slicer and ITK-SNAP.
|
22 |
"""
|
23 |
|
24 |
-
|
25 |
-
import numpy as np
|
26 |
-
import nibabel as nib
|
27 |
import os
|
28 |
import cv2
|
29 |
-
|
|
|
|
|
|
|
30 |
|
31 |
|
32 |
-
dir_in = 'Images_png'
|
33 |
-
dir_out = 'Images_nifti'
|
34 |
-
|
35 |
-
info_fn = 'DL_info.csv' # file name of the information file
|
36 |
|
37 |
|
38 |
def slices2nifti(ims, fn_out, spacing):
|
39 |
"""save 2D slices to 3D nifti file considering the spacing"""
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
V[:, :, i] = ims[i]
|
46 |
-
|
47 |
-
# the transformation matrix suitable for 3D slicer and ITK-SNAP
|
48 |
-
T = np.array([[0, -spacing[1], 0, 0], [-spacing[0], 0, 0, 0], [0, 0, -spacing[2], 0], [0, 0, 0, 1]])
|
49 |
-
img = nib.Nifti1Image(V, T)
|
50 |
-
path_out = os.path.join(dir_out, fn_out)
|
51 |
-
nib.save(img, path_out)
|
52 |
-
print fn_out, 'saved'
|
53 |
|
54 |
|
55 |
def load_slices(dir, slice_idxs):
|
@@ -58,43 +31,27 @@ def load_slices(dir, slice_idxs):
|
|
58 |
assert np.all(slice_idxs[1:] - slice_idxs[:-1] == 1)
|
59 |
ims = []
|
60 |
for slice_idx in slice_idxs:
|
61 |
-
|
62 |
-
|
63 |
-
im
|
64 |
-
|
65 |
-
print 'read', path
|
66 |
|
67 |
-
# the 16-bit png file has
|
68 |
ims.append((im.astype(np.int32) - 32768).astype(np.int16))
|
69 |
return ims
|
70 |
|
71 |
|
72 |
-
|
73 |
-
"""read spacings and image indices in DeepLesion"""
|
74 |
-
spacings = []
|
75 |
-
idxs = []
|
76 |
-
with open(info_fn, 'rb') as csvfile:
|
77 |
-
reader = csv.reader(csvfile)
|
78 |
-
rownum = 0
|
79 |
-
for row in reader:
|
80 |
-
if rownum == 0:
|
81 |
-
header = row
|
82 |
-
rownum += 1
|
83 |
-
else:
|
84 |
-
idxs.append([int(d) for d in row[1:4]])
|
85 |
-
spacings.append([float(d) for d in row[12].split(',')])
|
86 |
-
|
87 |
-
idxs = np.array(idxs)
|
88 |
-
spacings = np.array(spacings)
|
89 |
-
return idxs, spacings
|
90 |
|
|
|
|
|
|
|
|
|
|
|
91 |
|
92 |
-
if __name__ == '__main__':
|
93 |
-
idxs, spacings = read_DL_info()
|
94 |
if not os.path.exists(dir_out):
|
95 |
os.mkdir(dir_out)
|
96 |
-
img_dirs = os.listdir(dir_in)
|
97 |
-
img_dirs.sort()
|
98 |
for dir1 in img_dirs:
|
99 |
# find the image info according to the folder's name
|
100 |
idxs1 = np.array([int(d) for d in dir1.split('_')])
|
@@ -102,8 +59,7 @@ if __name__ == '__main__':
|
|
102 |
spacings1 = spacings[i1[0]]
|
103 |
|
104 |
fns = os.listdir(os.path.join(dir_in, dir1))
|
105 |
-
slices = [int(d[:-4]) for d in fns if d.endswith('.png')]
|
106 |
-
slices.sort()
|
107 |
|
108 |
# Each folder contains png slices from one series (volume)
|
109 |
# There may be several sub-volumes in each volume depending on the key slices
|
@@ -118,5 +74,5 @@ if __name__ == '__main__':
|
|
118 |
for group in groups:
|
119 |
# group contains slices indices of a sub-volume
|
120 |
ims = load_slices(dir1, group)
|
121 |
-
fn_out =
|
122 |
slices2nifti(ims, fn_out, spacings1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
"""
|
2 |
A simple demo to load 2D 16-bit slices from DeepLesion and save to 3D nifti volumes.
|
3 |
The nifti volumes can be viewed in software such as 3D slicer and ITK-SNAP.
|
4 |
"""
|
5 |
|
|
|
|
|
|
|
6 |
import os
|
7 |
import cv2
|
8 |
+
|
9 |
+
import numpy as np
|
10 |
+
import pandas as pd
|
11 |
+
import SimpleITK as sitk
|
12 |
|
13 |
|
14 |
+
dir_in = '../Images_png'
|
15 |
+
dir_out = '../Images_nifti'
|
16 |
+
info_fn = '../DL_info.csv'
|
|
|
17 |
|
18 |
|
19 |
def slices2nifti(ims, fn_out, spacing):
|
20 |
"""save 2D slices to 3D nifti file considering the spacing"""
|
21 |
+
image_itk = sitk.GetImageFromArray(np.stack(ims, axis=0))
|
22 |
+
image_itk.SetSpacing(spacing)
|
23 |
+
image_itk.SetDirection((1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, -1.0))
|
24 |
+
sitk.WriteImage(image_itk, os.path.join(dir_out, fn_out))
|
25 |
+
print(fn_out, 'saved')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
|
27 |
|
28 |
def load_slices(dir, slice_idxs):
|
|
|
31 |
assert np.all(slice_idxs[1:] - slice_idxs[:-1] == 1)
|
32 |
ims = []
|
33 |
for slice_idx in slice_idxs:
|
34 |
+
path = os.path.join(dir_in, dir, f'{slice_idx:03d}.png')
|
35 |
+
im = cv2.imread(path, cv2.IMREAD_UNCHANGED) # Read as 16-bit image
|
36 |
+
assert im is not None, f'error reading {path}'
|
37 |
+
print(f'read {path}')
|
|
|
38 |
|
39 |
+
# the 16-bit png file has an intensity bias of 32768
|
40 |
ims.append((im.astype(np.int32) - 32768).astype(np.int16))
|
41 |
return ims
|
42 |
|
43 |
|
44 |
+
if __name__ == '__main__':
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
|
46 |
+
# Read spacings and image indices in DeepLesion
|
47 |
+
dl_info = pd.read_csv(info_fn)
|
48 |
+
idxs = dl_info[['Patient_index', 'Study_index', 'Series_ID']].values
|
49 |
+
spacings = dl_info['Spacing_mm_px_'].apply(lambda x: np.array(x.split(", "), dtype=float)).values
|
50 |
+
spacings = np.stack(spacings)
|
51 |
|
|
|
|
|
52 |
if not os.path.exists(dir_out):
|
53 |
os.mkdir(dir_out)
|
54 |
+
img_dirs = sorted(os.listdir(dir_in))
|
|
|
55 |
for dir1 in img_dirs:
|
56 |
# find the image info according to the folder's name
|
57 |
idxs1 = np.array([int(d) for d in dir1.split('_')])
|
|
|
59 |
spacings1 = spacings[i1[0]]
|
60 |
|
61 |
fns = os.listdir(os.path.join(dir_in, dir1))
|
62 |
+
slices = sorted([int(d[:-4]) for d in fns if d.endswith('.png')])
|
|
|
63 |
|
64 |
# Each folder contains png slices from one series (volume)
|
65 |
# There may be several sub-volumes in each volume depending on the key slices
|
|
|
74 |
for group in groups:
|
75 |
# group contains slices indices of a sub-volume
|
76 |
ims = load_slices(dir1, group)
|
77 |
+
fn_out = f'{dir1}_{group[0]:03d}-{group[-1]:03d}.nii.gz'
|
78 |
slices2nifti(ims, fn_out, spacings1)
|
scripts/LICENSE
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Copyright (c) Feb 2024, Benjamin Hou
|
2 |
+
Copyright (c) May 2018, Ke Yan
|
3 |
+
Imaging Biomarkers and Computer-Aided Diagnosis Laboratory
|
4 |
+
National Institutes of Health Clinical Center
|
5 |
+
|
6 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR(S) ``AS IS'' AND ANY EXPRESS OR
|
7 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
8 |
+
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
9 |
+
IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
|
10 |
+
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
11 |
+
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
12 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
13 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
14 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
15 |
+
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
scripts/apply_window.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Apply CT windowing parameter from DL_info.csv to Images_png
|
3 |
+
"""
|
4 |
+
|
5 |
+
import os
|
6 |
+
import cv2
|
7 |
+
|
8 |
+
import numpy as np
|
9 |
+
import pandas as pd
|
10 |
+
|
11 |
+
from glob import glob
|
12 |
+
from tqdm import tqdm
|
13 |
+
|
14 |
+
|
15 |
+
dir_in = '../Images_png'
|
16 |
+
dir_out = '../Images_png_wn'
|
17 |
+
info_fn = '../DL_info.csv'
|
18 |
+
|
19 |
+
if not os.path.exists(dir_out):
|
20 |
+
os.mkdir(dir_out)
|
21 |
+
|
22 |
+
dl_info = pd.read_csv(info_fn)
|
23 |
+
|
24 |
+
def clip_and_normalize(np_image: np.ndarray,
|
25 |
+
clip_min: int = -150,
|
26 |
+
clip_max: int = 250
|
27 |
+
) -> np.ndarray:
|
28 |
+
np_image = np.clip(np_image, clip_min, clip_max)
|
29 |
+
np_image = (np_image - clip_min) / (clip_max - clip_min)
|
30 |
+
return np_image
|
31 |
+
|
32 |
+
|
33 |
+
for idx, row in tqdm(dl_info.iterrows(), total=len(dl_info)):
|
34 |
+
|
35 |
+
folder = row['File_name'].rsplit('_', 1)[0]
|
36 |
+
images = sorted(glob(f'{dir_in}/{folder}/*.png'))
|
37 |
+
|
38 |
+
if not os.path.exists(f'{dir_out}/{folder}'):
|
39 |
+
os.mkdir(f'{dir_out}/{folder}')
|
40 |
+
DICOM_windows = [float(value.strip()) for value in row['DICOM_windows'].split(',')]
|
41 |
+
|
42 |
+
for im in images:
|
43 |
+
try:
|
44 |
+
image = cv2.imread(im, cv2.IMREAD_UNCHANGED)
|
45 |
+
image = image.astype('int32') - 32768
|
46 |
+
image = clip_and_normalize(image, *DICOM_windows)
|
47 |
+
image = (image*255).astype('uint8')
|
48 |
+
cv2.imwrite(f'{dir_out}/{folder}/{os.path.basename(im)}', image)
|
49 |
+
except AttributeError:
|
50 |
+
# Broken Images
|
51 |
+
# 001821_07_01/372.png
|
52 |
+
# 002161_04_02/116.png
|
53 |
+
print(f'Conversion failed: {im}')
|
54 |
+
continue
|
scripts/batch_download_zips.py
CHANGED
@@ -1,5 +1,19 @@
|
|
1 |
-
|
2 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
# URLs for the zip files
|
5 |
links = [
|
@@ -66,10 +80,18 @@ links = [
|
|
66 |
'https://nihcc.box.com/shared/static/kqg4peb9j53ljhrxe3l3zrj4ac6xogif.zip'
|
67 |
]
|
68 |
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
md5_link = 'https://nihcc.box.com/shared/static/q0f8gy79q2spw96hs6o4jjjfsrg17t55.txt'
|
70 |
-
urllib.urlretrieve(md5_link,
|
|
|
|
|
71 |
for idx, link in enumerate(links):
|
72 |
-
fn = 'Images_png_
|
73 |
-
|
74 |
-
|
75 |
-
print
|
|
|
1 |
+
"""
|
2 |
+
Download the 56 zip files in Images_png in batches
|
3 |
+
"""
|
4 |
+
|
5 |
+
import os
|
6 |
+
import urllib.request
|
7 |
+
|
8 |
+
from tqdm import tqdm
|
9 |
+
|
10 |
+
|
11 |
+
class DownloadProgressBar(tqdm):
|
12 |
+
def update_to(self, b=1, bsize=1, tsize=None):
|
13 |
+
if tsize is not None:
|
14 |
+
self.total = tsize
|
15 |
+
self.update(b * bsize - self.n)
|
16 |
+
|
17 |
|
18 |
# URLs for the zip files
|
19 |
links = [
|
|
|
80 |
'https://nihcc.box.com/shared/static/kqg4peb9j53ljhrxe3l3zrj4ac6xogif.zip'
|
81 |
]
|
82 |
|
83 |
+
# Download directory
|
84 |
+
dir_out = '../Images_zip'
|
85 |
+
if not os.path.exists(dir_out):
|
86 |
+
os.mkdir(dir_out)
|
87 |
+
|
88 |
+
# Download MD5 checksum file
|
89 |
md5_link = 'https://nihcc.box.com/shared/static/q0f8gy79q2spw96hs6o4jjjfsrg17t55.txt'
|
90 |
+
urllib.request.urlretrieve(md5_link, f'{dir_out}/MD5_checksums.txt')
|
91 |
+
|
92 |
+
# Download DeepLesion zip files
|
93 |
for idx, link in enumerate(links):
|
94 |
+
fn = f'{dir_out}/Images_png_{idx+1:02d}.zip'
|
95 |
+
with DownloadProgressBar(unit='B', unit_scale=True, miniters=1, desc=f'Downloading: {fn}') as t:
|
96 |
+
urllib.request.urlretrieve(link, fn, t.update_to)
|
97 |
+
print('Download complete. Please check the MD5 checksums')
|