Skip to content

Commit 748c4b5

Browse files
committed
Partial progress on implementing GUI for batch export
1 parent 48dd2ea commit 748c4b5

File tree

5 files changed

+176
-119
lines changed

5 files changed

+176
-119
lines changed

rawtools/cli.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from importlib.metadata import version
55
from multiprocessing import cpu_count
66

7-
from rawtools import convert, generate, nsihdr, qualitycontrol, log, raw2img
7+
from rawtools import convert, generate, log, nsihdr, qualitycontrol, raw2img
88

99
__version__ = version('rawtools')
1010

@@ -75,13 +75,20 @@ def raw_nsihdr():
7575
parser.add_argument("-V", "--version", action="version", version=f'%(prog)s {__version__}')
7676
parser.add_argument("-v", "--verbose", action="store_true", help="Increase output verbosity")
7777
parser.add_argument("-f", "--force", action="store_true", default=False, help="Force file creation. Overwrite any existing files.")
78-
parser.add_argument('path', metavar='PATH', type=str, nargs='+', help='List of .nsihdr files')
78+
parser.add_argument("--gui", action="store_true", default=False, help="Enable GUI")
79+
parser.add_argument('path', metavar='PATH', type=str, nargs="+", help='List of .nsihdr files')
7980
args = parser.parse_args()
8081

8182
args.module_name = 'nsihdr'
8283
log.configure(args)
8384

84-
nsihdr.main(args)
85+
# Use a GUI to select the source directory
86+
if args.gui == True:
87+
from rawtools.gui import nsihdr
88+
nsihdr.App(args)
89+
# Otherwise, assume CLI use
90+
else:
91+
nsihdr.main(args)
8592

8693
def raw_qc():
8794
"""Quality control tools"""

rawtools/gui/__init__.py

Whitespace-only changes.

rawtools/gui/nsihdr.py

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
""""GUI for NSIHDR conversion tool"""
2+
3+
import logging
4+
import os
5+
import tkinter as tk
6+
from pprint import pformat
7+
from tkinter import E, N, S, W, filedialog, ttk
8+
9+
from ttkthemes import ThemedTk
10+
11+
class App():
12+
def __init__(self, args):
13+
self.source = ''
14+
15+
root = ThemedTk(theme='arc')
16+
root.title("Batch Export Tool - NSIHDR to RAW + DAT")
17+
root.resizable(False, False)
18+
mainframe = ttk.Frame(root, padding="16 16")
19+
mainframe.grid(column=0, row=0, sticky=(N, S, E, W))
20+
mainframe.columnconfigure(0, weight=1)
21+
22+
# Source folder selection
23+
src_intro_label_text = "Choose a directory that contains NSIHDR."
24+
src_intro_label = ttk.Label(mainframe, text=src_intro_label_text)
25+
src_intro_label.grid(row=0, column=0, columnspan=3, sticky=(E,W), pady="0 8")
26+
27+
src_label = ttk.Label(mainframe, text="Source Directory:")
28+
src_label.grid(row=1, column=0, sticky=W, pady="0 8")
29+
30+
self.src = tk.StringVar()
31+
self.src.set(self.source)
32+
# # Add event handling to changes to the source directory text field
33+
self.src.trace("w", lambda value = self.src: self.scan_folder([value]))
34+
self.src_entry = ttk.Entry(mainframe, textvariable = self.src, width = 100)
35+
self.src_entry.grid(row=2, column=0, sticky=(E, W), padx="0 8", pady="0 16")
36+
37+
self.src_folder_btn = ttk.Button(mainframe, text = 'Select Folder', command=self.choose_src)
38+
self.src_folder_btn.grid(row=2, column=1, pady="0 16", padx="8 0")
39+
40+
# Export data
41+
self.export_btn = ttk.Button(mainframe, text = 'export', command=self.export)
42+
self.export_btn.grid(row=12, column=0, pady="0 8", columnspan=3)
43+
44+
45+
# Center window on screen
46+
root.update() # virtual pre-render of GUI to calculate actual sizes
47+
w = root.winfo_reqwidth()
48+
h = root.winfo_reqheight()
49+
logging.debug(f"Root width: {w}")
50+
logging.debug(f"Root height: {h}")
51+
ws = root.winfo_screenwidth()
52+
hs = root.winfo_screenheight()
53+
# calculate position x, y
54+
x = (ws/2) - (w/2)
55+
y = (hs/2) - (h/2)
56+
root.geometry('+%d+%d' % (x, y))
57+
58+
59+
# Display window to user
60+
root.mainloop()
61+
62+
def choose_src(self):
63+
"""Select a folder to act as data source"""
64+
self.source = filedialog.askdirectory(initialdir=self.source, title="Choose directory")
65+
logging.debug(f'Selected folder: {self.source}')
66+
self.src.set(self.source)
67+
# self.files = scan_folder(path = [self.source])
68+
69+
def scan_folder(path):
70+
# Gather all files
71+
logging.info(path)
72+
files = []
73+
for p in path:
74+
for root, _, files in os.walk(p):
75+
for filename in files:
76+
files.append(os.path.join(root, filename))
77+
logging.info(filename)
78+
79+
# Append any loose, explicitly defined paths to .nsihdr files
80+
files.extend([ f for f in path if f.endswith('.nsihdr') ])
81+
82+
# Filter out non-NSIHDR files
83+
files = [ f for f in files if f.endswith('.nsihdr') ]
84+
85+
# Get all RAW files
86+
logging.debug(f"All files: {pformat(files)}")
87+
files = list(set(files)) # remove duplicates
88+
logging.debug(f"Unique files: {pformat(files)}")
89+
90+
return files
91+
92+
def export(self):
93+
logging.debug("PLACEHOLDER - EXPORT DATA")
94+

rawtools/nsihdr.py

Lines changed: 71 additions & 115 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import re
77
import sys
88
from multiprocessing import Pool, cpu_count
9+
from pprint import pformat
910
from time import time
1011

1112
import numpy as np
@@ -21,102 +22,14 @@
2122
sys.path.append(includesdir)
2223
from rawtools import nsiefx
2324

24-
25-
def write_metadata(args, metadata):
26-
"""Generates a .dat file from information gathered from an .nsihdr file
27-
28-
NOTE(tparker): Temporarily, I am writing the minimum and maximum values found
29-
in the 32-bit float version of the files in case we ever need to convert the
30-
uint16 version back to float32.
31-
32-
Args:
33-
args (ArgumentParser): user arguments from `argparse`
34-
metadata (dict): dictionary of metadata created from reading .nsihdr file
35-
"""
36-
ObjectFileName = args.output
37-
resolution = ' '.join(metadata['dimensions'])
38-
slice_thickness = ' '.join([ str(rr) for rr in metadata['resolution_rounded'] ])
39-
dat_filepath = f'{os.path.splitext(args.output)[0]}.dat'
40-
output_string = f"""ObjectFileName: {ObjectFileName}\nResolution: {resolution}\nSliceThickness: {slice_thickness}\nFormat: {metadata['bit_depth_type']}\nObjectModel: {metadata['ObjectModel']}"""
41-
42-
dat.write(dat_filepath, metadata['dimensions'], metadata['resolution_rounded'])
43-
# with open(dat_filepath, 'w') as ofp:
44-
# print(f'Generating {dat_filepath}')
45-
# ofp.write(output_string)
46-
47-
bounds_filepath = os.path.join(args.cwd, f'{os.path.splitext(args.output)[0]}.float32.range')
48-
with open(bounds_filepath, 'w') as ofp:
49-
print(f'Generating {bounds_filepath}')
50-
bounds = f'{INITIAL_LOWER_BOUND} {INITIAL_UPPER_BOUND}'
51-
ofp.write(bounds)
52-
53-
def read_nsihdr(args, fp):
54-
"""Collects relative metadata from .nsihdr file
55-
56-
Args:
57-
fp (str): Input filepath to an .nsihdr file
58-
59-
Returns:
60-
dict: metadata about NSI project
61-
"""
62-
global INITIAL_LOWER_BOUND
63-
global INITIAL_UPPER_BOUND
64-
65-
with open(fp, 'r') as ifp:
66-
document = ifp.readlines()
67-
68-
source_to_detector_distance = None
69-
source_to_table_distance = None
70-
bit_depth = None
71-
nsidats = []
72-
73-
bit_depth_query = re.search(bit_depth_pattern, line)
74-
if bit_depth_query:
75-
bit_depth = int(bit_depth_query.group('value'))
76-
77-
dimensions_query = re.search(dimensions_pattern, line)
78-
if dimensions_query:
79-
dimensions = [ dimensions_query.group('x'), dimensions_query.group('z'), dimensions_query.group('num_slices') ]
80-
81-
# Check if the .nsihdr already contains the data range
82-
# If it exists, we only have to read the .nsidat files once instead of twice
83-
data_range_query = re.search(data_range_pattern, line)
84-
if data_range_query:
85-
INITIAL_LOWER_BOUND = float(data_range_query.group('lower_bound'))
86-
INITIAL_UPPER_BOUND = float(data_range_query.group('upper_bound'))
87-
88-
# Temporarily set pitch as 0.127, as it should not change until we get a
89-
# new detector
90-
pitch = 0.127
91-
92-
# TODO(tparker): As far as I am aware, the data will always be of type DENSITY
93-
ObjectModel = 'DENSITY'
94-
95-
resolution = ( pitch / source_to_detector_distance ) * source_to_table_distance
96-
resolution_rounded = round(resolution, 4)
97-
nsidats.sort() # make sure that the files are in alphanumeric order
98-
99-
return {
100-
"datafiles": nsidats,
101-
"source_to_detector_distance": source_to_detector_distance,
102-
"source_to_table_distance": source_to_table_distance,
103-
"pitch": pitch,
104-
"resolution": resolution,
105-
"resolution_rounded": [resolution_rounded]*3,
106-
"bit_depth": bit_depth,
107-
"zoom_factor": round(source_to_detector_distance / source_to_table_distance, 2),
108-
"bit_depth_type": dat.bitdepth(bit_depth),
109-
"ObjectModel": ObjectModel,
110-
"dimensions": dimensions
111-
}
112-
113-
def process(args, fp, ofp):
25+
def process(args, fp, export_path):
11426
"""Converts NSIHDR files to a single .RAW + .DAT
11527
11628
Args:
29+
11730
args (ArgumentParser): user arguments from `argparse`
11831
fp (str): filepath to input .NSIHDR file
119-
ofp (str): filepath to output .RAW file
32+
export_path (str): filepath to output .RAW file
12033
"""
12134
logging.debug(f'{fp=}')
12235

@@ -137,12 +50,10 @@ def process(args, fp, ofp):
13750

13851
dname = os.path.dirname(fp)
13952
bname = os.path.basename(os.path.splitext(fp)[0])
140-
export_path = os.path.join(dname, f'{bname}.raw')
141-
logging.debug(f"{export_path=}")
14253
dat_path = os.path.join(dname, f'{bname}.dat')
143-
logging.debug(f"{dat_path=}")
14454

14555
dat.write(dat_path, dimensions = (width, height, depth), thickness = voxel_size)
56+
logging.debug(f"Generated '{dat_path}'")
14657

14758
if os.path.exists(export_path) and args.force == True:
14859
os.remove(export_path)
@@ -152,14 +63,18 @@ def process(args, fp, ofp):
15263
logging.warning(f"Removed old '{dat_path}'")
15364

15465
with open(export_path, 'ab') as raw_ofp:
155-
pbar = tqdm(total= depth, desc=f"Exporting {bname}")
66+
if not args.verbose:
67+
pbar = tqdm(total= depth, desc=f"Exporting {bname}")
15668
for n in range(depth):
15769
cross_section = v.read_slice(n)
15870
cross_section = np.array(cross_section, dtype="float32")
15971
cross_section = scale(cross_section, data_min, data_max, 0, 65535).astype(np.uint16)
16072
cross_section.tofile(raw_ofp)
161-
pbar.update()
162-
pbar.close()
73+
74+
if not args.verbose:
75+
pbar.update()
76+
if not args.verbose:
77+
pbar.close()
16378

16479
def main(args):
16580
start_time = time()
@@ -168,48 +83,89 @@ def main(args):
16883
# Gather all files
16984
args.files = []
17085
for p in args.path:
171-
for root, dirs, files in os.walk(p):
86+
for root, _, files in os.walk(p):
17287
for filename in files:
17388
args.files.append(os.path.join(root, filename))
17489

175-
# Append any loose, explicitly defined paths to .RAW files
90+
# Append any loose, explicitly defined paths to .nsihdr files
17691
args.files.extend([ f for f in args.path if f.endswith('.nsihdr') ])
17792

17893
# Filter out non-NSIHDR files
17994
args.files = [ f for f in args.files if f.endswith('.nsihdr') ]
18095

18196
# Get all RAW files
182-
logging.debug(f"All files: {args.files}")
97+
logging.debug(f"All files: {pformat(args.files)}")
18398
args.files = list(set(args.files)) # remove duplicates
184-
logging.info(f"Found {len(args.files)} volume(s).")
185-
logging.debug(f"Unique files: {args.files}")
99+
logging.debug(f"Unique files: {pformat(args.files)}")
100+
101+
# If file overwriting is disabled
102+
if not args.force:
103+
kept_volumes = []
104+
skipped_volumes = []
105+
for fp in args.files:
106+
dname = os.path.dirname(fp)
107+
bname = os.path.basename(os.path.splitext(fp)[0])
108+
export_path = os.path.join(dname, f'{bname}.raw')
109+
if os.path.exists(export_path) and os.path.isfile(export_path):
110+
skipped_volumes.append(export_path)
111+
else:
112+
kept_volumes.append(export_path)
113+
args.files = kept_volumes
114+
total_volumes = len(kept_volumes) + len(skipped_volumes)
115+
logging.debug(f"{kept_volumes=}")
116+
logging.debug(f"{skipped_volumes=}")
117+
118+
logging.info(f"Found {total_volumes} volume(s). (Unchanged: {len(kept_volumes)}, Skipped: {len(skipped_volumes)})")
119+
120+
# Otherwise, overwrite files
121+
else:
122+
unprocessed_volumes = []
123+
existing_volumes = []
124+
for fp in args.files:
125+
dname = os.path.dirname(fp)
126+
bname = os.path.basename(os.path.splitext(fp)[0])
127+
export_path = os.path.join(dname, f'{bname}.raw')
128+
if os.path.exists(export_path) and os.path.isfile(export_path):
129+
existing_volumes.append(export_path)
130+
else:
131+
unprocessed_volumes.append(export_path)
132+
total_volumes = len(existing_volumes) + len(unprocessed_volumes)
133+
134+
logging.debug(f"{existing_volumes=}")
135+
logging.debug(f"{unprocessed_volumes=}")
136+
137+
logging.info(f"Found {total_volumes} volume(s). (Overwriting: {len(existing_volumes)}, New: {len(unprocessed_volumes)})")
138+
186139
except Exception as err:
187140
logging.error(err)
188141
raise err
189142
else:
190143
# For each provided volume...
191-
pbar = tqdm(total = len(args.files), desc=f"Overall progress")
144+
if not args.verbose:
145+
pbar = tqdm(total = len(args.files), desc=f"Overall progress")
192146
for fp in args.files:
193147
logging.debug(f"Processing '{fp}'")
194-
ofp_directory = os.path.dirname(fp)
195-
logging.debug(f"{ofp_directory=}")
196-
ofp_filename = os.path.basename(os.path.splitext(fp)[0])
197-
logging.debug(f"{ofp_filename=}")
198-
ofp = os.path.join(ofp_directory, ofp_filename)
199-
logging.debug(f"{ofp=}")
148+
dname = os.path.dirname(fp)
149+
bname = os.path.basename(os.path.splitext(fp)[0])
150+
export_path = os.path.join(dname, f'{bname}.raw')
151+
logging.debug(f"{export_path=}")
152+
dat_path = os.path.join(dname, f'{bname}.dat')
153+
logging.debug(f"{dat_path=}")
200154

201155
# Determine output location and check for conflicts
202-
if os.path.exists(ofp) and os.path.isfile(ofp):
156+
if os.path.exists(export_path) and os.path.isfile(export_path):
203157
# If file creation not forced, do not process volume, return
204158
if args.force == False:
205-
logging.info(f"File already exists. Skipping {ofp}.")
159+
logging.info(f"File already exists. Skipping {export_path}.")
206160
continue
207161
# Otherwise, user forced file generation
208162
else:
209-
logging.warning(f"FileExistsWarning - {ofp}. File will be overwritten.")
163+
logging.warning(f"FileExistsWarning - {export_path}. File will be overwritten.")
210164

211165
# Extract slices and cast to desired datatype
212-
process(args, fp, ofp)
166+
process(args, fp, export_path)
213167

214-
pbar.update()
215-
pbar.close()
168+
if not args.verbose:
169+
pbar.update()
170+
if not args.verbose:
171+
pbar.close()

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
with open('HISTORY.rst') as history_file:
1414
history = history_file.read()
1515

16-
requirements = ['numpy==1.19.3', 'tqdm', 'Pillow', ]
16+
requirements = ['numpy==1.19.3', 'tqdm', 'Pillow', 'ttkthemes' ]
1717

1818
setup_requirements = ['pytest-runner', ]
1919

0 commit comments

Comments
 (0)