66import re
77import sys
88from multiprocessing import Pool , cpu_count
9+ from pprint import pformat
910from time import time
1011
1112import numpy as np
2122sys .path .append (includesdir )
2223from rawtools import nsiefx
2324
24-
25- def write_metadata (args , metadata ):
26- """Generates a .dat file from information gathered from an .nsihdr file
27-
28- NOTE(tparker): Temporarily, I am writing the minimum and maximum values found
29- in the 32-bit float version of the files in case we ever need to convert the
30- uint16 version back to float32.
31-
32- Args:
33- args (ArgumentParser): user arguments from `argparse`
34- metadata (dict): dictionary of metadata created from reading .nsihdr file
35- """
36- ObjectFileName = args .output
37- resolution = ' ' .join (metadata ['dimensions' ])
38- slice_thickness = ' ' .join ([ str (rr ) for rr in metadata ['resolution_rounded' ] ])
39- dat_filepath = f'{ os .path .splitext (args .output )[0 ]} .dat'
40- output_string = f"""ObjectFileName: { ObjectFileName } \n Resolution: { resolution } \n SliceThickness: { slice_thickness } \n Format: { metadata ['bit_depth_type' ]} \n ObjectModel: { metadata ['ObjectModel' ]} """
41-
42- dat .write (dat_filepath , metadata ['dimensions' ], metadata ['resolution_rounded' ])
43- # with open(dat_filepath, 'w') as ofp:
44- # print(f'Generating {dat_filepath}')
45- # ofp.write(output_string)
46-
47- bounds_filepath = os .path .join (args .cwd , f'{ os .path .splitext (args .output )[0 ]} .float32.range' )
48- with open (bounds_filepath , 'w' ) as ofp :
49- print (f'Generating { bounds_filepath } ' )
50- bounds = f'{ INITIAL_LOWER_BOUND } { INITIAL_UPPER_BOUND } '
51- ofp .write (bounds )
52-
53- def read_nsihdr (args , fp ):
54- """Collects relative metadata from .nsihdr file
55-
56- Args:
57- fp (str): Input filepath to an .nsihdr file
58-
59- Returns:
60- dict: metadata about NSI project
61- """
62- global INITIAL_LOWER_BOUND
63- global INITIAL_UPPER_BOUND
64-
65- with open (fp , 'r' ) as ifp :
66- document = ifp .readlines ()
67-
68- source_to_detector_distance = None
69- source_to_table_distance = None
70- bit_depth = None
71- nsidats = []
72-
73- bit_depth_query = re .search (bit_depth_pattern , line )
74- if bit_depth_query :
75- bit_depth = int (bit_depth_query .group ('value' ))
76-
77- dimensions_query = re .search (dimensions_pattern , line )
78- if dimensions_query :
79- dimensions = [ dimensions_query .group ('x' ), dimensions_query .group ('z' ), dimensions_query .group ('num_slices' ) ]
80-
81- # Check if the .nsihdr already contains the data range
82- # If it exists, we only have to read the .nsidat files once instead of twice
83- data_range_query = re .search (data_range_pattern , line )
84- if data_range_query :
85- INITIAL_LOWER_BOUND = float (data_range_query .group ('lower_bound' ))
86- INITIAL_UPPER_BOUND = float (data_range_query .group ('upper_bound' ))
87-
88- # Temporarily set pitch as 0.127, as it should not change until we get a
89- # new detector
90- pitch = 0.127
91-
92- # TODO(tparker): As far as I am aware, the data will always be of type DENSITY
93- ObjectModel = 'DENSITY'
94-
95- resolution = ( pitch / source_to_detector_distance ) * source_to_table_distance
96- resolution_rounded = round (resolution , 4 )
97- nsidats .sort () # make sure that the files are in alphanumeric order
98-
99- return {
100- "datafiles" : nsidats ,
101- "source_to_detector_distance" : source_to_detector_distance ,
102- "source_to_table_distance" : source_to_table_distance ,
103- "pitch" : pitch ,
104- "resolution" : resolution ,
105- "resolution_rounded" : [resolution_rounded ]* 3 ,
106- "bit_depth" : bit_depth ,
107- "zoom_factor" : round (source_to_detector_distance / source_to_table_distance , 2 ),
108- "bit_depth_type" : dat .bitdepth (bit_depth ),
109- "ObjectModel" : ObjectModel ,
110- "dimensions" : dimensions
111- }
112-
113- def process (args , fp , ofp ):
25+ def process (args , fp , export_path ):
11426 """Converts NSIHDR files to a single .RAW + .DAT
11527
11628 Args:
29+
11730 args (ArgumentParser): user arguments from `argparse`
11831 fp (str): filepath to input .NSIHDR file
119- ofp (str): filepath to output .RAW file
32+ export_path (str): filepath to output .RAW file
12033 """
12134 logging .debug (f'{ fp = } ' )
12235
@@ -137,12 +50,10 @@ def process(args, fp, ofp):
13750
13851 dname = os .path .dirname (fp )
13952 bname = os .path .basename (os .path .splitext (fp )[0 ])
140- export_path = os .path .join (dname , f'{ bname } .raw' )
141- logging .debug (f"{ export_path = } " )
14253 dat_path = os .path .join (dname , f'{ bname } .dat' )
143- logging .debug (f"{ dat_path = } " )
14454
14555 dat .write (dat_path , dimensions = (width , height , depth ), thickness = voxel_size )
56+ logging .debug (f"Generated '{ dat_path } '" )
14657
14758 if os .path .exists (export_path ) and args .force == True :
14859 os .remove (export_path )
@@ -152,14 +63,18 @@ def process(args, fp, ofp):
15263 logging .warning (f"Removed old '{ dat_path } '" )
15364
15465 with open (export_path , 'ab' ) as raw_ofp :
155- pbar = tqdm (total = depth , desc = f"Exporting { bname } " )
66+ if not args .verbose :
67+ pbar = tqdm (total = depth , desc = f"Exporting { bname } " )
15668 for n in range (depth ):
15769 cross_section = v .read_slice (n )
15870 cross_section = np .array (cross_section , dtype = "float32" )
15971 cross_section = scale (cross_section , data_min , data_max , 0 , 65535 ).astype (np .uint16 )
16072 cross_section .tofile (raw_ofp )
161- pbar .update ()
162- pbar .close ()
73+
74+ if not args .verbose :
75+ pbar .update ()
76+ if not args .verbose :
77+ pbar .close ()
16378
16479def main (args ):
16580 start_time = time ()
@@ -168,48 +83,89 @@ def main(args):
16883 # Gather all files
16984 args .files = []
17085 for p in args .path :
171- for root , dirs , files in os .walk (p ):
86+ for root , _ , files in os .walk (p ):
17287 for filename in files :
17388 args .files .append (os .path .join (root , filename ))
17489
175- # Append any loose, explicitly defined paths to .RAW files
90+ # Append any loose, explicitly defined paths to .nsihdr files
17691 args .files .extend ([ f for f in args .path if f .endswith ('.nsihdr' ) ])
17792
17893 # Filter out non-NSIHDR files
17994 args .files = [ f for f in args .files if f .endswith ('.nsihdr' ) ]
18095
18196 # Get all RAW files
182- logging .debug (f"All files: { args .files } " )
97+ logging .debug (f"All files: { pformat ( args .files ) } " )
18398 args .files = list (set (args .files )) # remove duplicates
184- logging .info (f"Found { len (args .files )} volume(s)." )
185- logging .debug (f"Unique files: { args .files } " )
99+ logging .debug (f"Unique files: { pformat (args .files )} " )
100+
101+ # If file overwriting is disabled
102+ if not args .force :
103+ kept_volumes = []
104+ skipped_volumes = []
105+ for fp in args .files :
106+ dname = os .path .dirname (fp )
107+ bname = os .path .basename (os .path .splitext (fp )[0 ])
108+ export_path = os .path .join (dname , f'{ bname } .raw' )
109+ if os .path .exists (export_path ) and os .path .isfile (export_path ):
110+ skipped_volumes .append (export_path )
111+ else :
112+ kept_volumes .append (export_path )
113+ args .files = kept_volumes
114+ total_volumes = len (kept_volumes ) + len (skipped_volumes )
115+ logging .debug (f"{ kept_volumes = } " )
116+ logging .debug (f"{ skipped_volumes = } " )
117+
118+ logging .info (f"Found { total_volumes } volume(s). (Unchanged: { len (kept_volumes )} , Skipped: { len (skipped_volumes )} )" )
119+
120+ # Otherwise, overwrite files
121+ else :
122+ unprocessed_volumes = []
123+ existing_volumes = []
124+ for fp in args .files :
125+ dname = os .path .dirname (fp )
126+ bname = os .path .basename (os .path .splitext (fp )[0 ])
127+ export_path = os .path .join (dname , f'{ bname } .raw' )
128+ if os .path .exists (export_path ) and os .path .isfile (export_path ):
129+ existing_volumes .append (export_path )
130+ else :
131+ unprocessed_volumes .append (export_path )
132+ total_volumes = len (existing_volumes ) + len (unprocessed_volumes )
133+
134+ logging .debug (f"{ existing_volumes = } " )
135+ logging .debug (f"{ unprocessed_volumes = } " )
136+
137+ logging .info (f"Found { total_volumes } volume(s). (Overwriting: { len (existing_volumes )} , New: { len (unprocessed_volumes )} )" )
138+
186139 except Exception as err :
187140 logging .error (err )
188141 raise err
189142 else :
190143 # For each provided volume...
191- pbar = tqdm (total = len (args .files ), desc = f"Overall progress" )
144+ if not args .verbose :
145+ pbar = tqdm (total = len (args .files ), desc = f"Overall progress" )
192146 for fp in args .files :
193147 logging .debug (f"Processing '{ fp } '" )
194- ofp_directory = os .path .dirname (fp )
195- logging . debug ( f" { ofp_directory = } " )
196- ofp_filename = os .path .basename ( os . path . splitext ( fp )[ 0 ] )
197- logging .debug (f"{ ofp_filename = } " )
198- ofp = os .path .join (ofp_directory , ofp_filename )
199- logging .debug (f"{ ofp = } " )
148+ dname = os .path .dirname (fp )
149+ bname = os . path . basename ( os . path . splitext ( fp )[ 0 ] )
150+ export_path = os .path .join ( dname , f' { bname } .raw' )
151+ logging .debug (f"{ export_path = } " )
152+ dat_path = os .path .join (dname , f' { bname } .dat' )
153+ logging .debug (f"{ dat_path = } " )
200154
201155 # Determine output location and check for conflicts
202- if os .path .exists (ofp ) and os .path .isfile (ofp ):
156+ if os .path .exists (export_path ) and os .path .isfile (export_path ):
203157 # If file creation not forced, do not process volume, return
204158 if args .force == False :
205- logging .info (f"File already exists. Skipping { ofp } ." )
159+ logging .info (f"File already exists. Skipping { export_path } ." )
206160 continue
207161 # Otherwise, user forced file generation
208162 else :
209- logging .warning (f"FileExistsWarning - { ofp } . File will be overwritten." )
163+ logging .warning (f"FileExistsWarning - { export_path } . File will be overwritten." )
210164
211165 # Extract slices and cast to desired datatype
212- process (args , fp , ofp )
166+ process (args , fp , export_path )
213167
214- pbar .update ()
215- pbar .close ()
168+ if not args .verbose :
169+ pbar .update ()
170+ if not args .verbose :
171+ pbar .close ()
0 commit comments