-
Christopher Randolph Rhodes authored
Maintain DataFrame index throughout workflow, export patch filenames and complete source filepaths to workflow DataFrame; prepopulate patch UUID
Christopher Randolph Rhodes authoredMaintain DataFrame index throughout workflow, export patch filenames and complete source filepaths to workflow DataFrame; prepopulate patch UUID
batch_run_patches.py 2.64 KiB
from pathlib import Path
import re
from time import localtime, strftime
import pandas as pd
from extensions.chaeo.workflows import export_patches_from_multichannel_zstack
from model_server.accessors import InMemoryDataAccessor, write_accessor_data_to_file
if __name__ == '__main__':
where_czi = Path(
'c:/Users/rhodes/projects/proj0004-marine-photoactivation/data/exp0038/AutoMic/20230906-163415/Selection'
)
where_output_root = Path(
'c:/Users/rhodes/projects/proj0011-plankton-seg/exp0009/output'
)
yyyymmdd = strftime('%Y%m%d', localtime())
idx = 0
for ff in Path(where_output_root).iterdir():
ma = re.match(f'batch-output-{yyyymmdd}-([\d]+)', ff.name)
if ma:
idx = max(idx, int(ma.groups()[0]) + 1 )
where_output = Path(
where_output_root / f'batch-output-{yyyymmdd}-{idx:04d}'
)
csv_args = {'mode': 'w', 'header': True} # when creating file
px_ilp = Path.home() / 'model-server' / 'ilastik' / 'AF405-bodies_boundaries.ilp'
#TODO: try/catch blocks and error handling around workflow calls
#TODO: pack JSON-serializable workflow inputs
for ff in where_czi.iterdir():
pattern = 'Selection--W([\d]+)--P([\d]+)-T([\d]+)'
ma = re.match(pattern, ff.stem)
print(ff)
if not ff.suffix.upper() == '.CZI':
continue
if int(ma.groups()[1]) > 10: # skip second half of set
continue
export_kwargs = {
'input_zstack_path': (where_czi / ff).__str__(),
'ilastik_project_file': px_ilp.__str__(),
'pxmap_threshold': 0.25,
'pixel_class': 0,
'zmask_channel': 0,
'patches_channel': 4,
'where_output': where_output.__str__(),
'mask_type': 'boxes',
'zmask_filters': {'area': (1e3, 1e8)},
'zmask_expand_box_by': (128, 3),
}
result = export_patches_from_multichannel_zstack(**export_kwargs)
# parse and record results
df = result['dataframe']
df['source_path'] = ff
df.to_csv(where_output / 'df_objects.csv', **csv_args)
pd.DataFrame(result['timer_results'], index=[0]).to_csv(where_output / 'timer_results.csv', **csv_args)
pd.json_normalize(export_kwargs).to_csv(where_output / 'workflow_params.csv', **csv_args)
csv_args = {'mode': 'a', 'header': False} # append to CSV from here on
# export intermediate data if flagged
for k in result['interm'].keys():
write_accessor_data_to_file(
where_output / k / (ff.stem + '.tif'),
InMemoryDataAccessor(result['interm'][k])
)