Skip to content
Snippets Groups Projects
Commit 5125d44c authored by Christopher Randolph Rhodes's avatar Christopher Randolph Rhodes
Browse files

Prompt user input and compare results of same model before and after opening in ilastik

parent 4bcad216
No related branches found
No related tags found
No related merge requests found
...@@ -13,9 +13,13 @@ from extensions.ilastik.models import IlastikObjectClassifierFromSegmentationMod ...@@ -13,9 +13,13 @@ from extensions.ilastik.models import IlastikObjectClassifierFromSegmentationMod
from model_server.accessors import generate_file_accessor, GenericImageDataAccessor, InMemoryDataAccessor, write_accessor_data_to_file from model_server.accessors import generate_file_accessor, GenericImageDataAccessor, InMemoryDataAccessor, write_accessor_data_to_file
class PatchStackObjectClassifier(IlastikObjectClassifierFromSegmentationModel): class PatchStackObjectClassifier(IlastikObjectClassifierFromSegmentationModel):
"""
Wrap ilastik object classification for inputs comprising raw image and binary segmentation masks, both represented
as time-series images where each frame contains only one object.
"""
@staticmethod @staticmethod
def make_tczyx(acc): def make_tczyx(acc: GenericImageDataAccessor):
assert acc.chroma == 1 assert acc.chroma == 1
tyx = np.moveaxis( tyx = np.moveaxis(
acc.data[:, :, 0, :], # YX(C)Z acc.data[:, :, 0, :], # YX(C)Z
...@@ -23,7 +27,6 @@ class PatchStackObjectClassifier(IlastikObjectClassifierFromSegmentationModel): ...@@ -23,7 +27,6 @@ class PatchStackObjectClassifier(IlastikObjectClassifierFromSegmentationModel):
[0, 1, 2] [0, 1, 2]
) )
return np.expand_dims(tyx, (1, 2)) return np.expand_dims(tyx, (1, 2))
# return tyx
def infer(self, input_img: GenericImageDataAccessor, segmentation_img: GenericImageDataAccessor) -> (np.ndarray, dict): def infer(self, input_img: GenericImageDataAccessor, segmentation_img: GenericImageDataAccessor) -> (np.ndarray, dict):
assert segmentation_img.is_mask() assert segmentation_img.is_mask()
...@@ -39,11 +42,11 @@ class PatchStackObjectClassifier(IlastikObjectClassifierFromSegmentationModel): ...@@ -39,11 +42,11 @@ class PatchStackObjectClassifier(IlastikObjectClassifierFromSegmentationModel):
} }
] ]
obmaps = self.shell.workflow.batchProcessingApplet.run_export(dsi, export_to_array=True) # [z x h x w x n] obmaps = self.shell.workflow.batchProcessingApplet.run_export(dsi, export_to_array=True) # [z x h x w x n]
assert len(obmaps) == 1, 'ilastik generated more than one object map' assert len(obmaps) == 1, 'ilastik generated more than one object map'
# for some reason these axes get scrambled to Z(1)YX(1) # for some reason ilastik scrambles these axes to Z(1)YX(1)
assert obmaps[0].shape == (input_img.nz, 1, input_img.hw[0], input_img.hw[1], 1) assert obmaps[0].shape == (input_img.nz, 1, input_img.hw[0], input_img.hw[1], 1)
yxcz = np.moveaxis( yxcz = np.moveaxis(
obmaps[0][:, :, :, :, 0], obmaps[0][:, :, :, :, 0],
...@@ -54,7 +57,13 @@ class PatchStackObjectClassifier(IlastikObjectClassifierFromSegmentationModel): ...@@ -54,7 +57,13 @@ class PatchStackObjectClassifier(IlastikObjectClassifierFromSegmentationModel):
assert yxcz.shape == input_img.shape assert yxcz.shape == input_img.shape
return InMemoryDataAccessor(data=yxcz), {'success': True} return InMemoryDataAccessor(data=yxcz), {'success': True}
def get_dataset_info(h5, lane=0): def get_dataset_info(h5: h5py.File, lane : int = 0):
"""
Report out specific datasets in ilastik project file HDF5
:param h5: handle to ilastik project file, as h5py.File object
:param lane: ilastik lane identifier
:return: (dict) selected data values from project file
"""
lns = f'{lane:04d}' lns = f'{lane:04d}'
lane = f'Input Data/infos/lane{lns}' lane = f'Input Data/infos/lane{lns}'
info = {} info = {}
...@@ -79,13 +88,22 @@ def get_dataset_info(h5, lane=0): ...@@ -79,13 +88,22 @@ def get_dataset_info(h5, lane=0):
return info return info
def generate_ilastik_object_classifier(template_ilp, where: str, lane=0): def generate_ilastik_object_classifier(template_ilp: str, where: str, stack_name: str = 'train', lane: int = 0):
"""
Starting with a template project file, transfer input data and labels to a duplicate project file.
:param template_ilp: absolute path to existing ilastik object classifier to use as a template
:param where: location of folder containing input data, segmentation maps, labels, and label descriptions
:poram stack_name: prefix of .tif and .csv files that contain classifier training data (e.g. train, test)
:param lane: ilastik lane identifier
:return: (str) name of new ilastik classifier project file
"""
# validate z-stack input data # validate z-stack input data
root = Path(where) root = Path(where)
paths = { paths = {
'Raw Data': root / 'zstack_train_raw.tif', 'Raw Data': root / f'zstack_{stack_name}_raw.tif',
'Segmentation Image': root / 'zstack_train_mask.tif', 'Segmentation Image': root / f'zstack_{stack_name}_mask.tif',
} }
accessors = {k: generate_file_accessor(pa) for k, pa in paths.items()} accessors = {k: generate_file_accessor(pa) for k, pa in paths.items()}
...@@ -97,9 +115,9 @@ def generate_ilastik_object_classifier(template_ilp, where: str, lane=0): ...@@ -97,9 +115,9 @@ def generate_ilastik_object_classifier(template_ilp, where: str, lane=0):
nz = accessors['Raw Data'].nz nz = accessors['Raw Data'].nz
# now load CSV # now load CSV
csv_path = root / 'train_stack.csv' csv_path = root / f'{stack_name}_stack.csv'
assert csv_path.exists() assert csv_path.exists()
df_patches = pd.read_csv(root / 'train_stack.csv') df_patches = pd.read_csv(csv_path)
assert np.all( assert np.all(
df_patches['zi'].sort_values().to_numpy() == np.arange(0, nz) df_patches['zi'].sort_values().to_numpy() == np.arange(0, nz)
) )
...@@ -142,7 +160,6 @@ def generate_ilastik_object_classifier(template_ilp, where: str, lane=0): ...@@ -142,7 +160,6 @@ def generate_ilastik_object_classifier(template_ilp, where: str, lane=0):
# change object labels # change object labels
la_groupname = f'ObjectClassification/LabelInputs/{lns}' la_groupname = f'ObjectClassification/LabelInputs/{lns}'
del h5[la_groupname] del h5[la_groupname]
lag = h5.create_group(la_groupname) lag = h5.create_group(la_groupname)
for zi in range(0, nz): for zi in range(0, nz):
...@@ -151,6 +168,12 @@ def generate_ilastik_object_classifier(template_ilp, where: str, lane=0): ...@@ -151,6 +168,12 @@ def generate_ilastik_object_classifier(template_ilp, where: str, lane=0):
return new_ilp return new_ilp
def compare_object_maps(truth: GenericImageDataAccessor, inferred: GenericImageDataAccessor) -> pd.DataFrame: def compare_object_maps(truth: GenericImageDataAccessor, inferred: GenericImageDataAccessor) -> pd.DataFrame:
"""
Compare two object maps to assess classification results
:param truth: t-stack of truth objects
:param inferred: t-stack of inferred objects, presumably with same segmentation boundaries as truth
:return: DataFrame comparing results for each frame in truth and inferred stacks
"""
assert truth.shape == inferred.shape assert truth.shape == inferred.shape
assert np.all((truth.data == 0) == (inferred.data == 0)) assert np.all((truth.data == 0) == (inferred.data == 0))
assert inferred.chroma == 1 assert inferred.chroma == 1
...@@ -180,25 +203,43 @@ def compare_object_maps(truth: GenericImageDataAccessor, inferred: GenericImageD ...@@ -180,25 +203,43 @@ def compare_object_maps(truth: GenericImageDataAccessor, inferred: GenericImageD
if __name__ == '__main__': if __name__ == '__main__':
root = Path('c:/Users/rhodes/projects/proj0011-plankton-seg/') root = Path('c:/Users/rhodes/projects/proj0011-plankton-seg/')
template_ilp = root / 'exp0014/template_obj.ilp' template_ilp = root / 'exp0014/template_obj.ilp'
# template_ilp = root / 'exp0014/test_obj_from_seg.ilp' where_patch_stack = root / 'exp0009/output/labeled_patches-20231018-0000'
where_patch_stack = root / 'exp0009/output/labeled_patches-20231016-0002'
# auto-populate an object classifier
new_ilp = generate_ilastik_object_classifier( new_ilp = generate_ilastik_object_classifier(
template_ilp, template_ilp,
where_patch_stack, where_patch_stack,
stack_name='train'
) )
train_zstack_raw = generate_file_accessor(where_patch_stack / 'zstack_train_raw.tif') def infer_and_compare(suffix):
train_zstack_mask = generate_file_accessor(where_patch_stack / 'zstack_train_mask.tif') # infer object labels from the same data used to train the classifier
train_zstack_raw = generate_file_accessor(where_patch_stack / 'zstack_train_raw.tif')
train_zstack_mask = generate_file_accessor(where_patch_stack / 'zstack_train_mask.tif')
mod = PatchStackObjectClassifier({'project_file': where_patch_stack / new_ilp})
result_acc, _ = mod.infer(train_zstack_raw, train_zstack_mask)
write_accessor_data_to_file(where_patch_stack / f'zstack_train_result_{suffix}.tif', result_acc)
# write comparison tables
train_truth_labels = generate_file_accessor(where_patch_stack / f'zstack_train_label.tif')
df_comp = compare_object_maps(train_truth_labels, result_acc)
df_comp.to_csv(
where_patch_stack / autonumber_new_file(
where_patch_stack, f'compare_train_result_{suffix}', 'csv'
),
index=False
)
print('Truth and inferred labels match?')
print(pd.value_counts(df_comp['truth_label'] == df_comp['inferred_label']))
# infer object labels from the same data used to train the classifier
infer_and_compare('before')
mod = PatchStackObjectClassifier({'project_file': new_ilp}) # prompt user input when ilastik file has been modified in-app
print(f'Press enter when project file {new_ilp} has been updated in ilastik')
input()
result_acc, _ = mod.infer(train_zstack_raw, train_zstack_mask) # repeat inference with the same project file, but a fresh model handle
write_accessor_data_to_file(where_patch_stack / 'result.tif', result_acc) infer_and_compare('after')
print(where_patch_stack / 'result.tif')
# write comparison
train_labels = generate_file_accessor(where_patch_stack / 'zstack_train_label.tif')
df_comp = compare_object_maps(train_labels, result_acc)
df_comp.to_csv(where_patch_stack / autonumber_new_file(where_patch_stack, 'comp', 'csv'), index=False)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment