Skip to content
Snippets Groups Projects
Commit f6429ae3 authored by Christopher Randolph Rhodes's avatar Christopher Randolph Rhodes
Browse files

Can set input dataset parameters

parent ed29f8f6
No related branches found
No related tags found
No related merge requests found
......@@ -42,6 +42,16 @@ tifffile = {
'z': 7,
}
filename = 'mono_zstack_mask.tif'
monozstackmask = {
'filename': filename,
'path': root / filename,
'w': 256,
'h': 256,
'c': 1,
'z': 85
}
ilastik = {
'pixel_classifier': 'demo_px.ilp',
'object_classifier': 'demo_obj.ilp',
......
import csv
from pathlib import Path
import h5py
import ilastik.applets.objectClassification
import json
import numpy as np
import pandas as pd
import tifffile
import uuid
from extensions.ilastik.models import IlastikObjectClassifierModel
from model_server.accessors import generate_file_accessor
def get_dataset_info(h5):
......@@ -14,11 +12,17 @@ def get_dataset_info(h5):
info = {}
for gk in ['Raw Data', 'Segmentation Image']:
info[gk] = {}
for dk in ['location', 'filePath', 'shape']:
for dk in ['location', 'filePath', 'shape', 'nickname']:
try:
info[gk][dk] = h5[f'{lane}/{gk}/{dk}'][()]
except Exception as e:
print(e)
try:
info[gk]['id'] = uuid.UUID(h5[f'{lane}/{gk}/datasetId'][()].decode())
except ValueError as e:
info[gk]['id'] = '<invalid UUID>'
info[gk]['axistags'] = json.loads(h5[f'{lane}/{gk}/axistags'][()].decode())
info[gk]['axes'] = [ax['key'] for ax in info[gk]['axistags']['axes']]
return info
def transfer_labels_to_ilastik_ilp(ilp, df_stack_meta):
......@@ -58,42 +62,55 @@ def transfer_labels_to_ilastik_ilp(ilp, df_stack_meta):
def generate_ilastik_object_classifier(template_ilp, where_training: str):
# validate input data
# validate z-stack input data
where = Path(where_training)
zstacks = {
'raw': {
'Raw Data': {
'path': where / 'zstack_train_raw.tif',
},
'seg': {
'Segmentation Image': {
'path': where / 'zstack_train_mask.tif',
}
}
for k, v in zstacks.items():
# assert v['path'].exists(), 'Could not find input z-stack: ' + v['path']
# ff = tifffile.imread(v['path'])
# v['nz'] = ff.shape[0]
# v['hw'] = ff.shape[1:2]
# v['dtype'] = ff.dtype
v['acc'] = generate_file_accessor(v['path'])
assert zstacks['raw']['acc'].is_binary()
assert zstacks['raw']
assert zstacks['Segmentation Image']['acc'].is_mask()
assert len(set([v['acc'].hw for k, v in zstacks.items()])) == 1 # same height and width
assert len(set([v['acc'].nz for k, v in zstacks.items()])) == 1 # same z-depth
# now load CSV
csv_path = where / 'train_stack.csv'
assert csv_path.exists()
df_meta = pd.read_csv(csv_path)
assert np.all(
df_meta['zi'].sort_values().to_numpy() == np.arange(0, zstacks['Raw Data']['acc'].nz)
)
with h5py.File(template_ilp, 'r+') as h5:
pass
info = get_dataset_info(h5)
def set_ds(grp, ds, val):
ds = h5[f'Input Data/infos/lane0000/{grp}/{ds}']
ds[()] = val
return ds[()]
for hg in ['Raw Data', 'Segmentation Image']:
assert info[hg]['location'] == b'FileSystem'
assert info[hg]['axes'] == ['t', 'y', 'x']
set_ds(hg, 'filePath', zstacks[hg]['path'].__str__())
set_ds(hg, 'nickname', zstacks[hg]['path'].stem)
shape_zyx = [zstacks[hg]['acc'].shape_dict[ax] for ax in ['Z', 'Y', 'X']]
set_ds(hg, 'shape', np.array(shape_zyx))
new_info = get_dataset_info(h5)
if __name__ == '__main__':
ilp = 'c:/Users/rhodes/model-server/ilastik/test_autolabel_obj - Copy.ilp'
# ilp = 'c:/Users/rhodes/model-server/ilastik/test_template_obj.ilp'
df = pd.read_csv(
'c:/Users/rhodes/projects/proj0011-plankton-seg/exp0009/output/labeled_patches-20231014-0002/train_stack.csv'
)
# transfer_labels_to_ilastik_ilp(ilp, df)
generate_ilastik_object_classifier(
ilp,
'c:/Users/rhodes/projects/proj0011-plankton-seg/exp0009/output/labeled_patches-20231014-0002'
'c:/Users/rhodes/model-server/ilastik/test_template_obj.ilp',
'c:/Users/rhodes/projects/proj0011-plankton-seg/exp0009/output/labeled_patches-20231014-0004'
)
\ No newline at end of file
......@@ -250,6 +250,7 @@ def transfer_ecotaxa_labels_to_patch_stacks(
stack_meta.append({'zi': fi, 'patch_filename': fn, 'annotation_class': ac, 'annotation_class_id': aci})
acc_bm = generate_file_accessor(Path(where_masks) / fn)
assert acc_bm.is_mask()
assert acc_bm.hw == patch_size, f'Unexpected patch size {patch_size}'
assert acc_bm.chroma == 1
assert acc_bm.nz == 1
......
......@@ -34,7 +34,11 @@ class GenericImageDataAccessor(ABC):
return True if self.shape_dict['Z'] > 1 else False
def is_mask(self):
return self._data.dtype == 'bool'
if self._data.dtype == 'bool':
return True
elif self._data.dtype == 'uint8':
return np.all(np.unique(self._data) == [0, 255])
return False
def get_one_channel_data (self, channel: int):
c = int(channel)
......@@ -101,12 +105,18 @@ class TifSingleSeriesFileAccessor(GenericImageFileAccessor):
raise DataShapeError(f'Expect only one series in {fpath}')
se = tf.series[0]
sd = {ch: se.shape[se.axes.index(ch)] for ch in se.axes}
idx = {k: sd[k] for k in ['Y', 'X', 'C', 'Z']}
order = ['Y', 'X', 'C', 'Z']
axs = [a for a in se.axes if a in order]
da = se.asarray()
if 'C' not in axs:
axs.append('C')
da = np.expand_dims(da, len(da.shape))
yxcz = np.moveaxis(
se.asarray(),
[se.axes.index(ch) for ch in idx],
da,
[axs.index(k) for k in order],
[0, 1, 2, 3]
)
......
......@@ -2,7 +2,7 @@ import unittest
import numpy as np
from conf.testing import czifile, output_path, monopngfile, rgbpngfile, tifffile
from conf.testing import czifile, output_path, monopngfile, rgbpngfile, tifffile, monozstackmask
from model_server.accessors import CziImageFileAccessor, DataShapeError, generate_file_accessor, InMemoryDataAccessor, PngFileAccessor, write_accessor_data_to_file, TifSingleSeriesFileAccessor
class TestCziImageFileAccess(unittest.TestCase):
......@@ -106,4 +106,8 @@ class TestCziImageFileAccess(unittest.TestCase):
self.assertEqual(acc.nz, 1)
def test_read_mono_png(self):
return self.test_read_png(pngfile=monopngfile)
\ No newline at end of file
return self.test_read_png(pngfile=monopngfile)
def test_read_zstack_mono_mask(self):
acc = generate_file_accessor(monozstackmask['path'])
self.assertTrue(acc.is_mask())
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment