Skip to content
Snippets Groups Projects
Commit 38f96f46 authored by Constantin Pape's avatar Constantin Pape
Browse files

Add scripts for masking extrapolated cells and nuclei

parent 470f4231
No related branches found
No related tags found
No related merge requests found
...@@ -10,7 +10,7 @@ import vigra ...@@ -10,7 +10,7 @@ import vigra
from scipy.ndimage.morphology import binary_dilation from scipy.ndimage.morphology import binary_dilation
from scripts.transformation import intensity_correction from scripts.transformation import intensity_correction
from pybdv import make_bdv from pybdv import make_bdv, convert_to_bdv
def combine_mask(): def combine_mask():
...@@ -58,6 +58,7 @@ def correct_intensities_test(target='local', max_jobs=32): ...@@ -58,6 +58,7 @@ def correct_intensities_test(target='local', max_jobs=32):
target=target, max_jobs=max_jobs) target=target, max_jobs=max_jobs)
# FIXME scale 0 is damaged in the h5 file !
def correct_intensities(target='slurm', max_jobs=250): def correct_intensities(target='slurm', max_jobs=250):
raw_path = '../data/rawdata/sbem-6dpf-1-whole-raw.h5' raw_path = '../data/rawdata/sbem-6dpf-1-whole-raw.h5'
tmp_folder = './tmp_intensity_correction' tmp_folder = './tmp_intensity_correction'
...@@ -112,7 +113,50 @@ def check_chunks(): ...@@ -112,7 +113,50 @@ def check_chunks():
json.dump(results, f) json.dump(results, f)
def make_subsampled_volume():
p = './em-raw-wholecorrected.h5'
k = 't00000/s00/2/cells'
with h5py.File(p, 'r') as f:
print(f[k].shape)
# return
scale_factors = [[2, 2, 2]] * 6
convert_to_bdv(p, k, 'em-raw-small-corrected.h5', downscale_factors=scale_factors, downscale_mode='mean',
resolution=[.05, .04, .04], unit='micrometer')
def make_extrapolation_mask():
z0 = 800 # extrapolation for z < z0
z1 = 9800 # extraplation for z > z1
ref_path = '../data/rawdata/sbem-6dpf-1-whole-raw.h5'
ref_scale = 4
ref_key = 't00000/s00/%i/cells' % ref_scale
with h5py.File(ref_path, 'r') as f:
shape = f[ref_key].shape
mask = np.zeros(shape, dtype='uint8')
# adapt to the resolution level
z0 //= (2 ** (ref_scale - 1))
z1 //= (2 ** (ref_scale - 1))
print(z0, z1)
mask[:z0] = 255
mask[z1:] = 255
print(mask.min(), mask.max())
scales = 3 * [[2, 2, 2]]
res = [.2, .16, .16]
out_path = './extrapolation_mask'
make_bdv(mask, out_path, downscale_factors=scales,
downscale_mode='nearest',
resolution=res, unit='micrometer',
convert_dtype=False)
if __name__ == '__main__': if __name__ == '__main__':
# combine_mask() # correct_intensities('slurm', 125)
correct_intensities('slurm', 125) # make_subsampled_volume()
# check_chunks() make_extrapolation_mask()
...@@ -5,7 +5,7 @@ from .base_attributes import base_attributes, propagate_attributes ...@@ -5,7 +5,7 @@ from .base_attributes import base_attributes, propagate_attributes
from .cell_nucleus_mapping import map_cells_to_nuclei from .cell_nucleus_mapping import map_cells_to_nuclei
from .genes import gene_assignment_table, vc_assignment_table from .genes import gene_assignment_table, vc_assignment_table
from .morphology import write_morphology_cells, write_morphology_nuclei from .morphology import write_morphology_cells, write_morphology_nuclei
from .region_attributes import region_attributes from .region_attributes import region_attributes, extrapolated_intensities
from .cilia_attributes import cilia_morphology from .cilia_attributes import cilia_morphology
from ..files.xml_utils import get_h5_path_from_xml from ..files.xml_utils import get_h5_path_from_xml
...@@ -76,6 +76,14 @@ def make_cell_tables(old_folder, folder, name, tmp_folder, resolution, ...@@ -76,6 +76,14 @@ def make_cell_tables(old_folder, folder, name, tmp_folder, resolution,
image_folder, segmentation_folder, image_folder, segmentation_folder,
label_ids, tmp_folder, target, max_jobs) label_ids, tmp_folder, target, max_jobs)
# mapping to extrapolated intensities
extrapol_mask = os.path.join(folder, 'segmentations', 'sbem-6dpf-mask-extrapolated.xml')
extrapol_mask = get_h5_path_from_xml(extrapol_mask, return_absolute_path=True)
extrapol_out = os.path.join(table_folder, 'extrapolated_intensity_correction.csv')
extrapolated_intensities(seg_path, 't00000/s00/3/cells',
extrapol_mask, 't00000/s00/0/cells',
extrapol_out, tmp_folder, target, max_jobs)
def make_nucleus_tables(old_folder, folder, name, tmp_folder, resolution, def make_nucleus_tables(old_folder, folder, name, tmp_folder, resolution,
target='slurm', max_jobs=100): target='slurm', max_jobs=100):
...@@ -101,6 +109,14 @@ def make_nucleus_tables(old_folder, folder, name, tmp_folder, resolution, ...@@ -101,6 +109,14 @@ def make_nucleus_tables(old_folder, folder, name, tmp_folder, resolution,
n_labels, resolution, tmp_folder, n_labels, resolution, tmp_folder,
target, max_jobs) target, max_jobs)
# mapping to extrapolated intensities
extrapol_mask = os.path.join(folder, 'segmentations', 'sbem-6dpf-mask-extrapolated.xml')
extrapol_mask = get_h5_path_from_xml(extrapol_mask, return_absolute_path=True)
extrapol_out = os.path.join(table_folder, 'extrapolated_intensity_correction.csv')
extrapolated_intensities(seg_path, 't00000/s00/1/cells',
extrapol_mask, 't00000/s00/0/cells',
extrapol_out, tmp_folder, target, max_jobs)
def make_cilia_tables(old_folder, folder, name, tmp_folder, resolution, def make_cilia_tables(old_folder, folder, name, tmp_folder, resolution,
target='slurm', max_jobs=100): target='slurm', max_jobs=100):
......
import os import os
import glob import glob
import numpy as np import numpy as np
import pandas as pd
import h5py import h5py
from .util import write_csv, node_labels, normalize_overlap_dict from .util import write_csv, node_labels, normalize_overlap_dict
...@@ -110,3 +111,29 @@ def region_attributes(seg_path, region_out, ...@@ -110,3 +111,29 @@ def region_attributes(seg_path, region_out,
# 3.) merge the mappings and write new table # 3.) merge the mappings and write new table
write_region_table(label_ids, label_list, semantic_mapping_list, region_out) write_region_table(label_ids, label_list, semantic_mapping_list, region_out)
def extrapolated_intensities(seg_path, seg_key, mask_path, mask_key, out_path,
tmp_folder, target, max_jobs, overlap_threshold=.5):
mask_labels = node_labels(seg_path, seg_key,
mask_path, mask_key,
'extrapolated_intensities', tmp_folder,
target, max_jobs, max_overlap=False)
foreground_id = 255
# we count everything that has at least 25 % overlap as muscle
mask_labels = normalize_overlap_dict(mask_labels)
label_ids = np.array([k for k in sorted(mask_labels.keys())])
overlap_values = np.array([mask_labels[label_id].get(foreground_id, 0.) for label_id in label_ids])
overlap_labels = label_ids[overlap_values > overlap_threshold]
n_labels = int(label_ids.max()) + 1
mask_labels = np.zeros(n_labels, dtype='uint8')
mask_labels[overlap_labels] = 1
label_ids = np.arange(n_labels)
data = np.concatenate([label_ids[:, None], mask_labels[:, None]], axis=1)
cols = ['label_id', 'has_extrapolated_intensities']
table = pd.DataFrame(data, columns=cols)
table.to_csv(out_path, sep='\t', index=False)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment