From a83a430ca0db42f5202d8db17d8e3ddfc79be82f Mon Sep 17 00:00:00 2001 From: Constantin Pape <constantin.pape@iwr.uni-heidelberg.de> Date: Tue, 13 Aug 2019 17:55:40 +0200 Subject: [PATCH] Add mechanism to change default config values and change conda env --- make_initial_version.py | 2 +- scripts/attributes/base_attributes.py | 16 +------- scripts/default_config.py | 56 ++++++++++++++++++++++++++ scripts/export/export_segmentation.py | 10 +---- scripts/export/map_segmentation_ids.py | 10 +---- scripts/export/to_bdv.py | 10 +---- 6 files changed, 65 insertions(+), 39 deletions(-) create mode 100644 scripts/default_config.py diff --git a/make_initial_version.py b/make_initial_version.py index 2cc52f8..e22e1e5 100755 --- a/make_initial_version.py +++ b/make_initial_version.py @@ -1,4 +1,4 @@ -#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python +#! /g/arendt/pape/miniconda3/envs/platybrowser/bin/python import os from shutil import copyfile diff --git a/scripts/attributes/base_attributes.py b/scripts/attributes/base_attributes.py index 77d436e..9c1aba3 100644 --- a/scripts/attributes/base_attributes.py +++ b/scripts/attributes/base_attributes.py @@ -8,19 +8,7 @@ import luigi from cluster_tools.morphology import MorphologyWorkflow from cluster_tools.morphology import RegionCentersWorkflow from .util import write_csv - - -def make_config(tmp_folder): - configs = MorphologyWorkflow.get_config() - config_folder = os.path.join(tmp_folder, 'configs') - os.makedirs(config_folder, exist_ok=True) - global_config = configs['global'] - # TODO use new platy browser env - shebang = '#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python' - global_config['shebang'] = shebang - global_config['block_shape'] = [64, 512, 512] - with open(os.path.join(config_folder, 'global.config'), 'w') as f: - json.dump(global_config, f) +from ..default_config import write_default_global_config def n5_attributes(input_path, input_key, tmp_folder, target, max_jobs): @@ -149,7 +137,7 @@ def base_attributes(input_path, input_key, output_path, resolution, tmp_folder, target, max_jobs, correct_anchors=True): # prepare cluster tools tasks - make_config(tmp_folder) + write_default_global_config(os.path.join(tmp_folder, 'configs')) # make base attributes as n5 dataset tmp_path, tmp_key = n5_attributes(input_path, input_key, diff --git a/scripts/default_config.py b/scripts/default_config.py new file mode 100644 index 0000000..38bb4f6 --- /dev/null +++ b/scripts/default_config.py @@ -0,0 +1,56 @@ +import os +import json +from cluster_tools.cluster_tasks import BaseClusterTask + +DEFAULT_GROUP = 'kreshuk' +DEFAULT_SHEBANG = '#! /g/arendt/pape/miniconda3/envs/platybrowser/bin/python' +DEFAULT_BLOCK_SHAPE = [64, 512, 512] + + +# +# default group parameter +# + +def set_default_group(group): + global DEFAULT_GROUP + DEFAULT_GROUP = group + + +def get_default_group(): + return DEFAULT_GROUP + + +# +# default shebang parameter +# + +def set_default_shebang(shebang): + global DEFAULT_SHEBANG + DEFAULT_SHEBANG = shebang + + +def get_default_shebang(): + return DEFAULT_SHEBANG + + +# +# default block_shape parameter +# + +def set_default_block_shape(block_shape): + global DEFAULT_BLOCK_SHAPE + DEFAULT_BLOCK_SHAPE = block_shape + + +def get_default_block_shape(): + return DEFAULT_BLOCK_SHAPE + + +def write_default_global_config(config_folder): + os.makedirs(config_folder, exist_ok=True) + global_config = BaseClusterTask.default_global_config() + global_config['shebang'] = get_default_shebang() + global_config['block_shape'] = get_default_block_shape() + global_config['group'] = get_default_group() + with open(os.path.join(config_folder, 'global.config'), 'w') as f: + json.dump(global_config, f) diff --git a/scripts/export/export_segmentation.py b/scripts/export/export_segmentation.py index 3d93272..410f9fc 100644 --- a/scripts/export/export_segmentation.py +++ b/scripts/export/export_segmentation.py @@ -7,6 +7,7 @@ from cluster_tools.downscaling import DownscalingWorkflow from paintera_tools import serialize_from_commit from .to_bdv import to_bdv from .map_segmentation_ids import map_segmentation_ids +from ..default_config import write_default_global_config def get_n_scales(paintera_path, paintera_key): @@ -23,16 +24,9 @@ def downscale(path, in_key, out_key, task = DownscalingWorkflow config_folder = os.path.join(tmp_folder, 'configs') - os.makedirs(config_folder, exist_ok=True) + write_default_global_config(config_folder) configs = task.get_config() - global_conf = configs['global'] - global_conf.update({'shebang': - "#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python", - 'block_shape': [64, 512, 512]}) - with open(os.path.join(config_folder, 'global.config'), 'w') as f: - json.dump(global_conf, f) - config = configs['downscaling'] config.update({'mem_limit': 8, 'time_limit': 120, 'library_kwargs': {'order': 0}}) diff --git a/scripts/export/map_segmentation_ids.py b/scripts/export/map_segmentation_ids.py index 1368e43..f8565fb 100644 --- a/scripts/export/map_segmentation_ids.py +++ b/scripts/export/map_segmentation_ids.py @@ -5,6 +5,7 @@ import z5py from cluster_tools.node_labels import NodeLabelWorkflow from ..files import get_h5_path_from_xml +from ..default_config import write_default_global_config def get_seg_path(folder, name): @@ -34,16 +35,9 @@ def map_ids(path1, path2, out_path, tmp_folder, max_jobs, target, prefix): task = NodeLabelWorkflow config_folder = os.path.join(tmp_folder, 'configs') - os.makedirs(config_folder, exist_ok=True) + write_default_global_config(config_folder) configs = task.get_config() - global_conf = configs['global'] - global_conf.update({'shebang': - "#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python", - 'block_shape': [64, 512, 512]}) - with open(os.path.join(config_folder, 'global.config'), 'w') as f: - json.dump(global_conf, f) - conf = configs['merge_node_labels'] conf.update({'threads_per_job': 8, 'mem_limit': 16}) with open(os.path.join(config_folder, 'merge_node_labels.config'), 'w') as f: diff --git a/scripts/export/to_bdv.py b/scripts/export/to_bdv.py index 1ee9aa5..e31b659 100644 --- a/scripts/export/to_bdv.py +++ b/scripts/export/to_bdv.py @@ -5,6 +5,7 @@ import luigi import h5py import z5py from cluster_tools.downscaling import PainteraToBdvWorkflow +from ..default_config import write_default_global_config def check_max_id(path, key): @@ -23,16 +24,9 @@ def to_bdv(in_path, in_key, out_path, resolution, tmp_folder, target='slurm'): max_id = check_max_id(in_path, in_key) config_folder = os.path.join(tmp_folder, 'configs') - os.makedirs(config_folder, exist_ok=True) + write_default_global_config(config_folder) configs = PainteraToBdvWorkflow.get_config() - global_conf = configs['global'] - global_conf.update({'shebang': - "#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python", - 'block_shape': [64, 512, 512]}) - with open(os.path.join(config_folder, 'global.config'), 'w') as f: - json.dump(global_conf, f) - config = configs['copy_volume'] config.update({'threads_per_job': 8, 'mem_limit': 32, 'time_limit': 1600, 'chunks': [32, 256, 256]}) -- GitLab