Skip to content
Snippets Groups Projects
Commit a83a430c authored by Constantin Pape's avatar Constantin Pape
Browse files

Add mechanism to change default config values and change conda env

parent abdbc59e
No related branches found
No related tags found
No related merge requests found
#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python
#! /g/arendt/pape/miniconda3/envs/platybrowser/bin/python
import os
from shutil import copyfile
......
......@@ -8,19 +8,7 @@ import luigi
from cluster_tools.morphology import MorphologyWorkflow
from cluster_tools.morphology import RegionCentersWorkflow
from .util import write_csv
def make_config(tmp_folder):
configs = MorphologyWorkflow.get_config()
config_folder = os.path.join(tmp_folder, 'configs')
os.makedirs(config_folder, exist_ok=True)
global_config = configs['global']
# TODO use new platy browser env
shebang = '#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python'
global_config['shebang'] = shebang
global_config['block_shape'] = [64, 512, 512]
with open(os.path.join(config_folder, 'global.config'), 'w') as f:
json.dump(global_config, f)
from ..default_config import write_default_global_config
def n5_attributes(input_path, input_key, tmp_folder, target, max_jobs):
......@@ -149,7 +137,7 @@ def base_attributes(input_path, input_key, output_path, resolution,
tmp_folder, target, max_jobs, correct_anchors=True):
# prepare cluster tools tasks
make_config(tmp_folder)
write_default_global_config(os.path.join(tmp_folder, 'configs'))
# make base attributes as n5 dataset
tmp_path, tmp_key = n5_attributes(input_path, input_key,
......
import os
import json
from cluster_tools.cluster_tasks import BaseClusterTask
DEFAULT_GROUP = 'kreshuk'
DEFAULT_SHEBANG = '#! /g/arendt/pape/miniconda3/envs/platybrowser/bin/python'
DEFAULT_BLOCK_SHAPE = [64, 512, 512]
#
# default group parameter
#
def set_default_group(group):
global DEFAULT_GROUP
DEFAULT_GROUP = group
def get_default_group():
return DEFAULT_GROUP
#
# default shebang parameter
#
def set_default_shebang(shebang):
global DEFAULT_SHEBANG
DEFAULT_SHEBANG = shebang
def get_default_shebang():
return DEFAULT_SHEBANG
#
# default block_shape parameter
#
def set_default_block_shape(block_shape):
global DEFAULT_BLOCK_SHAPE
DEFAULT_BLOCK_SHAPE = block_shape
def get_default_block_shape():
return DEFAULT_BLOCK_SHAPE
def write_default_global_config(config_folder):
os.makedirs(config_folder, exist_ok=True)
global_config = BaseClusterTask.default_global_config()
global_config['shebang'] = get_default_shebang()
global_config['block_shape'] = get_default_block_shape()
global_config['group'] = get_default_group()
with open(os.path.join(config_folder, 'global.config'), 'w') as f:
json.dump(global_config, f)
......@@ -7,6 +7,7 @@ from cluster_tools.downscaling import DownscalingWorkflow
from paintera_tools import serialize_from_commit
from .to_bdv import to_bdv
from .map_segmentation_ids import map_segmentation_ids
from ..default_config import write_default_global_config
def get_n_scales(paintera_path, paintera_key):
......@@ -23,16 +24,9 @@ def downscale(path, in_key, out_key,
task = DownscalingWorkflow
config_folder = os.path.join(tmp_folder, 'configs')
os.makedirs(config_folder, exist_ok=True)
write_default_global_config(config_folder)
configs = task.get_config()
global_conf = configs['global']
global_conf.update({'shebang':
"#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python",
'block_shape': [64, 512, 512]})
with open(os.path.join(config_folder, 'global.config'), 'w') as f:
json.dump(global_conf, f)
config = configs['downscaling']
config.update({'mem_limit': 8, 'time_limit': 120,
'library_kwargs': {'order': 0}})
......
......@@ -5,6 +5,7 @@ import z5py
from cluster_tools.node_labels import NodeLabelWorkflow
from ..files import get_h5_path_from_xml
from ..default_config import write_default_global_config
def get_seg_path(folder, name):
......@@ -34,16 +35,9 @@ def map_ids(path1, path2, out_path, tmp_folder, max_jobs, target, prefix):
task = NodeLabelWorkflow
config_folder = os.path.join(tmp_folder, 'configs')
os.makedirs(config_folder, exist_ok=True)
write_default_global_config(config_folder)
configs = task.get_config()
global_conf = configs['global']
global_conf.update({'shebang':
"#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python",
'block_shape': [64, 512, 512]})
with open(os.path.join(config_folder, 'global.config'), 'w') as f:
json.dump(global_conf, f)
conf = configs['merge_node_labels']
conf.update({'threads_per_job': 8, 'mem_limit': 16})
with open(os.path.join(config_folder, 'merge_node_labels.config'), 'w') as f:
......
......@@ -5,6 +5,7 @@ import luigi
import h5py
import z5py
from cluster_tools.downscaling import PainteraToBdvWorkflow
from ..default_config import write_default_global_config
def check_max_id(path, key):
......@@ -23,16 +24,9 @@ def to_bdv(in_path, in_key, out_path, resolution, tmp_folder, target='slurm'):
max_id = check_max_id(in_path, in_key)
config_folder = os.path.join(tmp_folder, 'configs')
os.makedirs(config_folder, exist_ok=True)
write_default_global_config(config_folder)
configs = PainteraToBdvWorkflow.get_config()
global_conf = configs['global']
global_conf.update({'shebang':
"#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python",
'block_shape': [64, 512, 512]})
with open(os.path.join(config_folder, 'global.config'), 'w') as f:
json.dump(global_conf, f)
config = configs['copy_volume']
config.update({'threads_per_job': 8, 'mem_limit': 32, 'time_limit': 1600,
'chunks': [32, 256, 256]})
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment