Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Martin Schorb
VolumeAlign
Commits
d03930e6
Commit
d03930e6
authored
May 05, 2022
by
Martin Schorb
Browse files
Merge remote-tracking branch 'origin/dev' into dev
parents
e0bed9a4
da8e3d2a
Pipeline
#33171
passed with stage
in 1 minute and 1 second
Changes
3
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
dashUI/pointmatch.py
View file @
d03930e6
...
...
@@ -106,7 +106,7 @@ def pointmatch_tp_dd_fill(stack, thispage):
thispage
=
thispage
.
lstrip
(
'/'
)
if
thispage
in
(
None
,
''
)
or
thispage
not
in
hf
.
trigger
(
key
=
'module'
)
and
dd_options_in
is
not
None
:
if
thispage
in
(
None
,
''
)
or
thispage
not
in
hf
.
trigger
(
key
=
'module'
):
raise
PreventUpdate
tp_dirlist
=
[
d_item
for
d_item
in
glob
.
glob
(
params
.
json_run_dir
+
'/tilepairs_'
+
params
.
user
+
'*'
+
stack
+
'*'
)
...
...
dashUI/start_webUI.py
View file @
d03930e6
...
...
@@ -6,7 +6,6 @@ Created on Fri Feb 5 08:49:53 2021
@author: schorb
"""
import
os
import
json
import
subprocess
...
...
@@ -16,59 +15,53 @@ from utils.launch_jobs import run_prefix
target_machines
=
params
.
remote_compute
user_file
=
'./web_users.json'
# required for many cluster environments, including SPARK
ssh_key_login
=
True
home
=
os
.
getenv
(
'HOME'
)
with
open
(
user_file
,
'r'
)
as
f
:
with
open
(
user_file
,
'r'
)
as
f
:
users_exist
=
json
.
load
(
f
)
if
params
.
user
not
in
users_exist
.
keys
():
print
(
'Setting up new user for Render WebUI. This is necess
er
y once.
\n
'
)
print
(
'Setting up new user for Render WebUI. This is necess
ary onl
y once.
\n
'
)
# create new user...
if
ssh_key_login
:
if
not
os
.
path
.
exists
(
home
+
'/.ssh'
):
os
.
mkdirs
(
home
+
'/.ssh'
)
if
ssh_key_login
:
if
not
os
.
path
.
exists
(
home
+
'/.ssh'
):
os
.
mkdirs
(
home
+
'/.ssh'
)
if
not
os
.
path
.
exists
(
home
+
'/.ssh/id_rsa_render'
):
if
not
os
.
path
.
exists
(
home
+
'/.ssh/id_rsa_render'
):
os
.
system
(
"ssh-keygen -t rsa -b 4096 -q -f '+home+'/.ssh/id_rsa_render -N ''"
)
for
target
in
target_machines
:
os
.
system
(
'ssh-copy-id -i '
+
home
+
'/.ssh/id_rsa_render '
+
target_machines
[
target
]
+
'@'
+
target
)
os
.
system
(
'ssh-copy-id -i '
+
home
+
'/.ssh/id_rsa_render '
+
target_machines
[
target
]
+
'@'
+
target
)
port
=
max
(
users_exist
.
values
())
+
1
users_exist
[
params
.
user
]
=
port
with
open
(
user_file
,
'w'
)
as
f
:
json
.
dump
(
users_exist
,
f
,
indent
=
4
)
with
open
(
user_file
,
'w'
)
as
f
:
json
.
dump
(
users_exist
,
f
,
indent
=
4
)
else
:
port
=
users_exist
[
params
.
user
]
logfile
=
os
.
path
.
join
(
params
.
render_log_dir
,
'webUI_'
+
run_prefix
()
+
'.log'
)
# check directory access for transient files to be written:
for
checkdir
in
[
params
.
render_log_dir
,
params
.
json_run_dir
]:
if
not
os
.
access
(
checkdir
,
7
):
raise
OSError
(
'The directory '
+
checkdir
+
' is not accessible. Check its user rights.'
)
logfile
=
os
.
path
.
join
(
params
.
render_log_dir
,
'webUI_'
+
run_prefix
()
+
'.log'
)
print
(
'Starting Render WebUI.
\n
'
)
print
(
'As long as this window is open, you can access Render through:
\n\n
'
)
print
(
'http://'
+
params
.
hostname
+
':'
+
str
(
port
)
+
'
\n\n
'
)
print
(
'http://'
+
params
.
hostname
+
':'
+
str
(
port
)
+
'
\n\n
'
)
print
(
'from any device in the network.
\n
Do not use CTRL+C to copy the address, this will close the process.'
)
print
(
'To avoid excessive resource use, please close the server when done with your processing.'
)
os
.
system
(
'python index.py '
+
str
(
port
)
+
' > '
+
logfile
)
dashUI/utils/launch_jobs.py
View file @
d03930e6
...
...
@@ -55,10 +55,13 @@ def status(run_state):
A run_state dict contains:
- 'status': string describing the processing status of ALL tasks
- 'type': string describing the type of compute infrastructure to be used. Currently supports ['standalone','generic','slurm','sparkslurm']
- 'type': string describing the type of compute infrastructure to be used.
Currently supports ['standalone','generic','slurm','sparkslurm']
- 'logfile': string path pointing to the/a log file of the processing run.
- 'id': ID of the processing task. Can be a single string to describe one task or a dict containing a list of task IDs:
allowed keys: - 'par' for parallel tasks or 'seq' for sequential tasks. These are exclusive and contain lists of job IDs
- 'id': ID of the processing task. Can be a single string to describe one task
or a dict containing a list of task IDs:
allowed keys: - 'par' for parallel tasks or 'seq' for sequential tasks.
These are exclusive and contain lists of job IDs
- 'logfiles': list of individual log files for the tasks.
...
...
@@ -259,7 +262,7 @@ def cluster_status(run_state):
command
+=
' --format=jobid,state,node --parsable'
# commands for other cluster types go HERE
if
run_state
[
'status'
]
in
[
'cancelled'
,
'error'
]:
if
run_state
[
'status'
]
in
[
'cancelled'
,
'error'
]:
return
run_state
[
'status'
],
link
if
cl_type
in
params
.
remote_submission
.
keys
():
...
...
@@ -482,7 +485,7 @@ def run(target='standalone',
:param str target: target for processing. Currently supports ['standalone','generic','slurm','sparkslurm']
:param str pyscript: script to execute
:param str or list jsonfile: string path of JSON file with the script parameters or list
of those
for multiple parallel tasks
:param str or list jsonfile: string path of JSON file with the script parameters or list for multiple parallel tasks
:param str or dict or list run_args: str, dict or list with run-time arguments for the specific launcher
:param str or dict or list target_args: str, dict or list with setup arguments for the specific launcher
:param str or dict or list special_args: str, dict or list with additional arguments
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment