Commit 9e229127 authored by Martin Schorb's avatar Martin Schorb
Browse files

Sparkslurm status

parent e94dab49
......@@ -80,8 +80,14 @@ def update_status(n,click,run_state,logfile,module,thispage):
(r_status['status'], link) = launch_jobs.status(run_state)
if not link == '':
status_href = link.split('__')[-1]
status_style = {}
r_status['status'],status_href = link.split('__')
if not 'Problem' in r_status['status']:
if 'Startup' in r_status['status']:
status_href += ':' + params.spark_port
else:
status_href += ':' + spark_job_port
status_style = {}
if 'Error' in r_status['status']:
......@@ -125,23 +131,16 @@ def get_status(run_state,module,thispage):
# procs=params.processes[module.strip('_')]
c_button_style = {'display': 'none'}
if run_state['status'] == 'running':
# if procs == []:
# status = 'not running'
# else:
status = html.Div([html.Img(src='assets/gears.gif',height=72),html.Br(),'running'])
# if not type(procs) is subprocess.Popen:
# if type(procs) is str:
# c_button_style = {}
# elif not type(procs[0]) is subprocess.Popen:
# c_button_style = {}
if 'running' in run_state['status']:
status = html.Div([html.Img(src='assets/gears.gif',height=72),html.Br(),'running'])
if run_state['type'] not in ['standalone']:
c_button_style = {}
status_style = {'color': '#04D'}
elif run_state['status'] == 'input':
status='process will start on click.'
elif run_state['status'] == 'done':
status='DONE'
status_style = {'color':'#0E0'}
status_style = {'color':'#0C0'}
elif run_state['status'] == 'pending':
c_button_style = {}
status = ['Waiting for cluster resources to be allocated.']
......
......@@ -188,7 +188,7 @@ def cluster_status(run_state):
with open(sp_masterfile) as f: sp_master=f.read().strip('\n')
link = '__http://' + sp_master + ':' + params.spark_job_port
link = '__' + sp_master
url = 'http://' + sp_master + ':' + params.spark_port + '/json/'
try:
......@@ -214,13 +214,10 @@ def cluster_status(run_state):
out_stat.append('Error in Spark setup!')
else:
if 'FINISHED' in sp_query['completedapps'][0]['state']:
drop = canceljobs('sparkslurm__'+str(j_id))
out_stat.append('done')
out_stat.append(canceljobs(run_state,'done'))
elif 'KILLED' in sp_query['completedapps'][0]['state']:
drop = canceljobs('sparkslurm__'+str(j_id))
drop = canceljobs(run_state)
out_stat.append('Spark app was killed.')
else:
out_stat.append('running' + link)
......@@ -241,8 +238,7 @@ def cluster_status(run_state):
return out_stat[0],link
def canceljobs(run_state):
out_status=list()
def canceljobs(run_state, out_status='cancelled'):
j_id = run_state['id']
......@@ -253,8 +249,6 @@ def canceljobs(run_state):
os.system(command)
out_status = 'cancelled'
return out_status
......@@ -292,7 +286,7 @@ def run(target='standalone',
if target=='standalone':
command = 'bash ' + runscriptfile
runscript.replace('#launch message','"Launching Render standalone processing script on " `hostname`')
runscript.replace('#launch message','echo "Launching Render standalone processing script on " `hostname`')
runscript += ' || echo $? > ' + logfile + '_exit'
with open(runscriptfile, 'a') as f:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment