diff --git a/dataproc/api/views/__pycache__/proc_input_view.cpython-38.pyc b/dataproc/api/views/__pycache__/proc_input_view.cpython-38.pyc
index 26b238c6ed0bedc1e8b53c345aa26046bef1ba86..e7c5a9230487bd57d5ff909e480b15aee814ba8d 100644
Binary files a/dataproc/api/views/__pycache__/proc_input_view.cpython-38.pyc and b/dataproc/api/views/__pycache__/proc_input_view.cpython-38.pyc differ
diff --git a/dataproc/api/views/proc_input_view.py b/dataproc/api/views/proc_input_view.py
index 29dfc0488b88d2cd9809646b5fb7084593979e7e..5bd7b3bdeeb5992182ecc1022c9a4e315cae3d6e 100644
--- a/dataproc/api/views/proc_input_view.py
+++ b/dataproc/api/views/proc_input_view.py
@@ -46,7 +46,9 @@ def storeProcInput(request):
         # json_data_mtz=json_data['dataprocessing']
 
         try:
-            report=storeParseReport(json_data_report)
+            create_or_update_test(node_name=Report, unique_properties_dict={"jobid" : "6e83d158-fd27-494a-aa45-90f0fad00432", 
+                "terminationstatus" : "COMPLETED"}, all_properties_dict=None)
+            # report=storeParseReport(json_data_report)
             # ligand=storeParseLigand(json_data_ligand, report)
             # storeParseRC(json_data_rc, report)
             # storeParsePRC(json_data_prc, ligand)
@@ -60,6 +62,47 @@ def storeProcInput(request):
         except :
             return JsonResponse({"STATUS":"ERROR OCCURRED"}, safe=False)
 
+# @csrf_exempt
+# def create_or_update_test(node_name, unique_properties_dict, all_properties_dict):
+#     logger.info("ACTIVATE FUNCTION")
+
+#     keyvaluepair=""
+
+#     for keys in unique_properties_dict:
+#         logger.info(keys)
+#         logger.info(unique_properties_dict[keys])
+
+#         keyvaluepair = keyvaluepair + keys + '=' + '\'' + unique_properties_dict[keys] + '\'' + ','
+
+#     keyvaluepair=keyvaluepair[:-1]
+#     logger.info(keyvaluepair)
+#     # exist=Report.nodes.get(jobid="6e83d158-fd27-494a-aa45-90f0fad00432")
+#     # logger.info("LOG REPOORT TYPE")
+#     # logger.info(type(exist))
+#     # test=exist.serialize
+#     # logger(test)
+#     key='jobid'
+#     # val='6e83d158-fd27-494a-aa45-90f0fad00432'
+#     all_nodes=Report.nodes
+#     logger.info("PRINT ALL NODE")
+#     logger.info(type(all_nodes))
+
+#     for node in all_nodes:
+#         logger.info("PRINT NODE")
+#         logger.info(node)
+#         # node = node.serialize
+#         logger.info("PRINT NODE")
+#         logger.info(type(node))
+#         logger.info(node.key)
+#         if (node[key] == val):
+#             exist = node
+#             break
+#     logger.info("PRINT VALUES")
+#     logger.info(keyvaluepair)
+#     logger.info(exist)
+
+
+
 @csrf_exempt
 def storeParseReport(data):
 
@@ -72,7 +115,6 @@ def storeParseReport(data):
         autoprocscaling=data['dataprocessing']['processingdata']['AutoProcscaling']
         autoprocscalingstatistics=data['dataprocessing']['processingdata']['AutoProcScalingStatistics']
         autoproc=data['dataprocessing']['processingdata']['AutoProc']
-        molprobity=data['ligandfitting']['ligands']['1']['validationstatistics']['molprobity']
 
         report=Report(command=GPhL_pipedream['command'], 
             jsonversion=GPhL_pipedream['jsonversion'],
@@ -81,20 +123,16 @@ def storeParseReport(data):
             jobid=GPhL_pipedream['jobid'],
             gphlpipedream_output=GPhL_pipedream['output'],
             version=GPhL_pipedream['version'],
+            terminationstatus=GPhL_pipedream['terminationstatus'],
 
-            molprobitypercentile=molprobity['molprobitypercentile'],
-            ramaoutlierpercent=molprobity['ramaoutlierpercent'],
-            cbetadeviations=molprobity['cbetadeviations'],
-            ramafavoredpercent=molprobity['ramafavoredpercent'],
-            poorrotamers=molprobity['poorrotamers'],
-            rmsbonds=molprobity['rmsbonds'],
-            rmsangles=molprobity['rmsangles'],
-            clashpercentile=molprobity['clashpercentile'],
-            poorrotamerspercent=molprobity['poorrotamerspercent'],
-            clashscore=molprobity['clashscore'],
-            ramafavored=molprobity['ramafavored'],
-            molprobityscore=molprobity['molprobityscore'],
-            ramaoutliers=molprobity['ramaoutliers'],
+            refinedCell_beta=autoproc['refinedCell_beta'],
+            refinedCell_b=autoproc['refinedCell_b'],
+            wavelength=autoproc['wavelength'],
+            refinedCell_a=autoproc['refinedCell_a'],
+            refinedCell_alpha=autoproc['refinedCell_alpha'],
+            spaceGroup=autoproc['spaceGroup'],
+            refinedCell_c=autoproc['refinedCell_c'],
+            refinedCell_gamma=autoproc['refinedCell_gamma'],
 
             recordTimeStamp=autoprocscaling['recordTimeStamp'],
             resolutionEllipsoidAxis13=autoprocscaling['resolutionEllipsoidAxis13'],
@@ -174,18 +212,10 @@ def storeParseReport(data):
             overall_meanIOverSigI=autoprocscalingstatistics['overall']['meanIOverSigI'],
             overall_anomalousCompletenessSpherical=autoprocscalingstatistics['overall']['anomalousCompletenessSpherical'],
             overall_ccAnomalous=autoprocscalingstatistics['overall']['ccAnomalous'],
-            overall_rMeasAllIPlusIMinus=autoprocscalingstatistics['overall']['rMeasAllIPlusIMinus'],
-
-            refinedCell_beta=autoproc['refinedCell_beta'],
-            refinedCell_b=autoproc['refinedCell_b'],
-            wavelength=autoproc['wavelength'],
-            refinedCell_a=autoproc['refinedCell_a'],
-            refinedCell_alpha=autoproc['refinedCell_alpha'],
-            spaceGroup=autoproc['spaceGroup'],
-            refinedCell_c=autoproc['refinedCell_c'],
-            refinedCell_gamma=autoproc['refinedCell_gamma'])
+            overall_rMeasAllIPlusIMinus=autoprocscalingstatistics['overall']['rMeasAllIPlusIMinus'])
 
         report.save()
+        logger.info(type(report))
         return report.serialize
 
     except:
@@ -256,8 +286,9 @@ def storeParseRC(data, report):
                 step=input_rc['step'],
                 RMSbonds=input_rc['RMSbonds'],
                 RMSangles=input_rc['RMSangles'],
-                R=input_rc['R'])
-                # WatersPresent=input_rc['WatersPresent'])
+                R=input_rc['R'],
+                WatersPresent=input_rc['WatersPresent'])
+           
             rc.save()
             rc2=rc.serialize
             connectReportRC(report['report_node_properties']['jobid'], rc2['rc_node_properties']['uuid'])
@@ -275,9 +306,25 @@ def storeParseLigand(data, report):
     """
 
     try:
+            # molprobity=data['ligandfitting']['ligands']['1']['validationstatistics']['molprobity']
+
             ligand=Ligand(ligand_id=data['id'],
                 depositioncoordinates=data['postrefinement']['deposition']['depositioncoordinates'],
-                depositionreflns=data['postrefinement']['deposition']['depositionreflns'])
+                depositionreflns=data['postrefinement']['deposition']['depositionreflns'],
+                molprobitypercentile=molprobity['molprobitypercentile'],
+                ramaoutlierpercent=molprobity['ramaoutlierpercent'],
+                cbetadeviations=molprobity['cbetadeviations'],
+                ramafavoredpercent=molprobity['ramafavoredpercent'],
+                poorrotamers=molprobity['poorrotamers'],
+                rmsbonds=molprobity['rmsbonds'],
+                rmsangles=molprobity['rmsangles'],
+                clashpercentile=molprobity['clashpercentile'],
+                poorrotamerspercent=molprobity['poorrotamerspercent'],
+                clashscore=molprobity['clashscore'],
+                ramafavored=molprobity['ramafavored'],
+                molprobityscore=molprobity['molprobityscore'],
+                ramaoutliers=molprobity['ramaoutliers'])
+
             ligand.save()
             ligand2=ligand.serialize
             connectReportLigand(report['report_node_properties']['jobid'], ligand2['ligand_node_properties']['uuid'])
@@ -480,9 +527,3 @@ def connectStructureFactorsMTZ(data1, data2):
     except:
         return JsonResponse({"STATUS": "ERROR OCCURRED WHILE CONNECTING STRUCTURE FACTORS TO MTZ"}, safe=False)
 
-# def exists(node=None, property=None, value=None):
-#     filter_node = (":" + node) if node != None else ''
-#     filter_value = ("{" + property + ": '" + value + "'}") if property != None and value != None else '' 
-#     return cypher_query("MATCH(n" + filter_node + filter_value + ")" + " return count(n) > 0 as exists;"  )[0][0][0]
-
-# exists(node='Report')
\ No newline at end of file