diff --git a/dataproc/api/models/__pycache__/dataset_model.cpython-38.pyc b/dataproc/api/models/__pycache__/dataset_model.cpython-38.pyc
index a1c781f084a6fd7750090641a9ba13b28d1fcfbd..a646800d024a192a6ce1ddf8419afe3ebe3ff2a5 100644
Binary files a/dataproc/api/models/__pycache__/dataset_model.cpython-38.pyc and b/dataproc/api/models/__pycache__/dataset_model.cpython-38.pyc differ
diff --git a/dataproc/api/models/dataset_model.py b/dataproc/api/models/dataset_model.py
index 79a1a0caf2060a8fe010c9cf4835bcf099de90b3..1bfe01d4094367e2ee7b6844ca835d7dfab209c2 100644
--- a/dataproc/api/models/dataset_model.py
+++ b/dataproc/api/models/dataset_model.py
@@ -40,7 +40,8 @@ class Dataset(StructuredNode):
  		"""
  		
  		return {
- 		'dataset_node_properties': {
+ 		'dataset_node_properties': 
+ 		{
  		'uuid': self.uuid,
  		'fileTemplateName': self.fileTemplateName,
  		'userUuid': self.userUuid,
diff --git a/dataproc/api/views/__pycache__/input_view.cpython-38.pyc b/dataproc/api/views/__pycache__/input_view.cpython-38.pyc
index fd32ce80796652b91774bcbc8877a0bc43afa5ae..fde9d73423dbf99cb784ee7857d547e76d49149a 100644
Binary files a/dataproc/api/views/__pycache__/input_view.cpython-38.pyc and b/dataproc/api/views/__pycache__/input_view.cpython-38.pyc differ
diff --git a/dataproc/api/views/draft_view.py b/dataproc/api/views/draft_view.py
deleted file mode 100644
index 5b61f338d0d95195d811e7287e9c8e8a4be0493f..0000000000000000000000000000000000000000
--- a/dataproc/api/views/draft_view.py
+++ /dev/null
@@ -1,40 +0,0 @@
-@csrf_exempt
-def storeParseDataset(data):
-
-    """
-    Creates nodes for each dataset with relative properties
-    """
-
-    try:
-        logger.info('CHECK FUNCTION')
-        Dataset.create_or_update(uuid=data['uuid'],
-            userUuid=data['userUuid'], 
-            crystalUuid=data['crystalUuid'],
-            currentPath=data['currentPath'],
-            generationPath=data['generationPath'],
-            fileTemplateName=data['fileTemplateName'],
-            blStartingDate=data['blStartingDate'],
-            beamlineName=data['beamlineName'],
-            facilityName=data['facilityName'])
-        logger.info('CREATE OR UPDATE')
-
-#         # dataset.save()
-
-#         # if dataset == None:
-#         #     datasetNew = Dataset(uuid=data['uuid'],
-#         #         userUuid=data['userUuid'], 
-#         #         crystalUuid=data['crystalUuid'],
-#         #         currentPath=data['currentPath'],
-#         #         generationPath=data['generationPath'],
-#         #         fileTemplateName=data['fileTemplateName'],
-#         #         blStartingDate=data['blStartingDate'],
-#         #         beamlineName=data['beamlineName'],
-#         #         facilityName=data['facilityName'])
-
-#         #     datasetNew.save()
-        return JsonResponse({"Status": "INPUT REGISTERED"})
-        logger.info('RETURN')
-
-    except:
-        print(sys.exc_info()[0])
-        return ({"STATUS": "ERROR OCCURRED WHILE REGISTERING DATASET"})
\ No newline at end of file
diff --git a/dataproc/api/views/input_view.py b/dataproc/api/views/input_view.py
index 77ff1658ba9851327b771b906f8ee9465c070620..ca361fa229334221aab5473f3cf115097a270532 100644
--- a/dataproc/api/views/input_view.py
+++ b/dataproc/api/views/input_view.py
@@ -31,33 +31,36 @@ def storeInput(request):
         json_data=json.loads(request.body)
         
         json_data_dataset=json_data['dataset']
-        # json_data_storagehost=json_data['storageHost']
-        # json_data_user=json_data['user']
-        # json_data_construct=json_data['construct']
-        # json_data_computationhost=json_data['computationHost']
-        # json_data_datacollection=json_data['dataCollection']
-        # json_data_ocf=json_data['OCF']
+        json_data_storagehost=json_data['storageHost']
+        json_data_user=json_data['user']
+        json_data_construct=json_data['construct']
+        json_data_computationhost=json_data['computationHost']
+        json_data_datacollection=json_data['dataCollection']
+        json_data_ocf=json_data['construct']['OCF']
 
         try:
             # Register nodes
             storeParseDataset(json_data_dataset)
-            # storeParseStorageHost(json_data_storagehost)
-            # storeParseUser(json_data_user)
-            # storeParseConstruct(json_data_construct)
-            # storeParseComputationHost(json_data_computationhost)
-            # storeParseDataCollection(json_data_datacollection)
-            # storeParseOCF(json_data_ocf)
+            storeParseStorageHost(json_data_storagehost)
+            storeParseUser(json_data_user)
+            storeParseConstruct(json_data_construct)
+            storeParseComputationHost(json_data_computationhost)
+            storeParseDataCollection(json_data_datacollection)
+            storeParseOCF(json_data_ocf)
 
             # Register relationships 
-            # connectConstructUser(json_data_construct, json_data_user)
-            # connectConstructStorageHost(json_data_construct, json_data_storagehost)
-            # connectConstructComputationHost(json_data_construct, json_data_computationhost)
-            # connectDatasetConstruct(json_data_dataset, json_data_construct)
-            # connectDatasetStorageHost(json_data_dataset, json_data_storagehost)
-            # connectDataCollectionDataset(json_data_datacollection, json_data_dataset)
+            connectConstructUser(json_data_construct, json_data_user)
+            connectConstructStorageHost(json_data_construct, json_data_storagehost)
+            connectConstructComputationHost(json_data_construct, json_data_computationhost)
+            connectDatasetConstruct(json_data_dataset, json_data_construct)
+            connectDatasetStorageHost(json_data_dataset, json_data_storagehost)
+            connectDataCollectionDataset(json_data_datacollection, json_data_dataset)
 
-            return JsonResponse({"Status": "INPUT SUCCESSFULLY REGISTERED"})
+            for input_ocf in json_data_ocf:
+                connectConstructOCF(json_data_construct, input_ocf)
 
+            return JsonResponse({"Status": "INPUT SUCCESSFULLY REGISTERED"})
+        
         except :
             return JsonResponse({"Status":"ERROR OCCURRED"}, safe=False)
 
@@ -69,7 +72,7 @@ def storeParseDataset(data):
     """
 
     try:
-        dataset=Dataset.get_or_create(uuid=data['uuid'],
+        dataset=Dataset(uuid=data['uuid'],
             userUuid=data['userUuid'], 
             crystalUuid=data['crystalUuid'],
             currentPath=data['currentPath'],
@@ -85,6 +88,22 @@ def storeParseDataset(data):
         print(sys.exc_info()[0])
         return ({"STATUS": "ERROR OCCURRED WHILE REGISTERING DATASET"})
 
+# @csrf_exempt
+# def storeParseDataset(data):
+
+#     """
+#     Creates nodes for each dataset with relative properties
+#     """
+
+#     try:
+#         # dataset=Dataset.create_or_update({"uuid": data['uuid']}, {"facilityName": data['facilityName']})
+#         Dataset.create_or_update(data.serialize)
+#         return dataset
+    
+#     except:
+#         print(sys.exc_info()[0])
+#         return ({"STATUS": "ERROR OCCURRED WHILE REGISTERING DATASET"})
+
 @csrf_exempt
 def storeParseStorageHost(data):
 
@@ -294,4 +313,20 @@ def connectDataCollectionDataset(data1, data2):
         return JsonResponse({"STATUS": datacollection.generates.connect(dataset)}, safe=False)
 
     except:
-        return JsonResponse({"STATUS": "ERROR OCCURRED WHILE CONNECTING DATA COLLECTION TO DATASET"}, safe=False)
\ No newline at end of file
+        return JsonResponse({"STATUS": "ERROR OCCURRED WHILE CONNECTING DATA COLLECTION TO DATASET"}, safe=False)
+
+@csrf_exempt
+def connectConstructOCF(data1, data2):
+    
+    """
+    Create a relationship between a construct and an ocf
+    """
+
+    try:
+        construct=Construct.nodes.get(uuid=data1["uuid"])
+        ocf=OCF.nodes.get(uuid=data2["uuid"])
+
+        return JsonResponse({"STATUS": construct.has_ocf.connect(ocf)}, safe=False)
+
+    except:
+        return JsonResponse({"STATUS": "ERROR OCCURRED WHILE CONNECTING DATA COLLECTION TO DATASET"}, safe=False)