From b66b851cdf0baa77df3401ec6e313d73b75b58f3 Mon Sep 17 00:00:00 2001
From: Christopher Rhodes <christopher.rhodes@embl.de>
Date: Fri, 22 Mar 2024 14:54:57 +0100
Subject: [PATCH] Removed JSON manifest file, logged summaries to logging
 service instead

---
 model_server/base/api.py     | 11 ++++++++---
 model_server/base/session.py | 18 ------------------
 2 files changed, 8 insertions(+), 21 deletions(-)

diff --git a/model_server/base/api.py b/model_server/base/api.py
index 8259a98c..c5bef994 100644
--- a/model_server/base/api.py
+++ b/model_server/base/api.py
@@ -46,6 +46,7 @@ def change_path(key, path):
             status_code=404,
             detail=e.__str__(),
         )
+    session.log_info(f'Change {key} path to {path}')
     return session.get_paths()
 
 @app.put('/paths/watch_input')
@@ -67,11 +68,15 @@ def list_active_models():
 
 @app.put('/models/dummy_semantic/load/')
 def load_dummy_model() -> dict:
-    return {'model_id': session.load_model(DummySemanticSegmentationModel)}
+    mid = session.load_model(DummySemanticSegmentationModel)
+    session.log_info(f'Loaded model {mid}')
+    return {'model_id': mid}
 
 @app.put('/models/dummy_instance/load/')
 def load_dummy_model() -> dict:
-    return {'model_id': session.load_model(DummyInstanceSegmentationModel)}
+    mid = session.load_model(DummyInstanceSegmentationModel)
+    session.log_info(f'Loaded model {mid}')
+    return {'model_id': mid}
 
 @app.put('/workflows/segment')
 def infer_img(model_id: str, input_filename: str, channel: int = None) -> dict:
@@ -83,5 +88,5 @@ def infer_img(model_id: str, input_filename: str, channel: int = None) -> dict:
         session.paths['outbound_images'],
         channel=channel,
     )
-    session.record_workflow_run(record)
+    session.log_info(f'Completed segmentation of {input_filename}')
     return record
\ No newline at end of file
diff --git a/model_server/base/session.py b/model_server/base/session.py
index 6119690b..bd76709f 100644
--- a/model_server/base/session.py
+++ b/model_server/base/session.py
@@ -1,4 +1,3 @@
-import json
 import logging
 import os
 
@@ -8,14 +7,10 @@ from typing import Dict
 
 import model_server.conf.defaults
 from model_server.base.models import Model
-from model_server.base.workflows import WorkflowRunRecord
 
 logger = logging.getLogger(__name__)
 
 
-def create_manifest_json():
-    pass
-
 class Singleton(type):
     _instances = {}
 
@@ -34,7 +29,6 @@ class Session(object, metaclass=Singleton):
     def __init__(self, root: str = None):
         print('Initializing session')
         self.models = {} # model_id : model object
-        self.manifest = [] # paths to data as well as other metadata from each inference run
         self.paths = self.make_paths(root)
 
         self.logfile = self.paths['logs'] / f'session.log'
@@ -42,9 +36,6 @@ class Session(object, metaclass=Singleton):
 
         self.log_info('Initialized session')
 
-        self.manifest_json = self.paths['logs'] / f'manifest.json'
-        open(self.manifest_json, 'w').close() # instantiate empty json file
-
     def get_paths(self):
         return self.paths
 
@@ -97,15 +88,6 @@ class Session(object, metaclass=Singleton):
     def log_error(self, msg):
         logger.error(msg)
 
-    def record_workflow_run(self, record: WorkflowRunRecord or None):
-        """
-        Append a JSON describing inference data to this session's manifest
-        """
-        self.log_info(f'Ran model {record.model_id} on {record.input_filepath} to infer {record.output_filepath}')
-        with open(self.manifest_json, 'w+') as fh:
-            json.dump(record.dict(), fh)
-
-
     def load_model(self, ModelClass: Model, params: Dict[str, str] = None) ->  dict:
         """
         Load an instance of a given model class and attach to this session's model registry
-- 
GitLab