Skip to content

Commit

Permalink
manage stale db status and version bump
Browse files Browse the repository at this point in the history
  • Loading branch information
Sandip117 committed Mar 5, 2024
1 parent 8e77410 commit cc7d807
Show file tree
Hide file tree
Showing 5 changed files with 54 additions and 10 deletions.
2 changes: 1 addition & 1 deletion app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

class Settings(BaseSettings):
pflink_mongodb: MongoDsn = 'mongodb://localhost:27017'
version: str = "3.8.7"
version: str = "3.8.8"
mongo_username: str = "admin"
mongo_password: str = "admin"
log_level: str = "DEBUG"
Expand Down
1 change: 0 additions & 1 deletion app/controllers/subprocesses/status.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,6 @@ def get_analysis_status(response: dict) -> dict:
"""
Get details about an analysis running on the given feed
"""
MAX_JOBS = 12
analysis_details = {}

created = response['created_jobs']
Expand Down
18 changes: 10 additions & 8 deletions app/controllers/subprocesses/wf_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def analysis_retry(workflow: WorkflowDBSchema):
"""
# Reset workflow status if max service_retry is not reached
if workflow.service_retry < 5 and not workflow.response.status and workflow.response.workflow_state == State.ANALYZING:
logger.warning(f"Retrying request.{workflow.service_retry}/5 retries left.", extra=d)
logger.warning(f"Retrying request.{5 - workflow.service_retry}/5 retries left.", extra=d)
workflow.service_retry += 1
workflow.response.feed_id = ""
workflow.response.feed_name = ""
Expand All @@ -161,7 +161,7 @@ def analysis_retry(workflow: WorkflowDBSchema):
update_workflow(key, workflow)
if workflow.service_retry >= 5: logger.warning("All retries exhausted. Giving up on this workflow request.",
extra=d)
return workflow
return retrieve_workflow(key)

def update_status(request: WorkflowRequestSchema):
"""
Expand Down Expand Up @@ -272,11 +272,11 @@ def do_cube_create_feed(request: WorkflowRequestSchema, cube_url: str, retries:
"""
Create a new feed in `CUBE` if not already present
"""
logger.info(f"Creating Chris client with {cube_url, request.cube_user_info.username, request.cube_user_info.password}", extra=d)
logger.debug(f"Creating Chris client with {cube_url, request.cube_user_info.username, request.cube_user_info.password}", extra=d)
client = do_cube_create_user(cube_url, request.cube_user_info.username, request.cube_user_info.password)
logger.info(f"Created client details {client}", extra=d)
logger.debug(f"Created client details {client}", extra=d)

logger.info(f"Fetching PACS details for {request.PACS_directive.__dict__}", extra=d)
logger.debug(f"Fetching PACS details for {request.PACS_directive.__dict__}", extra=d)
pacs_details = {}
try:
pacs_details = client.getPACSdetails(request.PACS_directive.__dict__)
Expand All @@ -285,7 +285,7 @@ def do_cube_create_feed(request: WorkflowRequestSchema, cube_url: str, retries:
feed_name = substitute_dicom_tags(request.workflow_info.feed_name, pacs_details)
logger.info(f"Fetching data path..", extra=d)
data_path = client.getSwiftPath(pacs_details)
logger.info(f"Received data path: {data_path}", extra=d)
logger.debug(f"Received data path: {data_path}", extra=d)
if retries > 0:
feed_name = feed_name + f"-retry#{retries}"

Expand Down Expand Up @@ -317,12 +317,13 @@ def __run_plugin_instance(previous_id: str, request: WorkflowRequestSchema, clie
"""
# search for plugin
plugin_search_params = {"name": request.workflow_info.plugin_name, "version": request.workflow_info.plugin_version}
logger.info(f"Adding plugin: {plugin_search_params} to pl_inst: {previous_id}", extra=d)
plugin_id = client.getPluginId(plugin_search_params)

# convert CLI params from string to a JSON dictionary
feed_params = str_to_param_dict(request.workflow_info.plugin_params)
feed_params["previous_id"] = previous_id
logger.info(f"Creating new analysis with plugin: {plugin_search_params} and parameters: {feed_params}",
logger.debug(f"Creating new analysis with plugin: {plugin_search_params} and parameters: {feed_params}",
extra=d)
feed_resp = client.createFeed(plugin_id, feed_params)

Expand All @@ -333,9 +334,10 @@ def __run_pipeline_instance(previous_id: str, request: WorkflowRequestSchema, cl
"""
# search for pipeline
pipeline_search_params = {"name": request.workflow_info.pipeline_name}
logger.info(f"Adding pipeline: {pipeline_search_params} to pl_inst: {previous_id}", extra=d)
pipeline_id = client.getPipelineId(pipeline_search_params)
pipeline_params = {"previous_plugin_inst_id": previous_id, "name": request.workflow_info.pipeline_name}
logger.info(f"Creating new analysis with pipeline: {pipeline_search_params}.",
logger.debug(f"Creating new analysis with pipeline: {pipeline_search_params}.",
extra=d)
feed_resp = client.createWorkflow(str(pipeline_id), pipeline_params)

Expand Down
2 changes: 2 additions & 0 deletions cmd.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#!/bin/bash
docker run --rm fnndsc/pl-dicom_filter:1.1.2 chris_plugin_info
41 changes: 41 additions & 0 deletions run-MainThread.log
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
2024-03-05T10:57:27.162733-05:00 START
2024-03-05T10:57:48.036372-05:00
{
"status": true,
"obj": {
"run": {
"status": true,
"stdout": {
"id": 145,
"creation_date": "2024-03-05T10:57:47.956671-05:00",
"name": "pl-dicom_filter",
"version": "1.1.2",
"dock_image": "fnndsc/pl-dicom_filter:1.1.2",
"public_repo": "https://github.com/FNNDSC/pl-dicom_filter",
"icon": "",
"type": "ds",
"stars": 0,
"authors": "FNNDSC <[email protected]>",
"title": "A ChRIS plugin to filter dicom files using dicom tags",
"category": "",
"description": "A ChRIS DS plugin template",
"documentation": "https://github.com/FNNDSC/python-chrisapp-template",
"license": "MIT",
"execshell": "/usr/local/bin/python",
"selfpath": "/usr/local/bin",
"selfexec": "dicom_filter",
"min_number_of_workers": 1,
"max_number_of_workers": 1,
"min_cpu_limit": 1000,
"max_cpu_limit": 2147483647,
"min_memory_limit": 2048,
"max_memory_limit": 2147483647,
"min_gpu_limit": 0,
"max_gpu_limit": 0
},
"stderr": "",
"returncode": 0
}
}
}
2024-03-05T10:57:48.036535-05:00 END

0 comments on commit cc7d807

Please sign in to comment.