diff --git a/materializationengine/blueprints/client/api2.py b/materializationengine/blueprints/client/api2.py index 9b4767b1..a210f115 100644 --- a/materializationengine/blueprints/client/api2.py +++ b/materializationengine/blueprints/client/api2.py @@ -659,7 +659,8 @@ def get( analysis_version, analysis_tables = get_analysis_version_and_tables( target_datastack, target_version, session ) - + if analysis_tables is None: + abort(404, f"no tables found in target_datastack {target_datastack} for version {target_version}") schema = AnalysisTableSchema() tables = schema.dump(analysis_tables, many=True) @@ -667,8 +668,9 @@ def get( for table in tables: table_name = table["table_name"] ann_md = db.database.get_table_metadata(table_name) - ann_md.pop("id") - ann_md.pop("deleted") + if ann_md is not None: + ann_md.pop("id") + ann_md.pop("deleted") table.update(ann_md) return tables, 200 diff --git a/materializationengine/utils.py b/materializationengine/utils.py index 6ba97826..bdda71a7 100644 --- a/materializationengine/utils.py +++ b/materializationengine/utils.py @@ -127,7 +127,18 @@ def check_write_permission(db, table_name): def check_read_permission(db, table_name): + """Check if user has read permission for table + + Args: + db (DynamicAnnotationInterface): db to check + table_name (str): table to check + + Returns: + dict: metadata of table + """ metadata = db.database.get_table_metadata(table_name) + if metadata is None: + abort(404, f"Table {table_name} not found in {db.aligned_volume} database") if metadata["read_permission"] == "GROUP": if not users_share_common_group(metadata["user_id"]): abort( @@ -142,6 +153,8 @@ def check_read_permission(db, table_name): def check_ownership(db, table_name): metadata = db.database.get_table_metadata(table_name) + if metadata is None: + abort(404, f"Table {table_name} not found in {db.aligned_volume} database") if metadata["user_id"] != str(g.auth_user["id"]): abort(401, "You cannot do this because you are not the owner of this table") return metadata diff --git a/materializationengine/workflows/ingest_new_annotations.py b/materializationengine/workflows/ingest_new_annotations.py index d76ca4e5..fdc401e8 100644 --- a/materializationengine/workflows/ingest_new_annotations.py +++ b/materializationengine/workflows/ingest_new_annotations.py @@ -67,9 +67,7 @@ def process_new_annotations_workflow( ) for mat_metadata in mat_info: - if mat_metadata["row_count"] < 1_000_000 and mat_metadata.get( - "segmentation_table_name" - ): + if mat_metadata.get("segmentation_table_name"): annotation_chunks = generate_chunked_model_ids(mat_metadata) process_chunks_workflow = chain( ingest_new_annotations_workflow( @@ -102,7 +100,9 @@ def ingest_table_svids( mat_metadata = mat_info[0] # only one entry for a single table table_created = create_missing_segmentation_table(mat_metadata) if table_created: - celery_logger.info(f'Table created: {mat_metadata.get("segmentation_table_name")}') + celery_logger.info( + f'Table created: {mat_metadata.get("segmentation_table_name")}' + ) if annotation_ids: ingest_workflow = ingest_new_annotations.si( None, mat_metadata, annotation_ids, lookup_root_ids=False @@ -377,7 +377,8 @@ def ingest_new_annotations_workflow(mat_metadata: dict): """ celery_logger.info("Ingesting new annotations...") if mat_metadata["row_count"] >= 1_000_000: - return fin.si() + if not mat_metadata.get("lookup_all_root_ids", False): + return fin.si() annotation_chunks = generate_chunked_model_ids(mat_metadata) table_created = create_missing_segmentation_table(mat_metadata) if table_created: diff --git a/materializationengine/workflows/update_database_workflow.py b/materializationengine/workflows/update_database_workflow.py index 7f772508..4ada6906 100644 --- a/materializationengine/workflows/update_database_workflow.py +++ b/materializationengine/workflows/update_database_workflow.py @@ -10,7 +10,7 @@ get_materialization_info, monitor_workflow_state, workflow_complete, - fin + fin, ) from materializationengine.task import LockedTask from materializationengine.utils import get_config_param @@ -72,6 +72,7 @@ def update_database_workflow(self, datastack_info: dict, **kwargs): analysis_version=None, materialization_time_stamp=materialization_time_stamp, ) + celery_logger.info(mat_info) update_live_database_workflow = [] @@ -90,7 +91,7 @@ def update_database_workflow(self, datastack_info: dict, **kwargs): update_live_database_workflow.append(workflow) else: update_live_database_workflow.append(fin.si()) - + run_update_database_workflow = chain( *update_live_database_workflow, workflow_complete.si("update_root_ids") ).apply_async(kwargs={"Datastack": datastack_info["datastack"]})