From 1bd7c8489b1fb5c0833be69d3ee3e1a090e5ae9b Mon Sep 17 00:00:00 2001 From: spapa013 Date: Mon, 18 Nov 2024 15:39:31 -0800 Subject: [PATCH 1/6] new dockerfile source and clean up a bit --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 62e3ba5..38db6bc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM at-docker.ad.bcm.edu:5000/microns-base +FROM at-docker:5000/microns-base:cuda11.8.0-python3.8 LABEL maintainer="Stelios Papadopoulos " RUN pip3 install \ @@ -8,11 +8,11 @@ RUN pip3 install \ caveclient \ nglui -WORKDIR /root +WORKDIR / ARG CLOUDVOLUME_TOKEN RUN mkdir -p .cloudvolume/secrets RUN echo "{\"token\": \"${CLOUDVOLUME_TOKEN:-}\"}" > .cloudvolume/secrets/cave-secret.json COPY . /src/microns-materialization -RUN pip3 install --prefix=$(python -m site --user-base) -e /src/microns-materialization/python/microns-materialization -RUN pip3 install --prefix=$(python -m site --user-base) -e /src/microns-materialization/python/microns-materialization-api \ No newline at end of file +RUN pip install -e /src/microns-materialization/python/microns-materialization +RUN pip install -e /src/microns-materialization/python/microns-materialization-api \ No newline at end of file From e18af3d23d3056ea9d40c1075cdd0dffa7b08c6a Mon Sep 17 00:00:00 2001 From: spapa013 Date: Mon, 18 Nov 2024 15:41:07 -0800 Subject: [PATCH 2/6] add utils/skeleton_utils.py --- .../utils/__init__.py | 0 .../utils/skeleton_utils.py | 16 ++++++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 python/microns-materialization-api/microns_materialization_api/utils/__init__.py create mode 100644 python/microns-materialization-api/microns_materialization_api/utils/skeleton_utils.py diff --git a/python/microns-materialization-api/microns_materialization_api/utils/__init__.py b/python/microns-materialization-api/microns_materialization_api/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/microns-materialization-api/microns_materialization_api/utils/skeleton_utils.py b/python/microns-materialization-api/microns_materialization_api/utils/skeleton_utils.py new file mode 100644 index 0000000..96af369 --- /dev/null +++ b/python/microns-materialization-api/microns_materialization_api/utils/skeleton_utils.py @@ -0,0 +1,16 @@ +import numpy as np + +def convert_skeleton_to_nodes_edges( + skeleton, + verbose = False,): + """ + from BCelli + """ + + all_skeleton_vertices = skeleton.reshape(-1,3) + unique_rows,indices = np.unique(all_skeleton_vertices,return_inverse=True,axis=0) + + #need to merge unique indices so if within a certain range of each other then merge them together + reshaped_indices = indices.reshape(-1,2) + + return unique_rows,reshaped_indices \ No newline at end of file From 9516a66ad0589d4cb696cd244e2b6d5f5313b5cb Mon Sep 17 00:00:00 2001 From: spapa013 Date: Mon, 18 Nov 2024 15:41:57 -0800 Subject: [PATCH 3/6] add minnie65_meshwork_axon_dendrite_skeletons adapter --- .../microns_materialization_api/config/adapters.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/python/microns-materialization-api/microns_materialization_api/config/adapters.py b/python/microns-materialization-api/microns_materialization_api/config/adapters.py index bebe813..4efcea3 100644 --- a/python/microns-materialization-api/microns_materialization_api/config/adapters.py +++ b/python/microns-materialization-api/microns_materialization_api/config/adapters.py @@ -7,7 +7,7 @@ import h5py import trimesh from meshparty import meshwork, skeleton -from microns_utils.adapter_utils import FilePathAdapter, adapt_mesh_hdf5 +from microns_utils.adapter_utils import FilePathAdapter, NumpyAdapter, adapt_mesh_hdf5 class TrimeshAdapter(FilePathAdapter): @@ -39,11 +39,13 @@ def get(self, filepath): minnie65_meshes = TrimeshAdapter('filepath@minnie65_meshes') minnie65_meshwork = MeshworkAdapter('filepath@minnie65_meshwork') minnie65_pcg_skeletons = PCGSkelAdapter('filepath@minnie65_pcg_skeletons') +minnie65_meshwork_axon_dendrite_skeletons = NumpyAdapter('filepath@minnie65_meshwork_axon_dendrite_skeletons') minnie65_materialization = { 'minnie65_meshes': minnie65_meshes, 'minnie65_meshwork': minnie65_meshwork, - 'minnie65_pcg_skeletons': minnie65_pcg_skeletons + 'minnie65_pcg_skeletons': minnie65_pcg_skeletons, + 'minnie65_meshwork_axon_dendrite_skeletons': minnie65_meshwork_axon_dendrite_skeletons, } # H01 From 20030dd63651de915b17939c3284fafa3ec95e09 Mon Sep 17 00:00:00 2001 From: spapa013 Date: Mon, 18 Nov 2024 15:42:19 -0800 Subject: [PATCH 4/6] add meshwork_axon_dendrite_skeletons external --- .../microns_materialization_api/config/externals.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python/microns-materialization-api/microns_materialization_api/config/externals.py b/python/microns-materialization-api/microns_materialization_api/config/externals.py index 1d332cd..3d42f66 100644 --- a/python/microns-materialization-api/microns_materialization_api/config/externals.py +++ b/python/microns-materialization-api/microns_materialization_api/config/externals.py @@ -18,9 +18,11 @@ minnie65_materialization_external_meshes_path = base_path / 'minnie65' / 'meshes' minnie65_materialization_external_meshwork_path = base_path / 'minnie65' / 'meshwork' minnie65_materialization_external_pcg_skeletons_path = base_path / 'minnie65' / 'pcg_skeletons' +minnie65_materialization_external_meshwork_axon_dendrite_skeletons_path = base_path / 'minnie65' / 'meshwork_axon_dendrite_skeletons' minnie65_materialization = { 'minnie65_meshes': djp.make_store_dict(minnie65_materialization_external_meshes_path), 'minnie65_meshwork': djp.make_store_dict(minnie65_materialization_external_meshwork_path), 'minnie65_pcg_skeletons': djp.make_store_dict(minnie65_materialization_external_pcg_skeletons_path), + 'minnie65_meshwork_axon_dendrite_skeletons': djp.make_store_dict(minnie65_materialization_external_meshwork_axon_dendrite_skeletons_path), } From 98c0dfc42d9dfeb5fdf91e8dfcbdda8e8bc72a56 Mon Sep 17 00:00:00 2001 From: spapa013 Date: Mon, 18 Nov 2024 15:52:48 -0800 Subject: [PATCH 5/6] add ImportMethod.Synapse3, Synapse.Info2 and MeshworkAxonDendriteSkeleton table --- .../schemas/minnie65_materialization.py | 99 ++++++++- .../minnie65_materialization.py | 199 +++++++++++++++--- 2 files changed, 267 insertions(+), 31 deletions(-) diff --git a/python/microns-materialization-api/microns_materialization_api/schemas/minnie65_materialization.py b/python/microns-materialization-api/microns_materialization_api/schemas/minnie65_materialization.py index bbae090..a330242 100644 --- a/python/microns-materialization-api/microns_materialization_api/schemas/minnie65_materialization.py +++ b/python/microns-materialization-api/microns_materialization_api/schemas/minnie65_materialization.py @@ -4,7 +4,7 @@ import datajoint as dj import datajoint_plus as djp from microns_utils.misc_utils import classproperty - +import microns_utils.datajoint_utils as dju from ..config import minnie65_materialization_config as config config.register_externals() @@ -13,6 +13,12 @@ schema = djp.schema(config.schema_name, create_schema=True) +@schema +class Tag(dju.VersionLookup): + package = 'microns-materialization-api' + attr_name = 'tag' + + @schema class ImportMethod(djp.Lookup): hash_name = 'import_method' @@ -108,6 +114,20 @@ class Synapse2(djp.Part): ts_inserted=CURRENT_TIMESTAMP : timestamp """ + class Synapse3(djp.Part): + enable_hashing = True + hash_name = 'import_method' + hashed_attrs = 'caveclient_version', 'datastack', 'ver', Tag.attr_name + definition = """ + -> master + --- + caveclient_version: varchar(48) # version of caveclient installed when method was created + datastack: varchar(250) # name of datastack + ver: smallint # client materialization version + -> Tag + ts_inserted=CURRENT_TIMESTAMP : timestamp + """ + class PCGMeshwork(djp.Part): enable_hashing = True hash_name = 'import_method' @@ -151,6 +171,26 @@ class PCGSkeleton(djp.Part): ts_inserted=CURRENT_TIMESTAMP : timestamp """ +@schema +class MakeMethod(djp.Lookup): + hash_name = 'make_method' + definition = f""" + {hash_name}: varchar(6) + """ + + class MeshworkAxonDendriteSkeleton(djp.Part): + enable_hashing = True + hash_name = 'make_method' + hashed_attrs = 'pcg_skel_version', Tag.attr_name + definition = """ + -> master + --- + pcg_skel_version: varchar(48) # version of pcg_skel installed when method was created + target_dir: varchar(1000) # target directory for file + -> Tag + """ + + @schema class Materialization(djp.Lookup): @@ -320,6 +360,21 @@ class Info(djp.Part): synapse_z : int unsigned # z coordinate of centroid in EM voxels (x: 4nm, y: 4nm, z: 40nm). From Allen 'ctr_pt_position'. synapse_size : int unsigned # (EM voxels) scaled by (4x4x40) """ + + class Info2(djp.Part): + definition = """ + # Synapses from the table 'synapses_pni_2' + -> Materialization + -> Segment.proj(primary_seg_id='segment_id') + secondary_seg_id : bigint unsigned # id of the segment that is synaptically paired to primary_segment_id. + -> master + --- + prepost : varchar(16) # whether the primary_seg_id is "presyn" or "postsyn" + synapse_x : int unsigned # x coordinate of synapse centroid in EM voxels (x: 4nm, y: 4nm, z: 40nm). From Allen 'ctr_pt_position'. + synapse_y : int unsigned # y coordinate of centroid in EM voxels (x: 4nm, y: 4nm, z: 40nm). From Allen 'ctr_pt_position'. + synapse_z : int unsigned # z coordinate of centroid in EM voxels (x: 4nm, y: 4nm, z: 40nm). From Allen 'ctr_pt_position'. + synapse_size : int unsigned # (EM voxels) scaled by (4x4x40) + """ class MatV1(djp.Part): definition = """ @@ -346,6 +401,14 @@ class CAVE(djp.Part, dj.Computed): ts_inserted=CURRENT_TIMESTAMP : timestamp # timestamp inserted """ + class CAVE2(djp.Part, dj.Computed): + definition = """ + -> master.Info2 + -> ImportMethod + --- + ts_inserted=CURRENT_TIMESTAMP : timestamp # timestamp inserted + """ + @schema class Mesh(djp.Lookup): @@ -446,6 +509,40 @@ class PCGSkeletonMaker(djp.Part, dj.Computed): ts_inserted=CURRENT_TIMESTAMP : timestamp """ + class MeshworkAxonDendriteSkeletonError(djp.Part): + error_code = '000001' + hash_name = 'skeleton_id' + definition = """ + -> master + -> Meshwork + -> MakeMethod + --- + error_msg : varchar(1000) # error message + ts_inserted=CURRENT_TIMESTAMP : timestamp + """ + + class MeshworkAxonDendriteSkeleton(djp.Part): + hash_name = 'skeleton_id' + definition = """ + -> master + --- + axon_skeleton : # path to the .npz file + dendrite_skeleton : # path to the .npz file + split_score : float # score from pcg_skel.meshwork.algorithms.split_axon_by_synapses + """ + + class MeshworkAxonDendriteSkeletonMaker(djp.Part, dj.Computed): + enable_hashing = True + hash_name = 'skeleton_make_id' + hashed_attrs = Meshwork.primary_key + MakeMethod.primary_key + definition = f""" + -> master.proj({hash_name}="skeleton_id") + -> Meshwork + -> MakeMethod + --- + ts_inserted=CURRENT_TIMESTAMP : timestamp + """ + @schema class Queue(djp.Lookup): hash_name = 'queue_id' diff --git a/python/microns-materialization/microns_materialization/minnie_materialization/minnie65_materialization.py b/python/microns-materialization/microns_materialization/minnie_materialization/minnie65_materialization.py index bc07b8f..8200c84 100644 --- a/python/microns-materialization/microns_materialization/minnie_materialization/minnie65_materialization.py +++ b/python/microns-materialization/microns_materialization/minnie_materialization/minnie65_materialization.py @@ -12,6 +12,9 @@ from meshparty import trimesh_io # Schema creation +from microns_materialization_api.utils.skeleton_utils import \ + convert_skeleton_to_nodes_edges + from microns_materialization_api.schemas import \ minnie65_materialization as m65mat @@ -31,6 +34,15 @@ # TODO: Deal with filter out unrestricted +import os +cvt = os.getenv('CLOUDVOLUME_TOKEN') +assert cvt is not None, 'No cloudvolume token found' + + +class Tag(m65mat.Tag): + pass + + class ImportMethod(m65mat.ImportMethod): @classmethod def run(cls, key): @@ -56,7 +68,7 @@ def update_method(cls, ver=None, **kwargs): datastack = 'minnie65_phase3_v1' # INSERT - client = set_CAVEclient(datastack, ver) + client = set_CAVEclient(datastack, ver, caveclient_kws={'auth_token': cvt}) cls.insert1({ 'caveclient_version': cpvfd('caveclient'), 'datastack': datastack, @@ -68,7 +80,7 @@ def run(self, **kwargs): self.Log('info', f'Running {self.class_name} with params {params}.') # INITIALIZE & VALIDATE - client = set_CAVEclient(params['datastack'], ver=params['ver']) + client = set_CAVEclient(params['datastack'], ver=params['ver'], caveclient_kws={'auth_token': cvt}) self.master.validate_method( names=('caveclient version', 'datastack', 'materialization_version'), method_values=(params['caveclient_version'], params['datastack'], params['ver']), @@ -89,7 +101,7 @@ def update_method(cls, ver=None, **kwargs): datastack = 'minnie65_phase3_v1' # INSERT - client = set_CAVEclient(datastack, ver) + client = set_CAVEclient(datastack, ver, caveclient_kws={'auth_token': cvt}) cls.insert1({ 'caveclient_version': cpvfd('caveclient'), 'datastack': datastack, @@ -101,7 +113,7 @@ def run(self, **kwargs): self.Log('info', f'Running {self.class_name} with params {params}.') # INITIALIZE & VALIDATE - client = set_CAVEclient(params['datastack'], ver=params['ver']) + client = set_CAVEclient(params['datastack'], ver=params['ver'], caveclient_kws={'auth_token': cvt}) self.master.validate_method( names=('caveclient version', 'datastack', 'materialization_version'), method_values=(params['caveclient_version'], params['datastack'], params['ver']), @@ -153,7 +165,7 @@ def update_method(cls, ver=None, download_meshes_kwargs={}, **kwargs): download_meshes_kwargs.setdefault('progress', False) # INSERT - client = set_CAVEclient(datastack, ver) + client = set_CAVEclient(datastack, ver, caveclient_kws={'auth_token': cvt}) cls.insert1( { 'description' : '', @@ -176,7 +188,7 @@ def run(self, **kwargs): self.Log('info', f'Running {self.class_name} with params {params}.') # INITIALIZE & VALIDATE - client = set_CAVEclient(params['datastack'], params['ver']) + client = set_CAVEclient(params['datastack'], params['ver'], caveclient_kws={'auth_token': cvt}) packages = { 'meshparty_version': 'meshparty', 'caveclient_version': 'caveclient', @@ -213,29 +225,38 @@ def run(self, **kwargs): class Synapse(m65mat.ImportMethod.Synapse): @classmethod def update_method(cls, *args, **kwargs): - msg = f'{cls.class_name} has been deprecated. Use {cls.master.class_name}.Synapse2.' + msg = f'{cls.class_name} has been deprecated. Use {cls.master.class_name}.Synapse3.' cls.Log('error', msg) raise Exception(msg) def run(self, *args, **kwargs): - msg = f'{self.class_name} has been deprecated. Use {self.master.class_name}.Synapse2.' + msg = f'{self.class_name} has been deprecated. Use {self.master.class_name}.Synapse3.' self.Log('error', msg) raise Exception(msg) class Synapse2(m65mat.ImportMethod.Synapse2): @classmethod - def update_method(cls, ver=None, **kwargs): - cls.Log('info', f'Updating method for {cls.class_name}.') + def update_method(cls, *args, **kwargs): + msg = f'{cls.class_name} has been deprecated. Use {cls.master.class_name}.Synapse3.' + cls.Log('error', msg) + raise Exception(msg) - # DEFAULTS - datastack = 'minnie65_phase3_v1' + def run(self, *args, **kwargs): + msg = f'{self.class_name} has been deprecated. Use {self.master.class_name}.Synapse3.' + self.Log('error', msg) + raise Exception(msg) + class Synapse3(m65mat.ImportMethod.Synapse3): + @classmethod + def update_method(cls, ver=None, datastack='minnie65_phase3_v1', **kwargs): + cls.Log('info', f'Updating method for {cls.class_name}.') # INSERT - client = set_CAVEclient(datastack, ver) + client = set_CAVEclient(datastack, ver, caveclient_kws={'auth_token': cvt}) cls.insert1({ 'caveclient_version': cpvfd('caveclient'), 'datastack': datastack, 'ver': ver if ver is not None else client.materialize.version, + Tag.attr_name: Tag.version, }, ignore_extra_fields=True, skip_duplicates=True, insert_to_master=True) def run(self, **kwargs): @@ -243,11 +264,11 @@ def run(self, **kwargs): self.Log('info', f'Running {self.class_name} with params {params}.') # INITIALIZE & VALIDATE - client = set_CAVEclient(params['datastack'], ver=params['ver']) + client = set_CAVEclient(params['datastack'], ver=params['ver'], caveclient_kws={'auth_token': cvt}) self.master.validate_method( - names=('caveclient version', 'datastack', 'materialization_version'), - method_values=(params['caveclient_version'], params['datastack'], params['ver']), - current_values=(cpvfd('caveclient'), client.materialize.datastack_name, client.materialize.version) + names=('caveclient version', 'datastack', 'materialization_version', Tag.attr_name), + method_values=(params['caveclient_version'], params['datastack'], params['ver'], params[Tag.attr_name]), + current_values=(cpvfd('caveclient'), client.materialize.datastack_name, client.materialize.version, Tag.version) ) # IMPORT DATA @@ -257,18 +278,34 @@ def run(self, **kwargs): df_pre = client.materialize.query_table('synapses_pni_2', filter_equal_dict={'pre_pt_root_id': primary_seg_id}) df_pre = df_pre.rename(columns={'pre_pt_root_id':'primary_seg_id', 'post_pt_root_id': 'secondary_seg_id'}) df_pre['prepost'] = 'presyn' + df_pre.attrs = {} # needed because the attrs in the returned dataframe break pd.concat + # # flip primary to secondary and presyn to postsyn + # df_pre_as_post = df_pre.copy() + # df_pre_as_post = df_pre_as_post.rename(columns={'primary_seg_id': 'secondary_seg_id', 'secondary_seg_id': 'primary_seg_id'}) + # df_pre_as_post['prepost'] = 'postsyn' + # get synapses where primary segment is postsynaptic df_post = client.materialize.query_table('synapses_pni_2', filter_equal_dict={'post_pt_root_id': primary_seg_id}) df_post = df_post.rename(columns={'post_pt_root_id':'primary_seg_id', 'pre_pt_root_id': 'secondary_seg_id'}) df_post['prepost'] = 'postsyn' - + + # # flip primary to secondary and postsyn to presyn + # df_post_as_pre = df_post.copy() + # df_post_as_pre = df_post_as_pre.rename(columns={'primary_seg_id': 'secondary_seg_id', 'secondary_seg_id': 'primary_seg_id'}) + # df_post_as_pre['prepost'] = 'presyn' + # combine dataframes df = pd.concat([df_pre, df_post], axis=0) + # df = pd.concat([df_pre, df_pre_as_post, df_post, df_post_as_pre], axis=0) # remove autapses (these are mostly errors) df = df[df['primary_seg_id']!=df['secondary_seg_id']] + # ensure primary_seg_id is in Segment table + # seg_df = pd.DataFrame(m65mat.Segment.proj(primary_seg_id='segment_id').fetch()) + # df = df.merge(seg_df) + if len(df)>0: # add synapse_xyz df['synapse_x'], df['synapse_y'], df['synapse_z'] = np.stack(df['ctr_pt_position'].T, -1) @@ -280,7 +317,7 @@ def run(self, **kwargs): 'prepost', 'synapse_x', 'synapse_y', 'synapse_z', 'synapse_size']] df['import_method'] = params['import_method'] - + df['ver'] = params['ver'] return {'df': df} else: return {'df': []} @@ -299,7 +336,7 @@ def update_method(cls, ver=None, pcg_meshwork_params={}, **kwargs): pcg_meshwork_params.setdefault('root_point_resolution', [4,4,40]) # INSERT - client = set_CAVEclient(datastack, ver) + client = set_CAVEclient(datastack, ver, caveclient_kws={'auth_token': cvt}) cls.insert1( { 'meshparty_version': cpvfd('meshparty'), @@ -324,7 +361,7 @@ def run(self, **kwargs): self.Log('info', f'Running {self.class_name} with params {params}.') # INITIALIZE & VALIDATE - client = set_CAVEclient(params['datastack'], ver=params['ver']) + client = set_CAVEclient(params['datastack'], ver=params['ver'], caveclient_kws={'auth_token': cvt}) # validate package dependencies packages = { @@ -401,7 +438,7 @@ def update_method(cls, ver=None, pcg_skel_params={}, **kwargs): pcg_skel_params.setdefault('root_point_resolution', [4,4,40]) # INSERT - client = set_CAVEclient(datastack, ver) + client = set_CAVEclient(datastack, ver, caveclient_kws={'auth_token': cvt}) cls.insert1( { 'meshparty_version': cpvfd('meshparty'), @@ -427,7 +464,7 @@ def run(self, **kwargs): self.Log('info', f'Running {self.class_name} with params {params}.') # INITIALIZE & VALIDATE - client = set_CAVEclient(params['datastack'], ver=params['ver']) + client = set_CAVEclient(params['datastack'], ver=params['ver'], caveclient_kws={'auth_token': cvt}) # validate package dependencies packages = { @@ -482,6 +519,55 @@ def run(self, **kwargs): 'ts_computed': ts_computed } + +class MakeMethod(m65mat.MakeMethod): + @classmethod + def run(cls, key): + return cls.r1p(key).run(**key) + + class MeshworkAxonDendriteSkeleton(m65mat.MakeMethod.MeshworkAxonDendriteSkeleton): + @classmethod + def update_method(cls): + cls.insert1({Tag.attr_name: Tag.version, + 'pcg_skel_version': cpvfd('pcg-skel'), + 'target_dir': config.externals['minnie65_meshwork_axon_dendrite_skeletons']['location'], + }, insert_to_master=True) + + def run(self, meshwork_id, **kwargs): + params = (self & kwargs).fetch1() + target_dir = params.get('target_dir') + assert params.get('tag') == Tag.version, 'Tag version mismatch' + assert params.get('pcg_skel_version') == cpvfd('pcg-skel'), 'pcg-skel version mismatch' + + meshwork_obj = (m65mat.Meshwork.PCGMeshwork & {'meshwork_id': meshwork_id}).fetch1('meshwork_obj') + + is_axon, score = pcg_skel.meshwork.algorithms.split_axon_by_synapses(meshwork_obj, meshwork_obj.anno.pre_syn.mesh_index, meshwork_obj.anno.post_syn.mesh_index) + # This does ignore any edges with a vertex in the axon and another vertex in the dendrites indices + axon_index_set = set(is_axon.to_skel_index.tolist()) + axon_edges = np.array([row for row in meshwork_obj.skeleton.edges if (row[0] in axon_index_set) and (row[1] in axon_index_set)]) + dendrite_edges = np.array([row for row in meshwork_obj.skeleton.edges if (row[0] not in axon_index_set) and (row[1] not in axon_index_set)]) + + axon_skeleton = meshwork_obj.skeleton.vertices[axon_edges] + dendrite_skeleton = meshwork_obj.skeleton.vertices[dendrite_edges] + + axon_vertices, axon_edges = convert_skeleton_to_nodes_edges(axon_skeleton) + dendrite_vertices, dendrite_edges = convert_skeleton_to_nodes_edges(dendrite_skeleton) + + axon_skeleton_fp = Path(target_dir).joinpath(f'{meshwork_id}_axon_skeleton.npz') + dendrite_skeleton_fp = Path(target_dir).joinpath(f'{meshwork_id}_dendrite_skeleton.npz') + + np.savez(axon_skeleton_fp, vertices=axon_vertices, edges=axon_edges) + np.savez(dendrite_skeleton_fp, vertices=dendrite_vertices, edges=dendrite_edges) + + return { + 'meshwork_id': meshwork_id, + 'make_method': params['make_method'], + 'axon_skeleton': axon_skeleton_fp, + 'dendrite_skeleton': dendrite_skeleton_fp, + 'split_score': score + } + + class Materialization(m65mat.Materialization): class Info(m65mat.Materialization.Info): pass @@ -581,6 +667,8 @@ class Synapse(m65mat.Synapse): class Info(m65mat.Synapse.Info): pass + class Info2(m65mat.Synapse.Info2): pass + class MatV1(m65mat.Synapse.MatV1): @classmethod def fill(cls): @@ -595,7 +683,7 @@ class SegmentExclude(m65mat.Synapse.SegmentExclude): pass class CAVE(m65mat.Synapse.CAVE): @property def key_source(self): - return (Segment.proj(primary_seg_id='segment_id') - Synapse.Info - Synapse.SegmentExclude) * ImportMethod.Synapse2 + return dj.U('primary_seg_id', 'import_method') & ((Segment.Nucleus.proj(primary_seg_id='segment_id') - Synapse.Info - Synapse.SegmentExclude) * ImportMethod.Synapse2) def make(self, key): df = ImportMethod.run(key)['df'] @@ -606,6 +694,20 @@ def make(self, key): else: self.master.SegmentExclude.insert1({'primary_seg_id': key['primary_seg_id'], 'synapse_id': 0, Exclusion.hash_name: Exclusion.hash1({'reason': 'no synapse data'})}, skip_duplicates=True) + class CAVE2(m65mat.Synapse.CAVE2): + @property + def key_source(self): + return dj.U('ver', 'primary_seg_id', 'import_method') & ((Segment.Nucleus.proj(primary_seg_id='segment_id') - Synapse.SegmentExclude) * ImportMethod.Synapse3) + + def make(self, key): + df = ImportMethod.run(key)['df'] + if len(df) > 0: + self.master.insert(df, ignore_extra_fields=True, skip_duplicates=True) + self.master.Info2.insert(df, ignore_extra_fields=True, skip_duplicates=True) + self.insert(df, ignore_extra_fields=True) + else: + self.master.SegmentExclude.insert1({'primary_seg_id': key['primary_seg_id'], 'synapse_id': 0, Exclusion.hash_name: Exclusion.hash1({'reason': 'no synapse data'})}, skip_duplicates=True) + class Mesh(m65mat.Mesh): @@ -662,6 +764,44 @@ def make(self, key): self.master.PCGSkeleton.insert1(result, ignore_extra_fields=True, skip_duplicates=True) self.insert1(result, insert_to_master=True, skip_hashing=True, ignore_extra_fields=True, skip_duplicates=True, insert_to_master_kws={'ignore_extra_fields': True, 'skip_duplicates': True}) + class MeshworkAxonDendriteSkeletonError(m65mat.Skeleton.MeshworkAxonDendriteSkeletonError): + pass + + class MeshworkAxonDendriteSkeleton(m65mat.Skeleton.MeshworkAxonDendriteSkeleton): + pass + + class MeshworkAxonDendriteSkeletonMaker(m65mat.Skeleton.MeshworkAxonDendriteSkeletonMaker): + @property + def key_source(self): + return Meshwork * (MakeMethod & MakeMethod.MeshworkAxonDendriteSkeleton) - self.master.MeshworkAxonDendriteSkeletonError + + def make(self, key): + try: + result = MakeMethod.run(key) + result_hash = self.hash1(result) + result[self.master.hash_name] = result_hash + result[self.hash_name] = result_hash + self.master.MeshworkAxonDendriteSkeleton.insert1(result, ignore_extra_fields=True, insert_to_master=True) + self.insert1(result, ignore_extra_fields=True, skip_hashing=True) + + except Exception as e: + error_table = self.master.MeshworkAxonDendriteSkeletonError + self.Log('error', f'errored on key {key}') + key.update({ + self.master.hash_name: error_table.error_code, + 'error_msg': e + }) + error_table.insert1( + key, + insert_to_master=True, + skip_duplicates=True, + insert_to_master_kws = { + 'skip_duplicates': True, + 'ignore_extra_fields': True + } + ) + + class Queue(m65mat.Queue): @@ -712,6 +852,7 @@ def download_materialization(ver=None, download_synapses=False, download_meshes= methods = [ ImportMethod.MaterializationVer, ImportMethod.NucleusSegment, + ImportMethod.NucleusSegment, ] makers = [ @@ -728,14 +869,12 @@ def download_materialization(ver=None, download_synapses=False, download_meshes= methods += [ImportMethod.MeshPartyMesh2] makers += [Mesh.MeshParty] - for m in methods: - logger.info(f'Updating methods for {m.class_name}.') + for m, mk in zip(methods, makers): + logger.info(f'Updating method for {m.class_name}.') m.update_method(ver=ver) - - for mk in makers: logger.info(f'Populating {mk.class_name}.') - mk.populate(m.master & m.get_latest_entries(), reserve_jobs=True, order='random', suppress_errors=True) - + mk.populate(m.master & m.get_latest_entries(), reserve_jobs=True, order='random', suppress_errors=True) + def download_meshwork_objects(restriction={}, loglevel=None, update_root_level=True): """ From 6563f38342544aef5706e3ff177250e5912efbd7 Mon Sep 17 00:00:00 2001 From: spapa013 Date: Mon, 18 Nov 2024 15:52:56 -0800 Subject: [PATCH 6/6] update version --- python/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/version.py b/python/version.py index 1f658a4..f18e5d0 100644 --- a/python/version.py +++ b/python/version.py @@ -1 +1 @@ -__version__ = "0.0.17" +__version__ = "0.0.18"