Skip to content

Commit

Permalink
black and tutoriakl
Browse files Browse the repository at this point in the history
  • Loading branch information
tathey1 committed Jan 3, 2024
1 parent 41d6284 commit 7b6b8ab
Show file tree
Hide file tree
Showing 18 changed files with 180 additions and 184 deletions.
47 changes: 13 additions & 34 deletions brainlit/BrainLine/analyze_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,13 @@ class BrainDistribution:
brain_ids (list): List of brain IDs (keys of data json file).
"""

def __init__(self, brain_ids: list, data_file: str, ontology_file: str, fixes_file: str = None):
def __init__(
self,
brain_ids: list,
data_file: str,
ontology_file: str,
fixes_file: str = None,
):
self.brain_ids = brain_ids
with open(data_file) as f:
data = json.load(f)
Expand Down Expand Up @@ -358,34 +364,6 @@ def napari_coronal_section(
heatmap[:, :, depth_radius, :], scale=[10, 10], name=f"Heatmap"
) # , rgb=True)

for subtype1, subtype2 in zip(
["tph2 gad2", "tph2 gad2", "tph2 vglut3"],
["tph2 vglut3", "gad2 vgat", "gad2 vgat"],
):
diffpos = (
heatmap[
:, :, depth_radius, channel_map[subtype_colors[subtype1]]
]
- heatmap[
:, :, depth_radius, channel_map[subtype_colors[subtype2]]
]
)
diffneg = np.copy(diffpos)
diffpos[diffpos < 0] = 0
diffneg[diffneg > 0] = 0
diffneg = np.abs(diffneg)
heatdiff = 0 * heatmap
heatdiff[
:, :, depth_radius, channel_map[subtype_colors[subtype1]]
] = diffpos
heatdiff[
:, :, depth_radius, channel_map[subtype_colors[subtype2]]
] = diffneg
v.add_image(
heatdiff[:, :, depth_radius, :],
scale=[10, 10],
name=f"{subtype1} - {subtype2}",
) # , rgb=True)
v.add_labels(borders * 2, scale=[10, 10], name=f"z={z}")

v.scale_bar.unit = "um"
Expand Down Expand Up @@ -578,7 +556,6 @@ def _setup_regiongraph(self):
region_graph.nodes[region][brain_id]
+ id_to_regioncounts[brain_id][region]
)


# propagate counts up the hierarchy
for brain_id in brain_ids:
Expand Down Expand Up @@ -858,7 +835,7 @@ def _compute_composition_corner(corners, outdir, dir_base_mask, dir_base_s3):
return

dir = dir_base_mask + "axon_mask"
vol_mask = CloudVolume(dir, parallel=1, mip=0, fill_missing=False)
vol_mask = CloudVolume(dir, parallel=1, mip=0, fill_missing=True)

dir = dir_base_s3 + "atlas_to_target"
vol_reg = CloudVolume(dir, parallel=1, mip=0, fill_missing=True)
Expand Down Expand Up @@ -1044,9 +1021,11 @@ def _setup_regiongraph(self, regional_distribution_dir):
region_idx = region

if region_idx in region_graph.nodes:
region_graph.nodes[region_idx][brain_id + " axon"] = region_graph.nodes[
region_idx
][brain_id + " axon"] + float(quantification_dict[region_idx][1])
region_graph.nodes[region_idx][
brain_id + " axon"
] = region_graph.nodes[region_idx][brain_id + " axon"] + float(
quantification_dict[region_idx][1]
)
region_graph.nodes[region_idx][
brain_id + " total"
] = region_graph.nodes[region_idx][brain_id + " total"] + float(
Expand Down
4 changes: 2 additions & 2 deletions brainlit/BrainLine/apply_ilastik.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def plot_results(
cntr = [s // 2 for s in mask.shape]

if object_type == "soma":
if filename.split("/")[-1] in doubles:
if filename.split("/")[-1].split("_Probabilities")[0] in doubles:
newpos = 2
else:
newpos = 1
Expand Down Expand Up @@ -352,7 +352,7 @@ def examine_threshold(
cntr = [s // 2 for s in mask.shape]

if object_type == "soma":
if filename.split("/")[-1] in doubles:
if filename.split("/")[-1].split("_Probabilities")[0] in doubles:
newpos = 2
else:
newpos = 1
Expand Down
10 changes: 5 additions & 5 deletions brainlit/BrainLine/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,9 @@ def dir_to_atlas_pts(dir, outname, atlas_file):
atlas_file (str): Name of downloaded atlas parcellation image.
"""
vol = io.imread(atlas_file)
files = [Path(dir) / f for f in os.listdir(dir) if os.path.splitext(f)[1] == ".json"]
files = [
Path(dir) / f for f in os.listdir(dir) if os.path.splitext(f)[1] == ".json"
]

coords = []
for file in tqdm(files, "Processing point files..."):
Expand All @@ -359,12 +361,10 @@ def dir_to_atlas_pts(dir, outname, atlas_file):
for pt in tqdm(json_file, "Finding interior points..."):
coord = pt["point"]
try:
if vol[int(coord[0]),int(coord[1]),int(coord[2])] != 0:
if vol[int(coord[0]), int(coord[1]), int(coord[2])] != 0:
coords.append(str(coord))
except IndexError:
pass

with open(outname, 'a') as f:
with open(outname, "a") as f:
f.write("\n".join(coords))


60 changes: 25 additions & 35 deletions docs/notebooks/pipelines/BrainLine/axon_analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -68,34 +68,22 @@
"Data should be stored in the ``brain2paths`` dictionary, with entries like:\n",
"\n",
" \"<sample ID>\": {\n",
" \"base\": \"<Path to directory with layers with CloudVolume prependings (ending with forward slash)>\",\n",
" \"base_local\": \"<Path to directory with layers with CloudVolume prependings (ending with forward slash)>\",\n",
" \"base_s3\": \"<Path to directory with layers with CloudVolume prependings (ending with forward slash)>\",\n",
" \"val_info\": {\n",
" \"url\": \"<neuroglancer URL>\",\n",
" \"somas_layer\": \"<name of layer with coordinates on somas>\",\n",
" \"nonsomas_layer\": \"<name of layer with coordinates on non-somas>\",\n",
" },\n",
" \"somas_atlas_url\": \"<neuroglancer URL with a single annotation layer which contains points of soma detections>\",\n",
" \"subtype\": \"<subtype>\"\n",
" #Optional:\n",
" \"train_info\": {\n",
" \"url\": \"<neuroglancer URL>\",\n",
" \"somas_layer\": \"<name of layer with coordinates on somas>\",\n",
" \"nonsomas_layer\": \"<name of layer with coordinates on non-somas>\",\n",
" },\n",
" },\n",
"\n",
"e.g.\n",
" \n",
" \"test\": {\n",
" \"base\": \"precomputed://file:///Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/brainlit/BrainLine/data/example/\",\n",
" \"val_info\": {\n",
" \"url\": \"https://viz.neurodata.io/?json_url=https://json.neurodata.io/v1?NGStateID=15e9owS_Hr51fg\",\n",
" \"somas_layer\": \"soma_val\",\n",
" \"nonsomas_layer\": \"nonsoma_val\",\n",
" },\n",
" \"somas_atlas_url\": \"https://ara.viz.neurodata.io/?json_url=https://json.neurodata.io/v1?NGStateID=LTWdmg7lYf1nbA\",\n",
" \"subtype\": \"test\"\n",
" },"
" \"transformed mask\": \"<cloudvolume path to transformed axon mask>\"\n",
" }"
]
},
{
Expand All @@ -111,6 +99,20 @@
" brainlit_path / \"docs\" / \"notebooks\" / \"pipelines\" / \"BrainLine\" / \"axon_data.json\"\n",
")\n",
"\n",
"brain = \"test\" # brain ID\n",
"axon_data_dir = (\n",
" str(\n",
" brainlit_path\n",
" / \"docs\"\n",
" / \"notebooks\"\n",
" / \"pipelines\"\n",
" / \"BrainLine\"\n",
" / \"validation-data\"\n",
" / \"axon\"\n",
" )\n",
" + \"/\"\n",
") # path to directory where training/validation data should be stored\n",
"\n",
"# Modify base path of test sample according to your system\n",
"with open(data_file, \"r\") as f:\n",
" data = json.load(f)\n",
Expand Down Expand Up @@ -170,19 +172,6 @@
"background_layer = \"background\"\n",
"endogenous_layer = \"endogenous\"\n",
"\n",
"brain = \"test\" # brain ID\n",
"axon_data_dir = (\n",
" str(\n",
" brainlit_path\n",
" / \"docs\"\n",
" / \"notebooks\"\n",
" / \"pipelines\"\n",
" / \"BrainLine\"\n",
" / \"validation-data\"\n",
" / \"axon\"\n",
" )\n",
" + \"/\"\n",
") # path to directory where training/validation data should be stored\n",
"dataset_to_save = \"val\" # train or val\n",
"\n",
"layer_names = [antibody_layer, background_layer, endogenous_layer]"
Expand Down Expand Up @@ -325,7 +314,8 @@
"source": [
"plot_results(\n",
" data_dir=axon_data_dir, brain_ids=[brain], positive_channel=1, object_type=\"axon\"\n",
")"
")\n",
"plt.show()"
]
},
{
Expand Down Expand Up @@ -355,7 +345,7 @@
"examine_threshold(\n",
" data_dir=axon_data_dir,\n",
" brain_id=brain,\n",
" threshold=0.5,\n",
" threshold=0.52,\n",
" object_type=\"axon\",\n",
" positive_channel=1,\n",
")"
Expand All @@ -367,7 +357,7 @@
"metadata": {},
"source": [
"## 6. Apply ilastik to whole image:\n",
"## This can be done alternatively via a script with: `brainlit/BrainLine/soma_detect_image`"
"## This can be done alternatively via a script with: `brainlit/BrainLine/axon_segment_image`"
]
},
{
Expand All @@ -384,7 +374,7 @@
"metadata": {},
"outputs": [],
"source": [
"threshold = 0.32 # threshold to use for ilastik\n",
"threshold = 0.52 # threshold to use for ilastik\n",
"data_dir = (\n",
" axon_data_dir + \"brain_temp/\"\n",
") # directory to store temporary subvolumes for segmentation\n",
Expand Down Expand Up @@ -569,7 +559,7 @@
" outdir=axon_data_dir,\n",
" max_coords=max_coords,\n",
" min_coords=min_coords,\n",
" ncpu=6,\n",
" ncpu=2,\n",
")"
]
},
Expand Down Expand Up @@ -715,7 +705,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.0"
"version": "3.8.10"
},
"metadata": {
"interpreter": {
Expand Down
10 changes: 6 additions & 4 deletions docs/notebooks/pipelines/BrainLine/axon_data.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,18 @@
},
"test": {
"comment": "Sampled used for demos, need to download precomputed data and put in brainlit/BrainLine/data/example. Data should be served via neuroglancer's cors_webserver.py",
"base": "precomputed://file:///Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/docs/notebooks/pipelines/BrainLine/example-data/",
"base_s3": "precomputed://file:///Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/docs/notebooks/pipelines/BrainLine/example-data/",
"base_local": "precomputed://file:///Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/docs/notebooks/pipelines/BrainLine/example-data/",
"transformed_mask": "precomputed://file:///Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/docs/notebooks/pipelines/BrainLine/example-data/axon_mask_transformed/",
"val_info": {
"url": "https://viz.neurodata.io/?json_url=https://json.neurodata.io/v1?NGStateID=QSzxzDKIRWgvmw",
"url": "https://viz.neurodata.io/?json_url=https://json.neurodata.io/v1?NGStateID=Ap9ZkdUyOAZqMg",
"layer": "val"
},
"subtype": "test_type"
"subtype": "test_type",
"base": "precomputed://file:///Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/docs/notebooks/pipelines/BrainLine/example-data/"
},
"pytest": {
"base": "precomputed://https://open-neurodata.s3.amazonaws.com/ara_2016/sagittal_10um/",
"base_s3": "precomputed://https://open-neurodata.s3.amazonaws.com/ara_2016/sagittal_10um/",
"val_info": {
"url": "https://ara.viz.neurodata.io/?json_url=https://json.neurodata.io/v1?NGStateID=_7H3OM-IAEgp0Q",
"layer": "val"
Expand Down
Binary file not shown.

0 comments on commit 7b6b8ab

Please sign in to comment.