Skip to content

Commit

Permalink
manual ome
Browse files Browse the repository at this point in the history
  • Loading branch information
tathey1 committed Jan 10, 2024
1 parent 5ce27bf commit e2ebf47
Show file tree
Hide file tree
Showing 3 changed files with 152 additions and 107 deletions.
96 changes: 96 additions & 0 deletions brainlit/utils/write.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import os
from cloudvolume import CloudVolume
import json
from skimage.measure import block_reduce


def _read_czi_slice(czi, C, Z):
Expand Down Expand Up @@ -140,6 +141,101 @@ def zarr_to_omezarr(zarr_path: str, out_path: str, res: list):
_edit_ome_metadata(out_path, res)


def _write_slice_ome(z: int, lvl: int, z_in_path: str, zgr_path: str):
z_in = zarr.open(z_in_path)
zgr = zarr.open_group(zgr_path)
z_out = zgr[str(lvl)]

im_slice = np.squeeze(z_in[z, :, :])
if lvl > 0:
im_ds = block_reduce(im_slice, block_size=2**lvl)
else:
im_ds = im_slice

z_out[z, :, :] = im_ds


def zarr_to_omezarr_single(zarr_path: str, out_path: str, res: list, parallel: int = 1):
"""Convert 3D zarr to ome-zarr manually. Chunk size in z is 1.
Args:
zarr_path (str): Path to zarr.
out_path (str): Path of ome-zarr to be created.
res (list): List of xyz resolution values in nanometers.
parallel (int): Number of cores to use.
Raises:
ValueError: If zarr to be written already exists.
ValueError: If conversion is not 3D array.
"""
if os.path.exists(out_path):
raise ValueError(
f"{out_path} already exists, please delete the existing file or change the name of the ome-zarr to be created."
)

zra = zarr.open(zarr_path)
sz0 = zra.shape

if len(sz0) != 3:
raise ValueError("Conversion only supported for 3D arrays")

zgr = zarr.group(out_path)

for lvl in tqdm(range(5), desc="Writing different levels..."):
im_slice = np.squeeze(zra[0, :, :])
if lvl > 0:
im_ds = block_reduce(im_slice, block_size=2**lvl)
else:
im_ds = im_slice
chunk_size = [1, np.amin((200, im_ds.shape[0])), np.amin((200, im_ds.shape[1]))]

zra_lvl = zgr.create(
str(lvl),
shape=(sz0[0], im_ds.shape[0], im_ds.shape[1]),
chunks=chunk_size,
dtype=zra.dtype,
dimension_separator="/",
)

if parallel == 1:
for z in tqdm(range(sz0[0]), desc="Writing slices...", leave=False):
_write_slice_ome(z, lvl, zarr_path, out_path)
else:
Parallel(n_jobs=parallel, backend="threading")(
delayed(_write_slice_ome)(
z, lvl, z_in_path=zarr_path, zgr_path=out_path
)
for z in tqdm(range(sz0[0]), desc="Saving slices...")
)

axes = []
for dim in ["z", "x", "y"]:
axes.append({"name": dim, "type": "space", "unit": "micrometer"})

datasets = []
for lvl in range(5):
datasets.append(
{
"path": str(lvl),
"coordinateTransformations": [
{
"type": "scale",
"scale": [res[2], res[0] * 2**lvl, res[1] * 2**lvl],
}
],
}
)

json_data = {
"multiscales": [
{"axes": axes, "datasets": datasets, "name": "/", "version": "0.4"}
]
}

with open(Path(out_path) / ".zattrs", "w") as f:
json.dump(json_data, f, indent=4)


def _edit_ome_metadata(out_path: str, res: list):
res = np.divide([res[-1], res[0], res[1]], 1000)
ome_zarr = zarr.open(
Expand Down
150 changes: 46 additions & 104 deletions experiments/sriram/scratch.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -24,147 +24,89 @@
]
},
{
"cell_type": "code",
"execution_count": null,
"cell_type": "markdown",
"metadata": {},
"outputs": [],
"source": [
"project_path = \"/Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/experiments/sriram/data/\" # \"C:\\\\Users\\\\Sriram Sudarsanam\\\\Desktop\\\\NeuroglancerTrial\\\\\"\n",
"czi_path = f\"{project_path}test.czi\" # path to czi image\n",
"out_dir = f\"{project_path}\" # path to directory where zarr should be made, should end in slash"
"create zgroup"
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"czi = aicspylibczi.CziFile(czi_path)\n",
"slice1 = _read_czi_slice(czi, C=0, Z=0)\n",
"C = czi.get_dims_shape()[0][\"C\"][1]\n",
"H = slice1.shape[0]\n",
"W = slice1.shape[1]\n",
"Z = czi.get_dims_shape()[0][\"Z\"][1]\n",
"\n",
"print(\n",
" f\"Writing {C} zarrs of shape {H}x{W}x{Z} from czi with dims {czi.get_dims_shape()}\"\n",
"dir = Path(\n",
" \"/Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/experiments/sriram/test-write-ome\"\n",
")\n",
"sz = np.array([H, W, Z], dtype=\"int\")\n",
"zgr_path = dir / \"group-test\"\n",
"\n",
"fg_path = out_dir + \"fg.zarr\"\n",
"zarr_fg = zarr.open(fg_path, mode=\"w\", shape=sz, chunks=(200, 200, 10), dtype=\"uint16\")"
"zgr = zarr.group(zgr_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"cell_type": "markdown",
"metadata": {},
"outputs": [],
"source": [
"def _write_zrange_thread(zarr_path, czi_path, channel, zs):\n",
" czi = aicspylibczi.CziFile(czi_path)\n",
"\n",
" zarr_fg = zarr.open(zarr_path)\n",
" for z in zs:\n",
" zarr_fg[:, :, z] = _read_czi_slice(czi, C=channel, Z=z)"
"create array"
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"from joblib import Parallel, delayed\n",
"\n",
"z_blocks = [np.arange(i, i + 10) for i in [0, 10, 20, 30]]\n",
"sz = 64\n",
"\n",
"Parallel(n_jobs=4)(\n",
" delayed(_write_zrange_thread)(fg_path, czi_path, 1, zs) for zs in z_blocks\n",
")"
"for lvl in range(5):\n",
" xysz = sz / 2**lvl\n",
" zgr.zeros(\n",
" str(lvl),\n",
" shape=(sz, xysz, xysz),\n",
" chunks=(4, np.amin((4, xysz)), np.amin((4, xysz))),\n",
" dtype=\"<u2\",\n",
" dimension_separator=\"/\",\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"fname = \"/Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/experiments/sriram/traces/skeletons/info\"\n",
"with open(fname) as f:\n",
" data = json.load(f)\n",
" print(data[\"vertex_attributes\"])\n",
" for i, attr in enumerate(data[\"vertex_attributes\"]):\n",
" if attr[\"id\"] == \"vertex_types\":\n",
" data[\"vertex_attributes\"].pop(i)\n",
" break\n",
"\n",
"with open(fname, \"w\") as f:\n",
" json.dump(data, f)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"store = parse_url(\n",
" \"/Users/thomasathey/Documents/jovolab/neurodata_infrastructure_tips/neuroglancer-zarr/test_ngff.zarr\",\n",
" mode=\"w\",\n",
").store\n",
"root = zarr.group(store=store)\n",
"write_image(image=dra, group=root, axes=\"xyz\")"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"python cors_webserver.py -d \"/Users/thomasathey/Documents/jovolab/neurodata_infrastructure_tips/neuroglancer-zarr/\" -p 9010"
"create zzattrs"
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
"ng_path = \"precomputed://file:///Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/experiments/sriram/sample/ng/frags\"\n",
"axes = []\n",
"for dim in [\"z\", \"x\", \"y\"]:\n",
" axes.append({\"name\": dim, \"type\": \"space\", \"unit\": \"micrometer\"})\n",
"\n",
"vol_im = CloudVolume(ng_path)\n",
"vol_im.chunk_size\n",
"vol_im.resolution"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ng_path = \"precomputed://file:///Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/experiments/sriram/sample/ng/traces\"\n",
"datasets = []\n",
"for lvl in range(5):\n",
" datasets.append(\n",
" {\n",
" \"path\": str(lvl),\n",
" \"coordinateTransformations\": [\n",
" {\"type\": \"scale\", \"scale\": [1.0, 2.0**lvl, 2.0**lvl]}\n",
" ],\n",
" }\n",
" )\n",
"\n",
"info = CloudVolume.create_new_info(\n",
" num_channels=1,\n",
" layer_type=\"segmentation\",\n",
" data_type=\"uint16\", # Channel images might be 'uint8'\n",
" # raw, png, jpeg, compressed_segmentation, fpzip, kempressed, zfpc, compresso\n",
" encoding=\"raw\",\n",
" resolution=vol_im.resolution, # Voxel scaling, units are in nanometers\n",
" voxel_offset=[0, 0, 0], # x,y,z offset in voxels from the origin\n",
" # Pick a convenient size for your underlying chunk representation\n",
" # Powers of two are recommended, doesn't need to cover image exactly\n",
" chunk_size=vol_im.chunk_size, # units are voxels\n",
" volume_size=vol_im.shape[:3], # e.g. a cubic millimeter dataset\n",
" skeletons=\"skeletons\",\n",
")\n",
"vol = CloudVolume(ng_path, info=info, compress=False)\n",
"vol.commit_info()"
"json_data = {\n",
" \"multiscales\": [{\"axes\": axes, \"datasets\": datasets, \"name\": \"/\", \"version\": \"0.4\"}]\n",
"}\n",
"\n",
"with open(\n",
" \"/Users/thomasathey/Documents/mimlab/mouselight/brainlit_parent/brainlit/experiments/sriram/test-write-ome/group-test/.zattrs\",\n",
" \"w\",\n",
") as f:\n",
" json.dump(json_data, f, indent=4)"
]
},
{
Expand Down Expand Up @@ -279,7 +221,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
"version": "3.8.18"
},
"orig_nbformat": 4,
"vscode": {
Expand Down
13 changes: 10 additions & 3 deletions experiments/sriram/visualization.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,12 @@
"metadata": {},
"outputs": [],
"source": [
"from brainlit.utils.write import czi_to_zarr, zarr_to_omezarr, write_trace_layer\n",
"from brainlit.utils.write import (\n",
" czi_to_zarr,\n",
" zarr_to_omezarr,\n",
" write_trace_layer,\n",
" zarr_to_omezarr_single,\n",
")\n",
"import zarr\n",
"from cloudvolume import CloudVolume\n",
"import json\n",
Expand Down Expand Up @@ -89,7 +94,9 @@
"metadata": {},
"outputs": [],
"source": [
"zarr_to_omezarr(zarr_path=zarr_paths[0], out_path=ome_path, res=resolution)\n",
"zarr_to_omezarr_single(\n",
" zarr_path=zarr_paths[0], out_path=ome_path, res=resolution, parallel=2\n",
")\n",
"write_trace_layer(parent_dir=project_path, res=resolution)"
]
},
Expand Down Expand Up @@ -405,7 +412,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
"version": "3.8.18"
},
"orig_nbformat": 4,
"vscode": {
Expand Down

0 comments on commit e2ebf47

Please sign in to comment.