Skip to content

Commit

Permalink
test_util
Browse files Browse the repository at this point in the history
  • Loading branch information
tathey1 committed Jan 4, 2024
1 parent 780987a commit fbf05b6
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 5 deletions.
64 changes: 59 additions & 5 deletions brainlit/BrainLine/tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,16 @@ def make_data_dir(tmp_path_factory):
return data_dir


# Makes a data file with invalid object_type, and a sample where info files can be written
@pytest.fixture(scope="session")
def make_bad_datafile(make_data_dir):
data_dir = make_data_dir
bad_data_file = data_dir / "bad_data.json"
bad_type = {"object_type": "invalid", "brain2paths": {}}
base_s3 = f"precomputed://file://{str(data_dir)}"
bad_type = {
"object_type": "invalid",
"brain2paths": {"write_info": {"base_s3": base_s3}},
}
with open(bad_data_file, "w") as f:
json.dump(bad_type, f)

Expand Down Expand Up @@ -65,14 +70,37 @@ def test_download_subvolumes(make_data_dir, make_bad_datafile):

# Data file with bad object_fype
bad_data_file = make_bad_datafile
with pytest.raises(ValueError):
with pytest.raises(ValueError) as e_info:
util.download_subvolumes(
data_dir=data_dir,
brain_id="pytest",
layer_names=layer_names,
dataset_to_save="val",
data_file=bad_data_file,
)
assert e_info.value.args[0] == f"object_type must be soma or axon, not invalid"

# Sample with no brain_s3 path
data_file = (
Path(os.path.abspath(__file__)).parents[3]
/ "docs"
/ "notebooks"
/ "pipelines"
/ "BrainLine"
/ "axon_data.json"
)
with pytest.raises(ValueError) as e_info:
util.download_subvolumes(
data_dir=data_dir,
brain_id="pytest_nobases3",
layer_names=layer_names,
dataset_to_save="val",
data_file=data_file,
)
assert (
e_info.value.args[0]
== f"base_s3 not an entry in brain2paths for brain pytest_nobases3"
)

# Axon
data_file = (
Expand All @@ -95,6 +123,13 @@ def test_download_subvolumes(make_data_dir, make_bad_datafile):
assert len(files) == 2

# Soma
# data_dir is string and data folder has already been made

output_dir = data_dir / "brainpytest_download"
output_dir.mkdir()
output_dir = output_dir / "val"
output_dir.mkdir()

data_file = (
Path(os.path.abspath(__file__)).parents[3]
/ "docs"
Expand All @@ -104,18 +139,32 @@ def test_download_subvolumes(make_data_dir, make_bad_datafile):
/ "soma_data.json"
)
util.download_subvolumes(
data_dir=data_dir,
data_dir=str(data_dir),
brain_id="pytest_download",
layer_names=layer_names,
dataset_to_save="val",
data_file=data_file,
)
output_dir = data_dir / "brainpytest_download" / "val"
files = os.listdir(output_dir)
assert len(files) == 2


def test_json_to_points():
def test_json_to_points(make_data_dir):
data_dir = make_data_dir
json_data = {
"layers": [
{
"type": "annotation",
"name": "points",
"annotations": [{"point": [0, 1, 2]}],
}
]
}
json_path = data_dir / "json_file.json"
with open(json_path, "w") as f:
json.dump(json_data, f)
point_layers = util.json_to_points(str(json_path))

url = "https://ara.viz.neurodata.io/?json_url=https://json.neurodata.io/v1?NGStateID=ki9d3Hsk5jcsJg"
point_layers = util.json_to_points(url)
keys = point_layers.keys()
Expand Down Expand Up @@ -220,6 +269,11 @@ def test_fold():
assert_array_equal(true_fold, test_fold)


def test_create_transformed_mask_info(make_bad_datafile):
bad_data_file = make_bad_datafile
util.create_transformed_mask_info(brain="write_info", data_file=bad_data_file)


def test_dir_to_atlas_pts(tmp_path):
json_dir = tmp_path / "json_data"
json_dir.mkdir()
Expand Down
8 changes: 8 additions & 0 deletions docs/notebooks/pipelines/BrainLine/axon_data.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,14 @@
"layer": "val"
},
"subtype": "test_type"
},
"pytest_nobases3": {
"base": "precomputed://https://open-neurodata.s3.amazonaws.com/ara_2016/sagittal_10um/",
"val_info": {
"url": "https://ara.viz.neurodata.io/?json_url=https://json.neurodata.io/v1?NGStateID=_7H3OM-IAEgp0Q",
"layer": "val"
},
"subtype": "test_type"
}
}
}

0 comments on commit fbf05b6

Please sign in to comment.