Skip to content

Commit

Permalink
MRG: Merge pull request #163 from octue/release/0.1.18
Browse files Browse the repository at this point in the history
Release/0.1.18
  • Loading branch information
cortadocodes committed May 12, 2021
2 parents 8924d88 + 1d92a2b commit d095788
Show file tree
Hide file tree
Showing 15 changed files with 85 additions and 85 deletions.
4 changes: 2 additions & 2 deletions docs/source/datafile.rst
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ For creating new data in a new local file:
tags = {"cleaned:True", "type:linear"}
with Datafile(path="path/to/local/file.dat", timestamp=None, sequence=sequence, tags=tags, mode="w") as datafile, f:
with Datafile(path="path/to/local/file.dat", sequence=sequence, tags=tags, mode="w") as datafile, f:
f.write("This is some cleaned data.")
datafile.to_cloud(project_name="my-project", bucket_name="my-bucket", path_in_bucket="path/to/data.dat")
Expand All @@ -141,5 +141,5 @@ For existing data in an existing local file:
sequence = 2
tags = {"cleaned:True", "type:linear"}
datafile = Datafile(path="path/to/local/file.dat", timestamp=None, sequence=sequence, tags=tags)
datafile = Datafile(path="path/to/local/file.dat", sequence=sequence, tags=tags)
datafile.to_cloud(project_name="my-project", bucket_name="my-bucket", path_in_bucket="path/to/data.dat")
8 changes: 4 additions & 4 deletions octue/resources/datafile.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ class Datafile(Taggable, Serialisable, Pathable, Loggable, Identifiable, Hashabl
def __init__(
self,
path,
timestamp,
timestamp=None,
id=ID_DEFAULT,
logger=None,
path_from=None,
Expand Down Expand Up @@ -184,7 +184,7 @@ def from_cloud(
the datafile when the context is exited
:return Datafile:
"""
datafile = cls(timestamp=None, path=storage.path.generate_gs_path(bucket_name, datafile_path))
datafile = cls(path=storage.path.generate_gs_path(bucket_name, datafile_path))
datafile.get_cloud_metadata(project_name, bucket_name, datafile_path)
custom_metadata = datafile._cloud_metadata.get("custom_metadata", {})

Expand Down Expand Up @@ -499,14 +499,14 @@ class _DatafileContextManager:
Usage:
```
my_datafile = Datafile(timestamp=None, path='subfolder/subsubfolder/my_datafile.json)
my_datafile = Datafile(path='subfolder/subsubfolder/my_datafile.json)
with my_datafile.open('w') as fp:
fp.write("{}")
```
This is equivalent to the standard python:
```
my_datafile = Datafile(timestamp=None, path='subfolder/subsubfolder/my_datafile.json)
my_datafile = Datafile(path='subfolder/subsubfolder/my_datafile.json)
os.makedirs(os.path.split(my_datafile.absolute_path)[0], exist_ok=True)
with open(my_datafile.absolute_path, 'w') as fp:
fp.write("{}")
Expand Down
6 changes: 3 additions & 3 deletions octue/resources/tag.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from octue.utils.encoders import OctueJSONEncoder


TAG_PATTERN = re.compile(r"^$|^[A-Za-z0-9][A-Za-z0-9:\-/]*(?<![/:-])$")
TAG_PATTERN = re.compile(r"^$|^[A-Za-z0-9][A-Za-z0-9:.\-/]*(?<![./:-])$")


class Tag(Filterable):
Expand Down Expand Up @@ -82,8 +82,8 @@ def _clean(name):

if not re.match(TAG_PATTERN, cleaned_name):
raise InvalidTagException(
f"Invalid tag '{cleaned_name}'. Tags must contain only characters 'a-z', '0-9', ':' and '-'. They must "
f"not start with '-' or ':'."
f"Invalid tag '{cleaned_name}'. Tags must contain only characters 'a-z', 'A-Z', '0-9', ':', '.', '/' "
f"and '-'. They must not start with '-', ':', '/' or '.'"
)

return cleaned_name
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
octue==0.1.17
octue==0.1.18
Original file line number Diff line number Diff line change
@@ -1 +1 @@
octue==0.1.17
octue==0.1.18
Original file line number Diff line number Diff line change
@@ -1 +1 @@
octue==0.1.17
octue==0.1.18
2 changes: 1 addition & 1 deletion octue/templates/template-python-fractal/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
octue==0.1.17
octue==0.1.18


# ----------- Some common libraries -----------------------------------------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion octue/templates/template-using-manifests/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
octue==0.1.17
octue==0.1.18


# ----------- Some common libraries -----------------------------------------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

setup(
name="octue",
version="0.1.17", # Ensure all requirements files containing octue are updated, too (e.g. docs build).
version="0.1.18", # Ensure all requirements files containing octue are updated, too (e.g. docs build).
py_modules=["cli"],
install_requires=[
"click>=7.1.2",
Expand Down
4 changes: 2 additions & 2 deletions tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ def create_valid_dataset(self):
path = os.path.join("path-within-dataset", "a_test_file.csv")

files = [
Datafile(timestamp=None, path_from=path_from, path=path, skip_checks=False),
Datafile(timestamp=None, path_from=path_from, path=path, skip_checks=False),
Datafile(path_from=path_from, path=path, skip_checks=False),
Datafile(path_from=path_from, path=path, skip_checks=False),
]

return Dataset(files=files)
Expand Down
4 changes: 2 additions & 2 deletions tests/cloud/pub_sub/test_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,8 @@ def test_ask_with_input_manifest(self):
asking_service = MockService(backend=self.BACKEND, children={responding_service.id: responding_service})

files = [
Datafile(timestamp=None, path="gs://my-dataset/hello.txt"),
Datafile(timestamp=None, path="gs://my-dataset/goodbye.csv"),
Datafile(path="gs://my-dataset/hello.txt"),
Datafile(path="gs://my-dataset/goodbye.csv"),
]

input_manifest = Manifest(datasets=[Dataset(files=files)], path="gs://my-dataset", keys={"my_dataset": 0})
Expand Down
30 changes: 15 additions & 15 deletions tests/resources/test_datafile.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def tearDown(self):
TEMPORARY_LOCAL_FILE_CACHE.clear()

def create_valid_datafile(self):
return Datafile(timestamp=None, path_from=self.path_from, path=self.path, skip_checks=False)
return Datafile(path_from=self.path_from, path=self.path, skip_checks=False)

def create_datafile_in_cloud(
self,
Expand Down Expand Up @@ -57,7 +57,7 @@ def create_datafile_in_cloud(

def test_instantiates(self):
"""Ensures a Datafile instantiates using only a path and generates a uuid ID"""
df = Datafile(timestamp=None, path="a_path")
df = Datafile(path="a_path")
self.assertTrue(isinstance(df.id, str))
self.assertEqual(type(uuid.UUID(df.id)), uuid.UUID)
self.assertIsNone(df.sequence)
Expand All @@ -82,35 +82,35 @@ def test_setting_timestamp(self):

def test_gt(self):
"""Test that datafiles can be ordered using the greater-than operator."""
a = Datafile(timestamp=None, path="a_path")
b = Datafile(timestamp=None, path="b_path")
a = Datafile(path="a_path")
b = Datafile(path="b_path")
self.assertTrue(a < b)

def test_gt_with_wrong_type(self):
"""Test that datafiles cannot be ordered compared to other types."""
with self.assertRaises(TypeError):
Datafile(timestamp=None, path="a_path") < "hello"
Datafile(path="a_path") < "hello"

def test_lt(self):
"""Test that datafiles can be ordered using the less-than operator."""
a = Datafile(timestamp=None, path="a_path")
b = Datafile(timestamp=None, path="b_path")
a = Datafile(path="a_path")
b = Datafile(path="b_path")
self.assertTrue(b > a)

def test_lt_with_wrong_type(self):
"""Test that datafiles cannot be ordered compared to other types."""
with self.assertRaises(TypeError):
Datafile(timestamp=None, path="a_path") > "hello"
Datafile(path="a_path") > "hello"

def test_checks_fail_when_file_doesnt_exist(self):
path = "not_a_real_file.csv"
with self.assertRaises(exceptions.FileNotFoundException) as error:
Datafile(timestamp=None, path=path, skip_checks=False)
Datafile(path=path, skip_checks=False)
self.assertIn("No file found at", error.exception.args[0])

def test_conflicting_extension_fails_check(self):
with self.assertRaises(exceptions.InvalidInputException) as error:
Datafile(timestamp=None, path_from=self.path_from, path=self.path, skip_checks=False, extension="notcsv")
Datafile(path_from=self.path_from, path=self.path, skip_checks=False, extension="notcsv")

self.assertIn("Extension provided (notcsv) does not match file extension", error.exception.args[0])

Expand Down Expand Up @@ -171,7 +171,7 @@ def test_hashes_for_the_same_datafile_are_the_same(self):
def test_is_in_cloud(self):
"""Test whether a file is in the cloud or not can be determined."""
self.assertFalse(self.create_valid_datafile().is_in_cloud)
self.assertTrue(Datafile(timestamp=None, path="gs://hello/file.txt").is_in_cloud)
self.assertTrue(Datafile(path="gs://hello/file.txt").is_in_cloud)

def test_from_cloud_with_bare_file(self):
"""Test that a Datafile can be constructed from a file on Google Cloud storage with no custom metadata."""
Expand Down Expand Up @@ -311,7 +311,7 @@ def test_update_cloud_metadata(self):
"""Test that a cloud datafile's metadata can be updated."""
_, project_name, bucket_name, path_in_bucket, _ = self.create_datafile_in_cloud()

new_datafile = Datafile(path="glib.txt", timestamp=None, cluster=32)
new_datafile = Datafile(path="glib.txt", cluster=32)
new_datafile.update_cloud_metadata(project_name, bucket_name, path_in_bucket)

self.assertEqual(Datafile.from_cloud(project_name, bucket_name, path_in_bucket).cluster, 32)
Expand Down Expand Up @@ -365,7 +365,7 @@ def test_open_with_reading_local_file(self):
with tempfile.NamedTemporaryFile("w", delete=False) as temporary_file:
temporary_file.write(file_contents)

datafile = Datafile(timestamp=None, path=temporary_file.name)
datafile = Datafile(path=temporary_file.name)

with datafile.open() as f:
self.assertEqual(f.read(), file_contents)
Expand All @@ -377,7 +377,7 @@ def test_open_with_writing_local_file(self):
with tempfile.NamedTemporaryFile("w", delete=False) as temporary_file:
temporary_file.write(file_contents)

datafile = Datafile(timestamp=None, path=temporary_file.name)
datafile = Datafile(path=temporary_file.name)

with datafile.open("w") as f:
f.write("hello")
Expand Down Expand Up @@ -486,7 +486,7 @@ def test_datafile_as_context_manager(self):
temporary_file = tempfile.NamedTemporaryFile("w", delete=False)
contents = "Here is the content."

with Datafile(path=temporary_file.name, timestamp=None, mode="w") as (datafile, f):
with Datafile(path=temporary_file.name, mode="w") as (datafile, f):
f.write(contents)

# Check that the cloud file has been updated.
Expand Down

0 comments on commit d095788

Please sign in to comment.