Skip to content

Commit

Permalink
Merge pull request #2587 from activeloopai/indra_0_0_75
Browse files Browse the repository at this point in the history
bump libdeeplake version to 0.0.75
  • Loading branch information
levongh committed Sep 7, 2023
2 parents ea1a2a3 + c63f680 commit 78109ea
Show file tree
Hide file tree
Showing 9 changed files with 19 additions and 8 deletions.
3 changes: 2 additions & 1 deletion deeplake/api/dataset.py
Expand Up @@ -383,7 +383,8 @@ def empty(
org_id (str, Optional): Organization id to be used for enabling enterprise features. Only applicable for local datasets.
verbose (bool): If True, logs will be printed. Defaults to True.
lock_timeout (int): Number of seconds to wait before throwing a LockException. If None, wait indefinitely
lock_enabled (bool): If true, the dataset manages a write lock. NOTE: Only set to False if you are managing concurrent access externally
lock_enabled (bool): If true, the dataset manages a write lock. NOTE: Only set to False if you are managing concurrent access externally. Defaults to ``True``.
Returns:
Dataset: Dataset created using the arguments provided.
Expand Down
1 change: 1 addition & 0 deletions deeplake/core/dataset/dataset.py
Expand Up @@ -1792,6 +1792,7 @@ def _squash_main(self) -> None:
Raises:
ReadOnlyModeError: If branch deletion is attempted in read-only mode.
VersionControlError: If the branch cannot be squashed.
Exception: If the dataset is filtered view.
"""
if self._is_filtered_view:
raise Exception(
Expand Down
2 changes: 1 addition & 1 deletion deeplake/core/seed.py
Expand Up @@ -13,7 +13,7 @@ def __new__(cls):
def seed(self, seed: Optional[int] = None):
if seed is None or isinstance(seed, int):
self.internal_seed = seed
if self.indra_api is None:
if self.indra_api is None: # type: ignore
from deeplake.enterprise.convert_to_libdeeplake import (
import_indra_api_silent,
)
Expand Down
1 change: 1 addition & 0 deletions deeplake/core/vectorstore/deeplake_vectorstore.py
Expand Up @@ -630,6 +630,7 @@ def delete_by_path(
Args:
path (str, pathlib.Path): The full path to the Deep Lake Vector Store.
token (str, optional): Activeloop token, used for fetching user credentials. This is optional, as tokens are normally autogenerated. Defaults to ``None``.
force (bool): delete the path in a forced manner without rising an exception. Defaults to ``True``.
Danger:
This method permanently deletes all of your data if the Vector Store exists! Be very careful when using this method.
Expand Down
4 changes: 4 additions & 0 deletions deeplake/core/vectorstore/vector_search/dataset/dataset.py
Expand Up @@ -406,6 +406,10 @@ def extend(
processed_tensors (Dict[str, List[Any]]): Dictionary of tensors to be added to the dataset.
dataset (deeplake.core.dataset.Dataset): Dataset to be extended.
Raises:
IncorrectEmbeddingShapeError: If embeding function shapes is incorrect.
ValueError: If embedding function returned empty list
"""
if embedding_function:
for func, data, tensor in zip(
Expand Down
Expand Up @@ -35,6 +35,7 @@ def search(
Raises:
ValueError: If both tql_string and tql_filter are specified.
raise_indra_installation_error: If the indra is not installed
Returns:
Union[Dict, DeepLakeDataset]: Dictionary where keys are tensor names and values are the results of the search, or a Deep Lake dataset view.
Expand Down
10 changes: 5 additions & 5 deletions deeplake/enterprise/dataloader.py
Expand Up @@ -54,7 +54,7 @@ def deeplake_islice(iterable, *args, **kwargs):


# Load lazy to avoid cycylic import.
INDRA_LOADER = None
INDRA_LOADER = None # type: ignore


def indra_available() -> bool:
Expand Down Expand Up @@ -205,7 +205,7 @@ def sampler(self):
def batch_sampler(self):
return (
BatchSampler(self.sampler, self.batch_size, self.drop_last)
if BatchSampler
if BatchSampler is not None
else None
)

Expand Down Expand Up @@ -640,8 +640,8 @@ def _get_suboptimal_thread_count(self) -> Optional[int]:
)

num_suboptimal_threads = (
int(INDRA_API.num_available_threads() / num_devices)
if INDRA_API is not None and num_devices is not None
int(INDRA_API.num_available_threads() / num_devices) # type: ignore [name-defined]
if INDRA_API is not None and num_devices is not None # type: ignore [name-defined]
else None
)
return num_suboptimal_threads
Expand Down Expand Up @@ -697,7 +697,7 @@ def __get_indra_dataloader(
"preserving the offset for resuming iteration at a predictable index and order, please set a random seed using deeplake.random()"
)

return INDRA_LOADER(
return INDRA_LOADER( # type: ignore [misc]
indra_dataset,
batch_size=self._batch_size,
num_threads=num_threads,
Expand Down
3 changes: 3 additions & 0 deletions deeplake/util/connect_dataset.py
Expand Up @@ -80,6 +80,9 @@ def __init__(
def validate(self, allow_local: bool = False) -> None:
"""Validates the attributes to make that dataset at ``src_path`` can be connected.
Args:
allow_local (bool): If local path is allowd to connect. Defaults to ``True``.
Raises:
InvalidSourcePathError: If the ``src_path`` is not a valid s3, gcs or azure path.
"""
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -70,7 +70,7 @@ def libdeeplake_availabe():
extras_require["all"] = [req_map[r] for r in all_extras]

if libdeeplake_availabe():
libdeeplake = "libdeeplake==0.0.74"
libdeeplake = "libdeeplake==0.0.75"
extras_require["enterprise"] = [libdeeplake, "pyjwt"]
extras_require["all"].append(libdeeplake)

Expand Down

0 comments on commit 78109ea

Please sign in to comment.