Skip to content

Commit

Permalink
Fix import (#2821)
Browse files Browse the repository at this point in the history
Make intra import not global
  • Loading branch information
khustup2 committed Apr 5, 2024
1 parent e75c56c commit 99af14c
Show file tree
Hide file tree
Showing 10 changed files with 31 additions and 68 deletions.
11 changes: 8 additions & 3 deletions deeplake/core/index/index.py
Expand Up @@ -198,10 +198,15 @@ def __getitem__(self, item: IndexValue):

def subscriptable(self):
"""Returns whether an IndexEntry can be further subscripted."""
from indra import api # type: ignore

if isinstance(self.value, api.core.IndexMappingInt64):
return self.value.subscriptable()
from deeplake.enterprise.util import INDRA_INSTALLED

if INDRA_INSTALLED:
from indra import api # type: ignore

if isinstance(self.value, api.core.IndexMappingInt64):
return self.value.subscriptable()

return not isinstance(self.value, int)

def indices(self, length: int):
Expand Down
5 changes: 3 additions & 2 deletions deeplake/core/storage/indra.py
@@ -1,7 +1,6 @@
from deeplake.core.storage.provider import StorageProvider
from deeplake.core.partial_reader import PartialReader
from deeplake.core.storage.deeplake_memory_object import DeepLakeMemoryObject
from indra.api import storage # type: ignore
from typing import Optional, Union, Dict


Expand All @@ -10,10 +9,12 @@ class IndraProvider(StorageProvider):

def __init__(
self,
root: Union[str, storage.provider],
root, # Union[str, storage.provider],
read_only: Optional[bool] = False,
**kwargs,
):
from indra.api import storage # type: ignore

if isinstance(root, str):
self.core = storage.create(root, read_only, **kwargs)
else:
Expand Down
Expand Up @@ -40,13 +40,6 @@ def __init__(
logger: logging.Logger,
**kwargs: Any,
):
try:
from indra import api # type: ignore

self.indra_installed = True
except Exception: # pragma: no cover
self.indra_installed = False # pragma: no cover

self._exec_option = exec_option

self.path: Optional[str] = None
Expand Down Expand Up @@ -104,9 +97,7 @@ def token(self):

@property
def exec_option(self) -> str:
return utils.parse_exec_option(
self.dataset, self._exec_option, self.indra_installed, self.username
)
return utils.parse_exec_option(self.dataset, self._exec_option, self.username)

@property
def username(self) -> str:
Expand Down
8 changes: 1 addition & 7 deletions deeplake/core/vectorstore/deep_memory/deep_memory.py
Expand Up @@ -10,6 +10,7 @@
import numpy as np

import deeplake
from deeplake.enterprise.util import INDRA_INSTALLED
from deeplake.util.exceptions import (
DeepMemoryAccessError,
IncorrectRelevanceTypeError,
Expand Down Expand Up @@ -460,13 +461,6 @@ def evaluate(
token=self.token,
)

try:
from indra import api # type: ignore

INDRA_INSTALLED = True
except Exception:
INDRA_INSTALLED = False

if not INDRA_INSTALLED:
raise ImportError(
"indra is not installed. Please install indra to use this functionality with: pip install `deeplake[enterprise]`"
Expand Down
11 changes: 1 addition & 10 deletions deeplake/core/vectorstore/vector_search/dataset/dataset.py
Expand Up @@ -38,16 +38,7 @@ def create_or_load_dataset(
branch="main",
**kwargs,
):
try:
from indra import api # type: ignore

_INDRA_INSTALLED = True # pragma: no cover
except ImportError: # pragma: no cover
_INDRA_INSTALLED = False # pragma: no cover

utils.check_indra_installation(
exec_option=exec_option, indra_installed=_INDRA_INSTALLED
)
utils.check_indra_installation(exec_option=exec_option)

if not overwrite and dataset_exists(dataset_path, token, creds, **kwargs):
if tensor_params is not None and tensor_params != DEFAULT_VECTORSTORE_TENSORS:
Expand Down
Expand Up @@ -2,6 +2,7 @@
from abc import ABC, abstractmethod
from typing import Union, Dict, List, Optional

from deeplake.enterprise.util import INDRA_INSTALLED
from deeplake.core.vectorstore.vector_search.indra import query
from deeplake.core.vectorstore.vector_search import utils
from deeplake.core.dataset import Dataset as DeepLakeDataset
Expand Down Expand Up @@ -110,14 +111,6 @@ def _get_view(self, tql_query, runtime: Optional[Dict] = None):
return view

def _get_indra_dataset(self):
try:
from indra import api # type: ignore

INDRA_INSTALLED = True
except ImportError:
INDRA_INSTALLED = False
pass

if not INDRA_INSTALLED:
from deeplake.enterprise.util import raise_indra_installation_error

Expand Down
Expand Up @@ -22,21 +22,14 @@ def vector_search(
org_id,
return_tql,
) -> Union[Dict, DeepLakeDataset]:
try:
from indra import api # type: ignore

_INDRA_INSTALLED = True # pragma: no cover
except ImportError: # pragma: no cover
_INDRA_INSTALLED = False # pragma: no cover

runtime = utils.get_runtime_from_exec_option(exec_option)

if callable(filter):
raise ValueError(
f"UDF filter functions are not supported with the current `exec_option`={exec_option}. "
)

utils.check_indra_installation(exec_option, indra_installed=_INDRA_INSTALLED)
utils.check_indra_installation(exec_option)

view, tql_filter = filter_utils.attribute_based_filtering_tql(
view=dataset,
Expand Down
29 changes: 12 additions & 17 deletions deeplake/core/vectorstore/vector_search/utils.py
Expand Up @@ -7,6 +7,7 @@

import deeplake
from deeplake.constants import MB, DEFAULT_VECTORSTORE_INDEX_PARAMS, TARGET_BYTE_SIZE
from deeplake.enterprise.util import INDRA_INSTALLED
from deeplake.util.exceptions import TensorDoesNotExistError
from deeplake.util.warnings import always_warn
from deeplake.core.dataset import DeepLakeCloudDataset, Dataset
Expand Down Expand Up @@ -41,9 +42,8 @@ def get_exec_option(self):


class ExecOptionCloudDataset(ExecOptionBase):
def __init__(self, dataset, indra_installed, username, path_type):
def __init__(self, dataset, username, path_type):
self.dataset = dataset
self.indra_installed = indra_installed
self.client = dataset.client
self.token = self.dataset.token
self.username = username
Expand All @@ -59,20 +59,15 @@ def get_exec_option(self):
return "tensor_db"
# option 2: dataset is created in a linked storage or locally,
# indra is installed user/org has access to indra
elif (
self.path_type == "hub"
and self.indra_installed
and self.username != "public"
):
elif self.path_type == "hub" and INDRA_INSTALLED and self.username != "public":
return "compute_engine"
else:
return "python"


class ExecOptionLocalDataset(ExecOptionBase):
def __init__(self, dataset, indra_installed, username):
def __init__(self, dataset, username):
self.dataset = dataset
self.indra_installed = indra_installed
self.token = self.dataset.token
self.username = username

Expand All @@ -83,21 +78,21 @@ def get_exec_option(self):
if "mem://" in self.dataset.path:
return "python"

if self.indra_installed and self.username != "public":
if INDRA_INSTALLED and self.username != "public":
return "compute_engine"
return "python"


def exec_option_factory(dataset, indra_installed, username):
def exec_option_factory(dataset, username):
path_type = get_path_type(dataset.path)
if path_type == "local":
return ExecOptionLocalDataset(dataset, indra_installed, username)
return ExecOptionCloudDataset(dataset, indra_installed, username, path_type)
return ExecOptionLocalDataset(dataset, username)
return ExecOptionCloudDataset(dataset, username, path_type)


def parse_exec_option(dataset, exec_option, indra_installed, username):
def parse_exec_option(dataset, exec_option, username):
if exec_option is None or exec_option == "auto":
exec_option = exec_option_factory(dataset, indra_installed, username)
exec_option = exec_option_factory(dataset, username)
return exec_option.get_exec_option()
return exec_option

Expand Down Expand Up @@ -136,8 +131,8 @@ def parse_return_tensors(dataset, return_tensors, embedding_tensor, return_view)
return return_tensors


def check_indra_installation(exec_option, indra_installed):
if exec_option == "compute_engine" and not indra_installed:
def check_indra_installation(exec_option):
if exec_option == "compute_engine" and not INDRA_INSTALLED:
from deeplake.enterprise.util import raise_indra_installation_error

raise raise_indra_installation_error(
Expand Down
3 changes: 0 additions & 3 deletions deeplake/enterprise/convert_to_libdeeplake.py
Expand Up @@ -43,9 +43,6 @@ def import_indra_api():
return api


INDRA_INSTALLED = bool(importlib.util.find_spec("indra"))


def _get_indra_ds_from_native_provider(provider: IndraProvider):
api = import_indra_api()
return api.dataset(provider.core)
Expand Down
3 changes: 3 additions & 0 deletions deeplake/enterprise/util.py
@@ -1,3 +1,4 @@
import importlib
from typing import Optional
from deeplake.integrations.pytorch.common import collate_fn as pytorch_collate_fn
from deeplake.integrations.tf.common import collate_fn as tf_collate_fn
Expand All @@ -6,6 +7,8 @@

import os

INDRA_INSTALLED = bool(importlib.util.find_spec("indra"))


def raise_indra_installation_error(indra_import_error: Optional[Exception] = None):
if not indra_import_error:
Expand Down

0 comments on commit 99af14c

Please sign in to comment.