Skip to content

Commit

Permalink
Merge branch 'feature/files-search' into 'develop'
Browse files Browse the repository at this point in the history
File search supported

See merge request core/sevenbridges-python!112
  • Loading branch information
Goran Vlaovic committed Mar 18, 2024
2 parents 0310e36 + 4e30d13 commit f5c7d98
Show file tree
Hide file tree
Showing 5 changed files with 191 additions and 12 deletions.
30 changes: 30 additions & 0 deletions docs/quickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -907,6 +907,36 @@ There are certain restrictions on using this parameter:
3. The continuation token pagination and offset pagination are mutually exclusive, so if there are passed both
:code:`cont_token` and :code:`offset` parameters to :code:`query()`/:code:`list_files()`, :code:`SbgError` will be returned.

Search Files using SBG query language
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Files can be searched based on a query criterion written in a special query language. The query syntax is explained in
`the documentation <https://docs.sevenbridges.com/reference/query-syntax>`_. This can be achieved using :code:`search()` method:

.. code:: python
search_response = api.files.search(query='IN "user/example-project" WHERE type = "FILE"')
search_response.count # Gets the number of returned files/folders (files in this concrete example)
search_response.cont_token # Gets continuation token that is used to fetch the next page of data
search_response.result_set # Gets the list of resulting files/folders
Pagination parameters
^^^^^^^^^^^^^^^^^^^^^

Token-based pagination can be achieved in one of the ways:

1. By using :code:`cont_token` and :code:`limit` parameters in :code:`search()` method:

.. code:: python
search_response = api.files.search(query='IN "user/example-project" WHERE type = "FILE"', limit=100, cont_token=start)
2. With :code:`TOKEN` and :code:`LIMIT` parameters in the provided query:

.. code:: python
search_response = api.files.search(query='IN "user/example-project" WHERE type = "FILE" LIMIT 100 TOKEN start')
Managing file upload and download
---------------------------------
Expand Down
54 changes: 46 additions & 8 deletions sevenbridges/models/file.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import os
import logging
import os
import tempfile

from sevenbridges.decorators import inplace_reload
from sevenbridges.errors import (
SbgError,
ResourceNotModified,
Expand All @@ -12,16 +13,15 @@
BasicListField
)
from sevenbridges.meta.resource import Resource
from sevenbridges.models.bulk import BulkRecord
from sevenbridges.transfer.upload import Upload
from sevenbridges.decorators import inplace_reload
from sevenbridges.transfer.download import Download
from sevenbridges.meta.transformer import Transform
from sevenbridges.models.bulk import BulkRecord
from sevenbridges.models.compound.files.download_info import DownloadInfo
from sevenbridges.models.compound.files.file_origin import FileOrigin
from sevenbridges.models.compound.files.file_storage import FileStorage
from sevenbridges.models.compound.files.metadata import Metadata
from sevenbridges.models.enums import PartSize, RequestParameters
from sevenbridges.models.compound.files.file_storage import FileStorage
from sevenbridges.models.compound.files.file_origin import FileOrigin
from sevenbridges.models.compound.files.download_info import DownloadInfo
from sevenbridges.transfer.download import Download
from sevenbridges.transfer.upload import Upload

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -52,6 +52,8 @@ class File(Resource):
'scroll_folder': '/files/{id}/scroll',
'copy_to_folder': '/files/{file_id}/actions/copy',
'move_to_folder': '/files/{file_id}/actions/move',

'search': '/files/search',
}

href = HrefField(read_only=True)
Expand Down Expand Up @@ -612,9 +614,45 @@ def move_to_folder(self, parent, name=None, api=None):
).json()
return File(api=api, **response)

@classmethod
def search(cls, query, cont_token=None, limit=None, api=None):
"""
Search files by a query.
:param query: Query written in SBG query language.
:param cont_token: Continuation token value.
:param limit: Limit value.
:param api: Api instance.
"""

if not query:
raise SbgError('Query must be provided.')

if limit is not None and limit <= 0:
raise SbgError('Limit must be greater than zero.')

api = api or cls._API

data = {'query': query}
params = {
'cont_token': cont_token,
'limit': limit
}

response = api.post(url=cls._URL['search'],
data=data,
params=params).json()

return SearchResponse(**response)


class FileBulkRecord(BulkRecord):
resource = CompoundField(cls=File, read_only=False)

def __str__(self):
return f'<FileBulkRecord valid={self.valid}>'


class SearchResponse(Resource):
count = IntegerField(read_only=True)
cont_token = StringField(read_only=True)
result_set = BasicListField(read_only=True)
35 changes: 31 additions & 4 deletions tests/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def paginated_projects(self, limit, num_of_projects):

if i > limit:
prev_url = (
f'/projects/?offset={i - limit}&limit={limit}&fields=_all'
f'/projects/?offset={i - limit}&limit={limit}&fields=_all'
)
prev = {
'method': 'GET',
Expand Down Expand Up @@ -286,6 +286,20 @@ def download_info():
'url': generator.url()
}

@staticmethod
def default_search_file():
return {
'id': generator.uuid4(),
'name': generator.name(),
'metadata': {
'sample': generator.name()
},
'tags': [
generator.name()
],
'type': generator.slug()
}

def exists(self, **kwargs):
file_ = FileProvider.default_file()
file_.update(kwargs)
Expand Down Expand Up @@ -480,6 +494,19 @@ def can_move_to_folder(self, id=None, parent=None, name=None):

self.request_mocker.post(f'/files/{id}/actions/move', json=result)

def files_to_search(self, num_of_files):
items = [
FileProvider.default_search_file()
for _ in range(num_of_files)
]
href = f'{self.base_url}/files/search'
response = {
'count': num_of_files,
'cont_token': generator.text(max_nb_chars=10),
'result_set': items
}
self.request_mocker.post(href, json=response)


class AppProvider:
def __init__(self, request_mocker, base_url):
Expand Down Expand Up @@ -834,7 +861,7 @@ def paginated_file_list(self, limit, num_of_files, volume_id, volume_data):
links = []
if i + limit < num_of_files:
next_page_link = {
'next': (
'next': (
f'{self.base_url}/storage/volumes/{volume_id}/list/'
f'?offset={i + limit}&limit={limit}&fields=_all'
)
Expand Down Expand Up @@ -944,13 +971,13 @@ def default_copy_result():
return copy_result

def feedback_set(self):
url = f'{self.base_url }/action/notifications/feedback'
url = f'{self.base_url}/action/notifications/feedback'
self.request_mocker.post(url)

def can_bulk_copy(self, **kwargs):
result = self.default_copy_result()
result.update(kwargs)
url = f'{self.base_url }/action/files/copy'
url = f'{self.base_url}/action/files/copy'
self.request_mocker.post(url, json=result)


Expand Down
62 changes: 62 additions & 0 deletions tests/test_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,3 +427,65 @@ def test_move_to_folder(api, given, verifier):

# verification
verifier.file.moved_to_folder(id=file_id)


def test_search_files(api, given, verifier):
total = 10
query = 'some query'
given.file.files_to_search(total)

# action
response = api.files.search(query)

# verification
assert response.count == total

verifier.file.searched(query)


def test_search_files_paginated(api, given, verifier):
total = 10
query = 'some query'
given.file.files_to_search(total)

# action
response = api.files.search(query, cont_token='start', limit=10)

# verification
assert response.count == total

verifier.file.searched_with_pagination(query, 'start', 10)


def test_search_files_paginated_limit(api, given, verifier):
total = 10
query = 'some query'
given.file.files_to_search(total)

# action
response = api.files.search(query, limit=10)

# verification
assert response.count == total

verifier.file.searched_with_limit(query, 10)


def test_search_files_with_no_query(api, given, verifier):
given.file.files_to_search(1)

with pytest.raises(SbgError):
api.files.search(None)

with pytest.raises(SbgError):
api.files.search("")


def test_search_files_with_invalid_limit(api, given, verifier):
given.file.files_to_search(1)

with pytest.raises(SbgError):
api.files.search(query='some query', limit=0)

with pytest.raises(SbgError):
api.files.search(query='some query', limit=-1)
22 changes: 22 additions & 0 deletions tests/verifiers.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,12 @@ def check_post_data(self):
for hist in self.request_mocker._adapter.request_history:
print(hist)

def check_body(self, body):
for hist in self.request_mocker._adapter.request_history:
if hist.json() == body:
return True
assert False, f'Body not matched \n{body} != \n{hist.body}'


class ProjectVerifier:
def __init__(self, request_mocker):
Expand Down Expand Up @@ -204,6 +210,22 @@ def copied_to_folder(self, id):
def moved_to_folder(self, id):
self.checker.check_url(f'/files/{id}/actions/move')

def searched(self, query):
self.checker.check_url('/files/search')
self.checker.check_body({'query': query})

def searched_with_pagination(self, query, cont_token, limit):
qs = {'cont_token': [cont_token], 'limit': [f'{limit}']}
self.checker.check_url('/files/search')
self.checker.check_query(qs)
self.checker.check_body({'query': query})

def searched_with_limit(self, query, limit):
qs = {'limit': [f'{limit}']}
self.checker.check_url('/files/search')
self.checker.check_query(qs)
self.checker.check_body({'query': query})


class AppVerifier:
def __init__(self, request_mocker):
Expand Down

0 comments on commit f5c7d98

Please sign in to comment.