Skip to content

Commit

Permalink
add log level option for indexing tool (#601)
Browse files Browse the repository at this point in the history
* add log level option for indexing tool

* fix moto mock

* fix moto mock

---------

Co-authored-by: Emma Ai <emma.ai@ga.gov.au>
  • Loading branch information
emmaai and Emma Ai committed Feb 22, 2024
1 parent 717f1b1 commit 391c190
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 28 deletions.
22 changes: 16 additions & 6 deletions apps/dc_tools/odc/apps/dc_tools/s3_to_dc.py
Expand Up @@ -33,12 +33,6 @@
publish_action,
)

logging.basicConfig(
level=logging.WARNING,
format="%(asctime)s: %(levelname)s: %(message)s",
datefmt="%m/%d/%Y %I:%M:%S",
)


def doc_error(uri, doc):
"""Log the internal errors parsing docs"""
Expand Down Expand Up @@ -96,6 +90,15 @@ def dump_to_odc(


@click.command("s3-to-dc")
@click.option(
"--log",
type=click.Choice(
["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], case_sensitive=False
),
default="WARNING",
show_default=True,
help="control the log level, e.g., --log=error",
)
@skip_lineage
@fail_on_missing_lineage
@verify_lineage
Expand All @@ -113,6 +116,7 @@ def dump_to_odc(
@click.argument("uris", nargs=-1)
@click.argument("product", type=str, nargs=1, required=False)
def cli(
log,
skip_lineage,
fail_on_missing_lineage,
verify_lineage,
Expand Down Expand Up @@ -140,6 +144,12 @@ def cli(
Can provide a single product name or a space separated list of multiple products
(formatted as a single string).
"""
log_level = getattr(logging, log.upper())
logging.basicConfig(
level=log_level,
format="%(asctime)s: %(levelname)s: %(message)s",
datefmt="%m/%d/%Y %I:%M:%S",
)

transform = None
if stac:
Expand Down
4 changes: 2 additions & 2 deletions apps/dc_tools/tests/conftest.py
Expand Up @@ -14,7 +14,7 @@
from datacube.index import index_connect
from datacube.model import MetadataType
from datacube.utils import documents
from moto import mock_s3
from moto import mock_aws
from moto.server import ThreadedMotoServer
from odc.apps.dc_tools.add_update_products import add_update_products

Expand Down Expand Up @@ -66,7 +66,7 @@ def mocked_aws_s3_env():

@pytest.fixture
def mocked_s3_datasets(mocked_aws_s3_env):
with mock_s3():
with mock_aws():
bucket = mocked_aws_s3_env.Bucket("odc-tools-test")
bucket.create(
ACL="public-read",
Expand Down
4 changes: 2 additions & 2 deletions apps/dc_tools/tests/test_sns_publishing.py
Expand Up @@ -3,7 +3,7 @@
import os
import pytest
from click.testing import CliRunner
from moto import mock_sns, mock_sqs
from moto import mock_aws
from pathlib import Path

from odc.apps.dc_tools.fs_to_dc import cli as fs_cli
Expand All @@ -27,7 +27,7 @@ def sns_setup(aws_credentials, aws_env):
Tests are structured as follows:
input: [ STAC -> SNS -> SQS ] -> dc_tools -> output: [ STAC -> SNS -> SQS ]
"""
with mock_sqs(), mock_sns():
with mock_aws():
sns = boto3.client("sns")
sqs = boto3.client("sqs")

Expand Down
26 changes: 13 additions & 13 deletions apps/dc_tools/tests/test_sqs_to_dc.py
Expand Up @@ -8,7 +8,7 @@
import pytest
from deepdiff import DeepDiff
from functools import partial
from moto import mock_sqs
from moto import mock_aws
from odc.aws.queue import get_messages
from pathlib import Path
from pprint import pformat
Expand Down Expand Up @@ -98,25 +98,25 @@ def aws_credentials():
os.environ["AWS_SESSION_TOKEN"] = "testing"


@mock_sqs
def test_extract_metadata_from_message(aws_credentials, odc_test_db_with_products):
TEST_QUEUE_NAME = "a_test_queue"
sqs_resource = boto3.resource("sqs")
with mock_aws():
TEST_QUEUE_NAME = "a_test_queue"
sqs_resource = boto3.resource("sqs")

dc = Datacube()
dc = Datacube()

a_queue = sqs_resource.create_queue(QueueName=TEST_QUEUE_NAME)
assert int(a_queue.attributes.get("ApproximateNumberOfMessages")) == 0
a_queue = sqs_resource.create_queue(QueueName=TEST_QUEUE_NAME)
assert int(a_queue.attributes.get("ApproximateNumberOfMessages")) == 0

a_queue.send_message(MessageBody=json.dumps(sqs_message))
assert int(a_queue.attributes.get("ApproximateNumberOfMessages")) == 1
a_queue.send_message(MessageBody=json.dumps(sqs_message))
assert int(a_queue.attributes.get("ApproximateNumberOfMessages")) == 1

assert dc.index.datasets.get("69a6eca2-ca45-4808-a5b3-694029200c43") is None
assert dc.index.datasets.get("69a6eca2-ca45-4808-a5b3-694029200c43") is None

queue = sqs_resource.get_queue_by_name(QueueName=TEST_QUEUE_NAME)
queue = sqs_resource.get_queue_by_name(QueueName=TEST_QUEUE_NAME)

msg = next(get_messages(queue))
metadata = extract_metadata_from_message(msg)
msg = next(get_messages(queue))
metadata = extract_metadata_from_message(msg)
data, uri = handle_bucket_notification_message(
msg, metadata, "cemp_insar/insar/displacement/alos/*", True
)
Expand Down
10 changes: 5 additions & 5 deletions libs/cloud/tests/test_aws.py
Expand Up @@ -3,7 +3,7 @@
import os
import pytest
from click.testing import CliRunner
from moto import mock_sqs
from moto import mock_aws
from odc.aws._find import parse_query
from odc.aws.queue import get_queue, get_queues, redrive_queue
from types import SimpleNamespace
Expand All @@ -24,7 +24,7 @@ def aws_env(monkeypatch):
monkeypatch.setenv("AWS_DEFAULT_REGION", "us-west-2")


@mock_sqs
@mock_aws
def test_redrive_to_queue(aws_env):
resource = boto3.resource("sqs")

Expand Down Expand Up @@ -66,7 +66,7 @@ def test_redrive_to_queue(aws_env):
assert get_n_messages(dead_queue) == 0


@mock_sqs
@mock_aws
def test_redrive_to_queue_cli(aws_env):
resource = boto3.resource("sqs")

Expand Down Expand Up @@ -127,7 +127,7 @@ def test_redrive_to_queue_cli(aws_env):
)


@mock_sqs
@mock_aws
def test_get_queues(aws_env):
resource = boto3.resource("sqs")

Expand Down Expand Up @@ -169,7 +169,7 @@ def test_get_queues(aws_env):
assert len(list(queues)) == 0


@mock_sqs
@mock_aws
def test_get_queues_empty(aws_env):
queues = get_queues()

Expand Down

0 comments on commit 391c190

Please sign in to comment.