Skip to content

Commit

Permalink
Add ZipInfo type annotations
Browse files Browse the repository at this point in the history
  • Loading branch information
tw4l committed Mar 5, 2024
1 parent 9f65146 commit 9955f70
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ async def migrate_up(self):
return

for crawl_id in crawl_ids_no_pages:
print(f"Adding pages for crawl {crawl_id}", flush=True)
try:
await self.page_ops.add_crawl_pages_to_db_from_wacz(crawl_id)
print(
Expand Down
9 changes: 5 additions & 4 deletions backend/btrixcloud/storages.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import os

from datetime import datetime
from zipfile import ZipInfo

from fastapi import Depends, HTTPException
from stream_zip import stream_zip, NO_COMPRESSION_64
Expand Down Expand Up @@ -548,7 +549,7 @@ def _sync_get_logs(

# pylint: disable=too-many-function-args
def stream_log_lines(
log_zipinfo, wacz_url: str, wacz_filename: str
log_zipinfo: ZipInfo, wacz_url: str, wacz_filename: str
) -> Iterator[dict]:
"""Pass lines as json objects"""
filename = log_zipinfo.filename
Expand Down Expand Up @@ -599,7 +600,7 @@ def organize_based_on_instance_number(
wacz_url = f"http://host.docker.internal:30870{wacz_url}"

with RemoteZip(wacz_url) as remote_zip:
log_files = [
log_files: List[ZipInfo] = [
f
for f in remote_zip.infolist()
if f.filename.startswith("logs/") and not f.is_dir()
Expand All @@ -625,7 +626,7 @@ def _sync_get_pages(

# pylint: disable=too-many-function-args
def stream_page_lines(
pagefile_zipinfo, wacz_url: str, wacz_filename: str
pagefile_zipinfo: ZipInfo, wacz_url: str, wacz_filename: str
) -> Iterator[Dict[Any, Any]]:
"""Pass lines as json objects"""
filename = pagefile_zipinfo.filename
Expand All @@ -647,7 +648,7 @@ def stream_page_lines(
wacz_url = f"http://host.docker.internal:30870{wacz_url}"

with RemoteZip(wacz_url) as remote_zip:
page_files = [
page_files: List[ZipInfo] = [
f
for f in remote_zip.infolist()
if f.filename.startswith("pages/")
Expand Down

0 comments on commit 9955f70

Please sign in to comment.