Skip to content

Commit

Permalink
combine set_crawl_exec_last_update into inc_crawl_exec_time
Browse files Browse the repository at this point in the history
  • Loading branch information
ikreymer committed Mar 3, 2024
1 parent 97f2903 commit d501761
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 13 deletions.
11 changes: 3 additions & 8 deletions backend/btrixcloud/crawls.py
Expand Up @@ -431,12 +431,13 @@ async def update_running_crawl_stats(self, crawl_id, stats):
query = {"_id": crawl_id, "type": "crawl", "state": "running"}
return await self.crawls.find_one_and_update(query, {"$set": {"stats": stats}})

async def inc_crawl_exec_time(self, crawl_id, exec_time):
async def inc_crawl_exec_time(self, crawl_id, exec_time, last_updated_time):
"""increment exec time"""
return await self.crawls.find_one_and_update(
{"_id": crawl_id, "type": "crawl"},
{"_id": crawl_id, "type": "crawl", "_lut": {"$ne": last_updated_time}},
{
"$inc": {"crawlExecSeconds": exec_time},
"$set": {"_lut": last_updated_time},
},
)

Expand All @@ -447,12 +448,6 @@ async def get_crawl_exec_last_update_time(self, crawl_id):
)
return res and res.get("_lut")

async def set_crawl_exec_last_update_time(self, crawl_id, last_update_time):
"""set crawl last update time"""
return await self.crawls.find_one_and_update(
{"_id": crawl_id, "type": "crawl"}, {"$set": {"_lut": last_update_time}}
)

async def get_crawl_state(self, crawl_id):
"""return current crawl state of a crawl"""
res = await self.crawls.find_one(
Expand Down
8 changes: 3 additions & 5 deletions backend/btrixcloud/operator/crawls.py
Expand Up @@ -233,7 +233,7 @@ async def sync_crawls(self, data: MCSyncData):
else:
status.scale = crawl.scale
now = dt_now()
await self.crawl_ops.set_crawl_exec_last_update_time(crawl_id, now)
await self.crawl_ops.inc_crawl_exec_time(crawl_id, 0, now)
status.lastUpdatedTime = to_k8s_date(now)

children = self._load_redis(params, status, data.children)
Expand Down Expand Up @@ -835,7 +835,7 @@ async def increment_pod_exec_time(
)

if not update_start_time:
await self.crawl_ops.set_crawl_exec_last_update_time(crawl_id, now)
await self.crawl_ops.inc_crawl_exec_time(crawl_id, 0, now)
status.lastUpdatedTime = to_k8s_date(now)
return

Expand Down Expand Up @@ -912,8 +912,6 @@ async def increment_pod_exec_time(
max_duration = max(duration, max_duration)

if exec_time:
await self.crawl_ops.inc_crawl_exec_time(crawl_id, exec_time)

await self.org_ops.inc_org_time_stats(oid, exec_time, True)
status.crawlExecTime += exec_time
status.elapsedCrawlTime += max_duration
Expand All @@ -923,7 +921,7 @@ async def increment_pod_exec_time(
flush=True,
)

await self.crawl_ops.set_crawl_exec_last_update_time(crawl_id, now)
await self.crawl_ops.inc_crawl_exec_time(crawl_id, exec_time, now)
status.lastUpdatedTime = to_k8s_date(now)

def should_mark_waiting(self, state, started):
Expand Down

0 comments on commit d501761

Please sign in to comment.