Skip to content

Commit

Permalink
Minor logging cleanup (#3598)
Browse files Browse the repository at this point in the history
* Minor logging cleanup

Minimize cache logging: details were useful when cache management first went
in, but are now disruptive during ops review.
  • Loading branch information
dbutenhof authored and webbnh committed Jan 19, 2024
1 parent 9be7689 commit aac53df
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 28 deletions.
6 changes: 0 additions & 6 deletions lib/pbench/cli/server/tree_manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import click
import humanfriendly
import humanize

from pbench.cli import pass_cli_context
from pbench.cli.server import config_setup
Expand Down Expand Up @@ -90,12 +89,7 @@ def tree_manage(
if reclaim_percent or reclaim_size:
target_size = humanfriendly.parse_size(reclaim_size) if reclaim_size else 0
target_pct = reclaim_percent if reclaim_percent else 20.0
click.echo(
f"Reclaiming {target_pct}% or {humanize.naturalsize(target_size)}"
)
outcome = cache_m.reclaim_cache(goal_pct=target_pct, goal_bytes=target_size)
un = "" if outcome else "un"
click.echo(f"The cache manager was {un}able to free the requested space")
rv = 0 if outcome else 1
except Exception as exc:
if logger:
Expand Down
44 changes: 23 additions & 21 deletions lib/pbench/server/cache_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -916,7 +916,6 @@ def get_unpacked_size(self) -> int:
except (ValueError, TypeError):
source = None
size = 0
self.logger.info("{} unpacked size (from {}) = {}", self.name, source, size)
self.unpacked_size = size
return size

Expand Down Expand Up @@ -1007,7 +1006,6 @@ def get_results(self, lock: LockManager) -> Path:
if process.returncode == 0:
size = int(process.stdout.split("\t", maxsplit=1)[0])
self.unpacked_size = size
self.logger.info("actual unpacked {} size = {}", self.name, size)
Metadata.setvalue(self.dataset, Metadata.SERVER_UNPACKED, size)
except Exception as e:
self.logger.warning("usage check failed: {}", e)
Expand Down Expand Up @@ -1555,10 +1553,17 @@ class Candidate:
last_ref: float # last reference timestamp
cache: Path # Unpacked artifact directory

def reached_goal():
@dataclass
class GoalCheck:
"""Report goal check"""

reached: bool
usage: shutil._ntuple_diskusage

def reached_goal() -> GoalCheck:
"""Check whether we've freed enough space"""
usage = shutil.disk_usage(self.cache_root)
return usage.free >= goal
return GoalCheck(usage.free >= goal, usage)

# Our reclamation goal can be expressed as % of total, absolute bytes,
# or both. We normalize to a single "bytes free" goal.
Expand All @@ -1567,25 +1572,20 @@ def reached_goal():
bytes_rounded = ((goal_bytes + MB_BYTES) // MB_BYTES) * MB_BYTES
goal = max(pct_as_bytes, bytes_rounded)

if reached_goal():
if usage.free >= goal:
return True

total_count = 0
reclaimed = 0
reclaim_failed = 0
self.logger.info(
"RECLAIM: looking for {} free (based on {}% of {} or {})",
humanize.naturalsize(goal),
goal_pct,
humanize.naturalsize(usage.total),
humanize.naturalsize(bytes_rounded),
)

# Identify cached datasets by examining the cache directory tree
candidates = deque()
for d in self.cache_root.iterdir():
if not d.is_dir():
self.logger.warning("unexpected file in cache root: {}", d)
self.logger.warning(
"RECLAIM: found unexpected file in cache root: {}", d
)
continue
total_count += 1
last_ref = 0.0
Expand Down Expand Up @@ -1632,12 +1632,12 @@ def reached_goal():
error = e
else:
reclaimed += 1
if reached_goal():
if reached_goal().reached:
break
except OSError as e:
if e.errno in (errno.EAGAIN, errno.EACCES):
self.logger.info(
"RECLAIM {}: skipping because cache is locked", name
"RECLAIM: skipping {} because cache is locked", name
)
# Don't reclaim a cache that's in use
continue
Expand All @@ -1647,17 +1647,19 @@ def reached_goal():
reclaim_failed += 1
error = e
if error:
self.logger.error("RECLAIM {} failed with '{}'", name, error)
reached = reached_goal()
self.logger.error("RECLAIM: {} failed with '{}'", name, error)
goal_check = reached_goal()
free_pct = goal_check.usage.free * 100.0 / goal_check.usage.total
self.logger.info(
"RECLAIM summary: goal {}%, {} {}: "
"{} datasets, {} had cache: {} reclaimed and {} errors",
"RECLAIM {} (goal {}%, {}): {} datasets, "
"{} had cache: {} reclaimed and {} errors: {:.1f}% free",
"achieved" if goal_check.reached else "partial",
goal_pct,
humanize.naturalsize(goal_bytes),
"achieved" if reached else "failed",
total_count,
has_cache,
reclaimed,
reclaim_failed,
free_pct,
)
return reached
return goal_check.reached
2 changes: 1 addition & 1 deletion lib/pbench/server/indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -637,7 +637,7 @@ def _make_user_benchmark_json(self):
# The csv_reader encountered the end-of-file on the first
# iteration (i.e., the file was empty), so skip it.
self.ptb.idxctx.logger.warning(
"CSV file {csv_name!r} is empty ({self.ptb._tbctx})"
"CSV file {!r} is empty ({})", csv_name, self.ptb._tbctx
)
continue
except OSError as exc:
Expand Down

0 comments on commit aac53df

Please sign in to comment.