-
-
Notifications
You must be signed in to change notification settings - Fork 118
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Customizable Performance Report #166
base: master
Are you sure you want to change the base?
Conversation
Add 2 performance stats in libpcap to NFStreamer, and warp them in a new class `PerformanceStats`
Currently, my implementation stored each performance stat in a dictionary. Edit: just noticed that After a little bit of testing, if using a class instead of dictionary, it can be easier for people to implement custom stats (hint from class member vs dictionary key) with very little performance impact. btw, are there better names for class classclass Perf:
def __init__(self, context) -> None:
self._received = context.Value("I", 0)
self._dropped = context.Value("I", 0)
self._dropped_if = context.Value("I", 0)
self._processed = context.Value("I", 0)
self._ignored = context.Value("I", 0)
@property
def received(self):
return self._received.value
@received.setter
def received(self, value):
self._received.value = value
@property
def dropped(self):
return self._dropped.value
@dropped.setter
def dropped(self, value):
self._dropped.value = value
@property
def dropped_if(self):
return self._dropped_if.value
@dropped_if.setter
def dropped_if(self, value):
self._dropped_if.value = value
@property
def processed(self):
return self._processed.value
@processed.setter
def processed(self, value):
self._processed.value = value
@property
def ignored(self):
return self._ignored.value
@ignored.setter
def ignored(self, value):
self._ignored.value = value
class PerformanceStats:
def __init__(self, n_meters, context, is_linux, flows_count) -> None:
self.is_linux = is_linux
self.flows_count = flows_count
self.perfs = [Perf(context) for _ in range(n_meters)]
def __getitem__(self, idx) -> Perf:
return self.perfs[idx] test codefrom time import time_ns
from multiprocessing import get_context
from nfstream.utils import RepeatedTimer
class Perf:
def __init__(self, context) -> None:
self._received = context.Value("I", 0)
self._dropped = context.Value("I", 0)
self._dropped_if = context.Value("I", 0)
self._processed = context.Value("I", 0)
self._ignored = context.Value("I", 0)
@property
def received(self):
return self._received.value
@received.setter
def received(self, value):
self._received.value = value
@property
def dropped(self):
return self._dropped.value
@dropped.setter
def dropped(self, value):
self._dropped.value = value
@property
def dropped_if(self):
return self._dropped_if.value
@dropped_if.setter
def dropped_if(self, value):
self._dropped_if.value = value
@property
def processed(self):
return self._processed.value
@processed.setter
def processed(self, value):
self._processed.value = value
@property
def ignored(self):
return self._ignored.value
@ignored.setter
def ignored(self, value):
self._ignored.value = value
class PerformanceStatsWithClass:
def __init__(self, n_meters, context, is_linux, flows_count) -> None:
self.is_linux = is_linux
self.flows_count = flows_count
self.perfs = [Perf(context) for _ in range(n_meters)]
def __getitem__(self, idx) -> Perf:
return self.perfs[idx]
class PerformanceStatsWithDict:
def __init__(self, n_meters, context, is_linux, flows_count) -> None:
self.is_linux = is_linux
self.flows_count = flows_count
self.performances = []
for _ in range(n_meters):
self.performances.append(
{
"received": context.Value("I", 0),
"dropped": context.Value("I", 0),
"dropped_if": context.Value("I", 0),
"processed": context.Value("I", 0),
"ignored": context.Value("I", 0),
}
)
def __getitem__(self, idx) -> dict:
return self.performances[idx]
def update_perf(perf: Perf | dict):
if isinstance(perf, dict):
for i in range(1_000_000):
print(i, end="\r")
perf["received"].value += 1
perf["dropped"].value += 2
perf["dropped_if"].value += 3
perf["processed"].value += 4
perf["ignored"].value += 5
print(
perf["received"].value,
perf["dropped"].value,
perf["dropped_if"].value,
perf["processed"].value,
perf["ignored"].value,
)
elif isinstance(perf, Perf):
for i in range(1_000_000):
print(i, end="\r")
perf.received += 1
perf.dropped += 2
perf.dropped_if += 3
perf.processed += 4
perf.ignored += 5
print(
perf.received,
perf.dropped,
perf.dropped_if,
perf.processed,
perf.ignored,
)
def watch(perfs: list[Perf | dict]):
if isinstance(perfs[0], dict):
for p in perfs:
a = p["received"].value
a = p["dropped"].value
a = p["dropped_if"].value
a = p["processed"].value
a = p["ignored"].value
elif isinstance(perfs[0], Perf):
for p in perfs:
a = p.received
a = p.dropped
a = p.dropped_if
a = p.processed
a = p.ignored
def time_func(func):
def wapper(n_meters):
start = time_ns()
func(n_meters)
print(f"{(time_ns()-start)/(10**9)} seconds")
return wapper
@time_func
def test_class(n_meters):
print("class")
ctx = get_context()
meters = []
p = PerformanceStatsWithClass(n_meters, ctx, True, 0)
for i in range(n_meters):
meters.append(ctx.Process(target=update_perf, args=(p[i],)))
rt = RepeatedTimer(0.001, watch, p)
for meter in meters:
meter.start()
for i in range(n_meters):
if meters[i].is_alive():
meters[i].join()
rt.stop()
@time_func
def test_dict(n_meters):
print("dict")
ctx = get_context()
meters = []
p = PerformanceStatsWithDict(n_meters, ctx, True, 0)
for i in range(n_meters):
meters.append(ctx.Process(target=update_perf, args=(p[i],)))
rt = RepeatedTimer(0.001, watch, p)
for meter in meters:
meter.start()
for i in range(n_meters):
if meters[i].is_alive():
meters[i].join()
rt.stop()
if __name__ == "__main__":
test_class(4)
test_dict(4) writing to context.Value 1,000,000 time while reading from it every 0.01 seconds class
1000000 2000000 3000000 4000000 5000000
1000000 2000000 3000000 4000000 5000000
1000000 2000000 3000000 4000000 5000000
1000000 2000000 3000000 4000000 5000000
11.975047249 seconds
dict
1000000 2000000 3000000 4000000 5000000
1000000 2000000 3000000 4000000 5000000
1000000 2000000 3000000 4000000 5000000
1000000 2000000 3000000 4000000 5000000
11.891510533 seconds |
Customizable Performance Report
Description
received
,dropped_by_interface
fromstruct nf_stat
toNFStreamer
's performance reportPerformanceStats
inutils.py
and makeupdate_performances()
into its own method.Customize performance report by inheriting
PerformanceStats
and overrideupdate_performances()
Related #147
Type of change
How Has This Been Tested?
Test Configuration:
Example code:
default log output:
custom log output:
Checklist: