Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add report summary CLI flag #1168

Merged
merged 5 commits into from
Nov 13, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
20 changes: 15 additions & 5 deletions cmd/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,17 +56,23 @@ func configFlagSet() *pflag.FlagSet {
flags.Bool("no-usage-report", false, "don't send anonymous stats to the developers")
flags.Bool("no-thresholds", false, "don't run thresholds")
flags.Bool("no-summary", false, "don't show the summary at the end of the test")
flags.String(
"summary-export",
"",
"output the end-of-test summary report to JSON file",
)
return flags
}

type Config struct {
lib.Options

Out []string `json:"out" envconfig:"K6_OUT"`
Linger null.Bool `json:"linger" envconfig:"K6_LINGER"`
NoUsageReport null.Bool `json:"noUsageReport" envconfig:"K6_NO_USAGE_REPORT"`
NoThresholds null.Bool `json:"noThresholds" envconfig:"K6_NO_THRESHOLDS"`
NoSummary null.Bool `json:"noSummary" envconfig:"K6_NO_SUMMARY"`
Out []string `json:"out" envconfig:"K6_OUT"`
Linger null.Bool `json:"linger" envconfig:"K6_LINGER"`
NoUsageReport null.Bool `json:"noUsageReport" envconfig:"K6_NO_USAGE_REPORT"`
NoThresholds null.Bool `json:"noThresholds" envconfig:"K6_NO_THRESHOLDS"`
NoSummary null.Bool `json:"noSummary" envconfig:"K6_NO_SUMMARY"`
SummaryExport null.String `json:"summaryExport" envconfig:"K6_SUMMARY_EXPORT"`

Collectors struct {
InfluxDB influxdb.Config `json:"influxdb"`
Expand Down Expand Up @@ -95,6 +101,9 @@ func (c Config) Apply(cfg Config) Config {
if cfg.NoSummary.Valid {
c.NoSummary = cfg.NoSummary
}
if cfg.SummaryExport.Valid {
c.SummaryExport = cfg.SummaryExport
}
c.Collectors.InfluxDB = c.Collectors.InfluxDB.Apply(cfg.Collectors.InfluxDB)
c.Collectors.Cloud = c.Collectors.Cloud.Apply(cfg.Collectors.Cloud)
c.Collectors.Kafka = c.Collectors.Kafka.Apply(cfg.Collectors.Kafka)
Expand All @@ -121,6 +130,7 @@ func getConfig(flags *pflag.FlagSet) (Config, error) {
NoUsageReport: getNullBool(flags, "no-usage-report"),
NoThresholds: getNullBool(flags, "no-thresholds"),
NoSummary: getNullBool(flags, "no-summary"),
SummaryExport: getNullString(flags, "summary-export"),
}, nil
}

Expand Down
33 changes: 27 additions & 6 deletions cmd/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,9 @@ a commandline interface for interacting with it.`,
if conf.NoSummary.Valid {
engine.NoSummary = conf.NoSummary.Bool
}
if conf.SummaryExport.Valid {
engine.SummaryExport = conf.SummaryExport.String != ""
}

// Create a collector and assign it to the engine if requested.
fprintf(stdout, "%s collector\r", initBar.String())
Expand Down Expand Up @@ -437,21 +440,39 @@ a commandline interface for interacting with it.`,
logrus.Warn("No data generated, because no script iterations finished, consider making the test duration longer")
}

data := ui.SummaryData{
Metrics: engine.Metrics,
RootGroup: engine.Executor.GetRunner().GetDefaultGroup(),
Time: engine.Executor.GetTime(),
TimeUnit: conf.Options.SummaryTimeUnit.String,
}
// Print the end-of-test summary.
if !conf.NoSummary.Bool {
fprintf(stdout, "\n")

s := ui.NewSummary(conf.SummaryTrendStats)
s.SummarizeMetrics(stdout, "", ui.SummaryData{
Metrics: engine.Metrics,
RootGroup: engine.Executor.GetRunner().GetDefaultGroup(),
Time: engine.Executor.GetTime(),
TimeUnit: conf.Options.SummaryTimeUnit.String,
})
s.SummarizeMetrics(stdout, "", data)

fprintf(stdout, "\n")
}

if conf.SummaryExport.ValueOrZero() != "" {
f, err := os.Create(conf.SummaryExport.String)
if err != nil {
logrus.WithError(err).Error("failed to create summary export file")
} else {
defer func() {
if err := f.Close(); err != nil {
logrus.WithError(err).Error("failed to close summary export file")
}
}()
s := ui.NewSummary(conf.SummaryTrendStats)
if err := s.SummarizeMetricsJSON(f, data); err != nil {
logrus.WithError(err).Error("failed to make summary export file")
}
}
}

if conf.Linger.Bool {
logrus.Info("Linger set; waiting for Ctrl+C...")
<-sigC
Expand Down
13 changes: 7 additions & 6 deletions core/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,12 @@ const (
type Engine struct {
runLock sync.Mutex

Executor lib.Executor
Options lib.Options
Collectors []lib.Collector
NoThresholds bool
NoSummary bool
Executor lib.Executor
Options lib.Options
Collectors []lib.Collector
NoThresholds bool
NoSummary bool
SummaryExport bool

logger *logrus.Logger

Expand Down Expand Up @@ -386,7 +387,7 @@ func (e *Engine) processSamples(sampleContainers []stats.SampleContainer) {
defer e.MetricsLock.Unlock()

// TODO: run this and the below code in goroutines?
if !(e.NoSummary && e.NoThresholds) {
if !(e.NoSummary && e.NoThresholds && !e.SummaryExport) {
e.processSamplesForMetrics(sampleContainers)
}

Expand Down
70 changes: 70 additions & 0 deletions ui/summary.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
package ui

import (
"encoding/json"
"fmt"
"io"
"sort"
Expand Down Expand Up @@ -242,6 +243,25 @@ func nonTrendMetricValueForSum(t time.Duration, timeUnit string, m *stats.Metric
}
}

func nonTrendMetricValueForSumJSON(t time.Duration, m *stats.Metric) map[string]interface{} {
data := make(map[string]interface{})
switch sink := m.Sink.(type) {
case *stats.CounterSink:
rate := 0.0
if t > 0 {
rate = sink.Value / (float64(t) / float64(time.Second))
}
data["rate"] = rate
case *stats.GaugeSink:
data["min"] = sink.Min
data["max"] = sink.Max
case *stats.RateSink:
data["passes"] = sink.Trues
data["fails"] = sink.Total - sink.Trues
}
return data
}

func displayNameForMetric(m *stats.Metric) string {
if m.Sub.Parent != "" {
return "{ " + m.Sub.Suffix + " }"
Expand Down Expand Up @@ -384,3 +404,53 @@ func (s *Summary) SummarizeMetrics(w io.Writer, indent string, data SummaryData)

s.summarizeMetrics(w, indent+" ", data.Time, data.TimeUnit, data.Metrics)
}

// SummarizeMetricsJSON summarizes a dataset in JSON format.
func (s *Summary) SummarizeMetricsJSON(w io.Writer, data SummaryData) error {
m := make(map[string]interface{})
m["root_group"] = data.RootGroup

metricsData := make(map[string]interface{})
for name, m := range data.Metrics {
m.Sink.Calc()

sinkData := m.Sink.Format(data.Time)
metricsData[name] = sinkData

var thresholds map[string]interface{}
if len(m.Thresholds.Thresholds) > 0 {
sinkDataWithThreshold := make(map[string]interface{})
for k, v := range sinkData {
sinkDataWithThreshold[k] = v
}
thresholds = make(map[string]interface{})
for _, threshold := range m.Thresholds.Thresholds {
thresholds[threshold.Source] = threshold.LastFailed
}
sinkDataWithThreshold["thresholds"] = thresholds
metricsData[name] = sinkDataWithThreshold
}

if _, ok := m.Sink.(*stats.TrendSink); ok {
continue
}

extra := nonTrendMetricValueForSumJSON(data.Time, m)
if len(extra) > 1 {
extraData := make(map[string]interface{})
extraData["value"] = sinkData["value"]
if thresholds != nil {
extraData["thresholds"] = thresholds
}
for k, v := range extra {
extraData[k] = v
}
metricsData[name] = extraData
}
}
m["metrics"] = metricsData
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")

return encoder.Encode(m)
}
109 changes: 103 additions & 6 deletions ui/summary_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,12 @@ import (
"testing"
"time"

"github.com/loadimpact/k6/lib"
"github.com/loadimpact/k6/stats"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gopkg.in/guregu/null.v3"

"github.com/loadimpact/k6/lib"
"github.com/loadimpact/k6/stats"
)

func TestSummary(t *testing.T) {
Expand All @@ -39,7 +41,7 @@ func TestSummary(t *testing.T) {
" ✓ checks......: 100.00% ✓ 3 ✗ 0 \n"
countOut = " ✗ http_reqs...: 3 3/s\n"
gaugeOut = " vus.........: 1 min=1 max=1\n"
trendOut = " my_trend....: avg=15ms min=10ms med=15ms max=20ms p(90)=19ms " +
trendOut = " my_trend....: avg=15ms min=10ms med=15ms max=20ms p(90)=19ms " +
"p(95)=19.5ms p(99.9)=19.99ms\n"
)

Expand All @@ -50,8 +52,8 @@ func TestSummary(t *testing.T) {
}{
{[]string{"avg", "min", "med", "max", "p(90)", "p(95)", "p(99.9)"},
checksOut + countOut + trendOut + gaugeOut},
{[]string{"count"}, checksOut + countOut + " my_trend....: count=3\n" + gaugeOut},
{[]string{"avg", "count"}, checksOut + countOut + " my_trend....: avg=15ms count=3\n" + gaugeOut},
{[]string{"count"}, checksOut + countOut + " my_trend....: count=3\n" + gaugeOut},
{[]string{"avg", "count"}, checksOut + countOut + " my_trend....: avg=15ms count=3\n" + gaugeOut},
}

rootG, _ := lib.NewGroup("", nil)
Expand Down Expand Up @@ -146,6 +148,8 @@ func createTestMetrics() map[string]*stats.Metric {

countMetric := stats.New("http_reqs", stats.Counter)
countMetric.Tainted = null.BoolFrom(true)
countMetric.Thresholds = stats.Thresholds{Thresholds: []*stats.Threshold{{Source: "rate<100"}}}

checksMetric := stats.New("checks", stats.Rate)
checksMetric.Tainted = null.BoolFrom(false)
sink := &stats.TrendSink{}
Expand All @@ -160,7 +164,100 @@ func createTestMetrics() map[string]*stats.Metric {
metrics["vus"] = gaugeMetric
metrics["http_reqs"] = countMetric
metrics["checks"] = checksMetric
metrics["my_trend"] = &stats.Metric{Name: "my_trend", Type: stats.Trend, Contains: stats.Time, Sink: sink}
metrics["my_trend"] = &stats.Metric{
Name: "my_trend",
Type: stats.Trend,
Contains: stats.Time,
Sink: sink,
Tainted: null.BoolFrom(true),
Thresholds: stats.Thresholds{
mstoykov marked this conversation as resolved.
Show resolved Hide resolved
Thresholds: []*stats.Threshold{
{
Source: "my_trend<1000",
LastFailed: true,
},
},
},
}

return metrics
}

func TestSummarizeMetricsJSON(t *testing.T) {
metrics := createTestMetrics()
expected := `{
"root_group": {
"name": "",
"path": "",
"id": "d41d8cd98f00b204e9800998ecf8427e",
"groups": {
"child": {
"name": "child",
"path": "::child",
"id": "f41cbb53a398ec1c9fb3d33e20c9b040",
"groups": {},
"checks": {
"check1": {
"name": "check1",
"path": "::child::check1",
"id": "6289a7a06253a1c3f6137dfb25695563",
"passes": 5,
"fails": 10
}
}
}
},
"checks": {}
},
"metrics": {
"checks": {
"value": 0,
"passes": 3,
"fails": 0
},
"http_reqs": {
"count": 3,
"rate": 3,
"thresholds": {
"rate<100": false
}
},
"my_trend": {
"avg": 15,
"max": 20,
"med": 15,
"min": 10,
"p(90)": 19,
"p(95)": 19.5,
"thresholds": {
"my_trend<1000": true
}
},
"vus": {
"value": 1,
"min": 1,
"max": 1
}
}
}
`
rootG, _ := lib.NewGroup("", nil)
childG, _ := rootG.Group("child")
check, _ := lib.NewCheck("check1", childG)
check.Passes = 5
check.Fails = 10
childG.Checks["check1"] = check

s := NewSummary([]string{"avg", "min", "med", "max", "p(90)", "p(95)", "p(99.9)"})
data := SummaryData{
Metrics: metrics,
RootGroup: rootG,
Time: time.Second,
TimeUnit: "",
}

var w bytes.Buffer
err := s.SummarizeMetricsJSON(&w, data)
require.Nil(t, err)
require.JSONEq(t, expected, w.String())
}