diff --git a/inputs/mtail/internal/exporter/export.go b/inputs/mtail/internal/exporter/export.go index f5102744..43784c40 100644 --- a/inputs/mtail/internal/exporter/export.go +++ b/inputs/mtail/internal/exporter/export.go @@ -18,8 +18,9 @@ import ( "sync" "time" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" "github.com/pkg/errors" + + "flashcat.cloud/categraf/inputs/mtail/internal/metrics" ) // Commandline Flags. @@ -29,15 +30,18 @@ var ( // Exporter manages the export of metrics to passive and active collectors. type Exporter struct { - ctx context.Context - wg sync.WaitGroup - store *metrics.Store - pushInterval time.Duration - hostname string - omitProgLabel bool - emitTimestamp bool - pushTargets []pushOptions - initDone chan struct{} + ctx context.Context + cancelFunc context.CancelFunc + wg sync.WaitGroup + store *metrics.Store + pushInterval time.Duration + hostname string + omitProgLabel bool + emitTimestamp bool + exportDisabled bool + pushTargets []pushOptions + initDone chan struct{} + shutdownDone chan struct{} } // Option configures a new Exporter. @@ -74,16 +78,26 @@ func PushInterval(opt time.Duration) Option { } } +func DisableExport() Option { + return func(e *Exporter) error { + e.exportDisabled = true + return nil + } +} + +var ErrNeedsStore = errors.New("exporter needs a Store") + // New creates a new Exporter. -func New(ctx context.Context, wg *sync.WaitGroup, store *metrics.Store, options ...Option) (*Exporter, error) { +func New(ctx context.Context, store *metrics.Store, options ...Option) (*Exporter, error) { if store == nil { - return nil, errors.New("exporter needs a Store") + return nil, ErrNeedsStore } e := &Exporter{ - ctx: ctx, - store: store, - initDone: make(chan struct{}), + store: store, + initDone: make(chan struct{}), + shutdownDone: make(chan struct{}), } + e.ctx, e.cancelFunc = context.WithCancel(ctx) defer close(e.initDone) if err := e.SetOption(options...); err != nil { return nil, err @@ -111,18 +125,25 @@ func New(ctx context.Context, wg *sync.WaitGroup, store *metrics.Store, options } e.StartMetricPush() - // This routine manages shutdown of the Exporter. TODO(jaq): This doesn't - // happen before mtail returns because of how context cancellation is set - // up.. How can we tie this shutdown in before mtail exits? Should - // exporter be merged with httpserver? + // This routine manages shutdown of the Exporter. go func() { <-e.initDone - <-e.ctx.Done() + // Wait for the context to be completed before waiting for subroutines. + if !e.exportDisabled { + <-e.ctx.Done() + } e.wg.Wait() + close(e.shutdownDone) }() return e, nil } +// Stop instructs the exporter to shut down. The function returns once the exporter has finished. +func (e *Exporter) Stop() { + e.cancelFunc() + <-e.shutdownDone +} + // SetOption takes one or more option functions and applies them in order to Exporter. func (e *Exporter) SetOption(options ...Option) error { for _, option := range options { @@ -178,7 +199,7 @@ func (e *Exporter) writeSocketMetrics(c io.Writer, f formatter, exportTotal *exp if err == nil { exportSuccess.Add(1) } else { - return errors.Errorf("write error: %s\n", err) + return errors.Errorf("write error: %s", err) } } m.RUnlock() @@ -212,6 +233,10 @@ func (e *Exporter) PushMetrics() { // StartMetricPush pushes metrics to the configured services each interval. func (e *Exporter) StartMetricPush() { + if e.exportDisabled { + log.Printf("Export loop disabled.") + return + } if len(e.pushTargets) == 0 { return } @@ -222,7 +247,7 @@ func (e *Exporter) StartMetricPush() { go func() { defer e.wg.Done() <-e.initDone - log.Println("Started metric push.") + log.Printf("Started metric push.") ticker := time.NewTicker(e.pushInterval) defer ticker.Stop() for { diff --git a/inputs/mtail/internal/exporter/export_test.go b/inputs/mtail/internal/exporter/export_test.go deleted file mode 100644 index a2a728d5..00000000 --- a/inputs/mtail/internal/exporter/export_test.go +++ /dev/null @@ -1,222 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package exporter - -import ( - "context" - "errors" - "reflect" - "sort" - "strings" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -const prefix = "prefix" - -func TestCreateExporter(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - var wg sync.WaitGroup - _, err := New(ctx, &wg, nil) - if err == nil { - t.Error("expecting error, got nil") - } - cancel() - wg.Wait() - ctx, cancel = context.WithCancel(context.Background()) - store := metrics.NewStore() - _, err = New(ctx, &wg, store) - if err != nil { - t.Errorf("unexpected error:%s", err) - } - cancel() - wg.Wait() - ctx, cancel = context.WithCancel(context.Background()) - failopt := func(*Exporter) error { - return errors.New("busted") // nolint:goerr113 - } - _, err = New(ctx, &wg, store, failopt) - if err == nil { - t.Errorf("unexpected success") - } - cancel() - wg.Wait() -} - -func FakeSocketWrite(f formatter, m *metrics.Metric) []string { - ret := make([]string, 0) - lc := make(chan *metrics.LabelSet) - d := 60 * time.Second - go m.EmitLabelSets(lc) - for l := range lc { - ret = append(ret, f("gunstar", m, l, d)) - } - sort.Strings(ret) - return ret -} - -func TestMetricToCollectd(t *testing.T) { - collectdPrefix = "" - ts, terr := time.Parse("2006/01/02 15:04:05", "2012/07/24 10:14:00") - if terr != nil { - t.Errorf("time parse error: %s", terr) - } - ms := metrics.NewStore() - - scalarMetric := metrics.NewMetric("foo", "prog", metrics.Counter, metrics.Int) - d, _ := scalarMetric.GetDatum() - datum.SetInt(d, 37, ts) - testutil.FatalIfErr(t, ms.Add(scalarMetric)) - - r := FakeSocketWrite(metricToCollectd, scalarMetric) - expected := []string{"PUTVAL \"gunstar/mtail-prog/counter-foo\" interval=60 1343124840:37\n"} - testutil.ExpectNoDiff(t, expected, r) - - dimensionedMetric := metrics.NewMetric("bar", "prog", metrics.Gauge, metrics.Int, "label") - d, _ = dimensionedMetric.GetDatum("quux") - datum.SetInt(d, 37, ts) - d, _ = dimensionedMetric.GetDatum("snuh") - datum.SetInt(d, 37, ts) - ms.ClearMetrics() - testutil.FatalIfErr(t, ms.Add(dimensionedMetric)) - - r = FakeSocketWrite(metricToCollectd, dimensionedMetric) - expected = []string{ - "PUTVAL \"gunstar/mtail-prog/gauge-bar-label-quux\" interval=60 1343124840:37\n", - "PUTVAL \"gunstar/mtail-prog/gauge-bar-label-snuh\" interval=60 1343124840:37\n", - } - testutil.ExpectNoDiff(t, expected, r) - - timingMetric := metrics.NewMetric("foo", "prog", metrics.Timer, metrics.Int) - d, _ = timingMetric.GetDatum() - datum.SetInt(d, 123, ts) - testutil.FatalIfErr(t, ms.Add(timingMetric)) - - r = FakeSocketWrite(metricToCollectd, timingMetric) - expected = []string{"PUTVAL \"gunstar/mtail-prog/gauge-foo\" interval=60 1343124840:123\n"} - testutil.ExpectNoDiff(t, expected, r) - - collectdPrefix = prefix - r = FakeSocketWrite(metricToCollectd, timingMetric) - expected = []string{"PUTVAL \"gunstar/prefixmtail-prog/gauge-foo\" interval=60 1343124840:123\n"} - testutil.ExpectNoDiff(t, expected, r) -} - -func TestMetricToGraphite(t *testing.T) { - graphitePrefix = "" - ts, terr := time.Parse("2006/01/02 15:04:05", "2012/07/24 10:14:00") - if terr != nil { - t.Errorf("time parse error: %s", terr) - } - - scalarMetric := metrics.NewMetric("foo", "prog", metrics.Counter, metrics.Int) - d, _ := scalarMetric.GetDatum() - datum.SetInt(d, 37, ts) - r := FakeSocketWrite(metricToGraphite, scalarMetric) - expected := []string{"prog.foo 37 1343124840\n"} - testutil.ExpectNoDiff(t, expected, r) - - dimensionedMetric := metrics.NewMetric("bar", "prog", metrics.Gauge, metrics.Int, "host") - d, _ = dimensionedMetric.GetDatum("quux.com") - datum.SetInt(d, 37, ts) - d, _ = dimensionedMetric.GetDatum("snuh.teevee") - datum.SetInt(d, 37, ts) - r = FakeSocketWrite(metricToGraphite, dimensionedMetric) - expected = []string{ - "prog.bar.host.quux_com 37 1343124840\n", - "prog.bar.host.snuh_teevee 37 1343124840\n", - } - testutil.ExpectNoDiff(t, expected, r) - - histogramMetric := metrics.NewMetric("hist", "prog", metrics.Histogram, metrics.Buckets, "xxx") - lv := &metrics.LabelValue{Labels: []string{"bar"}, Value: datum.MakeBuckets([]datum.Range{{Min: 0, Max: 10}, {Min: 10, Max: 20}}, time.Unix(0, 0))} - histogramMetric.AppendLabelValue(lv) - d, _ = histogramMetric.GetDatum("bar") - datum.SetFloat(d, 1, ts) - datum.SetFloat(d, 5, ts) - datum.SetFloat(d, 15, ts) - datum.SetFloat(d, 12, ts) - datum.SetFloat(d, 19, ts) - datum.SetFloat(d, 1000, ts) - r = FakeSocketWrite(metricToGraphite, histogramMetric) - r = strings.Split(strings.TrimSuffix(r[0], "\n"), "\n") - sort.Strings(r) - expected = []string{ - "prog.hist.xxx.bar 1052 1343124840", - "prog.hist.xxx.bar.bin_10 2 1343124840", - "prog.hist.xxx.bar.bin_20 3 1343124840", - "prog.hist.xxx.bar.bin_inf 1 1343124840", - "prog.hist.xxx.bar.count 6 1343124840", - } - testutil.ExpectNoDiff(t, expected, r) - - graphitePrefix = prefix - r = FakeSocketWrite(metricToGraphite, dimensionedMetric) - expected = []string{ - "prefixprog.bar.host.quux_com 37 1343124840\n", - "prefixprog.bar.host.snuh_teevee 37 1343124840\n", - } - testutil.ExpectNoDiff(t, expected, r) -} - -func TestMetricToStatsd(t *testing.T) { - statsdPrefix = "" - ts, terr := time.Parse("2006/01/02 15:04:05", "2012/07/24 10:14:00") - if terr != nil { - t.Errorf("time parse error: %s", terr) - } - - scalarMetric := metrics.NewMetric("foo", "prog", metrics.Counter, metrics.Int) - d, _ := scalarMetric.GetDatum() - datum.SetInt(d, 37, ts) - r := FakeSocketWrite(metricToStatsd, scalarMetric) - expected := []string{"prog.foo:37|c"} - if !reflect.DeepEqual(expected, r) { - t.Errorf("String didn't match:\n\texpected: %v\n\treceived: %v", expected, r) - } - - dimensionedMetric := metrics.NewMetric("bar", "prog", metrics.Gauge, metrics.Int, "l") - d, _ = dimensionedMetric.GetDatum("quux") - datum.SetInt(d, 37, ts) - d, _ = dimensionedMetric.GetDatum("snuh") - datum.SetInt(d, 42, ts) - r = FakeSocketWrite(metricToStatsd, dimensionedMetric) - expected = []string{ - "prog.bar.l.quux:37|g", - "prog.bar.l.snuh:42|g", - } - if !reflect.DeepEqual(expected, r) { - t.Errorf("String didn't match:\n\texpected: %v\n\treceived: %v", expected, r) - } - - multiLabelMetric := metrics.NewMetric("bar", "prog", metrics.Gauge, metrics.Int, "c", "a", "b") - d, _ = multiLabelMetric.GetDatum("x", "z", "y") - datum.SetInt(d, 37, ts) - r = FakeSocketWrite(metricToStatsd, multiLabelMetric) - expected = []string{"prog.bar.a.z.b.y.c.x:37|g"} - if !reflect.DeepEqual(expected, r) { - t.Errorf("String didn't match:\n\texpected: %v\n\treceived: %v", expected, r) - } - - timingMetric := metrics.NewMetric("foo", "prog", metrics.Timer, metrics.Int) - d, _ = timingMetric.GetDatum() - datum.SetInt(d, 37, ts) - r = FakeSocketWrite(metricToStatsd, timingMetric) - expected = []string{"prog.foo:37|ms"} - if !reflect.DeepEqual(expected, r) { - t.Errorf("String didn't match:\n\texpected: %v\n\treceived: %v", expected, r) - } - - statsdPrefix = prefix - r = FakeSocketWrite(metricToStatsd, timingMetric) - expected = []string{"prefixprog.foo:37|ms"} - if !reflect.DeepEqual(expected, r) { - t.Errorf("prefixed string didn't match:\n\texpected: %v\n\treceived: %v", expected, r) - } -} diff --git a/inputs/mtail/internal/exporter/graphite_test.go b/inputs/mtail/internal/exporter/graphite_test.go deleted file mode 100644 index 32c84596..00000000 --- a/inputs/mtail/internal/exporter/graphite_test.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2021 Adam Romanek -// This file is available under the Apache license. - -package exporter - -import ( - "context" - "io" - "net/http" - "net/http/httptest" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -var handleGraphiteTests = []struct { - name string - metrics []*metrics.Metric - expected string -}{ - { - "empty", - []*metrics.Metric{}, - "", - }, - { - "single", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - "foobar.test.foo 1 0\n", - }, -} - -func TestHandleGraphite(t *testing.T) { - graphitePrefix = "foobar." - for _, tc := range handleGraphiteTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - var wg sync.WaitGroup - ms := metrics.NewStore() - for _, metric := range tc.metrics { - testutil.FatalIfErr(t, ms.Add(metric)) - } - e, err := New(ctx, &wg, ms, Hostname("gunstar")) - testutil.FatalIfErr(t, err) - response := httptest.NewRecorder() - e.HandleGraphite(response, &http.Request{}) - if response.Code != 200 { - t.Errorf("response code not 200: %d", response.Code) - } - b, err := io.ReadAll(response.Body) - if err != nil { - t.Errorf("failed to read response %s", err) - } - testutil.ExpectNoDiff(t, tc.expected, string(b), testutil.IgnoreUnexported(sync.RWMutex{})) - cancel() - wg.Wait() - }) - } -} diff --git a/inputs/mtail/internal/exporter/json_test.go b/inputs/mtail/internal/exporter/json_test.go deleted file mode 100644 index 6d586f57..00000000 --- a/inputs/mtail/internal/exporter/json_test.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package exporter - -import ( - "context" - "io" - "math" - "net/http" - "net/http/httptest" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -var handleJSONTests = []struct { - name string - metrics []*metrics.Metric - expected string -}{ - { - "empty", - []*metrics.Metric{}, - "[]", - }, - { - "single", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `[ - { - "Name": "foo", - "Program": "test", - "Kind": 1, - "Type": 0, - "LabelValues": [ - { - "Value": { - "Value": 1, - "Time": 0 - } - } - ] - } -]`, - }, - { - "dimensioned", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - Keys: []string{"a", "b"}, - LabelValues: []*metrics.LabelValue{{Labels: []string{"1", "2"}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `[ - { - "Name": "foo", - "Program": "test", - "Kind": 1, - "Type": 0, - "Keys": [ - "a", - "b" - ], - "LabelValues": [ - { - "Labels": [ - "1", - "2" - ], - "Value": { - "Value": 1, - "Time": 0 - } - } - ] - } -]`, - }, - { - "histogram", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Histogram, - Keys: []string{"a", "b"}, - LabelValues: []*metrics.LabelValue{{Labels: []string{"1", "2"}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - Buckets: []datum.Range{{Min: 0, Max: math.Inf(1)}}, - }, - }, - `[ - { - "Name": "foo", - "Program": "test", - "Kind": 5, - "Type": 0, - "Keys": [ - "a", - "b" - ], - "LabelValues": [ - { - "Labels": [ - "1", - "2" - ], - "Value": { - "Value": 1, - "Time": 0 - } - } - ], - "Buckets": [ - { - "Min": "0", - "Max": "+Inf" - } - ] - } -]`, - }, -} - -func TestHandleJSON(t *testing.T) { - for _, tc := range handleJSONTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - var wg sync.WaitGroup - ms := metrics.NewStore() - for _, metric := range tc.metrics { - testutil.FatalIfErr(t, ms.Add(metric)) - } - e, err := New(ctx, &wg, ms, Hostname("gunstar")) - testutil.FatalIfErr(t, err) - response := httptest.NewRecorder() - e.HandleJSON(response, &http.Request{}) - if response.Code != 200 { - t.Errorf("response code not 200: %d", response.Code) - } - b, err := io.ReadAll(response.Body) - if err != nil { - t.Errorf("failed to read response: %s", err) - } - testutil.ExpectNoDiff(t, tc.expected, string(b), testutil.IgnoreUnexported(sync.RWMutex{})) - cancel() - wg.Wait() - }) - } -} diff --git a/inputs/mtail/internal/exporter/prometheus_test.go b/inputs/mtail/internal/exporter/prometheus_test.go deleted file mode 100644 index 28cc8cf2..00000000 --- a/inputs/mtail/internal/exporter/prometheus_test.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package exporter - -import ( - "bytes" - "context" - "math" - "strings" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - promtest "github.com/prometheus/client_golang/prometheus/testutil" -) - -var handlePrometheusTests = []struct { - name string - progLabel bool - metrics []*metrics.Metric - expected string -}{ - { - "empty", - false, - []*metrics.Metric{}, - "", - }, - { - "single", - false, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `# HELP foo defined at -# TYPE foo counter -foo{} 1 -`, - }, - { - "with prog label", - true, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `# HELP foo defined at -# TYPE foo counter -foo{prog="test"} 1 -`, - }, - { - "dimensioned", - false, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - Keys: []string{"a", "b"}, - LabelValues: []*metrics.LabelValue{{Labels: []string{"1", "2"}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `# HELP foo defined at -# TYPE foo counter -foo{a="1",b="2"} 1 -`, - }, - { - "gauge", - false, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Gauge, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `# HELP foo defined at -# TYPE foo gauge -foo{} 1 -`, - }, - { - "timer", - false, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Timer, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `# HELP foo defined at -# TYPE foo gauge -foo{} 1 -`, - }, - { - "text", - false, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Text, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeString("hi", time.Unix(0, 0))}}, - }, - }, - "", - }, - { - "quotes", - false, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - Keys: []string{"a"}, - LabelValues: []*metrics.LabelValue{{Labels: []string{"str\"bang\"blah"}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `# HELP foo defined at -# TYPE foo counter -foo{a="str\"bang\"blah"} 1 -`, - }, - { - "help", - false, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - Source: "location.mtail:37", - }, - }, - `# HELP foo defined at location.mtail:37 -# TYPE foo counter -foo{} 1 -`, - }, - { - "2 help with label", - true, - []*metrics.Metric{ - { - Name: "foo", - Program: "test2", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - Source: "location.mtail:37", - }, - { - Name: "foo", - Program: "test1", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - Source: "different.mtail:37", - }, - }, - `# HELP foo defined at location.mtail:37 -# TYPE foo counter -foo{prog="test2"} 1 -foo{prog="test1"} 1 -`, - }, - { - "histo", - true, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Histogram, - Keys: []string{"a"}, - LabelValues: []*metrics.LabelValue{{Labels: []string{"bar"}, Value: datum.MakeBuckets([]datum.Range{{Min: 0, Max: 1}, {Min: 1, Max: 2}}, time.Unix(0, 0))}}, - Source: "location.mtail:37", - }, - }, - `# HELP foo defined at location.mtail:37 -# TYPE foo histogram -foo_bucket{a="bar",prog="test",le="1"} 0 -foo_bucket{a="bar",prog="test",le="2"} 0 -foo_bucket{a="bar",prog="test",le="+Inf"} 0 -foo_sum{a="bar",prog="test"} 0 -foo_count{a="bar",prog="test"} 0 -`, - }, - { - "histo-count-eq-inf", - true, - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Histogram, - Keys: []string{"a"}, - LabelValues: []*metrics.LabelValue{ - { - Labels: []string{"bar"}, - Value: &datum.Buckets{ - Buckets: []datum.BucketCount{ - { - Range: datum.Range{Min: 0, Max: 1}, - Count: 1, - }, - { - Range: datum.Range{Min: 1, Max: 2}, - Count: 1, - }, - { - Range: datum.Range{Min: 2, Max: math.Inf(+1)}, - Count: 2, - }, - }, - Count: 4, - Sum: 5, - }, - }, - }, - Source: "location.mtail:37", - }, - }, - `# HELP foo defined at location.mtail:37 -# TYPE foo histogram -foo_bucket{a="bar",prog="test",le="1"} 1 -foo_bucket{a="bar",prog="test",le="2"} 2 -foo_bucket{a="bar",prog="test",le="+Inf"} 4 -foo_sum{a="bar",prog="test"} 5 -foo_count{a="bar",prog="test"} 4 -`, - }, -} - -func TestHandlePrometheus(t *testing.T) { - for _, tc := range handlePrometheusTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - var wg sync.WaitGroup - ctx, cancel := context.WithCancel(context.Background()) - ms := metrics.NewStore() - for _, metric := range tc.metrics { - testutil.FatalIfErr(t, ms.Add(metric)) - } - opts := []Option{ - Hostname("gunstar"), - } - if !tc.progLabel { - opts = append(opts, OmitProgLabel()) - } - e, err := New(ctx, &wg, ms, opts...) - testutil.FatalIfErr(t, err) - r := strings.NewReader(tc.expected) - if err = promtest.CollectAndCompare(e, r); err != nil { - t.Error(err) - } - cancel() - wg.Wait() - }) - } -} - -var writePrometheusTests = []struct { - name string - metrics []*metrics.Metric - expected string -}{ - { - "empty", - []*metrics.Metric{}, - "", - }, - { - "single", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - }, - `# HELP foo defined at -# TYPE foo counter -foo 1 -`, - }, - { - "multi", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(0, 0))}}, - }, - { - Name: "bar", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(2, time.Unix(0, 0))}}, - }, - }, - `# HELP bar defined at -# TYPE bar counter -bar 2 -# HELP foo defined at -# TYPE foo counter -foo 1 -`, - }, -} - -func TestWritePrometheus(t *testing.T) { - for _, tc := range writePrometheusTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - var wg sync.WaitGroup - ctx, cancel := context.WithCancel(context.Background()) - ms := metrics.NewStore() - for _, metric := range tc.metrics { - testutil.FatalIfErr(t, ms.Add(metric)) - } - opts := []Option{ - Hostname("gunstar"), - OmitProgLabel(), - } - e, err := New(ctx, &wg, ms, opts...) - testutil.FatalIfErr(t, err) - - var buf bytes.Buffer - err = e.Write(&buf) - testutil.FatalIfErr(t, err) - testutil.ExpectNoDiff(t, tc.expected, buf.String()) - - cancel() - wg.Wait() - }) - } -} diff --git a/inputs/mtail/internal/exporter/varz_test.go b/inputs/mtail/internal/exporter/varz_test.go deleted file mode 100644 index 6da29747..00000000 --- a/inputs/mtail/internal/exporter/varz_test.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package exporter - -import ( - "context" - "io" - "net/http" - "net/http/httptest" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -var handleVarzTests = []struct { - name string - metrics []*metrics.Metric - expected string -}{ - { - "empty", - []*metrics.Metric{}, - "", - }, - { - "single", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeInt(1, time.Unix(1397586900, 0))}}, - }, - }, - `foo{prog=test,instance=gunstar} 1 -`, - }, - { - "dimensioned", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Counter, - Keys: []string{"a", "b"}, - LabelValues: []*metrics.LabelValue{{Labels: []string{"1", "2"}, Value: datum.MakeInt(1, time.Unix(1397586900, 0))}}, - }, - }, - `foo{a=1,b=2,prog=test,instance=gunstar} 1 -`, - }, - { - "text", - []*metrics.Metric{ - { - Name: "foo", - Program: "test", - Kind: metrics.Text, - LabelValues: []*metrics.LabelValue{{Labels: []string{}, Value: datum.MakeString("hi", time.Unix(1397586900, 0))}}, - }, - }, - `foo{prog=test,instance=gunstar} hi -`, - }, -} - -func TestHandleVarz(t *testing.T) { - for _, tc := range handleVarzTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - var wg sync.WaitGroup - ctx, cancel := context.WithCancel(context.Background()) - ms := metrics.NewStore() - for _, metric := range tc.metrics { - testutil.FatalIfErr(t, ms.Add(metric)) - } - e, err := New(ctx, &wg, ms, Hostname("gunstar")) - testutil.FatalIfErr(t, err) - response := httptest.NewRecorder() - e.HandleVarz(response, &http.Request{}) - if response.Code != 200 { - t.Errorf("response code not 200: %d", response.Code) - } - b, err := io.ReadAll(response.Body) - if err != nil { - t.Errorf("failed to read response: %s", err) - } - testutil.ExpectNoDiff(t, tc.expected, string(b)) - cancel() - wg.Wait() - }) - } -} diff --git a/inputs/mtail/internal/metrics/datum/buckets.go b/inputs/mtail/internal/metrics/datum/buckets.go index 4d4a1173..ef15f0cb 100644 --- a/inputs/mtail/internal/metrics/datum/buckets.go +++ b/inputs/mtail/internal/metrics/datum/buckets.go @@ -44,7 +44,7 @@ func (d *Buckets) Observe(v float64, ts time.Time) { defer d.Unlock() for i, b := range d.Buckets { - if b.Range.Contains(v) { + if v <= b.Range.Max { d.Buckets[i].Count++ break } diff --git a/inputs/mtail/internal/metrics/datum/buckets_test.go b/inputs/mtail/internal/metrics/datum/buckets_test.go deleted file mode 100644 index cdd52535..00000000 --- a/inputs/mtail/internal/metrics/datum/buckets_test.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package datum_test - -import ( - "math" - "testing" - "testing/quick" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" -) - -func TestBucketContains(t *testing.T) { - if err := quick.Check(func(min, max, val float64) bool { - r := &datum.Range{Min: min, Max: max} - truth := val < max && val >= min - return truth == r.Contains(val) - }, nil); err != nil { - t.Error(err) - } -} - -func TestMakeBucket(t *testing.T) { - r := []datum.Range{ - {0, 1}, - {1, 2}, - {2, 4}, - } - b := datum.MakeBuckets(r, time.Unix(37, 42)) - ts := time.Unix(37, 31) - datum.Observe(b, 2, ts) - if r := datum.GetBucketsSum(b); r != 2 { - t.Errorf("sum not 2, got %v", r) - } - if r := datum.GetBucketsCount(b); r != 1 { - t.Errorf("count not 1, got %v", r) - } - bs := datum.GetBucketsCumByMax(b) - if r := datum.GetBucketsCount(b); r != bs[math.Inf(+1)] { - t.Errorf("Inf bucket des not equal total observation count: %v vs %v", bs[math.Inf(+1)], r) - } - if len(bs) != len(r)+1 { - t.Errorf("missing buckets from BucketsByMax: expected %d, got %v", len(r)+1, len(bs)) - } -} diff --git a/inputs/mtail/internal/metrics/datum/datum.go b/inputs/mtail/internal/metrics/datum/datum.go index 90d68dbc..cf13c1a4 100644 --- a/inputs/mtail/internal/metrics/datum/datum.go +++ b/inputs/mtail/internal/metrics/datum/datum.go @@ -94,7 +94,7 @@ func MakeString(v string, ts time.Time) Datum { // MakeBuckets creates a new bucket datum with the provided list of ranges and // timestamp. If no +inf bucket is provided, one is created. -func MakeBuckets(buckets []Range, ts time.Time) Datum { +func MakeBuckets(buckets []Range, _ time.Time) Datum { d := &Buckets{} seenInf := false highest := 0.0 diff --git a/inputs/mtail/internal/metrics/datum/datum_test.go b/inputs/mtail/internal/metrics/datum/datum_test.go deleted file mode 100644 index a8a64934..00000000 --- a/inputs/mtail/internal/metrics/datum/datum_test.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package datum - -import ( - "encoding/json" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestDatumSetAndValue(t *testing.T) { - d := MakeInt(12, time.Unix(37, 42)) - if r := GetInt(d); r != 12 { - t.Errorf("d ditn't return 12, got %v", r) - } - if r := d.ValueString(); r != "12" { - t.Errorf("d value is not 12, got %v", r) - } - if r := d.TimeString(); r != "37" { - t.Errorf("d Time not correct, got %v", r) - } - d = MakeFloat(1.2, time.Unix(37, 42)) - if r := GetFloat(d); r != 1.2 { - t.Errorf("d ditn't return 12, got %v", r) - } - if r := d.ValueString(); r != "1.2" { - t.Errorf("d value is not 12, got %v", r) - } - if r := d.TimeString(); r != "37" { - t.Errorf("d Time not correct, got %v", r) - } -} - -var datumJSONTests = []struct { - datum Datum - expected string -}{ - { - MakeInt(37, time.Unix(42, 12)), - `{"Value":37,"Time":42000000012}`, - }, - { - MakeFloat(37.1, time.Unix(42, 12)), - `{"Value":37.1,"Time":42000000012}`, - }, -} - -func TestMarshalJSON(t *testing.T) { - // This is not a round trip test because only the LabelValue knows how to unmarshal a Datum. - for i, tc := range datumJSONTests { - b, err := json.Marshal(tc.datum) - if err != nil { - t.Errorf("%d: Marshal failed: %v", i, err) - } - testutil.ExpectNoDiff(t, tc.expected, string(b)) - } -} diff --git a/inputs/mtail/internal/metrics/datum/int_test.go b/inputs/mtail/internal/metrics/datum/int_test.go deleted file mode 100644 index bc59bd33..00000000 --- a/inputs/mtail/internal/metrics/datum/int_test.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package datum - -import ( - "testing" - "time" -) - -func BenchmarkIncrementScalarInt(b *testing.B) { - d := &Int{} - ts := time.Now().UTC() - for i := 0; i < b.N; i++ { - d.IncBy(1, ts) - } -} - -func BenchmarkDecrementScalarInt(b *testing.B) { - d := &Int{} - ts := time.Now().UTC() - for i := 0; i < b.N; i++ { - d.DecBy(1, ts) - } -} - -func TestDecrementScalarInt(t *testing.T) { - d := &Int{} - ts := time.Now().UTC() - d.IncBy(1, ts) - r := d.Get() - if r != 1 { - t.Errorf("expected 1, got %d", r) - } - d.DecBy(1, ts) - r = d.Get() - if r != 0 { - t.Errorf("expected 0, got %d", r) - } -} diff --git a/inputs/mtail/internal/metrics/metric.go b/inputs/mtail/internal/metrics/metric.go index 7ad0b90e..f929d3c4 100644 --- a/inputs/mtail/internal/metrics/metric.go +++ b/inputs/mtail/internal/metrics/metric.go @@ -8,15 +8,16 @@ package metrics import ( "encoding/json" "fmt" + "log" "math/rand" "reflect" "strings" "sync" "time" - // "github.com/golang/glog" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" "github.com/pkg/errors" + + "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" ) // Kind enumerates the types of metrics supported. @@ -64,7 +65,7 @@ func (m Kind) String() string { } // Generate implements the quick.Generator interface for Kind. -func (Kind) Generate(rand *rand.Rand, size int) reflect.Value { +func (Kind) Generate(rand *rand.Rand, _ int) reflect.Value { return reflect.ValueOf(Kind(rand.Intn(int(endKind)))) } @@ -190,7 +191,10 @@ func (m *Metric) RemoveOldestDatum() { } if oldestLV != nil { // glog.V(1).Infof("removeOldest: removing oldest LV: %v", oldestLV) - m.RemoveDatum(oldestLV.Labels...) + err := m.RemoveDatum(oldestLV.Labels...) + if err != nil { + log.Println("W!", err) + } } } diff --git a/inputs/mtail/internal/metrics/metric_test.go b/inputs/mtail/internal/metrics/metric_test.go deleted file mode 100644 index e5459baf..00000000 --- a/inputs/mtail/internal/metrics/metric_test.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package metrics - -import ( - "encoding/json" - "fmt" - "math/rand" - "reflect" - "sync" - "testing" - "testing/quick" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestKindType(t *testing.T) { - v := Kind(0) - if s := v.String(); s != "Unknown" { - t.Errorf("Kind.String() returned %q not Unknown", s) - } - v = Counter - if s := v.String(); s != "Counter" { - t.Errorf("Kind.String() returned %q not Counter", s) - } - v = Gauge - if s := v.String(); s != "Gauge" { - t.Errorf("Kind.String() returned %q not Gauge", s) - } - v = Timer - if s := v.String(); s != "Timer" { - t.Errorf("Kind.String() returned %q not Timer", s) - } -} - -func TestScalarMetric(t *testing.T) { - v := NewMetric("test", "prog", Counter, Int) - d, err := v.GetDatum() - if err != nil { - t.Errorf("no datum: %s", err) - } - datum.IncIntBy(d, 1, time.Now().UTC()) - lv := v.FindLabelValueOrNil([]string{}) - if lv == nil { - t.Fatal("couldn't find labelvalue") - } - newD := lv.Value - if newD == nil { - t.Error("new_d is nil") - } - if newD.ValueString() != "1" { - t.Error("value not 1") - } - d2, err := v.GetDatum("a", "b") - if err == nil { - t.Errorf("datum with keys sohuld have returned no value, got %v", d2) - } -} - -func TestDimensionedMetric(t *testing.T) { - v := NewMetric("test", "prog", Counter, Int, "foo") - d, _ := v.GetDatum("a") - datum.IncIntBy(d, 1, time.Now().UTC()) - if v.FindLabelValueOrNil([]string{"a"}).Value.ValueString() != "1" { - t.Errorf("fail") - } - - v = NewMetric("test", "prog", Counter, Int, "foo", "bar") - d, _ = v.GetDatum("a", "b") - datum.IncIntBy(d, 1, time.Now().UTC()) - if v.FindLabelValueOrNil([]string{"a", "b"}).Value.ValueString() != "1" { - t.Errorf("fail") - } - - v = NewMetric("test", "prog", Counter, Int, "foo", "bar", "quux") - d, _ = v.GetDatum("a", "b", "c") - datum.IncIntBy(d, 1, time.Now().UTC()) - if v.FindLabelValueOrNil([]string{"a", "b", "c"}).Value.ValueString() != "1" { - t.Errorf("fail") - } -} - -var labelSetTests = []struct { - values []string - expectedLabels map[string]string -}{ - { - []string{"a", "b", "c"}, - map[string]string{"foo": "a", "bar": "b", "quux": "c"}, - }, - { - []string{"a", "b", "d"}, - map[string]string{"foo": "a", "bar": "b", "quux": "d"}, - }, -} - -func TestEmitLabelSet(t *testing.T) { - ts := time.Now().UTC() - for _, tc := range labelSetTests { - tc := tc - t.Run(fmt.Sprintf("%v", tc.values), func(t *testing.T) { - m := NewMetric("test", "prog", Gauge, Int, "foo", "bar", "quux") - d, _ := m.GetDatum(tc.values...) - datum.SetInt(d, 37, ts) - - c := make(chan *LabelSet) - - go m.EmitLabelSets(c) - - ls := <-c - - testutil.ExpectNoDiff(t, tc.expectedLabels, ls.Labels) - }) - } -} - -func TestFindLabelValueOrNil(t *testing.T) { - m0 := NewMetric("foo", "prog", Counter, Int) - if r0 := m0.FindLabelValueOrNil([]string{}); r0 != nil { - t.Errorf("m0 should be nil: %v", r0) - } - d, err := m0.GetDatum() - if err != nil { - t.Errorf("Bad datum %v: %v\n", d, err) - } - if r1 := m0.FindLabelValueOrNil([]string{}); r1 == nil { - t.Errorf("m0 should not be nil: %v", r1) - } - m1 := NewMetric("bar", "prog", Counter, Int, "a") - d1, err1 := m1.GetDatum("1") - if err1 != nil { - t.Errorf("err1 %v: %v\n", d1, err1) - } - if r2 := m1.FindLabelValueOrNil([]string{"0"}); r2 != nil { - t.Errorf("r2 should be nil") - } - if r3 := m1.FindLabelValueOrNil([]string{"1"}); r3 == nil { - t.Errorf("r3 should be non nil") - } -} - -func TestAppendLabelValue(t *testing.T) { - m := NewMetric("foo", "prog", Counter, Int, "bar") - l := []string{"test"} - d0 := datum.MakeInt(66, time.Unix(0, 0)) - lv := &LabelValue{Labels: l, Value: d0} - err := m.AppendLabelValue(lv) - if err != nil { - t.Errorf("Bad append %v: %v\n", d0, err) - } - d1, err := m.GetDatum(l...) - if err != nil { - t.Errorf("Bad datum %v: %v\n", d1, err) - } - testutil.ExpectNoDiff(t, d0, d1) -} - -func timeGenerator(rand *rand.Rand) time.Time { - months := []time.Month{ - time.January, time.February, time.March, - time.April, time.May, time.June, - time.July, time.August, time.September, - time.October, time.November, time.December, - } - - return time.Date( - rand.Intn(9999), - months[rand.Intn(len(months))], - rand.Intn(31), - rand.Intn(24), - rand.Intn(60), - rand.Intn(60), - int(rand.Int31()), - time.UTC, - ) -} - -func TestMetricJSONRoundTrip(t *testing.T) { - rand := rand.New(rand.NewSource(0)) - f := func(name, prog string, kind Kind, keys []string, val, ti, tns int64) bool { - m := NewMetric(name, prog, kind, Int, keys...) - labels := make([]string, 0) - for range keys { - if l, ok := quick.Value(reflect.TypeOf(name), rand); ok { - labels = append(labels, l.String()) - } else { - t.Errorf("failed to create value for labels") - break - } - } - d, _ := m.GetDatum(labels...) - datum.SetInt(d, val, timeGenerator(rand)) - - j, e := json.Marshal(m) - if e != nil { - t.Errorf("json.Marshal failed: %s\n", e) - return false - } - - r := newMetric(0) - e = json.Unmarshal(j, &r) - if e != nil { - t.Errorf("json.Unmarshal failed: %s\n", e) - return false - } - - return testutil.ExpectNoDiff(t, m, r, testutil.IgnoreUnexported(sync.RWMutex{}, Metric{})) - } - if err := quick.Check(f, nil); err != nil { - t.Error(err) - } -} - -func TestTimer(t *testing.T) { - m := NewMetric("test", "prog", Timer, Int) - n := NewMetric("test", "prog", Timer, Int) - testutil.ExpectNoDiff(t, m, n, testutil.IgnoreUnexported(sync.RWMutex{}, Metric{})) - d, _ := m.GetDatum() - datum.IncIntBy(d, 1, time.Now().UTC()) - lv := m.FindLabelValueOrNil([]string{}) - if lv == nil { - t.Fatal("couldn't find labelvalue") - } - newD := lv.Value - if newD == nil { - t.Errorf("new_d is nil") - } - if newD.ValueString() != "1" { - t.Errorf("value not 1") - } -} - -func TestRemoveMetricLabelValue(t *testing.T) { - m := NewMetric("test", "prog", Counter, Int, "a", "b", "c") - _, e := m.GetDatum("a", "a", "a") - if e != nil { - t.Errorf("Getdatum failed: %s", e) - } - lv := m.FindLabelValueOrNil([]string{"a", "a", "a"}) - if lv == nil { - t.Errorf("coidln't find labelvalue") - } - e = m.RemoveDatum("a", "a", "a") - if e != nil { - t.Errorf("couldn't remove datum: %s", e) - } - lv = m.FindLabelValueOrNil([]string{"a", "a", "a"}) - if lv != nil { - t.Errorf("label value still exists") - } -} - -func TestMetricLabelValueRemovePastLimit(t *testing.T) { - m := NewMetric("test", "prog", Counter, Int, "foo") - m.Limit = 1 - _, err := m.GetDatum("a") - testutil.FatalIfErr(t, err) - m.RemoveOldestDatum() - _, err = m.GetDatum("b") - testutil.FatalIfErr(t, err) - m.RemoveOldestDatum() - _, err = m.GetDatum("c") - testutil.FatalIfErr(t, err) - m.RemoveOldestDatum() - if len(m.LabelValues) > 2 { - t.Errorf("Expected 2 labelvalues got %#v", m.LabelValues) - } - if x := m.FindLabelValueOrNil([]string{"a"}); x != nil { - t.Errorf("found label a which is unexpected: %#v", x) - } -} diff --git a/inputs/mtail/internal/metrics/store_bench_test.go b/inputs/mtail/internal/metrics/store_bench_test.go deleted file mode 100644 index 73a82724..00000000 --- a/inputs/mtail/internal/metrics/store_bench_test.go +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package metrics - -import ( - "fmt" - "math" - "math/rand" - "reflect" - "testing" - "testing/quick" -) - -const ( - maxItemsLog2 = 10 - maxLabelsLog2 = 13 -) - -// newRandMetric makes a new, randomly filled Metric. -func newRandMetric(tb testing.TB, rand *rand.Rand, i int) *Metric { - tb.Helper() - nameVal, ok := quick.Value(reflect.TypeOf(""), rand) - if !ok { - tb.Fatalf("%d: can't make a name", i) - } - progVal, ok := quick.Value(reflect.TypeOf(""), rand) - if !ok { - tb.Fatalf("%d: can't make a prog", i) - } - kindVal, ok := quick.Value(reflect.TypeOf(Counter), rand) - if !ok { - tb.Fatalf("%d: can't make a kind", i) - } - typeVal, ok := quick.Value(reflect.TypeOf(Int), rand) - if !ok { - tb.Fatalf("%d: can't make a type", i) - } - keysVal, ok := quick.Value(reflect.TypeOf([]string{}), rand) - if !ok { - tb.Fatalf("%d: can't make a key list", i) - } - return NewMetric(nameVal.Interface().(string), - progVal.Interface().(string), - kindVal.Interface().(Kind), - typeVal.Interface().(Type), - keysVal.Interface().([]string)...) -} - -type bench struct { - name string - setup func(b *testing.B, rand *rand.Rand, items int, m *[]*Metric, s *Store) - b func(b *testing.B, items int, m []*Metric, s *Store) -} - -func fillMetric(b *testing.B, rand *rand.Rand, items int, m *[]*Metric, _ *Store) { - b.Helper() - for i := 0; i < items; i++ { - (*m)[i] = newRandMetric(b, rand, i) - } -} - -func addToStore(b *testing.B, items int, m []*Metric, s *Store) { - b.Helper() - for j := 0; j < items; j++ { - s.Add(m[j]) - } -} - -func BenchmarkStore(b *testing.B) { - benches := []bench{ - { - name: "Add", - setup: fillMetric, - b: addToStore, - }, - { - name: "Iterate", - setup: func(b *testing.B, rand *rand.Rand, items int, m *[]*Metric, s *Store) { - b.Helper() - fillMetric(b, rand, items, m, s) - addToStore(b, items, *m, s) - }, - b: func(b *testing.B, items int, m []*Metric, s *Store) { - b.Helper() - s.Range(func(*Metric) error { - return nil - }) - }, - }, - } - rand := rand.New(rand.NewSource(99)) - for _, bench := range benches { - bench := bench - for _, gc := range []bool{false, true} { - gc := gc - gcStr := "" - if gc { - gcStr = "WithGc" - } - for _, parallel := range []bool{false, true} { - parallel := parallel - parallelStr := "" - if parallel { - parallelStr = "Parallel" - } - - for i := 0.; i <= maxItemsLog2; i++ { - items := int(math.Pow(2, i)) - b.Run(fmt.Sprintf("%s%s%s-%d", bench.name, gcStr, parallelStr, items), func(b *testing.B) { - s := NewStore() - m := make([]*Metric, items) - if bench.setup != nil { - bench.setup(b, rand, items, &m, s) - } - b.ResetTimer() - if parallel { - b.RunParallel(func(pb *testing.PB) { - for pb.Next() { - bench.b(b, items, m, s) - } - }) - } else { - for n := 0; n < b.N; n++ { - bench.b(b, items, m, s) - if gc { - s.Gc() - } - } - } - }) - } - } - } - } -} - -func newRandLabels(tb testing.TB, rand *rand.Rand, i int) []string { - tb.Helper() - lv := make([]string, i) - for j := 0; j < i; j++ { - val, ok := quick.Value(reflect.TypeOf(""), rand) - if !ok { - tb.Fatalf("%d-%d: can't make a label", i, j) - } - lv[j] = val.Interface().(string) - } - return lv -} - -func fillLabel(b *testing.B, rand *rand.Rand, items, keys int, lvs *[][]string, _ *Metric) { - b.Helper() - for i := 0; i < items; i++ { - (*lvs)[i] = newRandLabels(b, rand, keys) - } -} - -func getDatum(b *testing.B, items int, lvs *[][]string, m *Metric) { - b.Helper() - for j := 0; j < items; j++ { - lv := (*lvs)[j] - m.GetDatum(lv...) - } -} - -type metricBench struct { - name string - setup func(b *testing.B, rand *rand.Rand, items, keys int, lvs *[][]string, m *Metric) - b func(b *testing.B, items int, lv *[][]string, m *Metric) -} - -func BenchmarkMetric(b *testing.B) { - maxKeys := 4 - benches := []metricBench{ - { - name: "GetDatum", - setup: fillLabel, - b: getDatum, - }, - } - rand := rand.New(rand.NewSource(99)) - for _, bench := range benches { - bench := bench - for _, parallel := range []bool{false, true} { - parallel := parallel - parallelStr := "" - if parallel { - parallelStr = "Parallel" - } - - for i := 1; i <= maxLabelsLog2; i++ { - items := int(math.Pow(2, float64(i))) - lv := newRandLabels(b, rand, maxKeys) - b.Run(fmt.Sprintf("%s%s-%d", bench.name, parallelStr, items), func(b *testing.B) { - m := NewMetric("test", "prog", Counter, Int, lv...) - lvs := make([][]string, items) - if bench.setup != nil { - bench.setup(b, rand, items, maxKeys, &lvs, m) - } - b.ResetTimer() - if parallel { - b.RunParallel(func(pb *testing.PB) { - for pb.Next() { - bench.b(b, items, &lvs, m) - } - }) - } else { - for n := 0; n < b.N; n++ { - bench.b(b, items, &lvs, m) - } - } - }) - } - } - } -} diff --git a/inputs/mtail/internal/metrics/store_test.go b/inputs/mtail/internal/metrics/store_test.go deleted file mode 100644 index 8911c4eb..00000000 --- a/inputs/mtail/internal/metrics/store_test.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package metrics - -import ( - "log" - "strconv" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestMatchingKind(t *testing.T) { - s := NewStore() - m1 := NewMetric("foo", "prog", Counter, Int) - err := s.Add(m1) - testutil.FatalIfErr(t, err) - m2 := NewMetric("foo", "prog1", Gauge, Int) - err = s.Add(m2) - if err == nil { - t.Fatal("should be err") - } -} - -func TestDuplicateMetric(t *testing.T) { - expectedMetrics := 0 - s := NewStore() - _ = s.Add(NewMetric("foo", "prog", Counter, Int, "user", "host")) - _ = s.Add(NewMetric("foo", "prog", Counter, Int)) - expectedMetrics++ - if len(s.Metrics["foo"]) != expectedMetrics { - t.Fatalf("should not add duplicate metric. Store: %v", s) - } - - _ = s.Add(NewMetric("foo", "prog", Counter, Float)) - log.Println("Store: ", s) - expectedMetrics++ - if len(s.Metrics["foo"]) != expectedMetrics { - t.Fatalf("should add metric of a different type: %v", s) - } - - _ = s.Add(NewMetric("foo", "prog", Counter, Int, "user", "host", "zone", "domain")) - log.Printf("Store: %v", s) - if len(s.Metrics["foo"]) != expectedMetrics { - t.Fatalf("should not add duplicate metric, but replace the old one. Store: %v", s) - } - - _ = s.Add(NewMetric("foo", "prog1", Counter, Int)) - log.Printf("Store: %v", s) - expectedMetrics++ - if len(s.Metrics["foo"]) != expectedMetrics { - t.Fatalf("should add metric with a different prog: %v", s) - } - - _ = s.Add(NewMetric("foo", "prog1", Counter, Float)) - log.Printf("Store: %v", s) - expectedMetrics++ - if len(s.Metrics["foo"]) != expectedMetrics { - t.Fatalf("should add metric of a different type: %v", s) - } -} - -/* -A program can add a metric with the same name and - - of different type. - Prometheus behavior in this case is undefined. - @see https://github.com/google/mtail/issues/130 -*/ -func TestAddMetricDifferentType(t *testing.T) { - expected := 2 - s := NewStore() - err := s.Add(NewMetric("foo", "prog", Counter, Int)) - testutil.FatalIfErr(t, err) - // Duplicate metric of different type from *the same program - err = s.Add(NewMetric("foo", "prog", Counter, Float)) - testutil.FatalIfErr(t, err) - if len(s.Metrics["foo"]) != expected { - t.Fatalf("should have %d metrics of different Type: %v", expected, s.Metrics) - } - - // Duplicate metric of different type from a different program - err = s.Add(NewMetric("foo", "prog1", Counter, Float)) - expected++ - testutil.FatalIfErr(t, err) - if len(s.Metrics["foo"]) != expected { - t.Fatalf("should have %d metrics of different Type: %v", expected, s.Metrics) - } -} - -func TestExpireOldDatum(t *testing.T) { - s := NewStore() - m := NewMetric("foo", "prog", Counter, Int, "a", "b", "c") - testutil.FatalIfErr(t, s.Add(m)) - d, err := m.GetDatum("1", "2", "3") - if err != nil { - t.Error(err) - } - datum.SetInt(d, 1, time.Now().Add(-time.Hour)) - lv := m.FindLabelValueOrNil([]string{"1", "2", "3"}) - if lv == nil { - t.Fatal("couldn't find lv") - } - lv.Expiry = time.Minute - d, err = m.GetDatum("4", "5", "6") - if err != nil { - t.Error(err) - } - datum.SetInt(d, 1, time.Now().Add(-time.Hour)) - lv = m.FindLabelValueOrNil([]string{"4", "5", "6"}) - if lv == nil { - t.Errorf("couldn't find lv") - } - - testutil.FatalIfErr(t, s.Gc()) - lv = m.FindLabelValueOrNil([]string{"1", "2", "3"}) - if lv != nil { - t.Errorf("lv not expired: %#v", lv) - t.Logf("Store: %#v", s) - } - lv = m.FindLabelValueOrNil([]string{"4", "5", "6"}) - if lv == nil { - t.Errorf("lv expired") - t.Logf("Store: %#v", s) - } -} - -func TestExpireOversizeDatum(t *testing.T) { - s := NewStore() - m := NewMetric("foo", "prog", Counter, Int, "foo") - m.Limit = 1 - testutil.FatalIfErr(t, s.Add(m)) - - _, err := m.GetDatum("a") - testutil.FatalIfErr(t, err) - testutil.FatalIfErr(t, s.Gc()) - - _, err = m.GetDatum("b") - testutil.FatalIfErr(t, err) - testutil.FatalIfErr(t, s.Gc()) - - _, err = m.GetDatum("c") - testutil.FatalIfErr(t, err) - testutil.FatalIfErr(t, s.Gc()) - - if len(m.LabelValues) > 2 { - t.Errorf("Expected 2 labelvalues got %#v", m.LabelValues) - } - if x := m.FindLabelValueOrNil([]string{"a"}); x != nil { - t.Errorf("found label a which is unexpected: %#v", x) - } -} - -func TestExpireManyMetrics(t *testing.T) { - s := NewStore() - m := NewMetric("foo", "prog", Counter, Int, "id") - testutil.FatalIfErr(t, s.Add(m)) - d, err := m.GetDatum("0") - if err != nil { - t.Error(err) - } - datum.SetInt(d, 1, time.Now().Add(-time.Hour)) - lv := m.FindLabelValueOrNil([]string{"0"}) - if lv == nil { - t.Fatal("couldn't find lv") - } - - for i := 1; i < 10; i++ { - d, err := m.GetDatum(strconv.Itoa(i)) - if err != nil { - t.Error(err) - } - datum.SetInt(d, 1, time.Now().Add(-time.Hour)) - lv = m.FindLabelValueOrNil([]string{strconv.Itoa(i)}) - if lv == nil { - t.Fatal("couldn't find lv") - } - lv.Expiry = time.Minute - } - - testutil.FatalIfErr(t, s.Gc()) - lv = m.FindLabelValueOrNil([]string{"8"}) - if lv != nil { - t.Errorf("lv not expired: %#v", lv) - t.Logf("Store: %#v", s) - } - lv = m.FindLabelValueOrNil([]string{"0"}) - if lv == nil { - t.Errorf("lv expired") - t.Logf("Store: %#v", s) - } -} diff --git a/inputs/mtail/internal/metrics/type.go b/inputs/mtail/internal/metrics/type.go index f767fa97..56a85a78 100644 --- a/inputs/mtail/internal/metrics/type.go +++ b/inputs/mtail/internal/metrics/type.go @@ -39,6 +39,6 @@ func (t Type) String() string { } // Generate implements the quick.Generator interface for Type. -func (Type) Generate(rand *rand.Rand, size int) reflect.Value { +func (Type) Generate(rand *rand.Rand, _ int) reflect.Value { return reflect.ValueOf(Type(rand.Intn(int(endType)))) } diff --git a/inputs/mtail/internal/mtail/basic_tail_integration_test.go b/inputs/mtail/internal/mtail/basic_tail_integration_test.go deleted file mode 100644 index 1162faf5..00000000 --- a/inputs/mtail/internal/mtail/basic_tail_integration_test.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "fmt" - "os" - "path/filepath" - "sync" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestBasicTail(t *testing.T) { - testutil.SkipIfShort(t) - if testing.Verbose() { - testutil.SetFlag(t, "vmodule", "tail=2,log_watcher=2") - } - logDir := testutil.TestTempDir(t) - - m, stopM := mtail.TestStartServer(t, 1, mtail.LogPathPatterns(logDir+"/*"), mtail.ProgramPath("../../examples/linecount.mtail")) - defer stopM() - - logFile := filepath.Join(logDir, "log") - - lineCountCheck := m.ExpectMapExpvarDeltaWithDeadline("log_lines_total", logFile, 3) - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", 1) - - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - m.PollWatched(1) // Force sync to EOF - - for i := 1; i <= 3; i++ { - testutil.WriteString(t, f, fmt.Sprintf("%d\n", i)) - } - m.PollWatched(1) // Expect to read 3 lines here. - - var wg sync.WaitGroup - wg.Add(2) - go func() { - defer wg.Done() - lineCountCheck() - }() - go func() { - defer wg.Done() - logCountCheck() - }() - wg.Wait() -} - -func TestNewLogDoesNotMatchIsIgnored(t *testing.T) { - testutil.SkipIfShort(t) - workdir := testutil.TestTempDir(t) - - // Start mtail - logFilepath := filepath.Join(workdir, "log") - m, stopM := mtail.TestStartServer(t, 0, mtail.LogPathPatterns(logFilepath)) - defer stopM() - - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", 0) - - // touch log file - newLogFilepath := filepath.Join(workdir, "log1") - - logFile, err := os.Create(newLogFilepath) - testutil.FatalIfErr(t, err) - defer logFile.Close() - m.PollWatched(0) // No streams so don't wait for any. - - logCountCheck() -} diff --git a/inputs/mtail/internal/mtail/buildinfo.go b/inputs/mtail/internal/mtail/buildinfo.go index e06b4a5c..eb5e9f1d 100644 --- a/inputs/mtail/internal/mtail/buildinfo.go +++ b/inputs/mtail/internal/mtail/buildinfo.go @@ -10,13 +10,16 @@ import ( // BuildInfo records the compile-time information for use when reporting the mtail version. type BuildInfo struct { - Version string + Branch string + Version string + Revision string } func (b BuildInfo) String() string { return fmt.Sprintf( - "mtail version %s go version %s go arch %s go os %s", + "mtail version %s git revision %s go version %s go arch %s go os %s", b.Version, + b.Revision, runtime.Version(), runtime.GOARCH, runtime.GOOS, diff --git a/inputs/mtail/internal/mtail/compile_only_integration_test.go b/inputs/mtail/internal/mtail/compile_only_integration_test.go deleted file mode 100644 index 3d327436..00000000 --- a/inputs/mtail/internal/mtail/compile_only_integration_test.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "context" - "os" - "path/filepath" - "strings" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestBadProgramFailsCompilation(t *testing.T) { - testutil.SkipIfShort(t) - progDir := testutil.TestTempDir(t) - - err := os.WriteFile(filepath.Join(progDir, "bad.mtail"), []byte("asdfasdf\n"), 0o666) - testutil.FatalIfErr(t, err) - - ctx := context.Background() - // Compile-only fails program compilation at server start, not after it's running. - _, err = mtail.New(ctx, metrics.NewStore(), mtail.ProgramPath(progDir), mtail.CompileOnly) - if err == nil { - t.Error("expected error from mtail") - } - if !strings.Contains(err.Error(), "compile failed") { - t.Error("compile failed not reported") - } -} diff --git a/inputs/mtail/internal/mtail/examples_integration_test.go b/inputs/mtail/internal/mtail/examples_integration_test.go deleted file mode 100644 index 4b065737..00000000 --- a/inputs/mtail/internal/mtail/examples_integration_test.go +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "context" - "fmt" - "io" - "os" - "path/filepath" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/mtail/golden" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -const exampleTimeout = 10 * time.Second - -var exampleProgramTests = []struct { - programfile string // Example program file. - logfile string // Sample log input. - goldenfile string // Expected metrics after processing. -}{ - { - "examples/rsyncd.mtail", - "testdata/rsyncd.log", - "testdata/rsyncd.golden", - }, - { - "examples/sftp.mtail", - "testdata/sftp_chroot.log", - "testdata/sftp_chroot.golden", - }, - { - "examples/dhcpd.mtail", - "testdata/anonymised_dhcpd_log", - "testdata/anonymised_dhcpd_log.golden", - }, - { - "examples/ntpd.mtail", - "testdata/ntp4", - "testdata/ntp4.golden", - }, - { - "examples/ntpd_peerstats.mtail", - "testdata/xntp3_peerstats", - "testdata/xntp3_peerstats.golden", - }, - { - "examples/apache_combined.mtail", - "testdata/apache-combined.log", - "testdata/apache-combined.golden", - }, - { - "examples/apache_common.mtail", - "testdata/apache-common.log", - "testdata/apache-common.golden", - }, - { - "examples/vsftpd.mtail", - "testdata/vsftpd_log", - "testdata/vsftpd_log.golden", - }, - { - "examples/vsftpd.mtail", - "testdata/vsftpd_xferlog", - "testdata/vsftpd_xferlog.golden", - }, - { - "examples/lighttpd.mtail", - "testdata/lighttpd_access.log", - "testdata/lighttpd_accesslog.golden", - }, - { - "examples/mysql_slowqueries.mtail", - "testdata/mysql_slowqueries.log", - "testdata/mysql_slowqueries.golden", - }, -} - -func TestExamplePrograms(t *testing.T) { - testutil.SkipIfShort(t) - for _, tc := range exampleProgramTests { - tc := tc - t.Run(fmt.Sprintf("%s on %s", tc.programfile, tc.logfile), - testutil.TimeoutTest(exampleTimeout, func(t *testing.T) { //nolint:thelper - ctx, cancel := context.WithCancel(context.Background()) - waker, _ := waker.NewTest(ctx, 0) // oneshot means we should never need to wake the stream - store := metrics.NewStore() - programFile := filepath.Join("../..", tc.programfile) - mtail, err := mtail.New(ctx, store, mtail.ProgramPath(programFile), mtail.LogPathPatterns(tc.logfile), mtail.OneShot, mtail.OmitMetricSource, mtail.DumpAstTypes, mtail.DumpBytecode, mtail.LogPatternPollWaker(waker), mtail.LogstreamPollWaker(waker)) - testutil.FatalIfErr(t, err) - - var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() - testutil.FatalIfErr(t, mtail.Run()) - }() - // Oneshot mode means we can wait for shutdown before cancelling. - wg.Wait() - cancel() - - g, err := os.Open(tc.goldenfile) - testutil.FatalIfErr(t, err) - defer g.Close() - - goldenStore := golden.ReadTestData(g, tc.programfile) - - var storeList metrics.MetricSlice - store.Range(func(m *metrics.Metric) error { - storeList = append(storeList, m) - return nil - }) - - testutil.ExpectNoDiff(t, goldenStore, storeList, testutil.SortSlices(metrics.Less), testutil.IgnoreUnexported(metrics.Metric{}, sync.RWMutex{}, datum.String{})) - })) - } -} - -// This test only compiles examples, but has coverage over all examples -// provided. This ensures we ship at least syntactically correct examples. -func TestCompileExamplePrograms(t *testing.T) { - testutil.SkipIfShort(t) - matches, err := filepath.Glob("../../examples/*.mtail") - testutil.FatalIfErr(t, err) - for _, tc := range matches { - tc := tc - name := filepath.Base(tc) - t.Run(name, func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - s := metrics.NewStore() - mtail, err := mtail.New(ctx, s, mtail.ProgramPath(tc), mtail.CompileOnly, mtail.OmitMetricSource, mtail.DumpAstTypes, mtail.DumpBytecode) - testutil.FatalIfErr(t, err) - // Ensure that run shuts down for CompileOnly - testutil.FatalIfErr(t, mtail.Run()) - cancel() - }) - } -} - -func BenchmarkProgram(b *testing.B) { - for _, bm := range exampleProgramTests { - bm := bm - b.Run(fmt.Sprintf("%s on %s", bm.programfile, bm.logfile), func(b *testing.B) { - b.ReportAllocs() - logDir := testutil.TestTempDir(b) - logFile := filepath.Join(logDir, "test.log") - log := testutil.TestOpenFile(b, logFile) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - store := metrics.NewStore() - programFile := filepath.Join("../..", bm.programfile) - mtail, err := mtail.New(ctx, store, mtail.ProgramPath(programFile), mtail.LogPathPatterns(log.Name()), mtail.LogstreamPollWaker(waker)) - testutil.FatalIfErr(b, err) - - var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() - testutil.FatalIfErr(b, mtail.Run()) - }() - - var total int64 - b.ResetTimer() - for i := 0; i < b.N; i++ { - l, err := os.Open(bm.logfile) - testutil.FatalIfErr(b, err) - count, err := io.Copy(log, l) - testutil.FatalIfErr(b, err) - total += count - awaken(1) - } - cancel() - wg.Wait() - b.StopTimer() - b.SetBytes(total) - }) - } -} diff --git a/inputs/mtail/internal/mtail/examples_integration_unix_test.go b/inputs/mtail/internal/mtail/examples_integration_unix_test.go deleted file mode 100644 index 7da8e8f1..00000000 --- a/inputs/mtail/internal/mtail/examples_integration_unix_test.go +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package mtail_test - -import ( - "context" - "errors" - "fmt" - "io" - "log" - "net" - "os" - "path/filepath" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" - "golang.org/x/sys/unix" -) - -// TestFilePipeStreamComparison is a unix-specific test since unix.Mkfifo is not defined on Windows. -// Two mtails both alike in dignity. -func TestFilePipeStreamComparison(t *testing.T) { - testutil.SkipIfShort(t) - - for _, tc := range exampleProgramTests { - tc := tc - t.Run(fmt.Sprintf("%s on %s", tc.programfile, tc.logfile), - testutil.TimeoutTest(exampleTimeout, func(t *testing.T) { //nolint:thelper - ctx, cancel := context.WithCancel(context.Background()) - waker := waker.NewTestAlways() - fileStore, pipeStore := metrics.NewStore(), metrics.NewStore() - programFile := filepath.Join("../..", tc.programfile) - - // Set up the pipe - tmpDir := testutil.TestTempDir(t) - - pipeName := filepath.Join(tmpDir, filepath.Base(tc.logfile)) - testutil.FatalIfErr(t, unix.Mkfifo(pipeName, 0o600)) - - var wg sync.WaitGroup - wg.Add(3) - go func() { - defer wg.Done() - source, err := os.OpenFile(tc.logfile, os.O_RDONLY, 0) - testutil.FatalIfErr(t, err) - // not NONBLOCK to wait for pipeMtail to start reading the pipe - pipe, err := os.OpenFile(pipeName, os.O_WRONLY, os.ModeNamedPipe) - testutil.FatalIfErr(t, err) - n, err := io.Copy(pipe, source) - testutil.FatalIfErr(t, err) - log.Printf("Copied %d bytes into pipe", n) - source.Close() - pipe.Close() - }() - - go func() { - defer wg.Done() - fileMtail, err := mtail.New(ctx, fileStore, mtail.ProgramPath(programFile), mtail.LogPathPatterns(tc.logfile), mtail.OneShot, mtail.OmitMetricSource, mtail.LogPatternPollWaker(waker), mtail.LogstreamPollWaker(waker)) - if err != nil { - t.Error(err) - } - if err := fileMtail.Run(); err != nil { - t.Error(err) - } - }() - pipeMtail, err := mtail.New(ctx, pipeStore, mtail.ProgramPath(programFile), mtail.LogPathPatterns(pipeName), mtail.OneShot, mtail.OmitMetricSource, mtail.LogPatternPollWaker(waker), mtail.LogstreamPollWaker(waker)) - testutil.FatalIfErr(t, err) - go func() { - defer wg.Done() - if err := pipeMtail.Run(); err != nil { - t.Error(err) - } - }() - - // Oneshot mode means we can wait for shutdown before cancelling. - wg.Wait() - cancel() - - var pipeMetrics, fileMetrics metrics.MetricSlice - pipeStore.Range(func(m *metrics.Metric) error { - pipeMetrics = append(pipeMetrics, m) - return nil - }) - fileStore.Range(func(m *metrics.Metric) error { - fileMetrics = append(fileMetrics, m) - return nil - }) - - // Ignore the datum.Time field as well, as the results will be unstable otherwise. - testutil.ExpectNoDiff(t, fileMetrics, pipeMetrics, testutil.SortSlices(metrics.Less), testutil.IgnoreUnexported(metrics.Metric{}, sync.RWMutex{}, datum.String{}), testutil.IgnoreFields(datum.BaseDatum{}, "Time")) - })) - } -} - -// TestFileSocketStreamComparison is a unix-specific test currently because on Windows, the constructed URL will -// be of the form unix://C:\\path, and this will be interpreted as protocol unix on host C and port \\path. -func TestFileSocketStreamComparison(t *testing.T) { - testutil.SkipIfShort(t) - - for _, scheme := range []string{"unixgram", "unix"} { - scheme := scheme - for _, tc := range exampleProgramTests { - tc := tc - t.Run(fmt.Sprintf("%s on %s://%s", tc.programfile, scheme, tc.logfile), - testutil.TimeoutTest(exampleTimeout, func(t *testing.T) { //nolint:thelper - ctx, cancel := context.WithCancel(context.Background()) - waker := waker.NewTestAlways() - fileStore, sockStore := metrics.NewStore(), metrics.NewStore() - programFile := filepath.Join("../..", tc.programfile) - - // Set up the socket - tmpDir := testutil.TestTempDir(t) - - sockName := filepath.Join(tmpDir, filepath.Base(tc.logfile)) - - var wg sync.WaitGroup - wg.Add(3) - go func() { - defer wg.Done() - fileMtail, err := mtail.New(ctx, fileStore, mtail.ProgramPath(programFile), mtail.LogPathPatterns(tc.logfile), mtail.OneShot, mtail.OmitMetricSource, mtail.LogPatternPollWaker(waker), mtail.LogstreamPollWaker(waker)) - if err != nil { - t.Error(err) - } - if err := fileMtail.Run(); err != nil { - t.Error(err) - } - }() - sockMtail, err := mtail.New(ctx, sockStore, mtail.ProgramPath(programFile), mtail.LogPathPatterns(scheme+"://"+sockName), mtail.OneShot, mtail.OmitMetricSource, mtail.LogPatternPollWaker(waker), mtail.LogstreamPollWaker(waker)) - testutil.FatalIfErr(t, err) - go func() { - defer wg.Done() - if err := sockMtail.Run(); err != nil { - t.Error(err) - } - }() - - go func() { - defer wg.Done() - source, err := os.OpenFile(tc.logfile, os.O_RDONLY, 0) - testutil.FatalIfErr(t, err) - s, err := net.DialUnix(scheme, nil, &net.UnixAddr{Name: sockName, Net: scheme}) - testutil.FatalIfErr(t, err) - n, err := io.Copy(s, source) - testutil.FatalIfErr(t, err) - log.Printf("Copied %d bytes into socket", n) - if scheme == "unixgram" { - // Write zero bytes after Stop is called to signal that this is the "end of the stream". - for { - _, err = s.Write([]byte{}) - if err == nil { - log.Printf("Zero bytes written to socket to signal EOF") - break - } - var netErr net.Error - if errors.As(err, &netErr) && netErr.Timeout() { - log.Printf("Write timeout") - time.Sleep(1 * time.Second) - } else { - testutil.FatalIfErr(t, err) - } - } - } - source.Close() - s.Close() - }() - - // Oneshot mode means we can wait for shutdown before cancelling. - wg.Wait() - cancel() - - var sockMetrics, fileMetrics metrics.MetricSlice - sockStore.Range(func(m *metrics.Metric) error { - sockMetrics = append(sockMetrics, m) - return nil - }) - fileStore.Range(func(m *metrics.Metric) error { - fileMetrics = append(fileMetrics, m) - return nil - }) - - // Ignore the datum.Time field as well, as the results will be unstable otherwise. - testutil.ExpectNoDiff(t, fileMetrics, sockMetrics, testutil.SortSlices(metrics.Less), testutil.IgnoreUnexported(metrics.Metric{}, sync.RWMutex{}, datum.String{}), testutil.IgnoreFields(datum.BaseDatum{}, "Time")) - })) - } - } -} diff --git a/inputs/mtail/internal/mtail/golden/reader.go b/inputs/mtail/internal/mtail/golden/reader.go index d2c7a920..734b244d 100644 --- a/inputs/mtail/internal/mtail/golden/reader.go +++ b/inputs/mtail/internal/mtail/golden/reader.go @@ -25,9 +25,7 @@ func ReadTestData(file io.Reader, programfile string) metrics.MetricSlice { prog := filepath.Base(programfile) scanner := bufio.NewScanner(file) for scanner.Scan() { - // log.Printf("'%s'\n", scanner.Text()) match := varRe.FindStringSubmatch(scanner.Text()) - // log.Printf("len match: %d\n", len(match)) if len(match) == 0 { continue } @@ -35,7 +33,6 @@ func ReadTestData(file io.Reader, programfile string) metrics.MetricSlice { vals := make([]string, 0) if match[3] != "" { for _, pair := range strings.Split(match[3], ",") { - // log.Printf("pair: %s\n", pair) kv := strings.Split(pair, "=") keys = append(keys, kv[0]) if kv[1] != "" { @@ -60,7 +57,6 @@ func ReadTestData(file io.Reader, programfile string) metrics.MetricSlice { case "histogram": kind = metrics.Histogram } - // log.Printf("match[4]: %q", match[4]) typ := metrics.Int var ( ival int64 @@ -78,10 +74,8 @@ func ReadTestData(file io.Reader, programfile string) metrics.MetricSlice { typ = metrics.String } } - // log.Printf("type is %q", typ) } var timestamp time.Time - // log.Printf("match 5: %q\n", match[5]) if match[5] != "" { timestamp, err = time.Parse(time.RFC3339, match[5]) if err != nil { @@ -93,13 +87,12 @@ func ReadTestData(file io.Reader, programfile string) metrics.MetricSlice { } } } - // log.Printf("timestamp is %s which is %v in unix", timestamp.Format(time.RFC3339), timestamp.Unix()) // Now we have enough information to get or create a metric. m := store.FindMetricOrNil(match[2], prog) if m != nil { if m.Type != typ { - // glog.V(2).Infof("The type of the fetched metric is not %s: %s", typ, m) + log.Printf("The type of the fetched metric is not %s: %s", typ, m) continue } } else { @@ -107,7 +100,7 @@ func ReadTestData(file io.Reader, programfile string) metrics.MetricSlice { if kind == metrics.Counter && len(keys) == 0 { d, err := m.GetDatum() if err != nil { - log.Println(err) + log.Fatal(err) } // Initialize to zero at the zero time. switch typ { @@ -117,7 +110,6 @@ func ReadTestData(file io.Reader, programfile string) metrics.MetricSlice { datum.SetFloat(d, 0, time.Unix(0, 0)) } } - // glog.V(2).Infof("making a new %v\n", m) if err := store.Add(m); err != nil { log.Printf("Failed to add metric %v to store: %s", m, err) } @@ -126,24 +118,19 @@ func ReadTestData(file io.Reader, programfile string) metrics.MetricSlice { if match[4] != "" { d, err := m.GetDatum(vals...) if err != nil { - // glog.V(2).Infof("Failed to get datum: %s", err) + log.Printf("Failed to get datum: %s", err) continue } - // glog.V(2).Infof("got datum %v", d) switch typ { case metrics.Int: - // glog.V(2).Infof("setting %v with vals %v to %v at %v\n", d, vals, ival, timestamp) datum.SetInt(d, ival, timestamp) case metrics.Float: - // glog.V(2).Infof("setting %v with vals %v to %v at %v\n", d, vals, fval, timestamp) datum.SetFloat(d, fval, timestamp) case metrics.String: - // glog.V(2).Infof("setting %v with vals %v to %v at %v\n", d, vals, sval, timestamp) datum.SetString(d, sval, timestamp) } } - // glog.V(2).Infof("Metric is now %s", m) } storeList := make([]*metrics.Metric, 0) diff --git a/inputs/mtail/internal/mtail/golden/reader_test.go b/inputs/mtail/internal/mtail/golden/reader_test.go deleted file mode 100644 index a6dc7831..00000000 --- a/inputs/mtail/internal/mtail/golden/reader_test.go +++ /dev/null @@ -1,121 +0,0 @@ -package golden - -import ( - "os" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -var expectedMetrics = metrics.MetricSlice{ - { - Name: "bytes_total", - Program: "reader_test", - Kind: metrics.Counter, - Keys: []string{"operation"}, - LabelValues: []*metrics.LabelValue{ - { - Labels: []string{"sent"}, - Value: datum.MakeInt(62793673, time.Date(2011, 2, 23, 5, 54, 10, 0, time.UTC)), - }, - { - Labels: []string{"received"}, - Value: datum.MakeInt(975017, time.Date(2011, 2, 23, 5, 54, 10, 0, time.UTC)), - }, - }, - }, - { - Name: "connections_total", - Program: "reader_test", - Kind: metrics.Counter, - Keys: []string{}, - LabelValues: []*metrics.LabelValue{ - { - Value: datum.MakeInt(52, time.Date(2011, 2, 22, 21, 54, 13, 0, time.UTC)), - }, - }, - }, - { - Name: "connection-time_total", - Program: "reader_test", - Kind: metrics.Counter, - Keys: []string{}, - LabelValues: []*metrics.LabelValue{ - { - Value: datum.MakeInt(1181011, time.Date(2011, 2, 23, 5, 54, 10, 0, time.UTC)), - }, - }, - }, - { - Name: "transfers_total", - Program: "reader_test", - Kind: metrics.Counter, - Keys: []string{"operation", "module"}, - LabelValues: []*metrics.LabelValue{ - { - Labels: []string{"send", "module"}, - Value: datum.MakeInt(2, time.Date(2011, 2, 23, 5, 50, 32, 0, time.UTC)), - }, - { - Labels: []string{"send", "repo"}, - Value: datum.MakeInt(25, time.Date(2011, 2, 23, 5, 51, 14, 0, time.UTC)), - }, - }, - }, - { - Name: "foo", - Program: "reader_test", - Kind: metrics.Gauge, - Keys: []string{"label"}, - LabelValues: []*metrics.LabelValue{}, - }, - { - Name: "bar", - Program: "reader_test", - Kind: metrics.Counter, - Keys: []string{}, - LabelValues: []*metrics.LabelValue{ - { - Value: datum.MakeInt(0, time.Unix(0, 0)), - }, - }, - }, - { - Name: "floaty", - Program: "reader_test", - Kind: metrics.Gauge, - Type: metrics.Float, - Keys: []string{}, - LabelValues: []*metrics.LabelValue{ - { - Labels: []string{}, - Value: datum.MakeFloat(37.1, time.Date(2017, 6, 15, 18, 9, 37, 0, time.UTC)), - }, - }, - }, - { - Name: "stringy", - Program: "reader_test", - Kind: metrics.Text, - Type: metrics.String, - Keys: []string{}, - LabelValues: []*metrics.LabelValue{ - { - Labels: []string{}, - Value: datum.MakeString("hi", time.Date(2018, 6, 16, 18, 4, 0, 0, time.UTC)), - }, - }, - }, -} - -func TestReadTestData(t *testing.T) { - f, err := os.Open("reader_test.golden") - testutil.FatalIfErr(t, err) - defer f.Close() - readMetrics := ReadTestData(f, "reader_test") - testutil.ExpectNoDiff(t, expectedMetrics, readMetrics, testutil.SortSlices(metrics.Less), testutil.IgnoreUnexported(metrics.Metric{}, sync.RWMutex{}, datum.String{})) -} diff --git a/inputs/mtail/internal/mtail/log_deletion_integration_unix_test.go b/inputs/mtail/internal/mtail/log_deletion_integration_unix_test.go deleted file mode 100644 index b5850350..00000000 --- a/inputs/mtail/internal/mtail/log_deletion_integration_unix_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package mtail_test - -import ( - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "log" - "os" - "path/filepath" - "testing" -) - -// TestLogDeletion is a unix-only test because on Windows files with open read handles cannot be deleted. -func TestLogDeletion(t *testing.T) { - testutil.SkipIfShort(t) - workdir := testutil.TestTempDir(t) - - // touch log file - logFilepath := filepath.Join(workdir, "log") - logFile := testutil.TestOpenFile(t, logFilepath) - defer logFile.Close() - - m, stopM := mtail.TestStartServer(t, 1, mtail.LogPathPatterns(logFilepath)) - defer stopM() - - logCloseCheck := m.ExpectMapExpvarDeltaWithDeadline("log_closes_total", logFilepath, 1) - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", -1) - - m.PollWatched(1) // Force sync to EOF - log.Println("remove") - testutil.FatalIfErr(t, os.Remove(logFilepath)) - - m.PollWatched(0) // one pass to stop - logCloseCheck() - m.PollWatched(0) // one pass to remove completed stream - logCountCheck() -} diff --git a/inputs/mtail/internal/mtail/log_glob_integration_test.go b/inputs/mtail/internal/mtail/log_glob_integration_test.go deleted file mode 100644 index d0d96baa..00000000 --- a/inputs/mtail/internal/mtail/log_glob_integration_test.go +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "expvar" - "log" - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestGlobBeforeStart(t *testing.T) { - testutil.SkipIfShort(t) - - workdir := testutil.TestTempDir(t) - - globTests := []struct { - name string - expected bool - }{ - { - filepath.Join(workdir, "log1"), - true, - }, - { - filepath.Join(workdir, "log2"), - true, - }, - { - filepath.Join(workdir, "1log"), - false, - }, - } - var count int64 - for _, tt := range globTests { - log := testutil.TestOpenFile(t, tt.name) - if tt.expected { - count++ - } - testutil.WriteString(t, log, "\n") - log.Close() - } - m, stopM := mtail.TestStartServer(t, 0, mtail.LogPathPatterns(filepath.Join(workdir, "log*"))) - stopM() - - if r := m.GetExpvar("log_count"); r.(*expvar.Int).Value() != count { - t.Errorf("Expecting log count of %d, received %d", count, r) - } -} - -func TestGlobAfterStart(t *testing.T) { - testutil.SkipIfShort(t) - - workdir := testutil.TestTempDir(t) - - globTests := []struct { - name string - expected bool - }{ - { - filepath.Join(workdir, "log1"), - true, - }, - { - filepath.Join(workdir, "log2"), - true, - }, - { - filepath.Join(workdir, "1log"), - false, - }, - } - m, stopM := mtail.TestStartServer(t, 0, mtail.LogPathPatterns(filepath.Join(workdir, "log*"))) - defer stopM() - - m.PollWatched(0) // Force sync to EOF - - var count int64 - for _, tt := range globTests { - if tt.expected { - count++ - } - } - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", count) - for _, tt := range globTests { - log := testutil.TestOpenFile(t, tt.name) - defer log.Close() - m.PollWatched(0) // Force sync to EOF - } - // m.PollWatched(2) - logCountCheck() -} - -func TestGlobIgnoreFolder(t *testing.T) { - testutil.SkipIfShort(t) - - workdir := testutil.TestTempDir(t) - - globTests := []struct { - name string - isFolder bool - expected bool - }{ - { - filepath.Join(workdir, "log1"), - false, - true, - }, - { - filepath.Join(workdir, "logarchive"), - true, - false, - }, - { - filepath.Join(workdir, "log2.gz"), - false, - false, - }, - } - var count int64 - for _, tt := range globTests { - var err error - var log *os.File - - if tt.isFolder { - err = os.Mkdir(tt.name, 0o700) - testutil.FatalIfErr(t, err) - continue - } else { - log, err = os.Create(tt.name) - } - - if !tt.isFolder && tt.expected { - count++ - } - defer log.Close() - testutil.FatalIfErr(t, err) - testutil.WriteString(t, log, "\n") - } - - m, stopM := mtail.TestStartServer(t, 0, mtail.LogPathPatterns(filepath.Join(workdir, "log*")), mtail.IgnoreRegexPattern("\\.gz")) - - stopM() - - if r := m.GetExpvar("log_count"); r.(*expvar.Int).Value() != count { - t.Errorf("Expecting log count of %d, received %v", count, r) - } -} - -func TestFilenameRegexIgnore(t *testing.T) { - testutil.SkipIfShort(t) - - workdir := testutil.TestTempDir(t) - - globTests := []struct { - name string - expected bool - }{ - { - filepath.Join(workdir, "log1"), - true, - }, - { - filepath.Join(workdir, "log1.gz"), - false, - }, - { - filepath.Join(workdir, "log2gz"), - true, - }, - } - var count int64 - for _, tt := range globTests { - log, err := os.Create(tt.name) - testutil.FatalIfErr(t, err) - defer log.Close() - if tt.expected { - count++ - } - testutil.WriteString(t, log, "\n") - } - - m, stopM := mtail.TestStartServer(t, 0, mtail.LogPathPatterns(filepath.Join(workdir, "log*")), mtail.IgnoreRegexPattern("\\.gz")) - - stopM() - - if r := m.GetExpvar("log_count"); r.(*expvar.Int).Value() != count { - t.Errorf("Log count not matching, expected: %d received: %v", count, r) - } -} - -func TestGlobRelativeAfterStart(t *testing.T) { - testutil.SkipIfShort(t) - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - err := os.Mkdir(logDir, 0o700) - testutil.FatalIfErr(t, err) - err = os.Mkdir(progDir, 0o700) - testutil.FatalIfErr(t, err) - - // Move to logdir to make relative paths - testutil.Chdir(t, logDir) - - m, stopM := mtail.TestStartServer(t, 1, mtail.ProgramPath(progDir), mtail.LogPathPatterns("log.*")) - defer stopM() - - { - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", 1) - - logFile := filepath.Join(logDir, "log.1.txt") - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - - m.PollWatched(1) // Force sync to EOF - testutil.WriteString(t, f, "line 1\n") - m.PollWatched(1) - - logCountCheck() - } - - { - - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", 1) - - logFile := filepath.Join(logDir, "log.2.txt") - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - - m.PollWatched(2) - testutil.WriteString(t, f, "line 1\n") - m.PollWatched(2) - - logCountCheck() - } - { - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", 0) - - logFile := filepath.Join(logDir, "log.2.txt") - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - - m.PollWatched(2) - testutil.WriteString(t, f, "line 2\n") - m.PollWatched(2) - - logCountCheck() - } - - log.Printf("end") -} diff --git a/inputs/mtail/internal/mtail/log_rotation_integration_test.go b/inputs/mtail/internal/mtail/log_rotation_integration_test.go deleted file mode 100644 index 017f2876..00000000 --- a/inputs/mtail/internal/mtail/log_rotation_integration_test.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "fmt" - "log" - "os" - "path/filepath" - "sync" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestLogSoftLinkChange(t *testing.T) { - testutil.SkipIfShort(t) - - for _, tc := range []bool{false, true} { - tc := tc - name := "disabled" - if tc { - name = "enabled" - } - t.Run(fmt.Sprintf("race simulation %s", name), func(t *testing.T) { - workdir := testutil.TestTempDir(t) - - logFilepath := filepath.Join(workdir, "log") - - m, stopM := mtail.TestStartServer(t, 1, mtail.LogPathPatterns(logFilepath)) - defer stopM() - - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", 1) - logOpensTotalCheck := m.ExpectMapExpvarDeltaWithDeadline("log_opens_total", logFilepath, 2) - - trueLog1 := testutil.TestOpenFile(t, logFilepath+".true1") - defer trueLog1.Close() - - testutil.FatalIfErr(t, os.Symlink(logFilepath+".true1", logFilepath)) - log.Printf("symlinked") - m.PollWatched(1) - - inputLines := []string{"hi1", "hi2", "hi3"} - for _, x := range inputLines { - testutil.WriteString(t, trueLog1, x+"\n") - } - m.PollWatched(1) - - trueLog2 := testutil.TestOpenFile(t, logFilepath+".true2") - defer trueLog2.Close() - m.PollWatched(1) - logClosedCheck := m.ExpectMapExpvarDeltaWithDeadline("log_closes_total", logFilepath, 1) - logCompletedCheck := m.ExpectExpvarDeltaWithDeadline("log_count", -1) - testutil.FatalIfErr(t, os.Remove(logFilepath)) - if tc { - m.PollWatched(0) // simulate race condition with this poll. - logClosedCheck() // sync when filestream closes fd - m.PollWatched(0) // invoke the GC - logCompletedCheck() // sync to when the logstream is removed from tailer - } - testutil.FatalIfErr(t, os.Symlink(logFilepath+".true2", logFilepath)) - m.PollWatched(1) - - for _, x := range inputLines { - testutil.WriteString(t, trueLog2, x+"\n") - } - m.PollWatched(1) - - var wg sync.WaitGroup - wg.Add(2) - go func() { - defer wg.Done() - logCountCheck() - }() - go func() { - defer wg.Done() - logOpensTotalCheck() - }() - wg.Wait() - - _, err := os.Stat(logFilepath + ".true1") - testutil.FatalIfErr(t, err) - _, err = os.Stat(logFilepath + ".true2") - testutil.FatalIfErr(t, err) - }) - } -} diff --git a/inputs/mtail/internal/mtail/log_rotation_integration_unix_test.go b/inputs/mtail/internal/mtail/log_rotation_integration_unix_test.go deleted file mode 100644 index c01b33ca..00000000 --- a/inputs/mtail/internal/mtail/log_rotation_integration_unix_test.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package mtail_test - -import ( - "fmt" - "log" - "os" - "path/filepath" - "sync" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -// TestLogRotation is a unix-specific test because on Windows, files cannot be removed -// or renamed while there is an open read handle on them. Instead, log rotation would -// have to be implemented by copying and then truncating the original file. That test -// case is already covered by TestLogTruncation. -func TestLogRotation(t *testing.T) { - testutil.SkipIfShort(t) - - for _, tc := range []bool{false, true} { - tc := tc - name := "disabled" - if tc { - name = "enabled" - } - t.Run(fmt.Sprintf("race simulation %s", name), func(t *testing.T) { - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - err := os.Mkdir(logDir, 0o700) - testutil.FatalIfErr(t, err) - err = os.Mkdir(progDir, 0o700) - testutil.FatalIfErr(t, err) - - logFile := filepath.Join(logDir, "log") - - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - - m, stopM := mtail.TestStartServer(t, 1, mtail.ProgramPath(progDir), mtail.LogPathPatterns(logDir+"/log")) - defer stopM() - - logOpensTotalCheck := m.ExpectMapExpvarDeltaWithDeadline("log_opens_total", logFile, 1) - logLinesTotalCheck := m.ExpectMapExpvarDeltaWithDeadline("log_lines_total", logFile, 3) - - testutil.WriteString(t, f, "line 1\n") - m.PollWatched(1) - - testutil.WriteString(t, f, "line 2\n") - m.PollWatched(1) - - logClosedCheck := m.ExpectMapExpvarDeltaWithDeadline("log_closes_total", logFile, 1) - logCompletedCheck := m.ExpectExpvarDeltaWithDeadline("log_count", -1) - log.Println("rename") - err = os.Rename(logFile, logFile+".1") - testutil.FatalIfErr(t, err) - if tc { - m.PollWatched(0) // simulate race condition with this poll. - logClosedCheck() // sync when filestream closes fd - m.PollWatched(0) // invoke the GC - logCompletedCheck() // sync to when the logstream is removed from tailer - } - log.Println("create") - f = testutil.TestOpenFile(t, logFile) - m.PollWatched(1) - testutil.WriteString(t, f, "line 1\n") - m.PollWatched(1) - - var wg sync.WaitGroup - wg.Add(2) - go func() { - defer wg.Done() - logLinesTotalCheck() - }() - go func() { - defer wg.Done() - - logOpensTotalCheck() - }() - wg.Wait() - }) - } -} diff --git a/inputs/mtail/internal/mtail/log_truncation_integration_test.go b/inputs/mtail/internal/mtail/log_truncation_integration_test.go deleted file mode 100644 index b197025f..00000000 --- a/inputs/mtail/internal/mtail/log_truncation_integration_test.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "log" - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestLogTruncation(t *testing.T) { - testutil.SkipIfShort(t) - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - testutil.FatalIfErr(t, os.Mkdir(logDir, 0o700)) - testutil.FatalIfErr(t, os.Mkdir(progDir, 0o700)) - - m, stopM := mtail.TestStartServer(t, 1, mtail.ProgramPath(progDir), mtail.LogPathPatterns(logDir+"/log")) - defer stopM() - - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", 1) - linesCountCheck := m.ExpectExpvarDeltaWithDeadline("lines_total", 2) - - logFile := filepath.Join(logDir, "log") - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - m.PollWatched(1) - - testutil.WriteString(t, f, "line 1\n") - m.PollWatched(1) - // After the last barrier, the filestream may not race ahead of the test - // here, so we need to ensure that a whole filestream loop occurs and that - // the file offset advances for this test to succeed, hence the second - // barrier here. - m.PollWatched(1) - - err := f.Close() - testutil.FatalIfErr(t, err) - - log.Println("truncate") - f, err = os.OpenFile(logFile, os.O_TRUNC|os.O_WRONLY, 0o600) - testutil.FatalIfErr(t, err) - defer f.Close() - m.PollWatched(1) - - testutil.WriteString(t, f, "2\n") - m.PollWatched(1) - - linesCountCheck() - logCountCheck() -} diff --git a/inputs/mtail/internal/mtail/mtail.go b/inputs/mtail/internal/mtail/mtail.go index edc66db3..6b884156 100644 --- a/inputs/mtail/internal/mtail/mtail.go +++ b/inputs/mtail/internal/mtail/mtail.go @@ -5,29 +5,27 @@ package mtail import ( "context" - "errors" - "expvar" "log" "net" - "net/http" - "net/http/pprof" "sync" - "time" + + "github.com/prometheus/client_golang/prometheus" "flashcat.cloud/categraf/inputs/mtail/internal/exporter" "flashcat.cloud/categraf/inputs/mtail/internal/logline" "flashcat.cloud/categraf/inputs/mtail/internal/metrics" "flashcat.cloud/categraf/inputs/mtail/internal/runtime" "flashcat.cloud/categraf/inputs/mtail/internal/tailer" - "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promhttp" ) // Server contains the state of the main mtail program. type Server struct { - ctx context.Context + ctx context.Context + cancel context.CancelFunc + + wg sync.WaitGroup // wait for main processes to shutdown + store *metrics.Store // Metrics storage - wg sync.WaitGroup // wait for main processes to shutdown tOpts []tailer.Option // options for constructing `t` t *tailer.Tailer // t manages log patterns and log streams, which sends lines to the VMs @@ -46,17 +44,16 @@ type Server struct { programPath string // path to programs to load oneShot bool // if set, mtail reads log files from the beginning, once, then exits - compileOnly bool // if set, mtail compiles programs then exits -} - -func (m *Server) Wait() { - m.wg.Wait() + compileOnly bool // if set, mtail compiles programs then exit } func (m *Server) GetRegistry() *prometheus.Registry { return m.reg } +// We can only copy the build info once to the version library. Protects tests from data races. +var buildInfoOnce sync.Once + // initRuntime constructs a new runtime and performs the initial load of program files in the program directory. func (m *Server) initRuntime() (err error) { m.r, err = runtime.New(m.lines, &m.wg, m.programPath, m.store, m.rOpts...) @@ -65,17 +62,11 @@ func (m *Server) initRuntime() (err error) { // initExporter sets up an Exporter for this Server. func (m *Server) initExporter() (err error) { - if m.oneShot { - // This is a hack to avoid a race in test, but assume that in oneshot - // mode we don't want to export anything. - return nil - } - m.e, err = exporter.New(m.ctx, &m.wg, m.store, m.eOpts...) + m.e, err = exporter.New(m.ctx, m.store, m.eOpts...) if err != nil { return err } m.reg.MustRegister(m.e) - return nil } @@ -85,71 +76,6 @@ func (m *Server) initTailer() (err error) { return } -// initHTTPServer begins the http server. -func (m *Server) initHTTPServer() error { - initDone := make(chan struct{}) - defer close(initDone) - - if m.listener == nil { - log.Println("no listen address configured, not starting http server") - return nil - } - - mux := http.NewServeMux() - mux.HandleFunc("/favicon.ico", FaviconHandler) - mux.Handle("/", m) - mux.Handle("/progz", http.HandlerFunc(m.r.ProgzHandler)) - mux.HandleFunc("/json", http.HandlerFunc(m.e.HandleJSON)) - mux.Handle("/metrics", promhttp.HandlerFor(m.reg, promhttp.HandlerOpts{})) - mux.HandleFunc("/graphite", http.HandlerFunc(m.e.HandleGraphite)) - mux.HandleFunc("/varz", http.HandlerFunc(m.e.HandleVarz)) - mux.Handle("/debug/vars", expvar.Handler()) - mux.HandleFunc("/debug/pprof/", pprof.Index) - mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) - mux.HandleFunc("/debug/pprof/profile", pprof.Profile) - mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) - mux.HandleFunc("/debug/pprof/trace", pprof.Trace) - - srv := &http.Server{ - Handler: mux, - } - - var wg sync.WaitGroup - errc := make(chan error, 1) - - // This goroutine runs the http server. - wg.Add(1) - go func() { - defer wg.Done() - <-initDone - log.Printf("Listening on %s", m.listener.Addr()) - if err := srv.Serve(m.listener); err != nil && !errors.Is(err, http.ErrServerClosed) { - errc <- err - } - }() - - // This goroutine manages http server shutdown. - go func() { - <-initDone - select { - case err := <-errc: - log.Println(err) - case <-m.ctx.Done(): - log.Println("Shutdown requested.") - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) - defer cancel() - srv.SetKeepAlivesEnabled(false) - if err := srv.Shutdown(ctx); err != nil { - log.Println(err) - } - } - // Wait for the Serve routine to exit. - wg.Wait() - }() - - return nil -} - // New creates a Server from the supplied Options. The Server is started by // the time New returns, it watches the LogPatterns for files, starts tailing // their changes and sends any new lines found to the virtual machines loaded @@ -159,27 +85,27 @@ func (m *Server) initHTTPServer() error { // block until quit, once TestServer.PollWatched is addressed. func New(ctx context.Context, store *metrics.Store, options ...Option) (*Server, error) { m := &Server{ - ctx: ctx, store: store, lines: make(chan *logline.LogLine), // Using a non-pedantic registry means we can be looser with metrics that // are not fully specified at startup. reg: prometheus.NewRegistry(), } - + m.ctx, m.cancel = context.WithCancel(ctx) + m.rOpts = append(m.rOpts, runtime.PrometheusRegisterer(m.reg)) if err := m.SetOption(options...); err != nil { return nil, err } if err := m.initExporter(); err != nil { return nil, err } + //nolint:contextcheck // TODO if err := m.initRuntime(); err != nil { return nil, err } if err := m.initTailer(); err != nil { return nil, err } - return m, nil } @@ -197,6 +123,7 @@ func (m *Server) SetOption(options ...Option) error { // TODO(jaq): remove this once the test server is able to trigger polls on the components. func (m *Server) Run() error { m.wg.Wait() + m.cancel() if m.compileOnly { log.Println("compile-only is set, exiting") return nil diff --git a/inputs/mtail/internal/mtail/multiple_levels_directory_integration_test.go b/inputs/mtail/internal/mtail/multiple_levels_directory_integration_test.go deleted file mode 100644 index 471d23ad..00000000 --- a/inputs/mtail/internal/mtail/multiple_levels_directory_integration_test.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestPollLogPathPatterns(t *testing.T) { - testutil.SkipIfShort(t) - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - testutil.FatalIfErr(t, os.Mkdir(logDir, 0o700)) - testutil.Chdir(t, logDir) - - m, stopM := mtail.TestStartServer(t, 0, mtail.LogPathPatterns(logDir+"/files/*/log/*log")) - defer stopM() - - logCountCheck := m.ExpectExpvarDeltaWithDeadline("log_count", 1) - lineCountCheck := m.ExpectExpvarDeltaWithDeadline("lines_total", 1) - - logFile := filepath.Join(logDir, "files", "a", "log", "a.log") - testutil.FatalIfErr(t, os.MkdirAll(filepath.Dir(logFile), 0o700)) - - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - m.PollWatched(1) - - logCountCheck() - - testutil.WriteString(t, f, "line 1\n") - m.PollWatched(1) - lineCountCheck() -} diff --git a/inputs/mtail/internal/mtail/multiple_lines_integration_test.go b/inputs/mtail/internal/mtail/multiple_lines_integration_test.go deleted file mode 100644 index f4107ac6..00000000 --- a/inputs/mtail/internal/mtail/multiple_lines_integration_test.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "log" - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestMultipleLinesInOneWrite(t *testing.T) { - testutil.SkipIfShort(t) - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - err := os.Mkdir(logDir, 0o700) - testutil.FatalIfErr(t, err) - err = os.Mkdir(progDir, 0o700) - testutil.FatalIfErr(t, err) - - logFile := filepath.Join(logDir, "log") - - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - - m, stopM := mtail.TestStartServer(t, 1, mtail.ProgramPath(progDir), mtail.LogPathPatterns(logDir+"/log")) - defer stopM() - - m.PollWatched(1) // Force sync to EOF - - { - lineCountCheck := m.ExpectExpvarDeltaWithDeadline("lines_total", 1) - n, err := f.WriteString("line 1\n") - testutil.FatalIfErr(t, err) - log.Printf("Wrote %d bytes", n) - m.PollWatched(1) - lineCountCheck() - } - - { - lineCountCheck := m.ExpectExpvarDeltaWithDeadline("lines_total", 2) - n, err := f.WriteString("line 2\nline 3\n") - testutil.FatalIfErr(t, err) - log.Printf("Wrote %d bytes", n) - m.PollWatched(1) - lineCountCheck() - } -} diff --git a/inputs/mtail/internal/mtail/options.go b/inputs/mtail/internal/mtail/options.go index 5beecd3b..8f9eb810 100644 --- a/inputs/mtail/internal/mtail/options.go +++ b/inputs/mtail/internal/mtail/options.go @@ -10,12 +10,10 @@ import ( "path/filepath" "time" - // "contrib.go.opencensus.io/exporter/jaeger" "flashcat.cloud/categraf/inputs/mtail/internal/exporter" "flashcat.cloud/categraf/inputs/mtail/internal/runtime" "flashcat.cloud/categraf/inputs/mtail/internal/tailer" "flashcat.cloud/categraf/inputs/mtail/internal/waker" - // "go.opencensus.io/trace" ) // Option configures mtail.Server. @@ -116,20 +114,6 @@ func (opt overrideLocation) apply(m *Server) error { return nil } -// StaleLogGcWaker triggers garbage collection runs for stale logs in the tailer. -func StaleLogGcWaker(w waker.Waker) Option { - return &staleLogGcWaker{w} -} - -type staleLogGcWaker struct { - waker.Waker -} - -func (opt staleLogGcWaker) apply(m *Server) error { - m.tOpts = append(m.tOpts, tailer.StaleLogGcWaker(opt.Waker)) - return nil -} - // LogPatternPollWaker triggers polls on the filesystem for new logs that match the log glob patterns. func LogPatternPollWaker(w waker.Waker) Option { return &logPatternPollWaker{w} @@ -171,6 +155,7 @@ var OneShot = &niladicOption{ func(m *Server) error { m.rOpts = append(m.rOpts, runtime.ErrorsAbort()) m.tOpts = append(m.tOpts, tailer.OneShot) + m.eOpts = append(m.eOpts, exporter.DisableExport()) m.oneShot = true return nil }, @@ -180,6 +165,7 @@ var OneShot = &niladicOption{ var CompileOnly = &niladicOption{ func(m *Server) error { m.rOpts = append(m.rOpts, runtime.CompileOnly()) + m.eOpts = append(m.eOpts, exporter.DisableExport()) m.compileOnly = true return nil }, @@ -193,7 +179,7 @@ var DumpAst = &niladicOption{ }, } -// DumpAstTypes instructs the Server's copmiler to print the AST after type checking. +// DumpAstTypes instructs the Server's compiler to print the AST after type checking. var DumpAstTypes = &niladicOption{ func(m *Server) error { m.rOpts = append(m.rOpts, runtime.DumpAstTypes()) @@ -249,22 +235,24 @@ var LogRuntimeErrors = &niladicOption{ }, } +/* // JaegerReporter creates a new jaeger reporter that sends to the given Jaeger endpoint address. type JaegerReporter string -func (opt JaegerReporter) apply(m *Server) error { - // je, err := jaeger.NewExporter(jaeger.Options{ - // CollectorEndpoint: string(opt), - // Process: jaeger.Process{ - // ServiceName: "mtail", - // }, - // }) - // if err != nil { - // return err - // } - // trace.RegisterExporter(je) +func (opt JaegerReporter) apply(_ *Server) error { + je, err := jaeger.NewExporter(jaeger.Options{ + CollectorEndpoint: string(opt), + Process: jaeger.Process{ + ServiceName: "mtail", + }, + }) + if err != nil { + return err + } + trace.RegisterExporter(je) return nil } +*/ // MetricPushInterval sets the interval between metrics pushes to passive collectors. type MetricPushInterval time.Duration diff --git a/inputs/mtail/internal/mtail/partial_line_integration_test.go b/inputs/mtail/internal/mtail/partial_line_integration_test.go deleted file mode 100644 index 85e68dbb..00000000 --- a/inputs/mtail/internal/mtail/partial_line_integration_test.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestPartialLineRead(t *testing.T) { - testutil.SkipIfShort(t) - - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - err := os.Mkdir(logDir, 0o700) - testutil.FatalIfErr(t, err) - err = os.Mkdir(progDir, 0o700) - testutil.FatalIfErr(t, err) - - logFile := filepath.Join(logDir, "log") - - f := testutil.TestOpenFile(t, logFile) - defer f.Close() - - m, stopM := mtail.TestStartServer(t, 1, mtail.ProgramPath(progDir), mtail.LogPathPatterns(logDir+"/log")) - defer stopM() - - lineCountCheck := m.ExpectExpvarDeltaWithDeadline("lines_total", 2) - - testutil.WriteString(t, f, "line 1\n") - m.PollWatched(1) - - testutil.WriteString(t, f, "line ") - m.PollWatched(1) - - testutil.WriteString(t, f, "2\n") - m.PollWatched(1) - - lineCountCheck() -} diff --git a/inputs/mtail/internal/mtail/permission_denied_integration_unix_test.go b/inputs/mtail/internal/mtail/permission_denied_integration_unix_test.go deleted file mode 100644 index 040140e4..00000000 --- a/inputs/mtail/internal/mtail/permission_denied_integration_unix_test.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package mtail_test - -import ( - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -// TestPermissionDeniedOnLog is a unix-specific test because on Windows, it is not possible to create a file -// that you yourself cannot read (minimum permissions are 0222). -func TestPermissionDeniedOnLog(t *testing.T) { - testutil.SkipIfShort(t) - // Can't force a permission denied error if run as root. - testutil.SkipIfRoot(t) - - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - err := os.Mkdir(logDir, 0o700) - testutil.FatalIfErr(t, err) - err = os.Mkdir(progDir, 0o700) - testutil.FatalIfErr(t, err) - - logFile := filepath.Join(logDir, "log") - - // Hide the error from stdout during test. - testutil.SetFlag(t, "stderrthreshold", "FATAL") - - m, stopM := mtail.TestStartServer(t, 0, mtail.ProgramPath(progDir), mtail.LogPathPatterns(logDir+"/log")) - defer stopM() - - errorsTotalCheck := m.ExpectMapExpvarDeltaWithDeadline("log_errors_total", logFile, 1) - - f, err := os.OpenFile(logFile, os.O_CREATE, 0) - testutil.FatalIfErr(t, err) - defer f.Close() - - // Nothing to await on, we expect to get a Permission Denied in the - // synchronous logstream.New path. - m.PollWatched(0) - - errorsTotalCheck() -} diff --git a/inputs/mtail/internal/mtail/prog_load_integration_test.go b/inputs/mtail/internal/mtail/prog_load_integration_test.go deleted file mode 100644 index 7e074d16..00000000 --- a/inputs/mtail/internal/mtail/prog_load_integration_test.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestNewProg(t *testing.T) { - testutil.SkipIfShort(t) - - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - err := os.Mkdir(logDir, 0o700) - testutil.FatalIfErr(t, err) - err = os.Mkdir(progDir, 0o700) - testutil.FatalIfErr(t, err) - - m, stopM := mtail.TestStartServer(t, 0, mtail.ProgramPath(progDir), mtail.LogPathPatterns(logDir+"/*")) - defer stopM() - - progLoadsTotalCheck := m.ExpectMapExpvarDeltaWithDeadline("prog_loads_total", "nocode.mtail", 1) - - f := testutil.TestOpenFile(t, progDir+"/nocode.mtail") - defer f.Close() - // No logs get watched here. - m.PollWatched(0) - - progLoadsTotalCheck() -} - -func TestProgramReloadNoDuplicateMetrics(t *testing.T) { - testutil.SkipIfShort(t) - - workdir := testutil.TestTempDir(t) - - logDir := filepath.Join(workdir, "logs") - testutil.FatalIfErr(t, os.Mkdir(logDir, 0o777)) - progDir := filepath.Join(workdir, "progs") - testutil.FatalIfErr(t, os.Mkdir(progDir, 0o777)) - - logFilepath := filepath.Join(logDir, "log") - logFile := testutil.TestOpenFile(t, logFilepath) - defer logFile.Close() - - m, stopM := mtail.TestStartServer(t, 0, mtail.ProgramPath(progDir), mtail.LogPathPatterns(logDir+"/*")) - defer stopM() - - progLoadsTotalCheck := m.ExpectMapExpvarDeltaWithDeadline("prog_loads_total", "program.mtail", 1) - - progpath := filepath.Join(progDir, "program.mtail") - p := testutil.TestOpenFile(t, progpath) - testutil.WriteString(t, p, "counter foo\n/^foo$/ {\n foo++\n }\n") - testutil.FatalIfErr(t, p.Close()) - m.PollWatched(0) - - progLoadsTotalCheck() - - fooIncreaseCheck := m.ExpectProgMetricDeltaWithDeadline("foo", "program.mtail", 1) - - testutil.WriteString(t, logFile, "foo\n") - m.PollWatched(1) - - fooIncreaseCheck() - progLoadsTotalCheck = m.ExpectMapExpvarDeltaWithDeadline("prog_loads_total", "program.mtail", 1) - - p = testutil.TestOpenFile(t, progpath) // opens in append mode - testutil.WriteString(t, p, "#\n") // append just enough to change but still valid - testutil.FatalIfErr(t, p.Close()) - m.PollWatched(1) - - progLoadsTotalCheck() - - // Should still be 1. - fooIncreaseCheck() -} - -func TestProgramUnloadIfDeleted(t *testing.T) { - testutil.SkipIfShort(t) - - workdir := testutil.TestTempDir(t) - - logDir := filepath.Join(workdir, "logs") - testutil.FatalIfErr(t, os.Mkdir(logDir, 0o777)) - progDir := filepath.Join(workdir, "progs") - testutil.FatalIfErr(t, os.Mkdir(progDir, 0o777)) - - logFilepath := filepath.Join(logDir, "log") - logFile := testutil.TestOpenFile(t, logFilepath) - defer logFile.Close() - - m, stopM := mtail.TestStartServer(t, 0, mtail.ProgramPath(progDir), mtail.LogPathPatterns(logDir+"/*")) - defer stopM() - - progLoadsTotalCheck := m.ExpectMapExpvarDeltaWithDeadline("prog_loads_total", "program.mtail", 1) - - progpath := filepath.Join(progDir, "program.mtail") - p := testutil.TestOpenFile(t, progpath) - testutil.WriteString(t, p, "counter foo\n/^foo$/ {\n foo++\n }\n") - testutil.FatalIfErr(t, p.Close()) - m.PollWatched(0) - - progLoadsTotalCheck() - - progUnloadsTotalCheck := m.ExpectMapExpvarDeltaWithDeadline("prog_unloads_total", "program.mtail", 1) - - testutil.FatalIfErr(t, os.Remove(progpath)) - - m.PollWatched(1) - - progUnloadsTotalCheck() -} diff --git a/inputs/mtail/internal/mtail/read_pipe_integration_unix_test.go b/inputs/mtail/internal/mtail/read_pipe_integration_unix_test.go deleted file mode 100644 index 9bd2dfa3..00000000 --- a/inputs/mtail/internal/mtail/read_pipe_integration_unix_test.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package mtail_test - -import ( - "net" - "os" - "path/filepath" - "syscall" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "golang.org/x/sys/unix" -) - -func TestReadFromPipe(t *testing.T) { - testutil.SkipIfShort(t) - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - testutil.FatalIfErr(t, os.Mkdir(logDir, 0o700)) - testutil.FatalIfErr(t, os.Mkdir(progDir, 0o700)) - testutil.Chdir(t, logDir) - - logFile := filepath.Join(logDir, "logpipe") - - testutil.FatalIfErr(t, unix.Mkfifo(logFile, 0o600)) - - // TODO: race if this openfile happens after teststartserver. - f, err := os.OpenFile(logFile, os.O_RDWR|syscall.O_NONBLOCK, 0o600) - testutil.FatalIfErr(t, err) - defer func() { - testutil.FatalIfErr(t, f.Close()) - }() - - m, stopM := mtail.TestStartServer(t, 1, mtail.LogPathPatterns(logDir+"/*"), mtail.ProgramPath(progDir)) - defer stopM() - - lineCountCheck := m.ExpectExpvarDeltaWithDeadline("lines_total", 3) - - testutil.WriteString(t, f, "1\n2\n3\n") - m.PollWatched(0) - - lineCountCheck() -} - -func TestReadFromSocket(t *testing.T) { - testutil.SkipIfShort(t) - - for _, scheme := range []string{"unix", "unixgram"} { - scheme := scheme - t.Run(scheme, func(t *testing.T) { - tmpDir := testutil.TestTempDir(t) - - logDir := filepath.Join(tmpDir, "logs") - progDir := filepath.Join(tmpDir, "progs") - testutil.FatalIfErr(t, os.Mkdir(logDir, 0o700)) - testutil.FatalIfErr(t, os.Mkdir(progDir, 0o700)) - testutil.Chdir(t, logDir) - - logFile := filepath.Join(logDir, "sock") - - m, stopM := mtail.TestStartServer(t, 1, mtail.LogPathPatterns(scheme+"://"+logDir+"/sock"), mtail.ProgramPath(progDir)) - defer stopM() - - lineCountCheck := m.ExpectExpvarDeltaWithDeadline("lines_total", 3) - time.Sleep(10 * time.Millisecond) - - s, err := net.DialUnix(scheme, nil, &net.UnixAddr{Name: logFile, Net: scheme}) - testutil.FatalIfErr(t, err) - defer func() { - testutil.FatalIfErr(t, s.Close()) - }() - - _, err = s.Write([]byte("1\n2\n3\n")) - testutil.FatalIfErr(t, err) - - m.PollWatched(0) - - lineCountCheck() - }) - } -} diff --git a/inputs/mtail/internal/mtail/relative_path_pattern_integration_test.go b/inputs/mtail/internal/mtail/relative_path_pattern_integration_test.go deleted file mode 100644 index f6926a46..00000000 --- a/inputs/mtail/internal/mtail/relative_path_pattern_integration_test.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "log" - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestRelativeLog(t *testing.T) { - testutil.SkipIfShort(t) - workdir := testutil.TestTempDir(t) - - cwd, err := os.Getwd() - testutil.FatalIfErr(t, err) - log.Printf("cwd is %q", cwd) - - testutil.FatalIfErr(t, os.Chdir(workdir)) - defer func() { - testutil.FatalIfErr(t, os.Chdir(cwd)) - }() - - // touch log file - logFilepath := filepath.Join(workdir, "log") - logFile, err := os.Create(logFilepath) - testutil.FatalIfErr(t, err) - defer logFile.Close() - pathnames := []string{"log"} - m, stopM := mtail.TestStartServer(t, 1, mtail.LogPathPatterns(pathnames...)) - defer stopM() - - m.PollWatched(1) // Force sync to EOF - - inputLines := []string{"hi", "hi2", "hi3"} - lineCountCheck := m.ExpectExpvarDeltaWithDeadline("lines_total", int64(len(inputLines))) - - for _, x := range inputLines { - // write to log file - testutil.WriteString(t, logFile, x+"\n") - } - m.PollWatched(1) - - lineCountCheck() -} diff --git a/inputs/mtail/internal/mtail/testing.go b/inputs/mtail/internal/mtail/testing.go deleted file mode 100644 index 609947ca..00000000 --- a/inputs/mtail/internal/mtail/testing.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail - -import ( - "context" - "expvar" - "fmt" - "log" - "os" - "runtime" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -const defaultDoOrTimeoutDeadline = 10 * time.Second - -type TestServer struct { - *Server - - waker waker.Waker // for idle logstreams; others are polled explicitly in PollWatched - awaken func(int) - - tb testing.TB - - cancel context.CancelFunc - - // Set this to change the poll deadline when using DoOrTimeout within this TestServer. - DoOrTimeoutDeadline time.Duration -} - -// TestMakeServer makes a new TestServer for use in tests, but does not start -// the server. If an error occurs during creation, a testing.Fatal is issued. -func TestMakeServer(tb testing.TB, wakers int, options ...Option) *TestServer { - tb.Helper() - - // Reset counters when running multiple tests. Tests that use expvar - // helpers cannot be made parallel. - log.Println("resetting counters") - expvar.Get("lines_total").(*expvar.Int).Set(0) - expvar.Get("log_count").(*expvar.Int).Set(0) - expvar.Get("log_lines_total").(*expvar.Map).Init() - expvar.Get("log_opens_total").(*expvar.Map).Init() - expvar.Get("log_closes_total").(*expvar.Map).Init() - expvar.Get("file_truncates_total").(*expvar.Map).Init() - expvar.Get("prog_loads_total").(*expvar.Map).Init() - - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, wakers) - options = append(options, - LogstreamPollWaker(waker), - ) - m, err := New(ctx, metrics.NewStore(), options...) - testutil.FatalIfErr(tb, err) - return &TestServer{Server: m, waker: waker, awaken: awaken, tb: tb, cancel: cancel} -} - -// TestStartServer creates a new TestServer and starts it running. It -// returns the server, and a stop function. -func TestStartServer(tb testing.TB, wakers int, options ...Option) (*TestServer, func()) { - tb.Helper() - ts := TestMakeServer(tb, wakers, options...) - return ts, ts.Start() -} - -// Start starts the TestServer and returns a stop function. -func (ts *TestServer) Start() func() { - ts.tb.Helper() - errc := make(chan error, 1) - go func() { - err := ts.Run() - errc <- err - }() - - return func() { - ts.cancel() - - select { - case err := <-errc: - testutil.FatalIfErr(ts.tb, err) - case <-time.After(6 * time.Second): - buf := make([]byte, 1<<16) - n := runtime.Stack(buf, true) - fmt.Fprintf(os.Stderr, "%s", buf[0:n]) - ts.tb.Fatal("timeout waiting for shutdown") - } - } -} - -// Poll all watched objects for updates. The parameter n indicates how many logstreams to wait on before waking them. -func (ts *TestServer) PollWatched(n int) { - log.Println("Testserver starting poll") - log.Println("TestServer polling filesystem patterns") - if err := ts.t.Poll(); err != nil { - log.Println(err) - } - log.Println("TestServer reloading programs") - if err := ts.r.LoadAllPrograms(); err != nil { - log.Println(err) - } - log.Println("TestServer tailer gcing") - if err := ts.t.ExpireStaleLogstreams(); err != nil { - log.Println(err) - } - log.Println("TestServer waking idle routines") - ts.awaken(n) - log.Println("Testserver finishing poll") -} - -// GetExpvar is a helper function on TestServer that acts like TestGetExpvar. -func (ts *TestServer) GetExpvar(name string) expvar.Var { - ts.tb.Helper() - return testutil.TestGetExpvar(ts.tb, name) -} - -// ExpectExpvarDeltaWithDeadline returns a deferrable function which tests if the expvar metric with name has changed by delta within the given deadline, once the function begins. Before returning, it fetches the original value for comparison. -func (ts *TestServer) ExpectExpvarDeltaWithDeadline(name string, want int64) func() { - ts.tb.Helper() - return testutil.ExpectExpvarDeltaWithDeadline(ts.tb, name, want) -} - -// ExpectMapExpvarMetricDeltaWithDeadline returns a deferrable function which tests if the expvar map metric with name and key has changed by delta within the given deadline, once the function begins. Before returning, it fetches the original value for comparison. -func (ts *TestServer) ExpectMapExpvarDeltaWithDeadline(name, key string, want int64) func() { - ts.tb.Helper() - return testutil.ExpectMapExpvarDeltaWithDeadline(ts.tb, name, key, want) -} - -// GetProgramMetric fetches the datum of the program metric name. -func (ts *TestServer) GetProgramMetric(name, prog string) datum.Datum { - ts.tb.Helper() - m := ts.store.FindMetricOrNil(name, prog) - if m == nil { - ts.tb.Fatalf("Unexpected metric store content, got nil instead of %s %s", name, prog) - return nil - } - d, derr := m.GetDatum() - testutil.FatalIfErr(ts.tb, derr) - return d -} - -// ExpectProgMetricDeltaWithDeadline tests that a given program metric increases by want within the deadline. It assumes that the named metric is an Int type datum.Datum. -func (ts *TestServer) ExpectProgMetricDeltaWithDeadline(name, prog string, want int64) func() { - ts.tb.Helper() - deadline := ts.DoOrTimeoutDeadline - if deadline == 0 { - deadline = defaultDoOrTimeoutDeadline - } - start := datum.GetInt(ts.GetProgramMetric(name, prog)) - check := func() (bool, error) { - ts.tb.Helper() - now := datum.GetInt(ts.GetProgramMetric(name, prog)) - return now-start == want, nil - } - return func() { - ts.tb.Helper() - ok, err := testutil.DoOrTimeout(check, deadline, 10*time.Millisecond) - if err != nil { - ts.tb.Fatal(err) - } - if !ok { - now := datum.GetInt(ts.GetProgramMetric(name, prog)) - delta := now - start - ts.tb.Errorf("Did not see %s have delta by deadline: got %v - %v = %d, want %d", name, now, start, delta, want) - } - } -} diff --git a/inputs/mtail/internal/mtail/unix_socket_export_integration_test.go b/inputs/mtail/internal/mtail/unix_socket_export_integration_test.go deleted file mode 100644 index 4fd9707b..00000000 --- a/inputs/mtail/internal/mtail/unix_socket_export_integration_test.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package mtail_test - -import ( - "log" - "net" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/mtail" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestBasicUNIXSockets(t *testing.T) { - testutil.SkipIfShort(t) - tmpDir := testutil.TestTempDir(t) - sockListenAddr := filepath.Join(tmpDir, "mtail_test.sock") - - _, stopM := mtail.TestStartServer(t, 1, mtail.LogPathPatterns(tmpDir+"/*"), mtail.ProgramPath("../../examples/linecount.mtail"), mtail.BindUnixSocket(sockListenAddr)) - defer stopM() - - log.Println("check that server is listening") - - addr, err := net.ResolveUnixAddr("unix", sockListenAddr) - testutil.FatalIfErr(t, err) - _, err = net.DialUnix("unix", nil, addr) - testutil.FatalIfErr(t, err) -} diff --git a/inputs/mtail/internal/runtime/code/instr_test.go b/inputs/mtail/internal/runtime/code/instr_test.go deleted file mode 100644 index 3ec668b1..00000000 --- a/inputs/mtail/internal/runtime/code/instr_test.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package code_test - -import ( - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/code" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestInstrString(t *testing.T) { - expected := "{match 0 0}" - - testutil.ExpectNoDiff(t, code.Instr{Opcode: code.Match, Operand: 0}.String(), expected) -} diff --git a/inputs/mtail/internal/runtime/code/opcodes_test.go b/inputs/mtail/internal/runtime/code/opcodes_test.go deleted file mode 100644 index de0f4178..00000000 --- a/inputs/mtail/internal/runtime/code/opcodes_test.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package code - -import "testing" - -func TestOpcodeHasString(t *testing.T) { - for o := Bad; o < lastOpcode; o++ { - if o.String() != opNames[o] { - t.Errorf("opcode string not match. Expected %s, received %s", opNames[o], o.String()) - } - } -} diff --git a/inputs/mtail/internal/runtime/compiler/ast/ast.go b/inputs/mtail/internal/runtime/compiler/ast/ast.go index b80c19c9..f011187d 100644 --- a/inputs/mtail/internal/runtime/compiler/ast/ast.go +++ b/inputs/mtail/internal/runtime/compiler/ast/ast.go @@ -435,14 +435,15 @@ func (n *StopStmt) Type() types.Type { // mergepositionlist is a helper that merges the positions of all the nodes in a list. func mergepositionlist(l []Node) *position.Position { - if len(l) == 0 { + switch len(l) { + case 0: return nil - } - if len(l) == 1 { - if l[0] != nil { - return l[0].Pos() + case 1: + if l[0] == nil { + return nil } - return nil + return l[0].Pos() + default: + return position.Merge(l[0].Pos(), mergepositionlist(l[1:])) } - return position.Merge(l[0].Pos(), mergepositionlist(l[1:])) } diff --git a/inputs/mtail/internal/runtime/compiler/ast/walk_test.go b/inputs/mtail/internal/runtime/compiler/ast/walk_test.go deleted file mode 100644 index eec6b249..00000000 --- a/inputs/mtail/internal/runtime/compiler/ast/walk_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package ast_test - -import ( - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/ast" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/parser" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/position" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/types" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -type testNode struct{} - -func (t testNode) Pos() *position.Position { - return &position.Position{} -} - -func (t testNode) Type() types.Type { - return types.None -} - -type testVisitor struct{} - -func (v testVisitor) VisitBefore(n ast.Node) (ast.Visitor, ast.Node) { - return v, n -} - -func (v testVisitor) VisitAfter(n ast.Node) ast.Node { - return n -} - -func TestWalkPanicsOnUnknown(t *testing.T) { - defer func() { - s := recover() - if s == nil { - t.Errorf("No panic received") - } - }() - ast.Walk(testVisitor{}, testNode{}) -} - -type testWalker struct{} - -func (t *testWalker) VisitBefore(n ast.Node) (ast.Visitor, ast.Node) { - if v, ok := n.(*ast.BinaryExpr); ok { - if v.Op == parser.DIV { - n = &ast.IntLit{I: 4} - } - } - return t, n -} - -func (t *testWalker) VisitAfter(n ast.Node) ast.Node { - if v, ok := n.(*ast.BinaryExpr); ok { - if v.Op == parser.MINUS { - n = &ast.IntLit{I: 5} - } - } - return n -} - -func TestAstReplacement(t *testing.T) { - var a ast.Node = &ast.BinaryExpr{ - LHS: &ast.BinaryExpr{LHS: &ast.IntLit{I: 0}, RHS: &ast.IntLit{I: 1}, Op: parser.DIV}, - RHS: &ast.BinaryExpr{LHS: &ast.IntLit{I: 2}, RHS: &ast.IntLit{I: 3}, Op: parser.MINUS}, - Op: parser.PLUS, - } - tw := &testWalker{} - a = ast.Walk(tw, a) - expected := &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 4}, - RHS: &ast.IntLit{I: 5}, - Op: parser.PLUS, - } - if !testutil.ExpectNoDiff(t, expected, a, testutil.IgnoreUnexported(ast.BinaryExpr{})) { - s := parser.Sexp{} - t.Log("AST:\n" + s.Dump(a)) - } -} diff --git a/inputs/mtail/internal/runtime/compiler/checker/checker_test.go b/inputs/mtail/internal/runtime/compiler/checker/checker_test.go deleted file mode 100644 index 15ba00a8..00000000 --- a/inputs/mtail/internal/runtime/compiler/checker/checker_test.go +++ /dev/null @@ -1,642 +0,0 @@ -// Copyright 2016 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package checker_test - -import ( - "flag" - "strings" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/ast" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/checker" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/parser" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/symbol" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/types" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "github.com/google/go-cmp/cmp/cmpopts" -) - -var checkerTestDebug = flag.Bool("checker_test_debug", false, "Turn on to log AST in tests") - -var checkerInvalidPrograms = []struct { - name string - program string - errors []string -}{ - { - "undefined named capture group", - "/blurgh/ { $undef++\n }\n", - []string{"undefined named capture group:1:12-17: Capture group `$undef' was not defined by a regular expression visible to this scope.", "\tTry using `(?P...)' to name the capture group."}, - }, - - { - "out of bounds capref", - "/(blyurg)/ { $2++ \n}\n", - []string{"out of bounds capref:1:14-15: Capture group `$2' was not defined by a regular expression " + - "visible to this scope.", "\tCheck that there are at least 2 pairs of parentheses."}, - }, - - { - "undefined decorator", - "@foo {}\n", - []string{"undefined decorator:1:1-4: Decorator `@foo' is not defined.", "\tTry adding a definition `def foo {}' earlier in the program."}, - }, - - { - "undefined identifier", - "// { x++ \n}\n", - []string{"undefined identifier:1:6: Identifier `x' not declared.", "\tTry adding `counter x' to the top of the program."}, - }, - - { - "invalid regex 1", - "/foo(/ {}\n", - []string{"invalid regex 1:1:1-6: error parsing regexp: missing closing ): `foo(`"}, - }, - - { - "invalid regex 2", - "/blurg(?P[[:alph:]])/ {}\n", - []string{"invalid regex 3:1:1-24: error parsing regexp: invalid character class range: `[:alph:]`"}, - }, - - { - "duplicate declaration", - "counter foo\ncounter foo\n", - []string{ - "duplicate declaration:2:9-11: Redeclaration of metric `foo' previously declared at duplicate declaration:1:9-11", - "duplicate declaration:1:9-11: Declaration of variable `foo' here is never used.", - }, - }, - - { - "indexedExpr parameter count", - `counter n - counter foo by a, b - counter bar by a, b - counter quux by a - /(\d+)/ { - n[$1]++ - foo[$1]++ - bar[$1][0]++ - quux[$1][0]++ - } - `, - []string{ - // n[$1] is syntactically valid, but n is not indexable - "indexedExpr parameter count:6:7-10: Index taken on unindexable expression", - // foo[$1] is short one key - "indexedExpr parameter count:7:7-12: Not enough keys for indexed expression: expecting 2, received 1", - // bar[$1][0] is ok - // quux[$1][0] has too many keys - "indexedExpr parameter count:9:7-16: Too many keys for indexed expression: expecting 1, received 2.", - }, - }, - - { - "indexedExpr binary expression", - `counter foo by a, b -counter bar by a, b -/(\d+)/ { - foo[$1]+=$1 -} -/(.*)/ { - foo = bar[$1] + 1 -} -`, - []string{ - "indexedExpr binary expression:4:3-8: Not enough keys for indexed expression: expecting 2, received 1", - "indexedExpr binary expression:7:3-5: Not enough keys for indexed expression: expecting 2, received 0", - "indexedExpr binary expression:7:9-14: Not enough keys for indexed expression: expecting 2, received 1", - }, - }, - - { - "builtin parameter mismatch", - `/\d+/ { - strptime() - } - /\d+/ { - timestamp() - } - `, - []string{"builtin parameter mismatch:2:4-13: call to `strptime': type mismatch; expected String→String→None received incomplete type"}, - }, - - { - "bad strptime format", - `strptime("2017-10-16 06:50:25", "2017-10-16 06:50:25") -`, - []string{ - "bad strptime format:1:33-53: invalid time format string \"2017-10-16 06:50:25\"", "\tRefer to the documentation at https://golang.org/pkg/time/#pkg-constants for advice.", - }, - }, - - { - "undefined const regex", - "/foo / + X + / bar/ {}\n", - []string{"undefined const regex:1:10: Identifier `X' not declared.", "\tTry adding `const X /.../' earlier in the program."}, - }, - - { - "unused symbols", - `counter foo -const ID /bar/ -/asdf/ { -} -`, - []string{ - "unused symbols:1:9-11: Declaration of variable `foo' here is never used.", - "unused symbols:2:7-8: Declaration of named pattern constant `ID' here is never used.", - }, - }, - { - "invalid del index count", - `gauge t by x, y -/.*/ { - del t["x"] - t["x"]["y"] -} -`, - []string{"invalid del index count:3:7-11: Not enough keys for indexed expression: expecting 2, received 1"}, - }, - // TODO(jaq): is it an error to make a counter of type string? - // {"counter as string", - // `counter foo - - // /(?P.*)/ { - // foo = $v - // } - // `, - // []string{"counter as string:4:4-11: Can't assign rhs of type String to lhs of type Int"}}, - { - "def without usage", - `def x{next}`, - []string{"def without usage:1:1-10: Declaration of decorator `x' here is never used."}, - }, - { - "def without next", - `def x{} -@x { -}`, - []string{"def without next:1:1-3: No symbols found in decorator `@x'.", "\tTry adding a `next' statement inside the `{}' block."}, - }, - { - "def with two nexts", - `def x{ - /a/ { - next - } - /b/ { - next - } -} -@x { -}`, - []string{"def with two nexts:6:5-8: Can't use `next' statement twice in a decorator."}, - }, - - { - "counter with buckets", - `counter foo buckets 1, 2, 3 -/(\d)/ { -foo = $1 -}`, - []string{"counter with buckets:1:9-11: Can't specify buckets for non-histogram metric `foo'."}, - }, - - { - "next outside of decorator", - `def x{ -next -} -@x { -next -} -`, - []string{"next outside of decorator:5:1-4: Can't use `next' outside of a decorator."}, - }, - - { - "use decorator in decorator", - `def x { -@x {} -}`, - []string{"use decorator in decorator:2:1-2: Decorator `@x' is not completely defined yet.", "\tTry removing @x from here.", "use decorator in decorator:2:1-2: No symbols found in decorator `@x'.", "\tTry adding a `next' statement inside the `{}' block."}, - }, - - { - "delete incorrect object", - `/(.*)/ { -del $0 -}`, - []string{"delete incorrect object:2:5-6: Cannot delete this.", "\tTry deleting from a dimensioned metric with this as an index."}, - }, - - { - "pattern fragment plus anything", - `gauge e -// + e { -} -`, - []string{"pattern fragment plus anything:2:6: Can't append variable `e' to this pattern.", "\tTry using a `const'-defined pattern fragment."}, - }, - - { - "recursive pattern fragment", - `const P//+P`, - []string{"recursive pattern fragment:1:11: Can't evaluate pattern fragment `P' here.", "\tTry defining it earlier in the program."}, - }, - - { - "delete a histogram", - `histogram# -m del# -m`, - []string{"delete a histogram:3:7: Cannot delete this.", "\tTry deleting an index from this dimensioned metric."}, - }, - - { - "int as bool", - `1 {}`, - []string{"int as bool:1:1: Can't interpret Int as a boolean expression here.", "\tTry using comparison operators to make the condition explicit."}, - }, - - { - "regexp too long", - "/" + strings.Repeat("c", 1025) + "/ {}", - []string{"regexp too long:1:1-1027: Exceeded maximum regular expression pattern length of 1024 bytes with 1025.", "\tExcessively long patterns are likely to cause compilation and runtime performance problems."}, - }, - - { - "strptime invalid args", - `strptime("",8) -`, - []string{"strptime invalid args:1:13: Expecting a format string for argument 2 of strptime(), not Int."}, - }, - - { - "inc invalid args", - `text l -l++ -`, - []string{"inc invalid args:2:1: type mismatch: expecting an Int for INC, not String."}, - }, - - { - "mod by zero", - `2=9%0 -`, - []string{"mod by zero:1:3-5: Can't divide by zero."}, - }, - - { - "assign to rvalue", - `gauge l -l++=l -`, - []string{"assign to rvalue:2:1-3: Can't assign to expression on left; expecting a variable here."}, - }, - - { - "tolower non string", - `tolower(2) -`, - []string{"tolower non string:1:9: Expecting a String for argument 1 of tolower(), not Int."}, - }, - - { - "dec non var", - `strptime("", "")-- -`, - []string{"dec non var:1:1-16: Can't assign to expression; expecting a variable here."}, - }, - - // TODO(jaq): This is an instance of bug #190, the capref is ambiguous. - // {"regexp with no zero capref", - // `//||/;0/ {$0||// {}} - // `, []string{"regexp with no zero capref:1:5-6: Nonexistent capref =."}}, - - { - "cmp to None", - `strptime("","")<5{} -`, - []string{"cmp to None:1:1-17: type mismatch: can't apply LT to LHS of type \"None\" with RHS of type \"Int\"."}, - }, - - { - "negate None", - `~strptime("", "") {} -`, - []string{"negate None:1:2-17: type mismatch; expected Int received None for `~' operator."}, - }, -} - -func TestCheckInvalidPrograms(t *testing.T) { - for _, tc := range checkerInvalidPrograms { - tc := tc - t.Run(tc.name, func(t *testing.T) { - ast, err := parser.Parse(tc.name, strings.NewReader(tc.program)) - testutil.FatalIfErr(t, err) - ast, err = checker.Check(ast, 0, 0) - if err == nil { - s := parser.Sexp{} - s.EmitTypes = true - t.Log(s.Dump(ast)) - t.Fatal("check didn't fail") - } - - if !testutil.ExpectNoDiff(t, - tc.errors, // want - strings.Split(err.Error(), "\n"), // got - cmpopts.SortSlices(func(x, y string) bool { return x < y })) { - t.Logf("Got: %s", err.Error()) - s := parser.Sexp{} - s.EmitTypes = true - t.Log(s.Dump(ast)) - } - }) - } -} - -var checkerValidPrograms = []struct { - name string - program string -}{ - { - "capture group", - `counter foo -/(.*)/ { - foo += $1 -} -`, - }, - { - "shadowed positionals", - `counter foo -/(.*)/ { - foo += $1 - /bar(\d+)/ { - foo += $1 - } -} -`, - }, - { - "sibling positionals", - `counter foo -/(.*)/ { - foo += $1 -} -/bar(\d+)/ { - foo += $1 -} -`, - }, - - { - "index expression", - `counter foo by a, b -/(\d)/ { - foo[1,$1] = 3 -}`, - }, - { - "odd indexes", - `counter foo by a,b,c - /(\d) (\d)/ { - foo[$1,$2][0]++ - } - `, - }, - { - "implicit int", - `counter foo -/$/ { - foo++ -}`, - }, - { - "function return value", - `len("foo") > 0 {}`, - }, - { - "conversions", - `counter i - counter f - /(.*)/ { - i = int($1) - f = float($1) - } - `, - }, - - { - "logical operators", - `0 || 1 { -} -1 && 0 { -} -`, - }, - { - "nested binary conditional", - `1 != 0 && 0 == 1 { -} -`, - }, - {"paren expr", ` -(0) || (1 && 3) { -}`}, - - {"strptime format", ` -strptime("2006-01-02 15:04:05", "2006-01-02 15:04:05") -`}, - - {"string concat", ` -counter f by s -/(.*), (.*)/ { - f[$1 + $2]++ -} -`}, - {"namespace", ` -counter test - -/(?P.*)/ { - test++ -} -`}, - {"match expr 1", ` -/(?P.*)/ { - $foo =~ /bar/ { - } -}`}, - - {"capref used in def", ` -/(?P\d+)/ && $x > 0 { -}`}, - {"binop compare type conversion", ` -gauge var -/(?P\d+) (\d+\.\d+)/ { - var = $x + $2 -}`}, - {"binop arith type conversion", ` -gauge var -/(?P\d+) (\d+\.\d+)/ { - var = $x + $2 -}`}, - - {"concat expr 1", ` -const X /foo/ -/bar/ + X { -}`}, - {"concat expr 2", ` -const X /foo/ -X { -}`}, - {"match expression 3", ` -const X /foo/ -"a" =~ X { -} -`}, - {"match expr 4", ` -/(?P.{6}) (?P.*)/ { - $foo =~ $bar { - } -}`}, - {"decorator scopes", ` -counter a -def decorator { - /(.).*/ { - next - } -} -@decorator { - $1 == "A" { - a++ - } -} -`}, - {"concat with add_assign", ` -text foo -/(?P.*)/ { - foo += $v -} -`}, - - {"decrement", ` -counter i -/.*/ { - i-- -}`}, - {"stop", ` -stop -// { - stop -}`}, - - {"declare histogram", ` -histogram foo buckets 1, 2, 3 -/(\d+)/ { - foo = $1 -}`}, - - {"match a pattern in cond", ` -const N /n/ -N { -}`}, - - {"match a pattern in a binary expr in cond", ` -const N /n/ -N && 1 { -}`}, - {"negative numbers in capture groups", ` -gauge foo -/(?P-?\d+)/ { - foo += $value_ms / 1000.0 -}`}, - {"substitution", ` -gauge foo -/(\d,\d)/ { -foo = subst(",", "", $1) -}`}, - {"regexp subst", ` -subst(/\d+/, "d", "1234") -`}, -} - -func TestCheckValidPrograms(t *testing.T) { - for _, tc := range checkerValidPrograms { - tc := tc - t.Run(tc.name, func(t *testing.T) { - ast, err := parser.Parse(tc.name, strings.NewReader(tc.program)) - testutil.FatalIfErr(t, err) - ast, err = checker.Check(ast, 0, 0) - if *checkerTestDebug { - s := parser.Sexp{} - s.EmitTypes = true - t.Log("Typed AST:\n" + s.Dump(ast)) - } - if err != nil { - t.Errorf("check failed: %s", err) - } - }) - } -} - -var checkerTypeExpressionTests = []struct { - name string - expr ast.Node - expected types.Type -}{ - { - "Int + Int -> Int", - &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 1}, - RHS: &ast.IntLit{I: 1}, - Op: parser.PLUS, - }, - types.Int, - }, - { - "Int + Float -> Float", - &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 1}, - RHS: &ast.FloatLit{F: 1.0}, - Op: parser.PLUS, - }, - types.Float, - }, - { - "⍺ + Float -> Float", - &ast.BinaryExpr{ - LHS: &ast.IDTerm{Symbol: &symbol.Symbol{Name: "i", Kind: symbol.VarSymbol, Type: types.NewVariable()}}, - RHS: &ast.CaprefTerm{Symbol: &symbol.Symbol{Kind: symbol.CaprefSymbol, Type: types.Float}}, - Op: parser.PLUS, - }, - types.Float, - }, -} - -func TestCheckTypeExpressions(t *testing.T) { - for _, tc := range checkerTypeExpressionTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - ast, err := checker.Check(tc.expr, 0, 0) - testutil.FatalIfErr(t, err) - - if !testutil.ExpectNoDiff(t, tc.expected, ast.Type().Root()) { - s := parser.Sexp{} - s.EmitTypes = true - t.Log("Typed AST:\n" + s.Dump(ast)) - } - }) - } -} diff --git a/inputs/mtail/internal/runtime/compiler/codegen/codegen_test.go b/inputs/mtail/internal/runtime/compiler/codegen/codegen_test.go deleted file mode 100644 index 8e6e16bc..00000000 --- a/inputs/mtail/internal/runtime/compiler/codegen/codegen_test.go +++ /dev/null @@ -1,1140 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package codegen_test - -import ( - "flag" - "strings" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/code" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/ast" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/checker" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/codegen" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/parser" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -var codegenTestDebug = flag.Bool("codegen_test_debug", false, "Log ASTs and debugging information ") - -var testCodeGenPrograms = []struct { - name string - source string - prog []code.Instr // expected bytecode -}{ - // Composite literals require too many explicit conversions. - { - name: "simple line counter", - source: "counter lines_total\n/$/ { lines_total++\n }\n", - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 7, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 1}, - {Opcode: code.Dload, Operand: 0, SourceLine: 1}, - {Opcode: code.Inc, Operand: nil, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "count a", - source: "counter a_count\n/a$/ { a_count++\n }\n", - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 7, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 1}, - {Opcode: code.Dload, Operand: 0, SourceLine: 1}, - {Opcode: code.Inc, Operand: nil, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "strptime and capref", - source: "counter foo\n" + - "/(.*)/ { strptime($1, \"2006-01-02T15:04:05\")\n" + - "foo++\n}\n", - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 11, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 1}, - {Opcode: code.Capref, Operand: 1, SourceLine: 1}, - {Opcode: code.Str, Operand: 0, SourceLine: 1}, - {Opcode: code.Strptime, Operand: 2, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "strptime and named capref", - source: "counter foo\n" + - "/(?P.*)/ { strptime($date, \"2006-01-02T15:04:05\")\n" + - "foo++\n }\n", - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 11, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 1}, - {Opcode: code.Capref, Operand: 1, SourceLine: 1}, - {Opcode: code.Str, Operand: 0, SourceLine: 1}, - {Opcode: code.Strptime, Operand: 2, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "inc by and set", - source: "counter foo\ncounter bar\n" + - "/([0-9]+)/ {\n" + - "foo += $1\n" + - "bar = $1\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 16, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.S2i, Operand: nil, SourceLine: 3}, - {Opcode: code.Inc, Operand: 0, SourceLine: 3}, - {Opcode: code.Mload, Operand: 1, SourceLine: 4}, - {Opcode: code.Dload, Operand: 0, SourceLine: 4}, - {Opcode: code.Push, Operand: 0, SourceLine: 4}, - {Opcode: code.Capref, Operand: 1, SourceLine: 4}, - {Opcode: code.S2i, Operand: nil, SourceLine: 4}, - {Opcode: code.Iset, Operand: nil, SourceLine: 4}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "cond expr gt", - source: "counter foo\n" + - "1 > 0 {\n" + - " foo++\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Push, Operand: int64(1), SourceLine: 1}, - {Opcode: code.Push, Operand: int64(0), SourceLine: 1}, - {Opcode: code.Icmp, Operand: 1, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 6, SourceLine: 1}, - {Opcode: code.Push, Operand: true, SourceLine: 1}, - {Opcode: code.Jmp, Operand: 7, SourceLine: 1}, - {Opcode: code.Push, Operand: false, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "cond expr lt", - source: "counter foo\n" + - "1 < 0 {\n" + - " foo++\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Push, Operand: int64(1), SourceLine: 1}, - {Opcode: code.Push, Operand: int64(0), SourceLine: 1}, - {Opcode: code.Icmp, Operand: -1, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 6, SourceLine: 1}, - {Opcode: code.Push, Operand: true, SourceLine: 1}, - {Opcode: code.Jmp, Operand: 7, SourceLine: 1}, - {Opcode: code.Push, Operand: false, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "cond expr eq", - source: "counter foo\n" + - "1 == 0 {\n" + - " foo++\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Push, Operand: int64(1), SourceLine: 1}, - {Opcode: code.Push, Operand: int64(0), SourceLine: 1}, - {Opcode: code.Icmp, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 6, SourceLine: 1}, - {Opcode: code.Push, Operand: true, SourceLine: 1}, - {Opcode: code.Jmp, Operand: 7, SourceLine: 1}, - {Opcode: code.Push, Operand: false, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "cond expr le", - source: "counter foo\n" + - "1 <= 0 {\n" + - " foo++\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Push, Operand: int64(1), SourceLine: 1}, - {Opcode: code.Push, Operand: int64(0), SourceLine: 1}, - {Opcode: code.Icmp, Operand: 1, SourceLine: 1}, - {Opcode: code.Jm, Operand: 6, SourceLine: 1}, - {Opcode: code.Push, Operand: true, SourceLine: 1}, - {Opcode: code.Jmp, Operand: 7, SourceLine: 1}, - {Opcode: code.Push, Operand: false, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "cond expr ge", - source: "counter foo\n" + - "1 >= 0 {\n" + - " foo++\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Push, Operand: int64(1), SourceLine: 1}, - {Opcode: code.Push, Operand: int64(0), SourceLine: 1}, - {Opcode: code.Icmp, Operand: -1, SourceLine: 1}, - {Opcode: code.Jm, Operand: 6, SourceLine: 1}, - {Opcode: code.Push, Operand: true, SourceLine: 1}, - {Opcode: code.Jmp, Operand: 7, SourceLine: 1}, - {Opcode: code.Push, Operand: false, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "cond expr ne", - source: "counter foo\n" + - "1 != 0 {\n" + - " foo++\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Push, Operand: int64(1), SourceLine: 1}, - {Opcode: code.Push, Operand: int64(0), SourceLine: 1}, - {Opcode: code.Icmp, Operand: 0, SourceLine: 1}, - {Opcode: code.Jm, Operand: 6, SourceLine: 1}, - {Opcode: code.Push, Operand: true, SourceLine: 1}, - {Opcode: code.Jmp, Operand: 7, SourceLine: 1}, - {Opcode: code.Push, Operand: false, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "nested cond", - source: "counter foo\n" + - "/(\\d+)/ {\n" + - " $1 <= 1 {\n" + - " foo++\n" + - " }\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 19, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.S2i, Operand: nil, SourceLine: 2}, - {Opcode: code.Push, Operand: int64(1), SourceLine: 2}, - {Opcode: code.Icmp, Operand: 1, SourceLine: 2}, - {Opcode: code.Jm, Operand: 11, SourceLine: 2}, - {Opcode: code.Push, Operand: true, SourceLine: 2}, - {Opcode: code.Jmp, Operand: 12, SourceLine: 2}, - {Opcode: code.Push, Operand: false, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 18, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Inc, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "deco", - source: "counter foo\n" + - "counter bar\n" + - "def fooWrap {\n" + - " /.*/ {\n" + - " foo++\n" + - " next\n" + - " }\n" + - "}\n" + - "" + - "@fooWrap { bar++\n }\n", - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 3}, - {Opcode: code.Jnm, Operand: 10, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 4}, - {Opcode: code.Dload, Operand: 0, SourceLine: 4}, - {Opcode: code.Inc, Operand: nil, SourceLine: 4}, - {Opcode: code.Mload, Operand: 1, SourceLine: 8}, - {Opcode: code.Dload, Operand: 0, SourceLine: 8}, - {Opcode: code.Inc, Operand: nil, SourceLine: 8}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 3}, - }, - }, - { - name: "length", - source: "len(\"foo\") > 0 {\n" + - "}\n", - prog: []code.Instr{ - {Opcode: code.Str, Operand: 0, SourceLine: 0}, - {Opcode: code.Length, Operand: 1, SourceLine: 0}, - {Opcode: code.Push, Operand: int64(0), SourceLine: 0}, - {Opcode: code.Cmp, Operand: 1, SourceLine: 0}, - {Opcode: code.Jnm, Operand: 7, SourceLine: 0}, - {Opcode: code.Push, Operand: true, SourceLine: 0}, - {Opcode: code.Jmp, Operand: 8, SourceLine: 0}, - {Opcode: code.Push, Operand: false, SourceLine: 0}, - {Opcode: code.Jnm, Operand: 11, SourceLine: 0}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 0}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 0}, - }, - }, - { - name: "bitwise", source: ` -gauge a - -a = 1 & 7 ^ 15 | 8 -a = ~ 16 << 2 -a = 1 >> 20 -`, - prog: []code.Instr{ - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Push, Operand: int64(1), SourceLine: 3}, - {Opcode: code.Push, Operand: int64(7), SourceLine: 3}, - {Opcode: code.And, Operand: nil, SourceLine: 3}, - {Opcode: code.Push, Operand: int64(15), SourceLine: 3}, - {Opcode: code.Xor, Operand: nil, SourceLine: 3}, - {Opcode: code.Push, Operand: int64(8), SourceLine: 3}, - {Opcode: code.Or, Operand: nil, SourceLine: 3}, - {Opcode: code.Iset, Operand: nil, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 4}, - {Opcode: code.Dload, Operand: 0, SourceLine: 4}, - {Opcode: code.Push, Operand: int64(16), SourceLine: 4}, - {Opcode: code.Neg, Operand: nil, SourceLine: 4}, - {Opcode: code.Push, Operand: int64(2), SourceLine: 4}, - {Opcode: code.Shl, Operand: nil, SourceLine: 4}, - {Opcode: code.Iset, Operand: nil, SourceLine: 4}, - {Opcode: code.Mload, Operand: 0, SourceLine: 5}, - {Opcode: code.Dload, Operand: 0, SourceLine: 5}, - {Opcode: code.Push, Operand: int64(1), SourceLine: 5}, - {Opcode: code.Push, Operand: int64(20), SourceLine: 5}, - {Opcode: code.Shr, Operand: nil, SourceLine: 5}, - {Opcode: code.Iset, Operand: nil, SourceLine: 5}, - }, - }, - { - name: "pow", source: ` -gauge a -/(\d+) (\d+)/ { - a = $1 ** $2 -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 14, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.S2i, Operand: nil, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 2, SourceLine: 3}, - {Opcode: code.S2i, Operand: nil, SourceLine: 3}, - {Opcode: code.Ipow, Operand: nil, SourceLine: 3}, - {Opcode: code.Iset, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "indexed expr", source: ` -counter a by b -a["string"]++ -`, - prog: []code.Instr{ - {Opcode: code.Str, Operand: 0, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 1, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - }, - }, - { - name: "strtol", source: ` -strtol("deadbeef", 16) -`, - prog: []code.Instr{ - {Opcode: code.Str, Operand: 0, SourceLine: 1}, - {Opcode: code.Push, Operand: int64(16), SourceLine: 1}, - {Opcode: code.S2i, Operand: 2, SourceLine: 1}, - }, - }, - { - name: "float", source: ` -20.0 -`, - prog: []code.Instr{ - {Opcode: code.Push, Operand: 20.0, SourceLine: 1}, - }, - }, - { - name: "otherwise", source: ` -counter a -otherwise { - a++ -} -`, - prog: []code.Instr{ - {Opcode: code.Otherwise, Operand: nil, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 7, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Inc, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "cond else", - source: `counter foo -counter bar -1 > 0 { - foo++ -} else { - bar++ -}`, - prog: []code.Instr{ - {Opcode: code.Push, Operand: int64(1), SourceLine: 2}, - {Opcode: code.Push, Operand: int64(0), SourceLine: 2}, - {Opcode: code.Icmp, Operand: 1, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 6, SourceLine: 2}, - {Opcode: code.Push, Operand: true, SourceLine: 2}, - {Opcode: code.Jmp, Operand: 7, SourceLine: 2}, - {Opcode: code.Push, Operand: false, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 14, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Inc, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - {Opcode: code.Jmp, Operand: 17, SourceLine: 2}, - {Opcode: code.Mload, Operand: 1, SourceLine: 5}, - {Opcode: code.Dload, Operand: 0, SourceLine: 5}, - {Opcode: code.Inc, Operand: nil, SourceLine: 5}, - }, - }, - { - name: "mod", - source: ` -gauge a -a = 3 % 1 -`, - prog: []code.Instr{ - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Push, Operand: int64(3), SourceLine: 2}, - {Opcode: code.Push, Operand: int64(1), SourceLine: 2}, - {Opcode: code.Imod, Operand: nil, SourceLine: 2}, - {Opcode: code.Iset, Operand: nil, SourceLine: 2}, - }, - }, - { - name: "del", source: ` -counter a by b -del a["string"] -`, - prog: []code.Instr{ - {Opcode: code.Str, Operand: 0, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Del, Operand: 1, SourceLine: 2}, - }, - }, - { - name: "del after", source: ` -counter a by b -del a["string"] after 1h -`, - prog: []code.Instr{ - {Opcode: code.Push, Operand: time.Hour, SourceLine: 2}, - {Opcode: code.Str, Operand: 0, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Expire, Operand: 1, SourceLine: 2}, - }, - }, - { - name: "types", source: ` -gauge i -gauge f -/(\d+)/ { - i = $1 -} -/(\d+\.\d+)/ { - f = $1 -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 3}, - {Opcode: code.Jnm, Operand: 10, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 4}, - {Opcode: code.Dload, Operand: 0, SourceLine: 4}, - {Opcode: code.Push, Operand: 0, SourceLine: 4}, - {Opcode: code.Capref, Operand: 1, SourceLine: 4}, - {Opcode: code.S2i, Operand: nil, SourceLine: 4}, - {Opcode: code.Iset, Operand: nil, SourceLine: 4}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 3}, - {Opcode: code.Match, Operand: 1, SourceLine: 6}, - {Opcode: code.Jnm, Operand: 20, SourceLine: 6}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 6}, - {Opcode: code.Mload, Operand: 1, SourceLine: 7}, - {Opcode: code.Dload, Operand: 0, SourceLine: 7}, - {Opcode: code.Push, Operand: 1, SourceLine: 7}, - {Opcode: code.Capref, Operand: 1, SourceLine: 7}, - {Opcode: code.S2f, Operand: nil, SourceLine: 7}, - {Opcode: code.Fset, Operand: nil, SourceLine: 7}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 6}, - }, - }, - - { - name: "getfilename", source: ` -getfilename() -`, - prog: []code.Instr{ - {Opcode: code.Getfilename, Operand: 0, SourceLine: 1}, - }, - }, - - { - name: "dimensioned counter", - source: `counter c by a,b,c -/(\d) (\d) (\d)/ { - c[$1,$2][$3]++ -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 2, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 3, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 3, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "string to int", - source: `counter c -/(.*)/ { - c = int($1) -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 10, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.S2i, Operand: nil, SourceLine: 2}, - {Opcode: code.Iset, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "int to float", - source: `counter c -/(\d)/ { - c = float($1) -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 10, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.S2f, Operand: nil, SourceLine: 2}, - {Opcode: code.Fset, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "string to float", - source: `counter c -/(.*)/ { - c = float($1) -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 10, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 0, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.S2f, Operand: nil, SourceLine: 2}, - {Opcode: code.Fset, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "float to string", - source: `counter c by a -/(\d+\.\d+)/ { - c[string($1)] ++ -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 11, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.S2f, Operand: nil, SourceLine: 2}, - {Opcode: code.F2s, Operand: nil, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 1, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "int to string", - source: `counter c by a -/(\d+)/ { - c[string($1)] ++ -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 11, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.S2i, Operand: nil, SourceLine: 2}, - {Opcode: code.I2s, Operand: nil, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 2}, - {Opcode: code.Dload, Operand: 1, SourceLine: 2}, - {Opcode: code.Inc, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "nested comparisons", - source: `counter foo -/(.*)/ { - $1 == "foo" || $1 == "bar" { - foo++ - } -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 31, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.Str, Operand: 0, SourceLine: 2}, - {Opcode: code.Scmp, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 10, SourceLine: 2}, - {Opcode: code.Push, Operand: true, SourceLine: 2}, - {Opcode: code.Jmp, Operand: 11, SourceLine: 2}, - {Opcode: code.Push, Operand: false, SourceLine: 2}, - {Opcode: code.Jm, Operand: 23, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.Str, Operand: 1, SourceLine: 2}, - {Opcode: code.Scmp, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 19, SourceLine: 2}, - {Opcode: code.Push, Operand: true, SourceLine: 2}, - {Opcode: code.Jmp, Operand: 20, SourceLine: 2}, - {Opcode: code.Push, Operand: false, SourceLine: 2}, - {Opcode: code.Jm, Operand: 23, SourceLine: 2}, - {Opcode: code.Push, Operand: false, SourceLine: 2}, - {Opcode: code.Jmp, Operand: 24, SourceLine: 2}, - {Opcode: code.Push, Operand: true, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 30, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Inc, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "string concat", source: ` -counter f by s -/(.*), (.*)/ { - f[$1 + $2]++ -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 12, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 2, SourceLine: 3}, - {Opcode: code.Cat, Operand: nil, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 1, SourceLine: 3}, - {Opcode: code.Inc, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "add assign float", source: ` -gauge foo -/(\d+\.\d+)/ { - foo += $1 -} -`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.S2f, Operand: nil, SourceLine: 3}, - {Opcode: code.Fadd, Operand: nil, SourceLine: 3}, - {Opcode: code.Fset, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "match expression", source: ` - counter foo - /(.*)/ { - $1 =~ /asdf/ { - foo++ - } - }`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.Smatch, Operand: 1, SourceLine: 3}, - {Opcode: code.Jnm, Operand: 12, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 4}, - {Opcode: code.Dload, Operand: 0, SourceLine: 4}, - {Opcode: code.Inc, Operand: nil, SourceLine: 4}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "negative match expression", source: ` - counter foo - /(.*)/ { - $1 !~ /asdf/ { - foo++ - } - }`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 14, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.Smatch, Operand: 1, SourceLine: 3}, - {Opcode: code.Not, Operand: nil, SourceLine: 3}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 4}, - {Opcode: code.Dload, Operand: 0, SourceLine: 4}, - {Opcode: code.Inc, Operand: nil, SourceLine: 4}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "capref used in def", source: ` -/(?P\d+)/ && $x > 5 { -}`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 14, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 1}, - {Opcode: code.Capref, Operand: 1, SourceLine: 1}, - {Opcode: code.S2i, Operand: nil, SourceLine: 1}, - {Opcode: code.Push, Operand: int64(5), SourceLine: 1}, - {Opcode: code.Icmp, Operand: 1, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 10, SourceLine: 1}, - {Opcode: code.Push, Operand: true, SourceLine: 1}, - {Opcode: code.Jmp, Operand: 11, SourceLine: 1}, - {Opcode: code.Push, Operand: false, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 14, SourceLine: 1}, - {Opcode: code.Push, Operand: true, SourceLine: 1}, - {Opcode: code.Jmp, Operand: 15, SourceLine: 1}, - {Opcode: code.Push, Operand: false, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 18, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }, - }, - { - name: "binop arith type conversion", source: ` -gauge var -/(?P\d+) (\d+\.\d+)/ { - var = $x + $2 -}`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 15, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.S2i, Operand: nil, SourceLine: 3}, - {Opcode: code.I2f, Operand: nil, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 2, SourceLine: 3}, - {Opcode: code.S2f, Operand: nil, SourceLine: 3}, - {Opcode: code.Fadd, Operand: nil, SourceLine: 3}, - {Opcode: code.Fset, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "binop compare type conversion", source: ` -counter var -/(?P\d+) (\d+\.\d+)/ { - $x > $2 { - var++ - } -}`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 22, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.S2i, Operand: nil, SourceLine: 3}, - {Opcode: code.I2f, Operand: nil, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 2, SourceLine: 3}, - {Opcode: code.S2f, Operand: nil, SourceLine: 3}, - {Opcode: code.Fcmp, Operand: 1, SourceLine: 3}, - {Opcode: code.Jnm, Operand: 14, SourceLine: 3}, - {Opcode: code.Push, Operand: true, SourceLine: 3}, - {Opcode: code.Jmp, Operand: 15, SourceLine: 3}, - {Opcode: code.Push, Operand: false, SourceLine: 3}, - {Opcode: code.Jnm, Operand: 21, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 4}, - {Opcode: code.Dload, Operand: 0, SourceLine: 4}, - {Opcode: code.Inc, Operand: nil, SourceLine: 4}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "set string", source: ` -text foo -/(.*)/ { - foo = $1 -} -`, prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 9, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.Sset, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }}, - { - name: "concat to text", source: ` -text foo -/(?P.*)/ { - foo += $v -}`, - prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 12, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.Cat, Operand: nil, SourceLine: 3}, - {Opcode: code.Sset, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }, - }, - { - name: "decrement", source: ` -counter i -// { - i-- -}`, prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 7, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dec, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }}, - {name: "capref and settime", source: ` -/(\d+)/ { - settime($1) -}`, prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 8, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.S2i, Operand: nil, SourceLine: 2}, - {Opcode: code.Settime, Operand: 1, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }}, - {name: "cast to self", source: ` -/(\d+)/ { -settime(int($1)) -}`, prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 8, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Push, Operand: 0, SourceLine: 2}, - {Opcode: code.Capref, Operand: 1, SourceLine: 2}, - {Opcode: code.S2i, Operand: nil, SourceLine: 2}, - {Opcode: code.Settime, Operand: 1, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }}, - {name: "stop", source: ` -stop -`, prog: []code.Instr{ - {Opcode: code.Stop, Operand: nil, SourceLine: 1}, - }}, - {name: "stop inside", source: ` -// { -stop -} -`, prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 1}, - {Opcode: code.Jnm, Operand: 5, SourceLine: 1}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 1}, - {Opcode: code.Stop, Operand: nil, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 1}, - }}, - - { - name: "nested decorators", - source: `def b { - def b { - next - } - @b { - next - } -} -@b { -}`, prog: nil, - }, - {name: "negative numbers in capture groups", source: ` -gauge foo -/(?P-?\d+)/ { -foo += $value_ms / 1000.0 -}`, prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 16, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.S2i, Operand: nil, SourceLine: 3}, - {Opcode: code.I2f, Operand: nil, SourceLine: 3}, - {Opcode: code.Push, Operand: 1000.0, SourceLine: 3}, - {Opcode: code.Fdiv, Operand: nil, SourceLine: 3}, - {Opcode: code.Fadd, Operand: nil, SourceLine: 3}, - {Opcode: code.Fset, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }}, - {name: "substitution", source: ` -gauge foo -/(\d+,\d)/ { - foo = int(subst(",", "", $1)) -}`, prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 2}, - {Opcode: code.Jnm, Operand: 13, SourceLine: 2}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 2}, - {Opcode: code.Mload, Operand: 0, SourceLine: 3}, - {Opcode: code.Dload, Operand: 0, SourceLine: 3}, - {Opcode: code.Str, Operand: 0, SourceLine: 3}, - {Opcode: code.Str, Operand: 1, SourceLine: 3}, - {Opcode: code.Push, Operand: 0, SourceLine: 3}, - {Opcode: code.Capref, Operand: 1, SourceLine: 3}, - {Opcode: code.Subst, Operand: 3, SourceLine: 3}, - {Opcode: code.S2i, Operand: nil, SourceLine: 3}, - {Opcode: code.Iset, Operand: nil, SourceLine: 3}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 2}, - }}, - {name: "const term as pattern", source: ` -const A /n/ -A && 1 { -} -`, prog: []code.Instr{ - {Opcode: code.Match, Operand: 0, SourceLine: 0}, - {Opcode: code.Jnm, Operand: 6, SourceLine: 0}, - {Opcode: code.Push, Operand: int64(1), SourceLine: 2}, - {Opcode: code.Jnm, Operand: 6, SourceLine: 0}, - {Opcode: code.Push, Operand: true, SourceLine: 0}, - {Opcode: code.Jmp, Operand: 7, SourceLine: 0}, - {Opcode: code.Push, Operand: false, SourceLine: 0}, - {Opcode: code.Jnm, Operand: 10, SourceLine: 0}, - {Opcode: code.Setmatched, Operand: false, SourceLine: 0}, - {Opcode: code.Setmatched, Operand: true, SourceLine: 0}, - }}, -} - -func TestCodeGenFromSource(t *testing.T) { - for _, tc := range testCodeGenPrograms { - tc := tc - t.Run(tc.name, func(t *testing.T) { - ast, err := parser.Parse(tc.name, strings.NewReader(tc.source)) - testutil.FatalIfErr(t, err) - ast, err = checker.Check(ast, 0, 0) - if *codegenTestDebug { - s := parser.Sexp{} - s.EmitTypes = true - t.Log("Typed AST:\n" + s.Dump(ast)) - } - testutil.FatalIfErr(t, err) - obj, err := codegen.CodeGen(tc.name, ast) - testutil.FatalIfErr(t, err) - - testutil.ExpectNoDiff(t, tc.prog, obj.Program, testutil.AllowUnexported(code.Instr{})) - }) - } -} - -var testCodeGenASTs = []struct { - name string - ast ast.Node // partial AST to be converted to bytecode - prog []code.Instr // expected bytecode -}{ - { - name: "subst", - ast: &ast.BuiltinExpr{ - Name: "subst", - Args: &ast.ExprList{ - Children: []ast.Node{ - &ast.StringLit{ - Text: "old", - }, - &ast.StringLit{ - Text: "new", - }, - &ast.StringLit{ - Text: "value", - }, - }, - }, - }, - prog: []code.Instr{ - {Opcode: code.Str, Operand: 0, SourceLine: 0}, - {Opcode: code.Str, Operand: 1, SourceLine: 0}, - {Opcode: code.Str, Operand: 2, SourceLine: 0}, - {Opcode: code.Subst, Operand: 3, SourceLine: 0}, - }, - }, - { - name: "regexp subst", - ast: &ast.BuiltinExpr{ - Name: "subst", - Args: &ast.ExprList{ - Children: []ast.Node{ - &ast.PatternExpr{ - Pattern: "a+", - Expr: &ast.PatternLit{ - Pattern: "a+", - }, - }, - &ast.StringLit{ - Text: "b", - }, - &ast.StringLit{ - Text: "aaaaaa", - }, - }, - }, - }, - prog: []code.Instr{ - {Opcode: code.Str, Operand: 0, SourceLine: 0}, - {Opcode: code.Str, Operand: 1, SourceLine: 0}, - {Opcode: code.Push, Operand: 0, SourceLine: 0}, - {Opcode: code.Rsubst, Operand: 3, SourceLine: 0}, - }, - }, -} - -func TestCodeGenFromAST(t *testing.T) { - for _, tc := range testCodeGenASTs { - tc := tc - t.Run(tc.name, func(t *testing.T) { - obj, err := codegen.CodeGen(tc.name, tc.ast) - testutil.FatalIfErr(t, err) - testutil.ExpectNoDiff(t, tc.prog, obj.Program, testutil.AllowUnexported(code.Instr{})) - }) - } -} diff --git a/inputs/mtail/internal/runtime/compiler/compiler_test.go b/inputs/mtail/internal/runtime/compiler/compiler_test.go deleted file mode 100644 index 30562fa4..00000000 --- a/inputs/mtail/internal/runtime/compiler/compiler_test.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package compiler_test - -import ( - "strings" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func makeCompiler(t *testing.T) *compiler.Compiler { - t.Helper() - c, err := compiler.New(compiler.EmitAst(), compiler.EmitAstTypes()) - testutil.FatalIfErr(t, err) - return c -} - -func TestCompileParserError(t *testing.T) { - c := makeCompiler(t) - r := strings.NewReader("bad program") - _, err := c.Compile("test", r) - if err == nil { - t.Errorf("expected error, got nil") - } -} - -func TestCompileCheckerError(t *testing.T) { - c := makeCompiler(t) - r := strings.NewReader(`// { -i++ -}`) - _, err := c.Compile("test", r) - if err == nil { - t.Error("expected error, got nil") - } -} - -func TestCompileCodegen(t *testing.T) { - c := makeCompiler(t) - r := strings.NewReader(`counter i -// { - i++ -}`) - _, err := c.Compile("test", r) - if err != nil { - t.Error(err) - } -} diff --git a/inputs/mtail/internal/runtime/compiler/errors/errors.go b/inputs/mtail/internal/runtime/compiler/errors/errors.go index 409c58a4..76882472 100644 --- a/inputs/mtail/internal/runtime/compiler/errors/errors.go +++ b/inputs/mtail/internal/runtime/compiler/errors/errors.go @@ -26,7 +26,7 @@ type ErrorList []*compileError // Add appends an error at a position to the list of errors. func (p *ErrorList) Add(pos *position.Position, msg string) { if pos == nil { - pos = &position.Position{Line: -1, Startcol: -1, Endcol: -1} + pos = &position.Position{"", -1, -1, -1} } *p = append(*p, &compileError{*pos, msg}) } diff --git a/inputs/mtail/internal/runtime/compiler/errors/errors_test.go b/inputs/mtail/internal/runtime/compiler/errors/errors_test.go deleted file mode 100644 index 234d1a75..00000000 --- a/inputs/mtail/internal/runtime/compiler/errors/errors_test.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package errors_test - -import ( - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/errors" -) - -func TestNilErrorPosition(t *testing.T) { - e := errors.ErrorList{} - e.Add(nil, "error") - r := e.Error() - expected := ":0:0: error" - if r != expected { - t.Errorf("want %q, got %q", expected, r) - } -} diff --git a/inputs/mtail/internal/runtime/compiler/opt/opt_test.go b/inputs/mtail/internal/runtime/compiler/opt/opt_test.go deleted file mode 100644 index 80d8ebf8..00000000 --- a/inputs/mtail/internal/runtime/compiler/opt/opt_test.go +++ /dev/null @@ -1,502 +0,0 @@ -// Copyright 2021 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package opt_test - -import ( - "math" - "math/rand" - "reflect" - "strings" - "testing" - "testing/quick" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/ast" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/opt" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/parser" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "github.com/google/go-cmp/cmp" -) - -var optimiserTests = []struct { - name string - ast ast.Node - want ast.Node -}{ - { - "int add", - &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 1}, - RHS: &ast.IntLit{I: 2}, - Op: parser.PLUS, - }, - &ast.IntLit{I: 3}, - }, - { - "float mul", - &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: 2}, - RHS: &ast.FloatLit{F: 3}, - Op: parser.MUL, - }, - &ast.FloatLit{F: 6}, - }, - { - "int float pow", - &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 2}, - RHS: &ast.FloatLit{F: 3}, - Op: parser.POW, - }, - &ast.FloatLit{F: 8}, - }, - { - "float int mod", - &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: 3}, - RHS: &ast.IntLit{I: 2}, - Op: parser.MOD, - }, - &ast.FloatLit{F: 1}, - }, - { - "nested ops", - &ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 2}, - RHS: &ast.IntLit{I: 4}, - Op: parser.POW, - }, - RHS: &ast.IntLit{I: 1}, - Op: parser.MINUS, - }, - &ast.IntLit{I: 15}, - }, -} - -func TestOptimiser(t *testing.T) { - for _, tc := range optimiserTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - got, err := opt.Optimise(tc.ast) - testutil.FatalIfErr(t, err) - testutil.ExpectNoDiff(t, tc.want, got) - }) - } -} - -var optimiserErrorTests = []struct { - name string - ast ast.Node - want []string -}{ - { - "integer divide by zero", - &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 4}, - RHS: &ast.IntLit{I: 0}, - Op: parser.DIV, - }, - []string{":1:1: divide by zero"}, - }, - { - "float divide by zero", - &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: 4}, - RHS: &ast.FloatLit{F: 0}, - Op: parser.DIV, - }, - []string{":1:1: divide by zero"}, - }, - { - "integer mod by zero", - &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 4}, - RHS: &ast.IntLit{I: 0}, - Op: parser.MOD, - }, - []string{":1:1: mod by zero"}, - }, - { - "float mod by zero", - &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: 4}, - RHS: &ast.FloatLit{F: 0}, - Op: parser.MOD, - }, - []string{":1:1: mod by zero"}, - }, -} - -func TestOptimiserErrors(t *testing.T) { - for _, tc := range optimiserErrorTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - _, err := opt.Optimise(tc.ast) - testutil.ExpectNoDiff(t, tc.want, strings.Split(err.Error(), "\n")) - }) - } -} - -var commOps = map[int]string{parser.PLUS: "add", parser.MUL: "mul"} - -func TestConstFoldQuickIntComm(t *testing.T) { - for op, name := range commOps { - op := op - t.Run(name, func(t *testing.T) { - if err := quick.Check(func(x, y int32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(x)}, - RHS: &ast.IntLit{I: int64(y)}, - Op: op, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(y)}, - RHS: &ast.IntLit{I: int64(x)}, - Op: op, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, nil); err != nil { - t.Error(err) - } - }) - } -} - -func TestConstFoldQuickFloatComm(t *testing.T) { - for op, name := range commOps { - op := op - t.Run(name, func(t *testing.T) { - if err := quick.Check(func(x, y float32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(x)}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: op, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(y)}, - RHS: &ast.FloatLit{F: float64(x)}, - Op: op, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, nil); err != nil { - t.Error(err) - } - }) - } -} - -func TestConstFoldQuickMixedComm(t *testing.T) { - for op, name := range commOps { - op := op - t.Run(name, func(t *testing.T) { - if err := quick.Check(func(x int32, y float32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(x)}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: op, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(y)}, - RHS: &ast.IntLit{I: int64(x)}, - Op: op, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, nil); err != nil { - t.Error(err) - } - }) - } -} - -func TestConstFoldQuickIntAddSub(t *testing.T) { - if err := quick.Check(func(x, y int32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(x)}, - RHS: &ast.IntLit{I: int64(y)}, - Op: parser.MINUS, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.IntLit{I: 0}, - RHS: &ast.IntLit{I: int64(y)}, - Op: parser.MINUS, - }, - RHS: &ast.IntLit{I: int64(x)}, - Op: parser.PLUS, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, nil); err != nil { - t.Error(err) - } -} - -func TestConstFoldQuickFloatAddSub(t *testing.T) { - if err := quick.Check(func(x, y float32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(x)}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: parser.MINUS, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: 0}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: parser.MINUS, - }, - RHS: &ast.FloatLit{F: float64(x)}, - Op: parser.PLUS, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, nil); err != nil { - t.Error(err) - } -} - -func TestConstFoldQuickMixedAddSub(t *testing.T) { - if err := quick.Check(func(x int32, y float32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(x)}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: parser.MINUS, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: 0}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: parser.MINUS, - }, - RHS: &ast.IntLit{I: int64(x)}, - Op: parser.PLUS, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, nil); err != nil { - t.Error(err) - } -} - -var cmpFloat = cmp.Comparer(func(x, y float64) bool { - delta := math.Abs(x - y) - mean := math.Abs(x+y) / 2.0 - return delta/mean < 0.00001 -}) - -func TestConstFoldQuickFloatMulDiv(t *testing.T) { - if err := quick.Check(func(x, y float32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(x)}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: parser.DIV, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: 1}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: parser.DIV, - }, - RHS: &ast.FloatLit{F: float64(x)}, - Op: parser.MUL, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b, cmpFloat) - }, nil); err != nil { - t.Error(err) - } -} - -func positiveInt(r *rand.Rand) int32 { - v := r.Int31() - if v == 0 { - return 1 - } - return v -} - -func TestConstFoldQuickIntModAddition(t *testing.T) { - values := func(args []reflect.Value, r *rand.Rand) { - args[0] = reflect.ValueOf(positiveInt(r)) - args[1] = reflect.ValueOf(positiveInt(r)) - args[2] = reflect.ValueOf(positiveInt(r)) - } - cfg := &quick.Config{ - Values: values, - } - if err := quick.Check(func(x, y, z int32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(x)}, - RHS: &ast.IntLit{I: int64(y)}, - Op: parser.PLUS, - }, - RHS: &ast.IntLit{I: int64(z)}, - Op: parser.MOD, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(x)}, - RHS: &ast.IntLit{I: int64(z)}, - Op: parser.MOD, - }, - RHS: &ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(y)}, - RHS: &ast.IntLit{I: int64(z)}, - Op: parser.MOD, - }, - Op: parser.PLUS, - }, - RHS: &ast.IntLit{I: int64(z)}, - Op: parser.MOD, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, cfg); err != nil { - t.Error(err) - } -} - -func positiveFloat(r *rand.Rand) float32 { - v := r.Float32() - if v == 0.0 { - return 1.0 - } - return v -} - -func TestConstFoldQuickFloatModAddition(t *testing.T) { - values := func(args []reflect.Value, r *rand.Rand) { - args[0] = reflect.ValueOf(positiveFloat(r)) - args[1] = reflect.ValueOf(positiveFloat(r)) - args[2] = reflect.ValueOf(positiveFloat(r)) - } - cfg := &quick.Config{ - Values: values, - } - if err := quick.Check(func(x, y, z float32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(x)}, - RHS: &ast.FloatLit{F: float64(y)}, - Op: parser.PLUS, - }, - RHS: &ast.FloatLit{F: float64(z)}, - Op: parser.MOD, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(x)}, - RHS: &ast.FloatLit{F: float64(z)}, - Op: parser.MOD, - }, - RHS: &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(y)}, - RHS: &ast.FloatLit{F: float64(z)}, - Op: parser.MOD, - }, - Op: parser.PLUS, - }, - RHS: &ast.FloatLit{F: float64(z)}, - Op: parser.MOD, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, cfg); err != nil { - t.Error(err) - } -} - -func TestConstFoldQuickMixedPowProduct(t *testing.T) { - values := func(args []reflect.Value, r *rand.Rand) { - args[0] = reflect.ValueOf(positiveFloat(r)) - args[1] = reflect.ValueOf(positiveInt(r)) - args[2] = reflect.ValueOf(positiveInt(r)) - } - cfg := &quick.Config{ - Values: values, - } - if err := quick.Check(func(x float32, y, z int32) bool { - a, aErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(x)}, - RHS: &ast.BinaryExpr{ - LHS: &ast.IntLit{I: int64(y)}, - RHS: &ast.IntLit{I: int64(z)}, - Op: parser.PLUS, - }, - Op: parser.POW, - }) - if aErr != nil { - t.Fatal(aErr) - } - b, bErr := opt.Optimise(&ast.BinaryExpr{ - LHS: &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(x)}, - RHS: &ast.IntLit{I: int64(y)}, - Op: parser.POW, - }, - RHS: &ast.BinaryExpr{ - LHS: &ast.FloatLit{F: float64(x)}, - RHS: &ast.IntLit{I: int64(z)}, - Op: parser.POW, - }, - Op: parser.MUL, - }) - if bErr != nil { - t.Fatal(bErr) - } - return cmp.Equal(a, b) - }, cfg); err != nil { - t.Error(err) - } -} diff --git a/inputs/mtail/internal/runtime/compiler/parser/lexer_test.go b/inputs/mtail/internal/runtime/compiler/parser/lexer_test.go deleted file mode 100644 index d07368e2..00000000 --- a/inputs/mtail/internal/runtime/compiler/parser/lexer_test.go +++ /dev/null @@ -1,303 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package parser - -import ( - "strings" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/position" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -type lexerTest struct { - name string - input string - tokens []Token -} - -var lexerTests = []lexerTest{ - {name: "empty", tokens: []Token{ - {Kind: EOF, Pos: position.Position{Filename: "empty"}}, - }}, - {name: "spaces", input: " \t", tokens: []Token{ - {Kind: EOF, Pos: position.Position{Filename: "spaces", Startcol: 2, Endcol: 2}}, - }}, - {name: "newlines", input: "\n", tokens: []Token{ - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "newlines", Line: 1, Endcol: -1}}, - {Kind: EOF, Pos: position.Position{Filename: "newlines", Line: 1}}, - }}, - {name: "comment", input: "# comment", tokens: []Token{ - {Kind: EOF, Pos: position.Position{Filename: "comment", Startcol: 9, Endcol: 9}}, - }}, - {name: "comment not at col 1", input: " # comment", tokens: []Token{ - {Kind: EOF, Pos: position.Position{Filename: "comment not at col 1", Startcol: 11, Endcol: 11}}, - }}, - {name: "punctuation", input: "{}()[],", tokens: []Token{ - {Kind: LCURLY, Spelling: "{", Pos: position.Position{Filename: "punctuation"}}, - {Kind: RCURLY, Spelling: "}", Pos: position.Position{Filename: "punctuation", Startcol: 1, Endcol: 1}}, - {Kind: LPAREN, Spelling: "(", Pos: position.Position{Filename: "punctuation", Startcol: 2, Endcol: 2}}, - {Kind: RPAREN, Spelling: ")", Pos: position.Position{Filename: "punctuation", Startcol: 3, Endcol: 3}}, - {Kind: LSQUARE, Spelling: "[", Pos: position.Position{Filename: "punctuation", Startcol: 4, Endcol: 4}}, - {Kind: RSQUARE, Spelling: "]", Pos: position.Position{Filename: "punctuation", Startcol: 5, Endcol: 5}}, - {Kind: COMMA, Spelling: ",", Pos: position.Position{Filename: "punctuation", Startcol: 6, Endcol: 6}}, - {Kind: EOF, Pos: position.Position{Filename: "punctuation", Startcol: 7, Endcol: 7}}, - }}, - {name: "operators", input: "- + = ++ += < > <= >= == != * / << >> & | ^ ~ ** % || && =~ !~ --", tokens: []Token{ - {Kind: MINUS, Spelling: "-", Pos: position.Position{Filename: "operators"}}, - {Kind: PLUS, Spelling: "+", Pos: position.Position{Filename: "operators", Startcol: 2, Endcol: 2}}, - {Kind: ASSIGN, Spelling: "=", Pos: position.Position{Filename: "operators", Startcol: 4, Endcol: 4}}, - {Kind: INC, Spelling: "++", Pos: position.Position{Filename: "operators", Startcol: 6, Endcol: 7}}, - {Kind: ADD_ASSIGN, Spelling: "+=", Pos: position.Position{Filename: "operators", Startcol: 9, Endcol: 10}}, - {Kind: LT, Spelling: "<", Pos: position.Position{Filename: "operators", Startcol: 12, Endcol: 12}}, - {Kind: GT, Spelling: ">", Pos: position.Position{Filename: "operators", Startcol: 14, Endcol: 14}}, - {Kind: LE, Spelling: "<=", Pos: position.Position{Filename: "operators", Startcol: 16, Endcol: 17}}, - {Kind: GE, Spelling: ">=", Pos: position.Position{Filename: "operators", Startcol: 19, Endcol: 20}}, - {Kind: EQ, Spelling: "==", Pos: position.Position{Filename: "operators", Startcol: 22, Endcol: 23}}, - {Kind: NE, Spelling: "!=", Pos: position.Position{Filename: "operators", Startcol: 25, Endcol: 26}}, - {Kind: MUL, Spelling: "*", Pos: position.Position{Filename: "operators", Startcol: 28, Endcol: 28}}, - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "operators", Startcol: 30, Endcol: 30}}, - {Kind: SHL, Spelling: "<<", Pos: position.Position{Filename: "operators", Startcol: 32, Endcol: 33}}, - {Kind: SHR, Spelling: ">>", Pos: position.Position{Filename: "operators", Startcol: 35, Endcol: 36}}, - {Kind: BITAND, Spelling: "&", Pos: position.Position{Filename: "operators", Startcol: 38, Endcol: 38}}, - {Kind: BITOR, Spelling: "|", Pos: position.Position{Filename: "operators", Startcol: 40, Endcol: 40}}, - {Kind: XOR, Spelling: "^", Pos: position.Position{Filename: "operators", Startcol: 42, Endcol: 42}}, - {Kind: NOT, Spelling: "~", Pos: position.Position{Filename: "operators", Startcol: 44, Endcol: 44}}, - {Kind: POW, Spelling: "**", Pos: position.Position{Filename: "operators", Startcol: 46, Endcol: 47}}, - {Kind: MOD, Spelling: "%", Pos: position.Position{Filename: "operators", Startcol: 49, Endcol: 49}}, - {Kind: OR, Spelling: "||", Pos: position.Position{Filename: "operators", Startcol: 51, Endcol: 52}}, - {Kind: AND, Spelling: "&&", Pos: position.Position{Filename: "operators", Startcol: 54, Endcol: 55}}, - {Kind: MATCH, Spelling: "=~", Pos: position.Position{Filename: "operators", Startcol: 57, Endcol: 58}}, - {Kind: NOT_MATCH, Spelling: "!~", Pos: position.Position{Filename: "operators", Startcol: 60, Endcol: 61}}, - {Kind: DEC, Spelling: "--", Pos: position.Position{Filename: "operators", Startcol: 63, Endcol: 64}}, - {Kind: EOF, Pos: position.Position{Filename: "operators", Startcol: 65, Endcol: 65}}, - }}, - { - name: "keywords", - input: "counter\ngauge\nas\nby\nhidden\ndef\nnext\nconst\ntimer\notherwise\nelse\ndel\ntext\nafter\nstop\nhistogram\nbuckets\n", - tokens: []Token{ - {Kind: COUNTER, Spelling: "counter", Pos: position.Position{Filename: "keywords", Endcol: 6}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 1, Startcol: 7, Endcol: -1}}, - {Kind: GAUGE, Spelling: "gauge", Pos: position.Position{Filename: "keywords", Line: 1, Endcol: 4}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 2, Startcol: 5, Endcol: -1}}, - {Kind: AS, Spelling: "as", Pos: position.Position{Filename: "keywords", Line: 2, Endcol: 1}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 3, Startcol: 2, Endcol: -1}}, - {Kind: BY, Spelling: "by", Pos: position.Position{Filename: "keywords", Line: 3, Endcol: 1}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 4, Startcol: 2, Endcol: -1}}, - {Kind: HIDDEN, Spelling: "hidden", Pos: position.Position{Filename: "keywords", Line: 4, Endcol: 5}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 5, Startcol: 6, Endcol: -1}}, - {Kind: DEF, Spelling: "def", Pos: position.Position{Filename: "keywords", Line: 5, Endcol: 2}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 6, Startcol: 3, Endcol: -1}}, - {Kind: NEXT, Spelling: "next", Pos: position.Position{Filename: "keywords", Line: 6, Endcol: 3}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 7, Startcol: 4, Endcol: -1}}, - {Kind: CONST, Spelling: "const", Pos: position.Position{Filename: "keywords", Line: 7, Endcol: 4}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 8, Startcol: 5, Endcol: -1}}, - {Kind: TIMER, Spelling: "timer", Pos: position.Position{Filename: "keywords", Line: 8, Endcol: 4}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 9, Startcol: 5, Endcol: -1}}, - {Kind: OTHERWISE, Spelling: "otherwise", Pos: position.Position{Filename: "keywords", Line: 9, Endcol: 8}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 10, Startcol: 9, Endcol: -1}}, - {Kind: ELSE, Spelling: "else", Pos: position.Position{Filename: "keywords", Line: 10, Endcol: 3}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 11, Startcol: 4, Endcol: -1}}, - {Kind: DEL, Spelling: "del", Pos: position.Position{Filename: "keywords", Line: 11, Endcol: 2}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 12, Startcol: 3, Endcol: -1}}, - {Kind: TEXT, Spelling: "text", Pos: position.Position{Filename: "keywords", Line: 12, Endcol: 3}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 13, Startcol: 4, Endcol: -1}}, - {Kind: AFTER, Spelling: "after", Pos: position.Position{Filename: "keywords", Line: 13, Endcol: 4}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 14, Startcol: 5, Endcol: -1}}, - {Kind: STOP, Spelling: "stop", Pos: position.Position{Filename: "keywords", Line: 14, Endcol: 3}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 15, Startcol: 4, Endcol: -1}}, - {Kind: HISTOGRAM, Spelling: "histogram", Pos: position.Position{Filename: "keywords", Line: 15, Endcol: 8}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 16, Startcol: 9, Endcol: -1}}, - {Kind: BUCKETS, Spelling: "buckets", Pos: position.Position{Filename: "keywords", Line: 16, Endcol: 6}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "keywords", Line: 17, Startcol: 7, Endcol: -1}}, - {Kind: EOF, Pos: position.Position{Filename: "keywords", Line: 17}}, - }, - }, - { - name: "builtins", - input: "strptime\ntimestamp\ntolower\nlen\nstrtol\nsettime\ngetfilename\nint\nbool\nfloat\nstring\nsubst\n", - tokens: []Token{ - {Kind: BUILTIN, Spelling: "strptime", Pos: position.Position{Filename: "builtins", Endcol: 7}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 1, Startcol: 8, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "timestamp", Pos: position.Position{Filename: "builtins", Line: 1, Endcol: 8}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 2, Startcol: 9, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "tolower", Pos: position.Position{Filename: "builtins", Line: 2, Endcol: 6}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 3, Startcol: 7, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "len", Pos: position.Position{Filename: "builtins", Line: 3, Endcol: 2}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 4, Startcol: 3, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "strtol", Pos: position.Position{Filename: "builtins", Line: 4, Endcol: 5}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 5, Startcol: 6, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "settime", Pos: position.Position{Filename: "builtins", Line: 5, Endcol: 6}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 6, Startcol: 7, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "getfilename", Pos: position.Position{Filename: "builtins", Line: 6, Endcol: 10}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 7, Startcol: 11, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "int", Pos: position.Position{Filename: "builtins", Line: 7, Endcol: 2}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 8, Startcol: 3, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "bool", Pos: position.Position{Filename: "builtins", Line: 8, Endcol: 3}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 9, Startcol: 4, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "float", Pos: position.Position{Filename: "builtins", Line: 9, Endcol: 4}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 10, Startcol: 5, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "string", Pos: position.Position{Filename: "builtins", Line: 10, Endcol: 5}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 11, Startcol: 6, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "subst", Pos: position.Position{Filename: "builtins", Line: 11, Endcol: 4}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "builtins", Line: 12, Startcol: 5, Endcol: -1}}, - {Kind: EOF, Pos: position.Position{Filename: "builtins", Line: 12}}, - }, - }, - {name: "numbers", input: "1 23 3.14 1.61.1 -1 -1.0 1h 0d 3d -1.5h 15m 24h0m0s 1e3 1e-3 .11 123.456e7", tokens: []Token{ - {Kind: INTLITERAL, Spelling: "1", Pos: position.Position{Filename: "numbers"}}, - {Kind: INTLITERAL, Spelling: "23", Pos: position.Position{Filename: "numbers", Startcol: 2, Endcol: 3}}, - {Kind: FLOATLITERAL, Spelling: "3.14", Pos: position.Position{Filename: "numbers", Startcol: 5, Endcol: 8}}, - {Kind: FLOATLITERAL, Spelling: "1.61", Pos: position.Position{Filename: "numbers", Startcol: 10, Endcol: 13}}, - {Kind: FLOATLITERAL, Spelling: ".1", Pos: position.Position{Filename: "numbers", Startcol: 14, Endcol: 15}}, - {Kind: INTLITERAL, Spelling: "-1", Pos: position.Position{Filename: "numbers", Startcol: 17, Endcol: 18}}, - {Kind: FLOATLITERAL, Spelling: "-1.0", Pos: position.Position{Filename: "numbers", Startcol: 20, Endcol: 23}}, - {Kind: DURATIONLITERAL, Spelling: "1h", Pos: position.Position{Filename: "numbers", Startcol: 25, Endcol: 26}}, - {Kind: DURATIONLITERAL, Spelling: "0d", Pos: position.Position{Filename: "numbers", Startcol: 28, Endcol: 29}}, - {Kind: DURATIONLITERAL, Spelling: "3d", Pos: position.Position{Filename: "numbers", Startcol: 31, Endcol: 32}}, - {Kind: DURATIONLITERAL, Spelling: "-1.5h", Pos: position.Position{Filename: "numbers", Startcol: 34, Endcol: 38}}, - {Kind: DURATIONLITERAL, Spelling: "15m", Pos: position.Position{Filename: "numbers", Startcol: 40, Endcol: 42}}, - {Kind: DURATIONLITERAL, Spelling: "24h0m0s", Pos: position.Position{Filename: "numbers", Startcol: 44, Endcol: 50}}, - {Kind: FLOATLITERAL, Spelling: "1e3", Pos: position.Position{Filename: "numbers", Startcol: 52, Endcol: 54}}, - {Kind: FLOATLITERAL, Spelling: "1e-3", Pos: position.Position{Filename: "numbers", Startcol: 56, Endcol: 59}}, - {Kind: FLOATLITERAL, Spelling: ".11", Pos: position.Position{Filename: "numbers", Startcol: 61, Endcol: 63}}, - {Kind: FLOATLITERAL, Spelling: "123.456e7", Pos: position.Position{Filename: "numbers", Startcol: 65, Endcol: 73}}, - {Kind: EOF, Pos: position.Position{Filename: "numbers", Startcol: 74, Endcol: 74}}, - }}, - {name: "identifier", input: "a be foo\nquux lines_total", tokens: []Token{ - {Kind: ID, Spelling: "a", Pos: position.Position{Filename: "identifier"}}, - {Kind: ID, Spelling: "be", Pos: position.Position{Filename: "identifier", Startcol: 2, Endcol: 3}}, - {Kind: ID, Spelling: "foo", Pos: position.Position{Filename: "identifier", Startcol: 5, Endcol: 7}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "identifier", Line: 1, Startcol: 8, Endcol: -1}}, - {Kind: ID, Spelling: "quux", Pos: position.Position{Filename: "identifier", Line: 1, Endcol: 3}}, - {Kind: ID, Spelling: "lines_total", Pos: position.Position{Filename: "identifier", Line: 1, Startcol: 5, Endcol: 15}}, - {Kind: EOF, Pos: position.Position{Filename: "identifier", Line: 1, Startcol: 16, Endcol: 16}}, - }}, - {name: "regex", input: "/asdf/", tokens: []Token{ - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "regex"}}, - {Kind: REGEX, Spelling: "asdf", Pos: position.Position{Filename: "regex", Startcol: 1, Endcol: 4}}, - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "regex", Startcol: 5, Endcol: 5}}, - {Kind: EOF, Pos: position.Position{Filename: "regex", Startcol: 6, Endcol: 6}}, - }}, - {name: "regex with escape", input: `/asdf\//`, tokens: []Token{ - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "regex with escape"}}, - {Kind: REGEX, Spelling: `asdf/`, Pos: position.Position{Filename: "regex with escape", Startcol: 1, Endcol: 6}}, - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "regex with escape", Startcol: 7, Endcol: 7}}, - {Kind: EOF, Pos: position.Position{Filename: "regex with escape", Startcol: 8, Endcol: 8}}, - }}, - {name: "regex with escape and special char", input: `/foo\d\//`, tokens: []Token{ - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "regex with escape and special char"}}, - {Kind: REGEX, Spelling: `foo\d/`, Pos: position.Position{Filename: "regex with escape and special char", Startcol: 1, Endcol: 7}}, - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "regex with escape and special char", Startcol: 8, Endcol: 8}}, - {Kind: EOF, Pos: position.Position{Filename: "regex with escape and special char", Startcol: 9, Endcol: 9}}, - }}, - {name: "capref", input: "$foo $1", tokens: []Token{ - {Kind: CAPREF_NAMED, Spelling: "foo", Pos: position.Position{Filename: "capref", Endcol: 3}}, - {Kind: CAPREF, Spelling: "1", Pos: position.Position{Filename: "capref", Startcol: 5, Endcol: 6}}, - {Kind: EOF, Pos: position.Position{Filename: "capref", Startcol: 7, Endcol: 7}}, - }}, - {name: "numerical capref", input: "$1", tokens: []Token{ - {Kind: CAPREF, Spelling: "1", Pos: position.Position{Filename: "numerical capref", Endcol: 1}}, - {Kind: EOF, Pos: position.Position{Filename: "numerical capref", Startcol: 2, Endcol: 2}}, - }}, - {name: "capref with trailing punc", input: "$foo,", tokens: []Token{ - {Kind: CAPREF_NAMED, Spelling: "foo", Pos: position.Position{Filename: "capref with trailing punc", Endcol: 3}}, - {Kind: COMMA, Spelling: ",", Pos: position.Position{Filename: "capref with trailing punc", Startcol: 4, Endcol: 4}}, - {Kind: EOF, Pos: position.Position{Filename: "capref with trailing punc", Startcol: 5, Endcol: 5}}, - }}, - {name: "quoted string", input: `"asdf"`, tokens: []Token{ - {Kind: STRING, Spelling: `asdf`, Pos: position.Position{Filename: "quoted string", Endcol: 5}}, - {Kind: EOF, Pos: position.Position{Filename: "quoted string", Startcol: 6, Endcol: 6}}, - }}, - {name: "escaped quote in quoted string", input: `"\""`, tokens: []Token{ - {Kind: STRING, Spelling: `"`, Pos: position.Position{Filename: "escaped quote in quoted string", Endcol: 3}}, - {Kind: EOF, Pos: position.Position{Filename: "escaped quote in quoted string", Startcol: 4, Endcol: 4}}, - }}, - {name: "decorator", input: `@foo`, tokens: []Token{ - {Kind: DECO, Spelling: "foo", Pos: position.Position{Filename: "decorator", Endcol: 3}}, - {Kind: EOF, Pos: position.Position{Filename: "decorator", Startcol: 4, Endcol: 4}}, - }}, - { - name: "large program", - input: "/(?P[[:digit:]-\\/ ])/ {\n" + - " strptime($date, \"%Y/%m/%d %H:%M:%S\")\n" + - " foo++\n" + - "}", - tokens: []Token{ - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "large program"}}, - {Kind: REGEX, Spelling: "(?P[[:digit:]-/ ])", Pos: position.Position{Filename: "large program", Startcol: 1, Endcol: 25}}, - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "large program", Startcol: 26, Endcol: 26}}, - {Kind: LCURLY, Spelling: "{", Pos: position.Position{Filename: "large program", Startcol: 28, Endcol: 28}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "large program", Line: 1, Startcol: 29, Endcol: -1}}, - {Kind: BUILTIN, Spelling: "strptime", Pos: position.Position{Filename: "large program", Line: 1, Startcol: 2, Endcol: 9}}, - {Kind: LPAREN, Spelling: "(", Pos: position.Position{Filename: "large program", Line: 1, Startcol: 10, Endcol: 10}}, - {Kind: CAPREF_NAMED, Spelling: "date", Pos: position.Position{Filename: "large program", Line: 1, Startcol: 11, Endcol: 15}}, - {Kind: COMMA, Spelling: ",", Pos: position.Position{Filename: "large program", Line: 1, Startcol: 16, Endcol: 16}}, - {Kind: STRING, Spelling: "%Y/%m/%d %H:%M:%S", Pos: position.Position{Filename: "large program", Line: 1, Startcol: 18, Endcol: 36}}, - {Kind: RPAREN, Spelling: ")", Pos: position.Position{Filename: "large program", Line: 1, Startcol: 37, Endcol: 37}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "large program", Line: 2, Startcol: 38, Endcol: -1}}, - {Kind: ID, Spelling: "foo", Pos: position.Position{Filename: "large program", Line: 2, Startcol: 2, Endcol: 4}}, - {Kind: INC, Spelling: "++", Pos: position.Position{Filename: "large program", Line: 2, Startcol: 5, Endcol: 6}}, - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "large program", Line: 3, Startcol: 7, Endcol: -1}}, - {Kind: RCURLY, Spelling: "}", Pos: position.Position{Filename: "large program", Line: 3}}, - {Kind: EOF, Pos: position.Position{Filename: "large program", Line: 3, Startcol: 1, Endcol: 1}}, - }, - }, - { - name: "linecount", - input: "# comment\n" + - "# blank line\n" + - "\n" + - "foo", - tokens: []Token{ - {Kind: NL, Spelling: "\n", Pos: position.Position{Filename: "linecount", Line: 3, Startcol: 12, Endcol: -1}}, - {Kind: ID, Spelling: "foo", Pos: position.Position{Filename: "linecount", Line: 3, Endcol: 2}}, - {Kind: EOF, Pos: position.Position{Filename: "linecount", Line: 3, Startcol: 3, Endcol: 3}}, - }, - }, - // errors - {name: "unexpected char", input: "?", tokens: []Token{ - {Kind: INVALID, Spelling: "Unexpected input: '?'", Pos: position.Position{Filename: "unexpected char"}}, - {Kind: EOF, Pos: position.Position{Filename: "unexpected char", Startcol: 1, Endcol: 1}}, - }}, - {name: "unterminated regex", input: "/foo\n", tokens: []Token{ - {Kind: DIV, Spelling: "/", Pos: position.Position{Filename: "unterminated regex"}}, - {Kind: INVALID, Spelling: "Unterminated regular expression: \"/foo\"", Pos: position.Position{Filename: "unterminated regex", Startcol: 1, Endcol: 3}}, - {Kind: EOF, Pos: position.Position{Filename: "unterminated regex", Startcol: 4, Endcol: 4}}, - }}, - {name: "unterminated quoted string", input: "\"foo\n", tokens: []Token{ - {Kind: INVALID, Spelling: "Unterminated quoted string: \"\\\"foo\"", Pos: position.Position{Filename: "unterminated quoted string", Endcol: 3}}, - {Kind: EOF, Pos: position.Position{Filename: "unterminated quoted string", Startcol: 4, Endcol: 4}}, - }}, -} - -// collect gathers the emitted items into a slice. -func collect(t *lexerTest) (tokens []Token) { - // Hack to count divs seen for regex tests. - inRegexSet := false - l := NewLexer(t.name, strings.NewReader(t.input)) - for { - tok := l.NextToken() - // Hack to simulate context signal from parser. - if tok.Kind == DIV && (strings.Contains(t.name, "regex") || strings.HasPrefix(t.name, "large program")) && !inRegexSet { - l.InRegex = true - inRegexSet = true - } - tokens = append(tokens, tok) - if tok.Kind == EOF { - return - } - } -} - -func TestLex(t *testing.T) { - for _, tc := range lexerTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - tokens := collect(&tc) - - testutil.ExpectNoDiff(t, tc.tokens, tokens, testutil.AllowUnexported(Token{}, position.Position{})) - }) - } -} diff --git a/inputs/mtail/internal/runtime/compiler/parser/parser_test.go b/inputs/mtail/internal/runtime/compiler/parser/parser_test.go deleted file mode 100644 index e1c6eb51..00000000 --- a/inputs/mtail/internal/runtime/compiler/parser/parser_test.go +++ /dev/null @@ -1,672 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package parser - -import ( - "flag" - "strings" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/ast" - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/position" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -var parserTestDebug = flag.Bool("parser_test_debug", false, "Turn on parser debug output if set.") - -var parserTests = []struct { - name string - program string -}{ - { - "empty", - "", - }, - - { - "newline", - "\n", - }, - - { - "declare counter", - "counter lines_total\n", - }, - - { - "declare counter string name", - "counter lines_total as \"line-count\"\n", - }, - - { - "declare dimensioned counter", - "counter foo by bar\n", - }, - - { - "declare dimensioned metric with limit", - "counter foo by a, b limit 100", - }, - - { - "declare multi-dimensioned counter", - "counter foo by bar, baz, quux\n", - }, - - { - "declare hidden counter", - "hidden counter foo\n", - }, - - { - "declare gauge", - "gauge foo\n", - }, - - { - "declare timer", - "timer foo\n", - }, - - { - "declare text", - "text stringy\n", - }, - - { - "declare histogram", - "histogram foo buckets 0, 1, 2\n", - }, - { - "declare histogram float", - "histogram foo buckets 0, 0.01, 0.1, 1, 10\n", - }, - { - "declare histogram by ", - "histogram foo by code buckets 0, 1, 2\n", - }, - { - "declare histogram reversed syntax ", - "histogram foo buckets 0, 1, 2 by code\n", - }, - - { - "simple pattern action", - "/foo/ {}\n", - }, - - { - "increment counter", - "counter lines_total\n" + - "/foo/ {\n" + - " lines_total++\n" + - "}\n", - }, - - { - "decrement counter", - `counter i -/foo/ { - i-- -} -`, - }, - - { - "regex match includes escaped slashes", - "counter foo\n" + - "/foo\\// { foo++\n}\n", - }, - - { - "numeric capture group reference", - "/(foo)/ {\n" + - " $1++\n" + - "}\n", - }, - - { - "strptime and capref", - "/(.*)/ {\n" + - "strptime($1, \"2006-01-02T15:04:05Z07:00\")\n" + - " }\n", - }, - - { - "named capture group reference", - "/(?P[[:digit:]-\\/ ])/ {\n" + - " strptime($date, \"%Y/%m/%d %H:%M:%S\")\n" + - "}\n", - }, - - { - "nested match conditions", - "counter foo\n" + - "counter bar\n" + - "/match(\\d+)/ {\n" + - " foo += $1\n" + - " /^bleh (\\S+)/ {\n" + - " bar++\n" + - " $1++\n" + - " }\n" + - "}\n", - }, - - { - "nested scope", - "counter foo\n" + - "/fo(o)/ {\n" + - " $1++\n" + - " /bar(xxx)/ {\n" + - " $1 += $1\n" + - " foo = $1\n" + - " }\n" + - "}\n", - }, - - { - "comment then code", - "# %d [%p]\n" + - "/^(?P\\d+\\/\\d+\\/\\d+ \\d+:\\d+:\\d+) \\[(?P\\d+)\\] / {\n" + - " strptime($1, \"2006/01/02 15:04:05\")\n" + - "}\n", - }, - - { - "assignment", - "counter variable\n" + - "/(?P.*)/ {\n" + - "variable = $foo\n" + - "}\n", - }, - - { - "increment operator", - "counter var\n" + - "/foo/ {\n" + - " var++\n" + - "}\n", - }, - - { - "incby operator", - "counter var\n" + - "/foo/ {\n var += 2\n}\n", - }, - - { - "additive", - "counter time_total\n" + - "/(?P.*)/ {\n" + - " time_total = timestamp() - time_total\n" + - "}\n", - }, - - { - "multiplicative", - "counter a\n" + - "counter b\n" + - " /foo/ {\n a = a * b\n" + - " a = a ** b\n" + - "}\n", - }, - - { - "additive and mem storage", - "counter time_total\n" + - "counter variable by foo\n" + - "/(?P.*)/ {\n" + - " time_total += timestamp() - variable[$foo]\n" + - "}\n", - }, - - { - "conditional expressions", - "counter foo\n" + - "/(?P.*)/ {\n" + - " $foo > 0 {\n" + - " foo += $foo\n" + - " }\n" + - " $foo >= 0 {\n" + - " foo += $foo\n" + - " }\n" + - " $foo < 0 {\n" + - " foo += $foo\n" + - " }\n" + - " $foo <= 0 {\n" + - " foo += $foo\n" + - " }\n" + - " $foo == 0 {\n" + - " foo += $foo\n" + - " }\n" + - " $foo != 0 {\n" + - " foo += $foo\n" + - " }\n" + - "}\n", - }, - - { - "decorator definition and invocation", - "def foo { next\n }\n" + - "@foo { }\n", - }, - - { - "const regex", - "const X /foo/\n" + - "/foo / + X + / bar/ {\n" + - "}\n", - }, - - { - "multiline regex starting with newline", - "const FOO\n" + - "/some regex here/ +\n" + - "/plus some other things/", - }, - - { - "multiline regex", - "/foo / +\n" + - "/barrr/ {\n" + - "}\n", - }, - - { - "len", - "/(?Pfoo)/ {\n" + - "len($foo) > 0 {\n" + - "}\n" + - "}\n", - }, - - { - "def and next", - "def foobar {/(?P.*)/ {" + - " next" + - "}" + - "}", - }, - - { - "const", - `const IP /\d+(\.\d+){3}/`, - }, - - { - "bitwise", - `gauge a -/foo(\d)/ { - a = $1 & 7 - a = $1 | 8 - a = $1 << 4 - a = $1 >> 20 - a = $1 ^ 15 - a = ~ 1 -}`, - }, - - { - "logical", - `0 || 1 && 0 { -} -`, - }, - - { - "floats", - `gauge foo -/foo/ { -foo = 3.14 -}`, - }, - - { - "simple otherwise action", - "otherwise {}\n", - }, - - { - "pattern action then otherwise action", - `counter lines_total by type - /foo/ { - lines_total["foo"]++ - } - otherwise { - lines_total["misc"] += 10 - }`, - }, - - { - "simple else clause", - "/foo/ {} else {}", - }, - - { - "nested else clause", - "/foo/ { / bar/ {} } else { /quux/ {} else {} }", - }, - - { - "mod operator", - `gauge a -/foo/ { - a = 3 % 1 -}`, - }, - - { - "delete", - `counter foo by bar -/foo/ { - del foo[$1] -}`, - }, - - { - "delete after", - `counter foo by bar -/foo/ { - del foo[$1] after 168h -}`, - }, - - {"getfilename", ` -getfilename() -`}, - - {"indexed expression arg list", ` -counter foo by a,b -/(\d) (\d+)/ { - foo[$1,$2]++ -}`}, - - {"paren expr", ` -(0) || (1 && 3) { -}`}, - - {"regex cond expr", ` -/(\d)/ && 1 { -} -`}, - - {"concat expr 1", ` -const X /foo/ -/bar/ + X { -}`}, - {"concat expr 2", ` -const X /foo/ -X { -}`}, - - {"match expression 1", ` -$foo =~ /bar/ { -} -$foo !~ /bar/ { -} -`}, - {"match expression 2", ` -$foo =~ /bar/ + X { -}`}, - {"match expression 3", ` -const X /foo/ -$foo =~ X { -}`}, - - {"capref used in def", ` -/(?P.*)/ && $x > 0 { -}`}, - - {"match expr 4", ` -/(?P.{6}) (?P.*)/ { - $foo =~ $bar { - } -}`}, - - {"stop", ` -// { - stop -}`}, - - {"substitution", ` -/(\d,\d)/ { - subst(",", ",", $1) -}`}, - - {"pattern in arg expr list", ` - /(\d,\d)/ { - subst(/,/, "", $1) - }`}, -} - -func TestParserRoundTrip(t *testing.T) { - if *parserTestDebug { - mtailDebug = 3 - } - for _, tc := range parserTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - p := newParser(tc.name, strings.NewReader(tc.program)) - r := mtailParse(p) - - if r != 0 || p.root == nil || len(p.errors) > 0 { - t.Error("1st pass parse errors:\n") - for _, e := range p.errors { - t.Errorf("\t%s\n", e) - } - t.Fatal() - } - - if *parserTestDebug { - s := Sexp{} - t.Log("AST:\n" + s.Dump(p.root)) - } - - u := Unparser{} - output := u.Unparse(p.root) - - p2 := newParser(tc.name+" 2", strings.NewReader(output)) - r = mtailParse(p2) - if r != 0 || p2.root == nil || len(p2.errors) > 0 { - t.Errorf("2nd pass parse errors:\n") - for _, e := range p2.errors { - t.Errorf("\t%s\n", e) - } - t.Logf("2nd pass input was:\n%s", output) - t.Logf("2nd pass diff:\n%s", testutil.Diff(tc.program, output)) - t.Fatal() - } - - u = Unparser{} - output2 := u.Unparse(p2.root) - - testutil.ExpectNoDiff(t, output2, output) - }) - } -} - -type parserInvalidProgram struct { - name string - program string - errors []string -} - -var parserInvalidPrograms = []parserInvalidProgram{ - { - "unknown character", - "?\n", - []string{"unknown character:1:1: Unexpected input: '?'"}, - }, - - { - "unterminated regex", - "/foo\n", - []string{ - "unterminated regex:1:2-4: Unterminated regular expression: \"/foo\"", - "unterminated regex:1:2-4: syntax error: unexpected end of file, expecting '/' to end regex", - }, - }, - - { - "unterminated string", - " \"foo }\n", - []string{"unterminated string:1:2-7: Unterminated quoted string: \"\\\"foo }\""}, - }, - - { - "unterminated const regex", - "const X /(?P", - []string{ - "unterminated const regex:1:10-17: Unterminated regular expression: \"/(?P\"", - "unterminated const regex:1:10-17: syntax error: unexpected end of file, expecting '/' to end regex", - }, - }, - - { - "unbalanced {", - "/foo/ {\n", - []string{"unbalanced {:2:1: syntax error: unexpected end of file, expecting '}' to end block"}, - }, - { - "unbalanced else {", - "/foo/ { } else {\n", - []string{"unbalanced else {:2:1: syntax error: unexpected end of file, expecting '}' to end block"}, - }, - { - "unbalanced otherwise {", - "otherwise {\n", - []string{"unbalanced otherwise {:2:1: syntax error: unexpected end of file, expecting '}' to end block"}, - }, - - { - "index of non-terminal 1", - `// { - foo++[$1]++ - }`, - []string{"index of non-terminal 1:2:7: syntax error: unexpected indexing of an expression"}, - }, - { - "index of non-terminal 2", - `// { - 0[$1]++ - }`, - []string{"index of non-terminal 2:2:3: syntax error: unexpected indexing of an expression"}, - }, - { - "index of pattern", - `/foo/[0] -`, - []string{"index of pattern:1:6: syntax error: unexpected indexing of an expression"}, - }, - - { - "statement with no effect", - `/(\d)foo/ { - timestamp() - $1 -}`, - []string{"statement with no effect:3:18: syntax error: statement with no effect, missing an assignment, `+' concatenation, or `{}' block?"}, - }, - - { - "pattern without block", - `/(?P.)/ -`, - []string{"pattern without block:2:11: syntax error: statement with no effect, missing an assignment, `+' concatenation, or `{}' block?"}, - }, - - { - "paired pattern without block", - `/(?P.)/ - /(?P.)/ {} - `, - []string{"paired pattern without block:2:11: syntax error: statement with no effect, missing an assignment, `+' concatenation, or `{}' block?"}, - }, - - { - "dimensioned limit per dimension", - "counter foo by a limit 10, b", - []string{"dimensioned limit per dimension:1:26: syntax error: unexpected COMMA"}, - }, -} - -func TestParseInvalidPrograms(t *testing.T) { - if *parserTestDebug { - mtailDebug = 3 - } - for _, tc := range parserInvalidPrograms { - tc := tc - t.Run(tc.name, func(t *testing.T) { - p := newParser(tc.name, strings.NewReader(tc.program)) - mtailParse(p) - - testutil.ExpectNoDiff(t, - strings.Join(tc.errors, "\n"), // want - strings.TrimRight(p.errors.Error(), "\n")) // got - if p.errors.Error() == "no errors" && *parserTestDebug { - s := Sexp{} - t.Log("AST:\n" + s.Dump(p.root)) - } - }) - } -} - -var parsePositionTests = []struct { - name string - program string - positions []*position.Position -}{ - { - name: "empty", - program: "", - positions: nil, - }, - { - name: "variable", - program: `counter foo`, - positions: []*position.Position{{Filename: "variable", Line: 0, Startcol: 8, Endcol: 10}}, - }, - { - name: "pattern", - program: `const ID /foo/`, - positions: []*position.Position{{Filename: "pattern", Line: 0, Startcol: 9, Endcol: 13}}, - }, - { - name: "multiline regex", - program: "const ID\n" + - "/foo/ +\n" + - "/bar/", - // TODO: Update position for the first token to `1, 0, 4` when position tracking is fixed - positions: []*position.Position{{Filename: "multiline regex", Line: 1, Startcol: 4, Endcol: 4}, {Filename: "multiline regex", Line: 2, Startcol: 0, Endcol: 4}}, - }, -} - -func TestParsePositionTests(t *testing.T) { - for _, tc := range parsePositionTests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - // Not t.Parallel() because the parser is not reentrant, and mtailDebug is a global. - root, err := Parse(tc.name, strings.NewReader(tc.program)) - testutil.FatalIfErr(t, err) - p := &positionCollector{} - ast.Walk(p, root) - testutil.ExpectNoDiff(t, tc.positions, p.positions, testutil.AllowUnexported(position.Position{})) - }) - } -} - -type positionCollector struct { - positions []*position.Position -} - -func (p *positionCollector) VisitBefore(node ast.Node) (ast.Visitor, ast.Node) { - switch n := node.(type) { - case *ast.VarDecl, *ast.PatternLit: - p.positions = append(p.positions, n.Pos()) - } - return p, node -} - -func (p *positionCollector) VisitAfter(node ast.Node) ast.Node { - return node -} diff --git a/inputs/mtail/internal/runtime/compiler/parser/tokens_test.go b/inputs/mtail/internal/runtime/compiler/parser/tokens_test.go deleted file mode 100644 index c6da73be..00000000 --- a/inputs/mtail/internal/runtime/compiler/parser/tokens_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package parser - -import ( - "fmt" - "testing" - "testing/quick" - - "flashcat.cloud/categraf/inputs/mtail/internal/runtime/compiler/position" -) - -func TestKindHasString(t *testing.T) { - for k := INVALID; k <= NL; k++ { - if Kind(k).String() != mtailToknames[k-INVALID+3] { - t.Errorf("kind string not match. expected %s, received %s", mtailToknames[k-INVALID], Kind(k).String()) - } - } -} - -func TestTokenString(t *testing.T) { - if err := quick.Check(func(kind Kind, spelling string, pos position.Position) bool { - tok := Token{Kind: kind, Spelling: spelling, Pos: pos} - return tok.String() == fmt.Sprintf("%s(%q,%s)", kind.String(), spelling, pos.String()) - }, nil); err != nil { - t.Error(err) - } -} diff --git a/inputs/mtail/internal/runtime/compiler/symbol/symtab_test.go b/inputs/mtail/internal/runtime/compiler/symbol/symtab_test.go deleted file mode 100644 index 21c5999f..00000000 --- a/inputs/mtail/internal/runtime/compiler/symbol/symtab_test.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2016 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package symbol - -import ( - "math/rand" - "reflect" - "testing" - "testing/quick" - - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestInsertLookup(t *testing.T) { - s := NewScope(nil) - - sym1 := NewSymbol("foo", VarSymbol, nil) - if r := s.Insert(sym1); r != nil { - t.Errorf("Insert already had sym1: %v", r) - } - - r1 := s.Lookup("foo", VarSymbol) - testutil.ExpectNoDiff(t, r1, sym1) -} - -// Generate implements the quick.Generator interface for SymbolKind. -func (Kind) Generate(rand *rand.Rand, size int) reflect.Value { - return reflect.ValueOf(Kind(rand.Intn(int(endSymbol)))) -} - -func TestInsertLookupQuick(t *testing.T) { - testutil.SkipIfShort(t) - - check := func(name string, kind Kind) bool { - // Create a new scope each run because scope doesn't overwrite on insert. - scope := NewScope(nil) - sym := NewSymbol(name, kind, nil) - a := scope.Insert(sym) - if a != nil { - return false - } - b := scope.Lookup(name, kind) - diff := testutil.Diff(a, b) - return diff != "" - } - q := &quick.Config{MaxCount: 100000} - if err := quick.Check(check, q); err != nil { - t.Error(err) - } -} - -func TestNestedScope(t *testing.T) { - s := NewScope(nil) - s1 := NewScope(s) - - sym1 := NewSymbol("bar", VarSymbol, nil) - if r := s.Insert(sym1); r != nil { - t.Errorf("Insert already had sym1: %v", r) - } - - sym2 := NewSymbol("foo", VarSymbol, nil) - if r1 := s1.Insert(sym2); r1 != nil { - t.Errorf("Insert already had sym2: %v", r1) - } - - if s1.Lookup("foo", VarSymbol) == nil { - t.Errorf("foo not found in s1") - } - - if s.Lookup("foo", VarSymbol) != nil { - t.Errorf("foo found in s") - } - - if s1.Lookup("bar", VarSymbol) == nil { - t.Errorf("bar not found from s1") - } -} diff --git a/inputs/mtail/internal/runtime/compiler/types/types.go b/inputs/mtail/internal/runtime/compiler/types/types.go index 281c2136..152ae099 100644 --- a/inputs/mtail/internal/runtime/compiler/types/types.go +++ b/inputs/mtail/internal/runtime/compiler/types/types.go @@ -10,14 +10,14 @@ import ( "regexp/syntax" "strings" "sync" - // "github.com/golang/glog" ) // Type represents a type in the mtail program. type Type interface { // Root returns an exemplar Type after unification occurs. If the type - // system is complete after unification, Root will be a TypeOperator. + // system is complete after unification, Root will be a TypeOperator. Root + // is the equivalent of Find in the union-find algorithm. Root() Type // String returns a string representation of a Type. @@ -43,7 +43,7 @@ func (e *TypeError) Root() Type { func (e *TypeError) String() string { if e == nil || e.error == nil { - return fmt.Sprintf("type error") + return "type error" } var estr, rstr string if IsComplete(e.expected) { @@ -90,6 +90,7 @@ var ( type Variable struct { ID int + // Instance is set if this variable has been bound to a type. instanceMu sync.RWMutex Instance Type } @@ -125,7 +126,8 @@ func (t *Variable) String() string { } // SetInstance sets the exemplar instance of this TypeVariable, during -// unification. +// unification. SetInstance is the equivalent of Union in the Union-Find +// algorithm. func (t *Variable) SetInstance(t1 Type) { t.instanceMu.Lock() defer t.instanceMu.Unlock() @@ -164,16 +166,22 @@ func (t *Operator) String() (s string) { return s } +const ( + functionName = "→" + dimensionName = "⨯" + alternateName = "|" +) + // Function is a convenience method, which instantiates a new Function type // scheme, with the given args as parameters. func Function(args ...Type) *Operator { - return &Operator{"→", args} + return &Operator{functionName, args} } // IsFunction returns true if the given type is a Function type. func IsFunction(t Type) bool { if v, ok := t.(*Operator); ok { - return v.Name == "→" + return v.Name == functionName } return false } @@ -182,13 +190,28 @@ func IsFunction(t Type) bool { // scheme, with the given args as the dimensions of the type. (This type looks // a lot like a Product type.) func Dimension(args ...Type) *Operator { - return &Operator{"⨯", args} + return &Operator{dimensionName, args} } // IsDimension returns true if the given type is a Dimension type. func IsDimension(t Type) bool { if v, ok := t.(*Operator); ok { - return v.Name == "⨯" + return v.Name == dimensionName + } + return false +} + +// Alternate is a convenience method which instantiates a new Alternate type +// scheme, with the given args as the possible types this type may take. (You +// might know this sort of type by the name Sum type.) +func Alternate(args ...Type) *Operator { + return &Operator{alternateName, args} +} + +// IsAlternate returns true if the given type is an Alternate type. +func IsAlternate(t Type) bool { + if v, ok := t.(*Operator); ok { + return v.Name == alternateName } return false } @@ -222,6 +245,9 @@ var ( Pattern = &Operator{"Pattern", []Type{}} // TODO(jaq): use composite type so we can typecheck the bucket directly, e.g. hist[j] = i. Buckets = &Operator{"Buckets", []Type{}} + + // Numeric types can be either Int or Float. + Numeric = Alternate(Int, Float) ) // Builtins is a mapping of the builtin language functions to their type definitions. @@ -314,7 +340,7 @@ func Equals(t1, t2 Type) bool { return false } for i := range t1.Args { - if !Equals(t1.Args[i], t2.Args[2]) { + if !Equals(t1.Args[i], t2.Args[i]) { return false } } @@ -326,35 +352,35 @@ func Equals(t1, t2 Type) bool { } // Unify performs type unification of both parameter Types. It returns the -// least upper bound of both types, the smallest type that is capable of +// least upper bound of both types, the most general type that is capable of // representing both parameters. If either type is a type variable, then that // variable is unified with the LUB. In reporting errors, it is assumed that a // is the expected type and b is the type observed. func Unify(a, b Type) Type { // glog.V(2).Infof("Unifying %v and %v", a, b) - a1, b1 := a.Root(), b.Root() - switch a2 := a1.(type) { + aR, bR := a.Root(), b.Root() + switch aT := aR.(type) { case *Variable: - switch b2 := b1.(type) { + switch bT := bR.(type) { case *Variable: - if a2.ID != b2.ID { - // glog.V(2).Infof("Making %q type %q", a2, b1) - a2.SetInstance(b1) - return b1 + if aT.ID != bT.ID { + // glog.V(2).Infof("Making %q type %q", aT, bR) + aT.SetInstance(bR) + return bR } - return a1 + return aT case *Operator: - if occursInType(a2, b2) { - return &TypeError{ErrRecursiveUnification, a2, b2} + if occursInType(aT, bT) { + return &TypeError{ErrRecursiveUnification, aT, bT} } - // glog.V(2).Infof("Making %q type %q", a2, b1) - a2.SetInstance(b1) - return b1 + // glog.V(2).Infof("Making %q type %q", aT, bR) + aT.SetInstance(bR) + return bR } case *Operator: - switch b2 := b1.(type) { + switch bT := bR.(type) { case *Variable: - // reverse args to call above + // reverse args, to recurse the pattern above t := Unify(b, a) var e *TypeError if AsTypeError(t, &e) { @@ -363,34 +389,68 @@ func Unify(a, b Type) Type { return t case *Operator: - if len(a2.Args) != len(b2.Args) { - return &TypeError{ErrTypeMismatch, a2, b2} - } - var rType *Operator - if a2.Name != b2.Name { - t := LeastUpperBound(a, b) - // glog.V(2).Infof("Got LUB = %#v", t) + switch { + case IsAlternate(aT) && !IsAlternate(bT): + if OccursIn(bT, aT.Args) { + return bT + } + return &TypeError{ErrTypeMismatch, aT, bT} + + case IsAlternate(bT) && !IsAlternate(aT): + t := Unify(b, a) var e *TypeError if AsTypeError(t, &e) { - return e + // We flipped the args, flip them back. + return &TypeError{e.error, e.received, e.expected} } - var ok bool - if rType, ok = t.(*Operator); !ok { - return &TypeError{ErrInternal, a2, b2} + return t + + case IsAlternate(aT) && IsAlternate(bT): + // Both are Alternates, find intersection of type arguments. + var args []Type + for _, arg := range bT.Args { + if OccursIn(arg, aT.Args) { + args = append(args, arg) + } } - } else { - rType = &Operator{a2.Name, []Type{}} - } - rType.Args = make([]Type, len(a2.Args)) - for i, argA := range a2.Args { - t := Unify(argA, b2.Args[i]) - var e *TypeError - if AsTypeError(t, &e) { - return e + if len(args) == 0 { + return &TypeError{ErrTypeMismatch, aT, bT} + } + if len(args) == 1 { + return args[0] + } + return &Operator{alternateName, args} + + default: + if len(aT.Args) != len(bT.Args) { + return &TypeError{ErrTypeMismatch, aT, bT} + } + var rType *Operator + if aT.Name != bT.Name { + t := LeastUpperBound(a, b) + // glog.V(2).Infof("Got LUB = %#v", t) + var e *TypeError + if AsTypeError(t, &e) { + return e + } + var ok bool + if rType, ok = t.(*Operator); !ok { + return &TypeError{ErrRecursiveUnification, aT, bT} + } + } else { + rType = &Operator{aT.Name, []Type{}} + } + rType.Args = make([]Type, len(aT.Args)) + for i, argA := range aT.Args { + t := Unify(argA, bT.Args[i]) + var e *TypeError + if AsTypeError(t, &e) { + return e + } + rType.Args[i] = t } - rType.Args[i] = t + return rType } - return rType } } return &TypeError{ErrInternal, a, b} @@ -448,6 +508,24 @@ func LeastUpperBound(a, b Type) Type { (Equals(a1, Bool) && Equals(b1, Pattern)) { return Bool } + if (Equals(a1, Bool) && Equals(b1, Int)) || + (Equals(a1, Int) && Equals(b1, Bool)) { + return Int + } + // A Numeric can be an Int, or a Float, but not vice versa. + if (Equals(a1, Numeric) && Equals(b1, Int)) || + (Equals(a1, Int) && Equals(b1, Numeric)) { + return Int + } + if (Equals(a1, Numeric) && Equals(b1, Float)) || + (Equals(a1, Float) && Equals(b1, Numeric)) { + return Float + } + // A string can be a pattern, but not vice versa. + if (Equals(a1, String) && Equals(b1, Pattern)) || + (Equals(a1, Pattern) && Equals(b1, String)) { + return Pattern + } // A pattern and an Int are Bool if (Equals(a1, Pattern) && Equals(b1, Int)) || (Equals(a1, Int) && Equals(b1, Pattern)) { diff --git a/inputs/mtail/internal/runtime/compiler/types/types_test.go b/inputs/mtail/internal/runtime/compiler/types/types_test.go deleted file mode 100644 index c1c20bf6..00000000 --- a/inputs/mtail/internal/runtime/compiler/types/types_test.go +++ /dev/null @@ -1,364 +0,0 @@ -// Copyright 2016 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package types - -import ( - "errors" - "fmt" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -var typeUnificationTests = []struct { - a, b Type - expected Type -}{ - // The unification of None with None is still None. - { - None, None, - None, - }, - // The unification of a type T with itself is T. - { - String, String, - String, - }, - { - Int, Int, - Int, - }, - { - Float, Float, - Float, - }, - { - &Variable{ID: 0}, &Variable{ID: 0}, - &Variable{ID: 0}, - }, - // The unification of any type operator with a type variable is the type operator - { - &Variable{}, None, - None, - }, - { - &Variable{}, Float, - Float, - }, - { - &Variable{}, Int, - Int, - }, - { - &Variable{}, String, - String, - }, - { - None, &Variable{}, - None, - }, - { - Float, &Variable{}, - Float, - }, - { - Int, &Variable{}, - Int, - }, - { - String, &Variable{}, - String, - }, - // The lub of Int and Float is Float. - { - Int, Float, - Float, - }, - { - Float, Int, - Float, - }, - // The lub of Int and String is String. - { - Int, String, - String, - }, - { - String, Int, - String, - }, - // The lub of Float and String is String. - { - Float, String, - String, - }, - { - String, Float, - String, - }, - // lub of Bool and Int is an Int. - { - Bool, Int, - Int, - }, - { - Int, Bool, - Int, - }, - // Strings can be Patterns. - { - Pattern, String, - Pattern, - }, - { - String, Pattern, - Pattern, - }, - // Patterns and Ints can both be Bool. - { - Pattern, Int, - Bool, - }, - // Undef secedes to other - { - Undef, Int, - Int, - }, - { - String, Undef, - String, - }, - { - Undef, Undef, - Undef, - }, - // TypeError supercedes other. - { - Pattern, &TypeError{}, - &TypeError{}, - }, - { - &TypeError{}, Float, - &TypeError{}, - }, -} - -func TestTypeUnification(t *testing.T) { - for _, tc := range typeUnificationTests { - tc := tc - t.Run(fmt.Sprintf("%s %s", tc.a, tc.b), func(t *testing.T) { - tU := Unify(tc.a, tc.b) - /* Type Errors never equal. */ - if IsTypeError(tc.expected) && IsTypeError(tU) { - return - } - if !Equals(tc.expected, tU) { - t.Errorf("want %q, got %q", tc.expected, tU) - } - }) - } -} - -var groupOnlyMatchesTests = []struct { - pattern string - check string - expected bool -}{ - { - `\d+`, - "0123456789", - true, - }, - { - `[0123456789]`, - "0123456789", - true, - }, - { - `(0|1|2|3|4|5|6|7|8|9)`, - "0123456789", - true, - }, - { - `(\+|-)?\d+(\.\d+)?`, - "0123456789", - false, - }, - { - `(\d+\.\d+)`, - "0123456789.eE+-", - true, - }, - { - `(\+|-)?\d+(\.\d+)?`, - "0123456789.eE+-", - true, - }, - { - `(?P-?\d+\.\d+)`, - "0123456789.eE+-", - true, - }, - { - `(?P-)`, - "+-", - true, - }, - { - `(?P-)`, - "+-0123456789", - true, - }, - { - `\-`, - "+-", - true, - }, - { - `\-`, - "+-0123456789", - true, - }, - { - `\-|[0-9]`, - "+-", - false, - }, -} - -func TestGroupOnlyMatches(t *testing.T) { - for _, tc := range groupOnlyMatchesTests { - r, err := ParseRegexp(tc.pattern) - testutil.FatalIfErr(t, err) - result := groupOnlyMatches(r, tc.check) - if result != tc.expected { - t.Errorf("Pattern %q didn't only match check %q: expected %+v, received %+v", tc.pattern, tc.check, tc.expected, result) - } - } -} - -var inferCaprefTypeTests = []struct { - pattern string - typ Type -}{ - { - `\d+`, - Int, - }, - { - `-?\d+`, - Int, - }, - { - `[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?`, - Float, - }, - { - `-?\d+\.\d+`, - Float, - }, - { - `(\d+\.\d+)`, - Float, - }, - { - `\d+\.\d+\.\d+\.\d+`, - String, - }, - { - `-`, - String, - }, - { - `\-`, - String, - }, - // A single - is not an Int, so the whole class cannot be Int. - { - `[-0-9]`, - String, - }, - // Fun fact! This test gets simplified into `[\-0-9]` because the character - // class is also an alternation. - { - `-|[0-9]`, - String, - }, - { - `\d+\.\d+|\-`, - String, - }, - { - `\-|\d+\.\d+`, - String, - }, -} - -func TestInferCaprefType(t *testing.T) { - for _, tc := range inferCaprefTypeTests { - tc := tc - t.Run(tc.pattern, func(t *testing.T) { - re, err := ParseRegexp(`(` + tc.pattern + `)`) - testutil.FatalIfErr(t, err) - r := InferCaprefType(re, 1) - if !Equals(tc.typ, r) { - t.Errorf("Types don't match: %q inferred %v, not %v", tc.pattern, r, tc.typ) - } - }) - } -} - -func TestTypeEquals(t *testing.T) { - if Equals(NewVariable(), NewVariable()) { - t.Error("Type variables are not same") - } - - var e *TypeError - - t1 := NewVariable() - t2 := NewVariable() - ty := Unify(t1, t2) - if AsTypeError(ty, &e) { - t.Fatal(e) - } - if !Equals(t1, t2) { - t.Errorf("Unified variables should be same: %v %v", t1, t2) - } - if !Equals(Int, Int) { - t.Errorf("type constants not same") - } - - t3 := NewVariable() - if Equals(t3, Int) { - t.Error("ununified type const and var") - } - ty = Unify(Int, t3) - if AsTypeError(ty, &e) { - t.Fatal(e) - } - if !Equals(t3, Int) { - t.Error("unified variable and const not same") - } - - typeErr := &TypeError{} - if Equals(typeErr, typeErr) { - t.Error("error type equals itself") - } -} - -func TestAsTypeError(t *testing.T) { - e := &TypeError{ErrTypeMismatch, Int, Bool} - - var e1 *TypeError - if !AsTypeError(e, &e1) { - t.Errorf("want type error, got: %#v", e1) - } - if !errors.Is(e1.error, ErrTypeMismatch) { - t.Errorf("want ErrTypeMismatch, got: %#v", e1.error) - } - if e.expected != e1.expected || e.received != e1.received { - t.Errorf("want %#v, got: %#v", e.expected, e1.expected) - t.Errorf("want %#v, got: %#v", e.received, e1.received) - } -} diff --git a/inputs/mtail/internal/runtime/fuzz.go b/inputs/mtail/internal/runtime/fuzz.go index 7d2af957..fd5a7918 100644 --- a/inputs/mtail/internal/runtime/fuzz.go +++ b/inputs/mtail/internal/runtime/fuzz.go @@ -2,7 +2,6 @@ // This file is available under the Apache license. //go:build gofuzz -// +build gofuzz package runtime diff --git a/inputs/mtail/internal/runtime/httpstatus.go b/inputs/mtail/internal/runtime/httpstatus.go index f3a2bffb..2ecaf026 100644 --- a/inputs/mtail/internal/runtime/httpstatus.go +++ b/inputs/mtail/internal/runtime/httpstatus.go @@ -14,7 +14,7 @@ import ( const loaderTemplate = `

Program Loader

- +
diff --git a/inputs/mtail/internal/runtime/runtime.go b/inputs/mtail/internal/runtime/runtime.go index e0301eb7..2b8a0fb9 100644 --- a/inputs/mtail/internal/runtime/runtime.go +++ b/inputs/mtail/internal/runtime/runtime.go @@ -264,10 +264,18 @@ type Runtime struct { signalQuit chan struct{} // When closed stops the signal handler goroutine. } +var ( + ErrNeedsStore = errors.New("loader needs a store") + ErrNeedsWaitgroup = errors.New("loader needs a WaitGroup") +) + // New creates a new program loader that reads programs from programPath. func New(lines <-chan *logline.LogLine, wg *sync.WaitGroup, programPath string, store *metrics.Store, options ...Option) (*Runtime, error) { if store == nil { - return nil, errors.New("loader needs a store") + return nil, ErrNeedsStore + } + if wg == nil { + return nil, ErrNeedsWaitgroup } r := &Runtime{ ms: store, diff --git a/inputs/mtail/internal/runtime/runtime_integration_test.go b/inputs/mtail/internal/runtime/runtime_integration_test.go deleted file mode 100644 index ca23d8ed..00000000 --- a/inputs/mtail/internal/runtime/runtime_integration_test.go +++ /dev/null @@ -1,1105 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package runtime - -import ( - "bufio" - "context" - "math" - "strings" - "sync" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics" - "flashcat.cloud/categraf/inputs/mtail/internal/metrics/datum" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -var vmTests = []struct { - name string - prog string - log string - errs int64 - metrics metrics.MetricSlice -}{ - { - "single-dash-parseint", - `counter c - -/(?P-)/ { - $x == "-" { - c++ - } -} -`, `123 a -- b -`, - 0, - metrics.MetricSlice{ - { - Name: "c", - Program: "single-dash-parseint", - Kind: metrics.Counter, - Type: metrics.Int, - Hidden: false, - Keys: []string{}, - LabelValues: []*metrics.LabelValue{ - { - Value: &datum.Int{Value: 1}, - }, - }, - }, - }, - }, - { - "histogram", - `histogram hist1 buckets 1, 2, 4, 8 -histogram hist2 by code buckets 0, 1, 2, 4, 8 -histogram hist3 by f buckets -1, 0, 1 - -/^(.) (\d+)/ { - hist1 = $2 - hist2[$1] = $2 -} - -/^(?P[a-z]+) (?P
program name errors
+
@@ -38,7 +38,6 @@ const tailerTemplate = ` {{end}}
pathname errors
- ` // WriteStatusHTML emits the Tailer's state in HTML format to the io.Writer w. diff --git a/inputs/mtail/internal/tailer/logstream/base.go b/inputs/mtail/internal/tailer/logstream/base.go new file mode 100644 index 00000000..a0d1ebb5 --- /dev/null +++ b/inputs/mtail/internal/tailer/logstream/base.go @@ -0,0 +1,20 @@ +// Copyright 2024 Google Inc. All Rights Reserved. +// This file is available under the Apache license. + +package logstream + +import ( + "flashcat.cloud/categraf/inputs/mtail/internal/logline" +) + +type streamBase struct { + sourcename string // human readable name of the logstream source + + lines chan *logline.LogLine // outbound channel for lines +} + +// Lines returns the output log line channel for this stream. The stream is +// completed when this channel closes. +func (s *streamBase) Lines() <-chan *logline.LogLine { + return s.lines +} diff --git a/inputs/mtail/internal/tailer/logstream/cancel.go b/inputs/mtail/internal/tailer/logstream/cancel.go index 34af7e31..05dd9e3a 100644 --- a/inputs/mtail/internal/tailer/logstream/cancel.go +++ b/inputs/mtail/internal/tailer/logstream/cancel.go @@ -10,24 +10,35 @@ import ( "time" ) +// ReadDeadliner has a SetReadDeadline function to be used for interrupting reads. type ReadDeadliner interface { SetReadDeadline(t time.Time) error } +// SetReadDeadlineOnDone waits for the context to be done, and then sets an +// immediate read deadline on the flie descriptor `d`. This causes any blocked +// reads on that descriptor to return with an i/o timeout error. func SetReadDeadlineOnDone(ctx context.Context, d ReadDeadliner) { go func() { <-ctx.Done() log.Println("cancelled, setting read deadline to interrupt read") if err := d.SetReadDeadline(time.Now()); err != nil { - log.Println(err) + log.Printf("SetReadDeadline() -> %v", err) } }() } -func IsEndOrCancel(err error) bool { +// IsExitableError returns true if a stream should exit because of this error. +func IsExitableError(err error) bool { + if err == nil { + return false + } if errors.Is(err, io.EOF) { return true } + if errors.Is(err, os.ErrClosed) { + return true + } if os.IsTimeout(err) { return true } diff --git a/inputs/mtail/internal/tailer/logstream/decode.go b/inputs/mtail/internal/tailer/logstream/decode.go deleted file mode 100644 index 3b1a1feb..00000000 --- a/inputs/mtail/internal/tailer/logstream/decode.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package logstream - -import ( - "bytes" - "context" - "expvar" - "unicode/utf8" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" -) - -// logLines counts the number of lines read per log file. -var logLines = expvar.NewMap("log_lines_total") - -// decodeAndSend transforms the byte array `b` into unicode in `partial`, sending to the llp as each newline is decoded. -func decodeAndSend(ctx context.Context, lines chan<- *logline.LogLine, pathname string, n int, b []byte, partial *bytes.Buffer) int { - var ( - r rune - width int - count int - ) - for i := 0; i < len(b) && i < n; i += width { - r, width = utf8.DecodeRune(b[i:]) - if r == utf8.RuneError { - if len(b)-i > 10 { - count += width - continue - } - return count - } - // Most file-based log sources will end with \n on Unixlike systems. - // On Windows they appear to be both \r\n. syslog disallows \r (and \t - // and others) and writes them escaped, per syslog(7). [RFC - // 3164](https://www.ietf.org/rfc/rfc3164.txt) disallows newlines in - // the message: "The MSG part of the syslog packet MUST contain visible - // (printing) characters." So for now let's assume that a \r only - // occurs at the end of a line anyway, and we can just eat it. - switch { - case r == '\r': - // nom - case r == '\n': - sendLine(ctx, pathname, partial, lines) - default: - partial.WriteRune(r) - } - count += width - } - return count -} - -func sendLine(ctx context.Context, pathname string, partial *bytes.Buffer, lines chan<- *logline.LogLine) { - // glog.V(2).Infof("sendline") - logLines.Add(pathname, 1) - lines <- logline.New(ctx, pathname, partial.String()) - partial.Reset() -} diff --git a/inputs/mtail/internal/tailer/logstream/dgramstream.go b/inputs/mtail/internal/tailer/logstream/dgramstream.go index 4130c1da..7dec7f36 100644 --- a/inputs/mtail/internal/tailer/logstream/dgramstream.go +++ b/inputs/mtail/internal/tailer/logstream/dgramstream.go @@ -4,94 +4,92 @@ package logstream import ( - "bytes" "context" + "fmt" "log" "net" "sync" - "time" - // "github.com/golang/glog" "flashcat.cloud/categraf/inputs/mtail/internal/logline" "flashcat.cloud/categraf/inputs/mtail/internal/waker" ) type dgramStream struct { - ctx context.Context - lines chan<- *logline.LogLine + streamBase + + cancel context.CancelFunc scheme string // Datagram scheme, either "unixgram" or "udp". address string // Given name for the underlying socket path on the filesystem or hostport. - - mu sync.RWMutex // protects following fields - completed bool // This pipestream is completed and can no longer be used. - lastReadTime time.Time // Last time a log line was read from this named pipe - - stopOnce sync.Once // Ensure stopChan only closed once. - stopChan chan struct{} // Close to start graceful shutdown. } -func newDgramStream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, scheme, address string, lines chan<- *logline.LogLine) (LogStream, error) { +func newDgramStream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, scheme, address string, oneShot OneShotMode) (LogStream, error) { if address == "" { return nil, ErrEmptySocketAddress } - ss := &dgramStream{ctx: ctx, scheme: scheme, address: address, lastReadTime: time.Now(), lines: lines, stopChan: make(chan struct{})} - if err := ss.stream(ctx, wg, waker); err != nil { + ctx, cancel := context.WithCancel(ctx) + ss := &dgramStream{ + cancel: cancel, + scheme: scheme, + address: address, + streamBase: streamBase{ + sourcename: fmt.Sprintf("%s://%s", scheme, address), + lines: make(chan *logline.LogLine), + }, + } + if err := ss.stream(ctx, wg, waker, oneShot); err != nil { return nil, err } return ss, nil } -func (ss *dgramStream) LastReadTime() time.Time { - ss.mu.RLock() - defer ss.mu.RUnlock() - return ss.lastReadTime -} - -const datagramReadBufferSize = 131071 +// The read buffer size for datagrams. +const datagramReadBufferSize = 131072 -func (ss *dgramStream) stream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker) error { - c, err := net.ListenPacket(ss.scheme, ss.address) +func (ds *dgramStream) stream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, oneShot OneShotMode) error { + c, err := net.ListenPacket(ds.scheme, ds.address) if err != nil { - logErrors.Add(ss.address, 1) + logErrors.Add(ds.address, 1) return err } - // glog.V(2).Infof("opened new datagram socket %v", c) - b := make([]byte, datagramReadBufferSize) - partial := bytes.NewBufferString("") + // glog.V(2).Infof("stream(%s): opened new datagram socket %v", ds.sourcename, c) + lr := NewLineReader(ds.sourcename, ds.lines, &dgramConn{c}, datagramReadBufferSize, ds.cancel) var total int wg.Add(1) go func() { defer wg.Done() defer func() { - // glog.V(2).Infof("%v: read total %d bytes from %s", c, total, ss.address) - // glog.V(2).Infof("%v: closing connection", c) + // glog.V(2).Infof("stream(%s): read total %d bytes", ds.sourcename, total) + // glog.V(2).Infof("stream(%s): closing connection", ds.sourcename) err := c.Close() if err != nil { - logErrors.Add(ss.address, 1) + logErrors.Add(ds.address, 1) log.Println(err) } - logCloses.Add(ss.address, 1) - ss.mu.Lock() - ss.completed = true - ss.mu.Unlock() - ss.Stop() + logCloses.Add(ds.address, 1) + lr.Finish(ctx) + close(ds.lines) + ds.cancel() }() ctx, cancel := context.WithCancel(ctx) defer cancel() SetReadDeadlineOnDone(ctx, c) for { - n, _, err := c.ReadFrom(b) - // glog.V(2).Infof("%v: read %d bytes, err is %v", c, n, err) + n, err := lr.ReadAndSend(ctx) + // glog.V(2).Infof("stream(%s): read %d bytes, err is %v", ds.sourcename, n, err) // This is a test-only trick that says if we've already put this // logstream in graceful shutdown, then a zero-byte read is // equivalent to an "EOF" in connection and file oriented streams. if n == 0 { + if oneShot { + // glog.V(2).Infof("stream(%s): exiting because zero byte read and one shot", ds.sourcename) + return + } select { - case <-ss.stopChan: - // glog.V(2).Infof("%v: exiting because zero byte read after Stop", c) + case <-ctx.Done(): + // glog.V(2).Infof("stream(%s): exiting because zero byte read after cancellation", ds.sourcename) return default: } @@ -99,53 +97,45 @@ func (ss *dgramStream) stream(ctx context.Context, wg *sync.WaitGroup, waker wak if n > 0 { total += n - decodeAndSend(ss.ctx, ss.lines, ss.address, n, b[:n], partial) - ss.mu.Lock() - ss.lastReadTime = time.Now() - ss.mu.Unlock() - } - if err != nil && IsEndOrCancel(err) { - if partial.Len() > 0 { - sendLine(ctx, ss.address, partial, ss.lines) + // No error implies more to read, so restart the loop. + if err == nil && ctx.Err() == nil { + continue } - // glog.V(2).Infof("%v: exiting, stream has error %s", c, err) + } + + if IsExitableError(err) { + // glog.V(2).Infof("stream(%s): exiting, stream has error %s", ds.sourcename, err) return } // Yield and wait - // glog.V(2).Infof("%v: waiting", c) + // glog.V(2).Infof("stream(%s): waiting", ds.sourcename) select { - case <-ss.stopChan: + case <-ctx.Done(): + // Exit after next read attempt. // We may have started waiting here when the stop signal // arrives, but since that wait the file may have been // written to. The file is not technically yet at EOF so // we need to go back and try one more read. We'll exit // the stream in the zero byte handler above. - // glog.V(2).Infof("%v: Stopping after next zero byte read", c) - case <-ctx.Done(): - // Exit immediately; a cancelled context will set an immediate - // deadline on the next read which will cause us to exit then, - // so don't bother going around the loop again. - return + // glog.V(2).Infof("stream(%s): context cancelled, exiting after next zero byte read", ds.scheme, ds.address) case <-waker.Wake(): // sleep until next Wake() - // glog.V(2).Infof("%v: Wake received", c) + // glog.V(2).Infof("stream(%s): Wake received", ds.sourcename) } } }() return nil } -func (ss *dgramStream) IsComplete() bool { - ss.mu.RLock() - defer ss.mu.RUnlock() - return ss.completed +// dgramConn wraps a PacketConn to add a Read method. +type dgramConn struct { + net.PacketConn } -func (ss *dgramStream) Stop() { - // glog.V(2).Infof("Stop received on datagram stream.") - ss.stopOnce.Do(func() { - close(ss.stopChan) - }) +// Read satisfies io.Reader +func (d *dgramConn) Read(p []byte) (count int, err error) { + count, _, err = d.ReadFrom(p) + return } diff --git a/inputs/mtail/internal/tailer/logstream/dgramstream_unix_test.go b/inputs/mtail/internal/tailer/logstream/dgramstream_unix_test.go deleted file mode 100644 index 37c82875..00000000 --- a/inputs/mtail/internal/tailer/logstream/dgramstream_unix_test.go +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package logstream_test - -import ( - "context" - "fmt" - "net" - "path/filepath" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/tailer/logstream" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -const dgramTimeout = 1 * time.Second - -func TestDgramStreamReadCompletedBecauseSocketClosed(t *testing.T) { - for _, scheme := range []string{"unixgram", "udp"} { - scheme := scheme - t.Run(scheme, testutil.TimeoutTest(dgramTimeout, func(t *testing.T) { //nolint:thelper - var wg sync.WaitGroup - - var addr string - switch scheme { - case "unixgram": - tmpDir := testutil.TestTempDir(t) - addr = filepath.Join(tmpDir, "sock") - case "udp": - addr = fmt.Sprintf("[::]:%d", testutil.FreePort(t)) - default: - t.Fatalf("bad scheme %s", scheme) - } - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - - sockName := scheme + "://" + addr - ss, err := logstream.New(ctx, &wg, waker, sockName, lines, false) - testutil.FatalIfErr(t, err) - - s, err := net.Dial(scheme, addr) - testutil.FatalIfErr(t, err) - - _, err = s.Write([]byte("1\n")) - testutil.FatalIfErr(t, err) - - awaken(0) // sync past read - - ss.Stop() - - // "Close" the socket by sending zero bytes, which after Stop tells the stream to act as if we're done. - _, err = s.Write([]byte{}) - testutil.FatalIfErr(t, err) - - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: addr, Line: "1"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - cancel() - wg.Wait() - - if !ss.IsComplete() { - t.Errorf("expecting dgramstream to be complete because socket closed") - } - })) - } -} - -func TestDgramStreamReadCompletedBecauseCancel(t *testing.T) { - for _, scheme := range []string{"unixgram", "udp"} { - scheme := scheme - t.Run(scheme, testutil.TimeoutTest(dgramTimeout, func(t *testing.T) { //nolint:thelper - var wg sync.WaitGroup - - var addr string - switch scheme { - case "unixgram": - tmpDir := testutil.TestTempDir(t) - addr = filepath.Join(tmpDir, "sock") - case "udp": - addr = fmt.Sprintf("[::]:%d", testutil.FreePort(t)) - default: - t.Fatalf("bad scheme %s", scheme) - } - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - - sockName := scheme + "://" + addr - ss, err := logstream.New(ctx, &wg, waker, sockName, lines, false) - testutil.FatalIfErr(t, err) - - s, err := net.Dial(scheme, addr) - testutil.FatalIfErr(t, err) - - _, err = s.Write([]byte("1\n")) - testutil.FatalIfErr(t, err) - - awaken(0) // Synchronise past read. - - cancel() // This cancellation should cause the stream to shut down. - - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: addr, Line: "1"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if !ss.IsComplete() { - t.Errorf("expecting dgramstream to be complete because cancel") - } - })) - } -} diff --git a/inputs/mtail/internal/tailer/logstream/fifostream.go b/inputs/mtail/internal/tailer/logstream/fifostream.go new file mode 100644 index 00000000..a121b5a7 --- /dev/null +++ b/inputs/mtail/internal/tailer/logstream/fifostream.go @@ -0,0 +1,144 @@ +// Copyright 2020 Google Inc. All Rights Reserved. +// This file is available under the Apache license. + +package logstream + +import ( + "context" + "errors" + "io" + "log" + "os" + "sync" + "syscall" + + "flashcat.cloud/categraf/inputs/mtail/internal/logline" + "flashcat.cloud/categraf/inputs/mtail/internal/waker" +) + +type fifoStream struct { + streamBase + + cancel context.CancelFunc + + pathname string // Given name for the underlying named pipe on the filesystem +} + +// newFifoStream creates a new stream reader for Unix Fifos. +// `pathname` must already be verified as clean. +func newFifoStream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, pathname string, fi os.FileInfo) (LogStream, error) { + ctx, cancel := context.WithCancel(ctx) + ps := &fifoStream{ + cancel: cancel, + pathname: pathname, + streamBase: streamBase{ + sourcename: pathname, + lines: make(chan *logline.LogLine), + }, + } + if err := ps.stream(ctx, wg, waker, fi); err != nil { + return nil, err + } + return ps, nil +} + +func fifoOpen(pathname string) (*os.File, error) { + if IsStdinPattern(pathname) { + return os.Stdin, nil + } + // Open in nonblocking mode because the write end of the fifo may not have started yet; this also gives us the ability to set a read deadline when the context is cancelled. https://github.com/golang/go/issues/24842 + fd, err := os.OpenFile(pathname, os.O_RDONLY|syscall.O_NONBLOCK, 0o600) // #nosec G304 -- path already validated by caller + if err != nil { + log.Printf("fifoOpen(%s): open failed: %v", pathname, err) + logErrors.Add(pathname, 1) + return nil, err + } + // glog.V(2).Infof("fifoOpen(%s): opened new fifo %v", pathname, fd) + return fd, nil +} + +// The read buffer size for fifos. +// +// Before Linux 2.6.11, the capacity of a fifo was the same as the +// system page size (e.g., 4096 bytes on i386). Since Linux 2.6.11, +// the fifo capacity is 16 pages (i.e., 65,536 bytes in a system +// with a page size of 4096 bytes). Since Linux 2.6.35, the default +// fifo capacity is 16 pages, but the capacity can be queried and +// set using the fcntl(2) F_GETPIPE_SZ and F_SETPIPE_SZ operations. +// See fcntl(2) for more information. +// +// https://man7.org/linux/man-pages/man7/pipe.7.html +const defaultFifoReadBufferSize = 131072 + +func (ps *fifoStream) stream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, _ os.FileInfo) error { + fd, err := fifoOpen(ps.pathname) + if err != nil { + return err + } + lr := NewLineReader(ps.sourcename, ps.lines, fd, defaultFifoReadBufferSize, ps.cancel) + var total int + wg.Add(1) + go func() { + defer wg.Done() + defer func() { + // glog.V(2).Infof("stream(%s): read total %d bytes", ps.sourcename, total) + // glog.V(2).Infof("stream(%s): closing file descriptor %v", ps.sourcename, fd) + err := fd.Close() + if err != nil { + logErrors.Add(ps.pathname, 1) + log.Println(err) + } + logCloses.Add(ps.pathname, 1) + lr.Finish(ctx) + close(ps.lines) + ps.cancel() + }() + SetReadDeadlineOnDone(ctx, fd) + + for { + n, err := lr.ReadAndSend(ctx) + + if n > 0 { + total += n + + // No error implies there is more to read so restart the loop. + if err == nil && ctx.Err() == nil { + continue + } + } else if n == 0 && total > 0 { + // `pipe(7)` tells us "If all file descriptors referring to the + // write end of a fifo have been closed, then an attempt to + // read(2) from the fifo will see end-of-file (read(2) will + // return 0)." To avoid shutting down the stream at startup + // before any writer has connected to the fifo, condition on + // having read any bytes previously. + // glog.V(2).Infof("stream(%s): exiting, 0 bytes read", ps.sourcename) + return + } + + // Test to see if we should exit. + if IsExitableError(err) { + // Because we've opened in nonblocking mode, this Read can return + // straight away. If there are no writers, it'll return EOF (per + // `pipe(7)` and `read(2)`.) This is expected when `mtail` is + // starting at system init as the writer may not be ready yet. + if !(errors.Is(err, io.EOF) && total == 0) { + // glog.V(2).Infof("stream(%s): exiting, stream has error %s", ps.sourcename, err) + return + } + } + + // Wait for wakeup or termination. + // glog.V(2).Infof("stream(%s): waiting", ps.sourcename) + select { + case <-ctx.Done(): + // Exit after next read attempt. + // glog.V(2).Infof("stream(%s): context cancelled, exiting after next read timeout", ps.pathname) + case <-waker.Wake(): + // sleep until next Wake() + // glog.V(2).Infof("stream(%s): Wake received", ps.sourcename) + } + } + }() + return nil +} diff --git a/inputs/mtail/internal/tailer/logstream/filestream.go b/inputs/mtail/internal/tailer/logstream/filestream.go index cb1fa6b2..9b358d6f 100644 --- a/inputs/mtail/internal/tailer/logstream/filestream.go +++ b/inputs/mtail/internal/tailer/logstream/filestream.go @@ -4,7 +4,6 @@ package logstream import ( - "bytes" "context" "errors" "expvar" @@ -13,8 +12,8 @@ import ( "os" "sync" "syscall" - "time" + // "github.com/golang/glog" "flashcat.cloud/categraf/inputs/mtail/internal/logline" "flashcat.cloud/categraf/inputs/mtail/internal/waker" ) @@ -34,153 +33,143 @@ var fileTruncates = expvar.NewMap("file_truncates_total") // a new goroutine and closes itself down. The shared context is used for // cancellation. type fileStream struct { - ctx context.Context - lines chan<- *logline.LogLine + streamBase - pathname string // Given name for the underlying file on the filesystem - - mu sync.RWMutex // protects following fields. - lastReadTime time.Time // Last time a log line was read from this file - completed bool // The filestream is completed and can no longer be used. + cancel context.CancelFunc - stopOnce sync.Once // Ensure stopChan only closed once. - stopChan chan struct{} // Close to start graceful shutdown. + pathname string // Given name for the underlying file on the filesystem } // newFileStream creates a new log stream from a regular file. -func newFileStream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, pathname string, fi os.FileInfo, lines chan<- *logline.LogLine, streamFromStart bool) (LogStream, error) { - fs := &fileStream{ctx: ctx, pathname: pathname, lastReadTime: time.Now(), lines: lines, stopChan: make(chan struct{})} - if err := fs.stream(ctx, wg, waker, fi, streamFromStart); err != nil { +func newFileStream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, pathname string, fi os.FileInfo, oneShot OneShotMode) (LogStream, error) { + ctx, cancel := context.WithCancel(ctx) + fs := &fileStream{ + cancel: cancel, + pathname: pathname, + streamBase: streamBase{ + sourcename: pathname, + lines: make(chan *logline.LogLine), + }, + } + // Stream from the start of the file when in one shot mode. + streamFromStart := oneShot == OneShotEnabled + if err := fs.stream(ctx, wg, waker, fi, oneShot, streamFromStart); err != nil { return nil, err } return fs, nil } -func (fs *fileStream) LastReadTime() time.Time { - fs.mu.RLock() - defer fs.mu.RUnlock() - return fs.lastReadTime -} - -func (fs *fileStream) stream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, fi os.FileInfo, streamFromStart bool) error { +func (fs *fileStream) stream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, fi os.FileInfo, oneShot OneShotMode, streamFromStart bool) error { fd, err := os.OpenFile(fs.pathname, os.O_RDONLY, 0o600) if err != nil { - logErrors.Add(fs.pathname, 1) + logErrors.Add(fs.sourcename, 1) return err } - logOpens.Add(fs.pathname, 1) - // glog.V(2).Infof("%v: opened new file", fd) + logOpens.Add(fs.sourcename, 1) + // glog.V(2).Infof("stream(%s): opened new file", fs.sourcename) if !streamFromStart { + // Normal operation for first stream is to ignore the past, and seek to + // EOF immediately to start tailing. if _, err := fd.Seek(0, io.SeekEnd); err != nil { - logErrors.Add(fs.pathname, 1) + logErrors.Add(fs.sourcename, 1) if err := fd.Close(); err != nil { - logErrors.Add(fs.pathname, 1) - log.Println(err) + logErrors.Add(fs.sourcename, 1) + log.Printf("stream(%s): closing file: %v", fs.sourcename, err) } return err } - // glog.V(2).Infof("%v: seeked to end", fd) + // glog.V(2).Infof("stream(%s): seeked to end", fs.sourcename) } - b := make([]byte, defaultReadBufferSize) - var lastBytes []byte - partial := bytes.NewBufferString("") + + lr := NewLineReader(fs.sourcename, fs.lines, fd, defaultReadBufferSize, fs.cancel) + started := make(chan struct{}) var total int wg.Add(1) go func() { defer wg.Done() defer func() { - // glog.V(2).Infof("%v: read total %d bytes from %s", fd, total, fs.pathname) - // glog.V(2).Infof("%v: closing file descriptor", fd) + // glog.V(2).Infof("stream(%s): read total %d bytes", fs.sourcename, total) + // glog.V(2).Infof("stream(%s): closing file descriptor", fs.sourcename) if err := fd.Close(); err != nil { - logErrors.Add(fs.pathname, 1) - log.Println(err) + logErrors.Add(fs.sourcename, 1) + log.Printf("stream(%s): closing file: %v", fs.sourcename, err) } - logCloses.Add(fs.pathname, 1) + logCloses.Add(fs.sourcename, 1) }() close(started) for { // Blocking read but regular files will return EOF straight away. - count, err := fd.Read(b) - // glog.V(2).Infof("%v: read %d bytes, err is %v", fd, count, err) + count, err := lr.ReadAndSend(ctx) + // glog.V(2).Infof("stream(%s): read %d bytes, err is %v", fs.sourcename, count, err) if count > 0 { total += count - // glog.V(2).Infof("%v: decode and send", fd) - needSend := lastBytes - needSend = append(needSend, b[:count]...) - sendCount := decodeAndSend(ctx, fs.lines, fs.pathname, len(needSend), needSend, partial) - if sendCount < len(needSend) { - lastBytes = append([]byte{}, needSend[sendCount:]...) - } else { - lastBytes = []byte{} + + // No error implies there is more to read so restart the loop. + if err == nil && ctx.Err() == nil { + continue } - fs.mu.Lock() - fs.lastReadTime = time.Now() - fs.mu.Unlock() } - if err != nil && err != io.EOF { - logErrors.Add(fs.pathname, 1) + if err != nil && !errors.Is(err, io.EOF) { + logErrors.Add(fs.sourcename, 1) // TODO: This could be generalised to check for any retryable // errors, and end on unretriables; e.g. ESTALE looks // retryable. if errors.Is(err, syscall.ESTALE) { - log.Printf("%v: reopening stream due to %s", fd, err) - if nerr := fs.stream(ctx, wg, waker, fi, true); nerr != nil { - log.Println(nerr) + log.Printf("stream(%s): reopening stream due to %s", fs.sourcename, err) + // streamFromStart always true on a stream reopen + if nerr := fs.stream(ctx, wg, waker, fi, oneShot, true); nerr != nil { + log.Printf("stream(%s): new stream: %v", fs.sourcename, nerr) } // Close this stream. return } - log.Println(err) + log.Printf("stream(%s): read error: %v", fs.sourcename, err) } // If we have read no bytes and are at EOF, check for truncation and rotation. - if err == io.EOF && count == 0 { - // glog.V(2).Infof("%v: eof an no bytes", fd) + if errors.Is(err, io.EOF) && count == 0 { + // glog.V(2).Infof("stream(%s): eof an no bytes", fs.sourcename) // Both rotation and truncation need to stat, so check for // rotation first. It is assumed that rotation is the more // common change pattern anyway. newfi, serr := os.Stat(fs.pathname) if serr != nil { - log.Println(serr) + log.Printf("stream(%s): stat error: %v", serr) // If this is a NotExist error, then we should wrap up this // goroutine. The Tailer will create a new logstream if the // file is in the middle of a rotation and gets recreated // in the next moment. We can't rely on the Tailer to tell // us we're deleted because the tailer can only tell us to - // Stop, which ends up causing us to race here against - // detection of IsCompleted. + // cancel. if os.IsNotExist(serr) { - // glog.V(2).Infof("%v: source no longer exists, exiting", fd) - if partial.Len() > 0 { - sendLine(ctx, fs.pathname, partial, fs.lines) - } - fs.mu.Lock() - fs.completed = true - fs.mu.Unlock() + // glog.V(2).Infof("stream(%s): source no longer exists, exiting", fs.sourcename) + lr.Finish(ctx) + close(fs.lines) return } - logErrors.Add(fs.pathname, 1) + logErrors.Add(fs.sourcename, 1) goto Sleep } if !os.SameFile(fi, newfi) { - // glog.V(2).Infof("%v: adding a new file routine", fd) - if err := fs.stream(ctx, wg, waker, newfi, true); err != nil { - log.Println(err) + // glog.V(2).Infof("stream(%s): adding a new file routine", fs.sourcename) + // Stream from start always true on a stream reopen + if err := fs.stream(ctx, wg, waker, newfi, oneShot, true); err != nil { + log.Printf("stream(%s): new stream: %v", fs.sourcename, err) } // We're at EOF so there's nothing left to read here. return } currentOffset, serr := fd.Seek(0, io.SeekCurrent) if serr != nil { - logErrors.Add(fs.pathname, 1) + logErrors.Add(fs.sourcename, 1) log.Println(serr) continue } - // glog.V(2).Infof("%v: current seek is %d", fd, currentOffset) - // glog.V(2).Infof("%v: new size is %d", fd, newfi.Size()) + // glog.V(2).Infof("stream(%s): current seek is %d", fs.sourcename, currentOffset) + // glog.V(2).Infof("stream(%s): new size is %d", fs.sourcename, newfi.Size()) // We know that newfi is from the current file. Truncation can // only be detected if the new file is currently shorter than // the current seek offset. In test this can be a race, but in @@ -188,50 +177,36 @@ func (fs *fileStream) stream(ctx context.Context, wg *sync.WaitGroup, waker wake // than the previous after rotation in the time it takes for // mtail to notice. if newfi.Size() < currentOffset { - // glog.V(2).Infof("%v: truncate? currentoffset is %d and size is %d", fd, currentOffset, newfi.Size()) + // glog.V(2).Infof("stream(%s): truncate? currentoffset is %d and size is %d", fs.sourcename, currentOffset, newfi.Size()) // About to lose all remaining data because of the truncate so flush the accumulator. - if partial.Len() > 0 { - sendLine(ctx, fs.pathname, partial, fs.lines) - } + lr.Finish(ctx) _, serr := fd.Seek(0, io.SeekStart) if serr != nil { - logErrors.Add(fs.pathname, 1) - log.Println(serr) + logErrors.Add(fs.sourcename, 1) + log.Printf("stream(%s): seek: %v", fs.sourcename, serr) } - // glog.V(2).Infof("%v: Seeked to %d", fd, p) - fileTruncates.Add(fs.pathname, 1) + // glog.V(2).Infof("stream(%s): Seeked to %d", fs.sourcename, p) + fileTruncates.Add(fs.sourcename, 1) continue } } - // No error implies there is more to read in this file so go - // straight back to read unless it looks like context is Done. - if err == nil && ctx.Err() == nil { - continue - } - Sleep: // If we get here it's because we've stalled. First test to see if it's // time to exit. - if err == io.EOF || ctx.Err() != nil { - select { - case <-fs.stopChan: - // glog.V(2).Infof("%v: stream has been stopped, exiting", fd) - if partial.Len() > 0 { - sendLine(ctx, fs.pathname, partial, fs.lines) - } - fs.mu.Lock() - fs.completed = true - fs.mu.Unlock() + if errors.Is(err, io.EOF) { + if oneShot == OneShotEnabled { + // Exit now, because oneShot means read only to EOF. + // glog.V(2).Infof("stream(%s): EOF in one shot mode, exiting", fs.sourcename) + lr.Finish(ctx) + close(fs.lines) return + } + select { case <-ctx.Done(): - // glog.V(2).Infof("%v: stream has been cancelled, exiting", fd) - if partial.Len() > 0 { - sendLine(ctx, fs.pathname, partial, fs.lines) - } - fs.mu.Lock() - fs.completed = true - fs.mu.Unlock() + // glog.V(2).Infof("stream(%s): context has been cancelled, exiting", fs.sourcename) + lr.Finish(ctx) + close(fs.lines) return default: // keep going @@ -240,24 +215,22 @@ func (fs *fileStream) stream(ctx context.Context, wg *sync.WaitGroup, waker wake // Don't exit, instead yield and wait for a termination signal or // wakeup. - // glog.V(2).Infof("%v: waiting", fd) + // glog.V(2).Infof("stream(%s): waiting", fs.sourcename) select { - case <-fs.stopChan: - // We may have started waiting here when the stop signal + case <-ctx.Done(): + // Exit after next read attempt. + // We may have started waiting here when the cancellation // arrives, but since that wait the file may have been // written to. The file is not technically yet at EOF so // we need to go back and try one more read. We'll exit - // the stream in the select stanza above. - // glog.V(2).Infof("%v: Stopping after next read", fd) - case <-ctx.Done(): - // Same for cancellation; this makes tests stable, but + // the stream in the select stanza above. This makes tests stable, but // could argue exiting immediately is less surprising. // Assumption is that this doesn't make a difference in // production. - // glog.V(2).Infof("%v: Cancelled after next read", fd) + // glog.V(2).Infof("stream(%s): context cancelled, exiting after next read timeout", fs.pathname) case <-waker.Wake(): // sleep until next Wake() - // glog.V(2).Infof("%v: Wake received", fd) + // glog.V(2).Infof("stream(%s): Wake received", fs.sourcename) } } }() @@ -265,17 +238,3 @@ func (fs *fileStream) stream(ctx context.Context, wg *sync.WaitGroup, waker wake <-started return nil } - -func (fs *fileStream) IsComplete() bool { - fs.mu.RLock() - defer fs.mu.RUnlock() - return fs.completed -} - -// Stop implements the LogStream interface. -func (fs *fileStream) Stop() { - fs.stopOnce.Do(func() { - log.Println("signalling stop at next EOF") - close(fs.stopChan) - }) -} diff --git a/inputs/mtail/internal/tailer/logstream/filestream_test.go b/inputs/mtail/internal/tailer/logstream/filestream_test.go deleted file mode 100644 index 65a3c2f2..00000000 --- a/inputs/mtail/internal/tailer/logstream/filestream_test.go +++ /dev/null @@ -1,218 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package logstream_test - -import ( - "context" - "path/filepath" - "sync" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/tailer/logstream" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -func TestFileStreamRead(t *testing.T) { - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "log") - f := testutil.TestOpenFile(t, name) - defer f.Close() - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - fs, err := logstream.New(ctx, &wg, waker, name, lines, true) - testutil.FatalIfErr(t, err) - awaken(1) - - testutil.WriteString(t, f, "yo\n") - awaken(1) - - fs.Stop() - wg.Wait() - close(lines) - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "yo"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if !fs.IsComplete() { - t.Errorf("expecting filestream to be complete because stopped") - } - cancel() - wg.Wait() -} - -func TestFileStreamReadNonSingleByteEnd(t *testing.T) { - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "log") - f := testutil.TestOpenFile(t, name) - defer f.Close() - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - fs, err := logstream.New(ctx, &wg, waker, name, lines, true) - testutil.FatalIfErr(t, err) - awaken(1) - - s := "a" - for i := 0; i < 4094; i++ { - s += "a" - } - - s += "中" - testutil.WriteString(t, f, s+"\n") - awaken(1) - - fs.Stop() - wg.Wait() - close(lines) - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: s}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if !fs.IsComplete() { - t.Errorf("expecting filestream to be complete because stopped") - } - cancel() - wg.Wait() -} - -func TestFileStreamTruncation(t *testing.T) { - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "log") - f := testutil.OpenLogFile(t, name) - defer f.Close() - - lines := make(chan *logline.LogLine, 3) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - fs, err := logstream.New(ctx, &wg, waker, name, lines, true) - // fs.Stop() is also called explicitly further down but a failed test - // and early return would lead to the handle staying open - defer fs.Stop() - - testutil.FatalIfErr(t, err) - awaken(1) // Synchronise past first read after seekToEnd - - testutil.WriteString(t, f, "1\n2\n") - awaken(1) - testutil.FatalIfErr(t, f.Close()) - awaken(1) - f = testutil.OpenLogFile(t, name) - defer f.Close() - - testutil.WriteString(t, f, "3\n") - awaken(1) - - fs.Stop() - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "1"}, - {Context: context.TODO(), Filename: name, Line: "2"}, - {Context: context.TODO(), Filename: name, Line: "3"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - cancel() - wg.Wait() -} - -func TestFileStreamFinishedBecauseCancel(t *testing.T) { - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "log") - f := testutil.TestOpenFile(t, name) - defer f.Close() - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - - fs, err := logstream.New(ctx, &wg, waker, name, lines, true) - testutil.FatalIfErr(t, err) - awaken(1) // Synchronise past first read after seekToEnd - - testutil.WriteString(t, f, "yo\n") - awaken(1) - - cancel() - wg.Wait() - close(lines) // Signal it's time to go. - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "yo"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if !fs.IsComplete() { - t.Errorf("expecting filestream to be complete because stream was cancelled") - } -} - -func TestFileStreamPartialRead(t *testing.T) { - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "log") - f := testutil.TestOpenFile(t, name) - defer f.Close() - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - - fs, err := logstream.New(ctx, &wg, waker, name, lines, true) - testutil.FatalIfErr(t, err) - awaken(1) - - testutil.WriteString(t, f, "yo") - awaken(1) - - // received := testutil.LinesReceived(lines) - // expected := []*logline.LogLine{} - // testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - testutil.WriteString(t, f, "\n") - awaken(1) - - fs.Stop() - wg.Wait() - close(lines) - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "yo"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if !fs.IsComplete() { - t.Errorf("expecting filestream to be complete because cancellation") - } - - cancel() - wg.Wait() -} diff --git a/inputs/mtail/internal/tailer/logstream/filestream_unix_test.go b/inputs/mtail/internal/tailer/logstream/filestream_unix_test.go deleted file mode 100644 index 6be978f3..00000000 --- a/inputs/mtail/internal/tailer/logstream/filestream_unix_test.go +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package logstream_test - -import ( - "context" - "log" - "os" - "path/filepath" - "sync" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/tailer/logstream" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -// TestFileStreamRotation is a unix-specific test because on Windows, files cannot be removed -// or renamed while there is an open read handle on them. Instead, log rotation would -// have to be implemented by copying and then truncating the original file. That test -// case is already covered by TestFileStreamTruncation. -func TestFileStreamRotation(t *testing.T) { - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "log") - f := testutil.TestOpenFile(t, name) - defer f.Close() - - lines := make(chan *logline.LogLine, 2) - - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - - fs, err := logstream.New(ctx, &wg, waker, name, lines, true) - // fs.Stop() is also called explicitly further down but a failed test - // and early return would lead to the handle staying open - defer fs.Stop() - - testutil.FatalIfErr(t, err) - awaken(1) - - log.Println("write 1") - testutil.WriteString(t, f, "1\n") - awaken(1) - - log.Println("rename") - testutil.FatalIfErr(t, os.Rename(name, name+".1")) - // filestream won't notice if there's a synchronisation point between - // rename and create, that path relies on the tailer - f = testutil.TestOpenFile(t, name) - defer f.Close() - - awaken(1) - log.Println("write 2") - testutil.WriteString(t, f, "2\n") - awaken(1) - - fs.Stop() - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "1"}, - {Context: context.TODO(), Filename: name, Line: "2"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - cancel() - wg.Wait() -} - -func TestFileStreamURL(t *testing.T) { - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "log") - f := testutil.TestOpenFile(t, name) - defer f.Close() - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - fs, err := logstream.New(ctx, &wg, waker, "file://"+name, lines, true) - testutil.FatalIfErr(t, err) - awaken(1) - - testutil.WriteString(t, f, "yo\n") - awaken(1) - - fs.Stop() - wg.Wait() - close(lines) - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "yo"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if !fs.IsComplete() { - t.Errorf("expecting filestream to be complete because stopped") - } - cancel() - wg.Wait() -} - -// TestFileStreamOpenFailure is a unix-specific test because on Windows, it is not possible to create a file -// that you yourself cannot read (minimum permissions are 0222). -func TestFileStreamOpenFailure(t *testing.T) { - // can't force a permission denied if run as root - testutil.SkipIfRoot(t) - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "log") - f, err := os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0) - defer f.Close() - - testutil.FatalIfErr(t, err) - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, _ := waker.NewTest(ctx, 0) - - _, err = logstream.New(ctx, &wg, waker, name, lines, true) - if err == nil || !os.IsPermission(err) { - t.Errorf("Expected a permission denied error, got: %v", err) - } - cancel() -} diff --git a/inputs/mtail/internal/tailer/logstream/logstream.go b/inputs/mtail/internal/tailer/logstream/logstream.go index bd6e9970..59837d36 100644 --- a/inputs/mtail/internal/tailer/logstream/logstream.go +++ b/inputs/mtail/internal/tailer/logstream/logstream.go @@ -13,14 +13,13 @@ import ( "errors" "expvar" "fmt" + "log" "net/url" "os" "sync" - "time" "flashcat.cloud/categraf/inputs/mtail/internal/logline" "flashcat.cloud/categraf/inputs/mtail/internal/waker" - // "github.com/golang/glog" ) var ( @@ -34,47 +33,67 @@ var ( // LogStream. type LogStream interface { - LastReadTime() time.Time // Return the time when the last log line was read from the source - Stop() // Ask to gracefully stop the stream; e.g. stream keeps reading until EOF and then completes work. - IsComplete() bool // True if the logstream has completed work and cannot recover. The caller should clean up this logstream, creating a new logstream on a pathname if necessary. + Lines() <-chan *logline.LogLine // Returns the output channel of this LogStream. } -// defaultReadBufferSize the size of the buffer for reading bytes into. -const defaultReadBufferSize = 4096 +// defaultReadBufferSize the size of the buffer for reading bytes for files. +// +// Anecdotally the maximum file read buffer is 4GiB, but thats way too massive. +const defaultReadBufferSize = 131072 + +const stdinPattern = "-" var ( ErrUnsupportedURLScheme = errors.New("unsupported URL scheme") ErrUnsupportedFileType = errors.New("unsupported file type") ErrEmptySocketAddress = errors.New("socket address cannot be empty, please provide a unix domain socket filename or host:port") + ErrNeedsWaitgroup = errors.New("logstream needs a waitgroup") +) + +type OneShotMode bool + +const ( + OneShotDisabled OneShotMode = false + OneShotEnabled OneShotMode = true ) // New creates a LogStream from the file object located at the absolute path // `pathname`. The LogStream will watch `ctx` for a cancellation signal, and -// notify the `wg` when it is Done. Log lines will be sent to the `lines` -// channel. `seekToStart` is only used for testing and only works for regular -// files that can be seeked. -func New(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, pathname string, lines chan<- *logline.LogLine, oneShot bool) (LogStream, error) { +// notify the `wg` when it is Done. `oneShot` is used for testing and only +// works for regular files that can be seeked. +func New(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, pathname string, oneShot OneShotMode) (LogStream, error) { + if wg == nil { + return nil, ErrNeedsWaitgroup + } u, err := url.Parse(pathname) if err != nil { return nil, err } - // glog.Infof("Parsed url as %v", u) + log.Println("Parsed url as %v", u) path := pathname switch u.Scheme { default: // glog.V(2).Infof("%v: %q in path pattern %q, treating as path", ErrUnsupportedURLScheme, u.Scheme, pathname) case "unixgram": - return newDgramStream(ctx, wg, waker, u.Scheme, u.Path, lines) + return newDgramStream(ctx, wg, waker, u.Scheme, u.Path, oneShot) case "unix": - return newSocketStream(ctx, wg, waker, u.Scheme, u.Path, lines, oneShot) + return newSocketStream(ctx, wg, waker, u.Scheme, u.Path, oneShot) case "tcp": - return newSocketStream(ctx, wg, waker, u.Scheme, u.Host, lines, oneShot) + return newSocketStream(ctx, wg, waker, u.Scheme, u.Host, oneShot) case "udp": - return newDgramStream(ctx, wg, waker, u.Scheme, u.Host, lines) + return newDgramStream(ctx, wg, waker, u.Scheme, u.Host, oneShot) case "", "file": path = u.Path } + if IsStdinPattern(path) { + fi, err := os.Stdin.Stat() + if err != nil { + logErrors.Add(path, 1) + return nil, err + } + return newFifoStream(ctx, wg, waker, path, fi) + } fi, err := os.Stat(path) if err != nil { logErrors.Add(path, 1) @@ -82,13 +101,23 @@ func New(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, pathname st } switch m := fi.Mode(); { case m.IsRegular(): - return newFileStream(ctx, wg, waker, path, fi, lines, oneShot) + return newFileStream(ctx, wg, waker, path, fi, oneShot) case m&os.ModeType == os.ModeNamedPipe: - return newPipeStream(ctx, wg, waker, path, fi, lines) + return newFifoStream(ctx, wg, waker, path, fi) // TODO(jaq): in order to listen on an existing socket filepath, we must unlink and recreate it // case m&os.ModeType == os.ModeSocket: - // return newSocketStream(ctx, wg, waker, pathname, lines) + // return newSocketStream(ctx, wg, waker, pathname) default: return nil, fmt.Errorf("%w: %q", ErrUnsupportedFileType, pathname) } } + +func IsStdinPattern(pattern string) bool { + if pattern == stdinPattern { + return true + } + if pattern == "/dev/stdin" { + return true + } + return false +} diff --git a/inputs/mtail/internal/tailer/logstream/pipestream.go b/inputs/mtail/internal/tailer/logstream/pipestream.go deleted file mode 100644 index 63283da9..00000000 --- a/inputs/mtail/internal/tailer/logstream/pipestream.go +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package logstream - -import ( - "bytes" - "context" - "log" - "os" - "sync" - "syscall" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" - // "github.com/golang/glog" -) - -type pipeStream struct { - ctx context.Context - lines chan<- *logline.LogLine - - pathname string // Given name for the underlying named pipe on the filesystem - - mu sync.RWMutex // protects following fields - completed bool // This pipestream is completed and can no longer be used. - lastReadTime time.Time // Last time a log line was read from this named pipe -} - -func newPipeStream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, pathname string, fi os.FileInfo, lines chan<- *logline.LogLine) (LogStream, error) { - ps := &pipeStream{ctx: ctx, pathname: pathname, lastReadTime: time.Now(), lines: lines} - if err := ps.stream(ctx, wg, waker, fi); err != nil { - return nil, err - } - return ps, nil -} - -func (ps *pipeStream) LastReadTime() time.Time { - ps.mu.RLock() - defer ps.mu.RUnlock() - return ps.lastReadTime -} - -func (ps *pipeStream) stream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, fi os.FileInfo) error { - // Open in nonblocking mode because the write end of the pipe may not have started yet. - fd, err := os.OpenFile(ps.pathname, os.O_RDONLY|syscall.O_NONBLOCK, 0o600) - if err != nil { - logErrors.Add(ps.pathname, 1) - return err - } - // glog.V(2).Infof("opened new pipe %v", fd) - b := make([]byte, defaultReadBufferSize) - partial := bytes.NewBufferString("") - var total int - wg.Add(1) - go func() { - defer wg.Done() - defer func() { - // glog.V(2).Infof("%v: read total %d bytes from %s", fd, total, ps.pathname) - // glog.V(2).Infof("%v: closing file descriptor", fd) - err := fd.Close() - if err != nil { - logErrors.Add(ps.pathname, 1) - log.Println(err) - } - logCloses.Add(ps.pathname, 1) - ps.mu.Lock() - ps.completed = true - ps.mu.Unlock() - }() - ctx, cancel := context.WithCancel(ctx) - defer cancel() - SetReadDeadlineOnDone(ctx, fd) - - for { - n, err := fd.Read(b) - // glog.V(2).Infof("%v: read %d bytes, err is %v", fd, n, err) - - if n > 0 { - total += n - decodeAndSend(ps.ctx, ps.lines, ps.pathname, n, b[:n], partial) - // Update the last read time if we were able to read anything. - ps.mu.Lock() - ps.lastReadTime = time.Now() - ps.mu.Unlock() - } - - // Test to see if we should exit. - if err != nil && IsEndOrCancel(err) { - if partial.Len() > 0 { - sendLine(ctx, ps.pathname, partial, ps.lines) - } - // glog.V(2).Infof("%v: exiting, stream has error %s", fd, err) - return - } - - // Wait for wakeup or termination. - // glog.V(2).Infof("%v: waiting", fd) - select { - case <-ctx.Done(): - // Exit immediately; cancelled context is going to cause the - // next read to be interrupted and exit, so don't bother going - // around the loop again. - return - case <-waker.Wake(): - // sleep until next Wake() - // glog.V(2).Infof("%v: Wake received", fd) - } - } - }() - return nil -} - -func (ps *pipeStream) IsComplete() bool { - ps.mu.RLock() - defer ps.mu.RUnlock() - return ps.completed -} - -// Stop implements the Logstream interface. -// Calling Stop on a PipeStream is a no-op; PipeStreams always read until the pipe is closed, which is what calling Stop means on a Logstream. -func (ps *pipeStream) Stop() { -} diff --git a/inputs/mtail/internal/tailer/logstream/pipestream_unix_test.go b/inputs/mtail/internal/tailer/logstream/pipestream_unix_test.go deleted file mode 100644 index 46ea0021..00000000 --- a/inputs/mtail/internal/tailer/logstream/pipestream_unix_test.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package logstream_test - -import ( - "context" - "os" - "path/filepath" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/tailer/logstream" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" - "golang.org/x/sys/unix" -) - -func TestPipeStreamReadCompletedBecauseClosed(t *testing.T) { - testutil.TimeoutTest(1*time.Second, func(t *testing.T) { //nolint:thelper - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "fifo") - testutil.FatalIfErr(t, unix.Mkfifo(name, 0o666)) - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker := waker.NewTestAlways() - - // In this and the following test, open RDWR so as to not block this thread - // from proceeding. If we open the logstream first, there is a race before - // the write end opens that can sometimes lead to the logstream reading an - // EOF (because the write end is not yet open) and the test fails. - f, err := os.OpenFile(name, os.O_RDWR, os.ModeNamedPipe) - testutil.FatalIfErr(t, err) - - ps, err := logstream.New(ctx, &wg, waker, name, lines, false) - testutil.FatalIfErr(t, err) - - testutil.WriteString(t, f, "1\n") - - // Pipes need to be closed to signal to the pipeStream to finish up. - testutil.FatalIfErr(t, f.Close()) - - ps.Stop() // no-op for pipes - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "1"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - cancel() - - if !ps.IsComplete() { - t.Errorf("expecting pipestream to be complete because fifo closed") - } - })(t) -} - -func TestPipeStreamReadCompletedBecauseCancel(t *testing.T) { - testutil.TimeoutTest(1*time.Second, func(t *testing.T) { // nolint:thelper - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "fifo") - testutil.FatalIfErr(t, unix.Mkfifo(name, 0o666)) - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - - f, err := os.OpenFile(name, os.O_RDWR, os.ModeNamedPipe) - testutil.FatalIfErr(t, err) - - ps, err := logstream.New(ctx, &wg, waker, name, lines, false) - testutil.FatalIfErr(t, err) - - testutil.WriteString(t, f, "1\n") - - // Avoid a race with cancellation if we can synchronise with waker.Wake() - awaken(0) - - cancel() // Cancellation here should cause the stream to shut down. - - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "1"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if !ps.IsComplete() { - t.Errorf("expecting pipestream to be complete because cancelled") - } - })(t) -} - -func TestPipeStreamReadURL(t *testing.T) { - var wg sync.WaitGroup - - tmpDir := testutil.TestTempDir(t) - - name := filepath.Join(tmpDir, "fifo") - testutil.FatalIfErr(t, unix.Mkfifo(name, 0o666)) - - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker := waker.NewTestAlways() - - ps, err := logstream.New(ctx, &wg, waker, "file://"+name, lines, false) - testutil.FatalIfErr(t, err) - - f, err := os.OpenFile(name, os.O_WRONLY, os.ModeNamedPipe) - testutil.FatalIfErr(t, err) - testutil.WriteString(t, f, "1\n") - - // Pipes need to be closed to signal to the pipeStream to finish up. - testutil.FatalIfErr(t, f.Close()) - - ps.Stop() // no-op for pipes - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: name, Line: "1"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - cancel() - - if !ps.IsComplete() { - t.Errorf("expecting pipestream to be complete because fifo closed") - } -} diff --git a/inputs/mtail/internal/tailer/logstream/reader.go b/inputs/mtail/internal/tailer/logstream/reader.go new file mode 100644 index 00000000..4aa9f96e --- /dev/null +++ b/inputs/mtail/internal/tailer/logstream/reader.go @@ -0,0 +1,106 @@ +// Copyright 2024 Google Inc. All Rights Reserved. +// This file is available under the Apache license. + +package logstream + +import ( + "bytes" + "context" + "expvar" + "io" + "time" + + "flashcat.cloud/categraf/inputs/mtail/internal/logline" +) + +// logLines counts the number of lines read per log file. +var logLines = expvar.NewMap("log_lines_total") + +// LineReader reads lines from input and sends lines through the channel +type LineReader struct { + sourcename string // name of owner, for sending loglines + lines chan<- *logline.LogLine // not owned + f io.Reader // not owned + cancel context.CancelFunc + staleTimer *time.Timer // call CancelFunc if no read in 24h + + size int + buf []byte + off int // tracks the start of the next line in buf +} + +// NewLineReader creates a new LineReader +func NewLineReader(sourcename string, lines chan<- *logline.LogLine, f io.Reader, size int, cancel context.CancelFunc) *LineReader { + return &LineReader{ + sourcename: sourcename, + lines: lines, + f: f, + cancel: cancel, + size: size, + buf: make([]byte, 0, size), + } +} + +// ReadAndSend reads bytes from f, attempts to find line endings in the bytes read, and sends them to the lines channel. It manages the read buffer size to make sure we can always read size bytes. +func (lr *LineReader) ReadAndSend(ctx context.Context) (count int, err error) { + if cap(lr.buf)-len(lr.buf) < lr.size { + lr.buf = append(make([]byte, 0, len(lr.buf)+lr.size), lr.buf...) + } + count, err = lr.f.Read(lr.buf[len(lr.buf):cap(lr.buf)]) + if lr.staleTimer != nil { + lr.staleTimer.Stop() + } + lr.buf = lr.buf[:len(lr.buf)+count] // reslice to set len + if count > 0 { + lr.staleTimer = time.AfterFunc(time.Hour*24, lr.cancel) + ok := true + for ok { + ok = lr.send(ctx) + } + // reslice to drop earlier bytes + lr.buf = lr.buf[lr.off:len(lr.buf)] + lr.off = 0 + } + return +} + +// send sends the line and resets the buffer offset +func (lr *LineReader) send(ctx context.Context) bool { + lim := min(len(lr.buf), cap(lr.buf)) + i := bytes.IndexByte(lr.buf[lr.off:lim], '\n') + // No newlines in the latest bytes, wait for next read + if i < 0 { + return false + } + end := lr.off + i // excluding delim + skip := 1 // len of delim char + + // Most file-based log sources will end with \n on Unixlike systems. On + // Windows they appear to be both \r\n. syslog disallows \r (and \t and + // others) and writes them escaped, per syslog(7). [RFC + // 3164](https://www.ietf.org/rfc/rfc3164.txt) disallows newlines in the + // message: "The MSG part of the syslog packet MUST contain visible + // (printing) characters." Thus if the previous char was a \r then ignore + // it as well. + if end > 0 && lr.buf[end-1] == '\r' { + end-- + skip = 2 + } + + line := string(lr.buf[lr.off:end]) + logLines.Add(lr.sourcename, 1) + lr.lines <- logline.New(ctx, lr.sourcename, line) + lr.off = end + skip // move past delim + return true +} + +// Finish sends the current accumulated line to the end of the buffer, despite +// there being no closing newline. +func (lr *LineReader) Finish(ctx context.Context) { + line := string(lr.buf[lr.off:]) + if len(line) == 0 { + return + } + logLines.Add(lr.sourcename, 1) + lr.lines <- logline.New(ctx, lr.sourcename, line) +} diff --git a/inputs/mtail/internal/tailer/logstream/socketstream.go b/inputs/mtail/internal/tailer/logstream/socketstream.go index 5d504f11..169cf92f 100644 --- a/inputs/mtail/internal/tailer/logstream/socketstream.go +++ b/inputs/mtail/internal/tailer/logstream/socketstream.go @@ -4,50 +4,48 @@ package logstream import ( - "bytes" "context" + "fmt" "log" "net" "sync" - "time" "flashcat.cloud/categraf/inputs/mtail/internal/logline" "flashcat.cloud/categraf/inputs/mtail/internal/waker" ) type socketStream struct { - ctx context.Context - lines chan<- *logline.LogLine + streamBase - oneShot bool + cancel context.CancelFunc + + oneShot OneShotMode scheme string // URL Scheme to listen with, either tcp or unix address string // Given name for the underlying socket path on the filesystem or host/port. - - mu sync.RWMutex // protects following fields - completed bool // This socketStream is completed and can no longer be used. - lastReadTime time.Time // Last time a log line was read from this socket - - stopOnce sync.Once // Ensure stopChan only closed once. - stopChan chan struct{} // Close to start graceful shutdown. } -func newSocketStream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, scheme, address string, lines chan<- *logline.LogLine, oneShot bool) (LogStream, error) { +func newSocketStream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, scheme, address string, oneShot OneShotMode) (LogStream, error) { if address == "" { return nil, ErrEmptySocketAddress } - ss := &socketStream{ctx: ctx, oneShot: oneShot, scheme: scheme, address: address, lastReadTime: time.Now(), lines: lines, stopChan: make(chan struct{})} + ctx, cancel := context.WithCancel(ctx) + ss := &socketStream{ + cancel: cancel, + oneShot: oneShot, + scheme: scheme, + address: address, + streamBase: streamBase{ + sourcename: fmt.Sprintf("%s://%s", scheme, address), + lines: make(chan *logline.LogLine), + }, + } + if err := ss.stream(ctx, wg, waker); err != nil { return nil, err } return ss, nil } -func (ss *socketStream) LastReadTime() time.Time { - ss.mu.RLock() - defer ss.mu.RUnlock() - return ss.lastReadTime -} - // stream starts goroutines to read data from the stream socket, until Stop is called or the context is cancelled. func (ss *socketStream) stream(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker) error { l, err := net.Listen(ss.scheme, ss.address) @@ -55,82 +53,71 @@ func (ss *socketStream) stream(ctx context.Context, wg *sync.WaitGroup, waker wa logErrors.Add(ss.address, 1) return err } - // glog.V(2).Infof("opened new socket listener %v", l) + // glog.V(2).Infof("stream(%s): opened new socket listener %+v", ss.sourcename, l) + + // signals when a connection has been opened + started := make(chan struct{}) + // tracks connection handling routines + var connWg sync.WaitGroup - initDone := make(chan struct{}) // Set up for shutdown wg.Add(1) go func() { defer wg.Done() - // If oneshot, wait only for the one conn handler to start, otherwise wait for context Done or stopChan. - <-initDone + // If oneshot, wait only for the one conn handler to start, otherwise + // wait for context Done or stopChan. + <-started if !ss.oneShot { - select { - case <-ctx.Done(): - case <-ss.stopChan: - } + <-ctx.Done() } - // glog.V(2).Infof("%v: closing listener", l) + // glog.V(2).Infof("stream(%s): closing listener", ss.sourcename) err := l.Close() if err != nil { log.Println(err) } - ss.mu.Lock() - ss.completed = true - ss.mu.Unlock() + connWg.Wait() + close(ss.lines) }() - acceptConn := func() error { - c, err := l.Accept() - if err != nil { - log.Println(err) - return err - } - // glog.V(2).Infof("%v: got new conn %v", l, c) - wg.Add(1) - go ss.handleConn(ctx, wg, waker, c) - return nil - } - - if ss.oneShot { - wg.Add(1) - go func() { - defer wg.Done() - if err := acceptConn(); err != nil { - log.Println(err) - } - log.Println("oneshot mode, retuning") - close(initDone) - }() - return nil - } + var connOnce sync.Once wg.Add(1) go func() { defer wg.Done() for { - if err := acceptConn(); err != nil { + c, err := l.Accept() + if err != nil { + log.Println(err) + return + } + // glog.V(2).Infof("stream(%s): got new conn %v", ss.sourcename, c) + connWg.Add(1) + go ss.handleConn(ctx, &connWg, waker, c) + connOnce.Do(func() { close(started) }) + if ss.oneShot { + // glog.Infof("stream(%s): oneshot mode, exiting accept loop", ss.sourcename) return } } }() - close(initDone) + return nil } func (ss *socketStream) handleConn(ctx context.Context, wg *sync.WaitGroup, waker waker.Waker, c net.Conn) { defer wg.Done() - b := make([]byte, defaultReadBufferSize) - partial := bytes.NewBufferString("") + + lr := NewLineReader(ss.sourcename, ss.lines, c, defaultReadBufferSize, ss.cancel) var total int defer func() { - // glog.V(2).Infof("%v: read total %d bytes from %s", c, total, ss.address) - // glog.V(2).Infof("%v: closing connection", c) + // glog.V(2).Infof("stream(%s): read total %d bytes from %s", ss.sourcename, c, total) + // glog.V(2).Infof("stream(%s): closing connection, %v", ss.sourcename, c) err := c.Close() if err != nil { logErrors.Add(ss.address, 1) log.Println(err) } + lr.Finish(ctx) logCloses.Add(ss.address, 1) }() ctx, cancel := context.WithCancel(ctx) @@ -138,53 +125,32 @@ func (ss *socketStream) handleConn(ctx context.Context, wg *sync.WaitGroup, wake SetReadDeadlineOnDone(ctx, c) for { - n, err := c.Read(b) - // glog.V(2).Infof("%v: read %d bytes, err is %v", c, n, err) + n, err := lr.ReadAndSend(ctx) + // glog.V(2).Infof("stream(%s): read %d bytes, err is %v", ss.sourcename, n, err) if n > 0 { total += n - decodeAndSend(ss.ctx, ss.lines, ss.address, n, b[:n], partial) - ss.mu.Lock() - ss.lastReadTime = time.Now() - ss.mu.Unlock() - } - if err != nil && IsEndOrCancel(err) { - if partial.Len() > 0 { - sendLine(ctx, ss.address, partial, ss.lines) + // No error implies more to read, so restart the loop. + if err == nil && ctx.Err() == nil { + continue } - // glog.V(2).Infof("%v: exiting, conn has error %s", c, err) + } + if IsExitableError(err) { + // glog.V(2).Infof("stream(%s): exiting, conn has error %s", ss.sourcename, err) return } // Yield and wait - // glog.V(2).Infof("%v: waiting", c) + // glog.V(2).Infof("stream(%s): waiting", ss.sourcename) select { case <-ctx.Done(): - // Exit immediately; cancelled context will cause the next read to be interrupted and exit anyway, so no point waiting to loop. - return - case <-ss.stopChan: - // Stop after connection is closed. - // glog.V(2).Infof("%v: stopchan closed, exiting after next read timeout", c) + // Exit after next read attempt. + // glog.V(2).Infof("stream(%s:%s): context cancelled, exiting after next read timeout", ss.scheme, ss.address) case <-waker.Wake(): // sleep until next Wake() - // glog.V(2).Infof("%v: Wake received", c) + // glog.V(2).Infof("stream(%s): Wake received", ss.sourcename) } } } - -func (ss *socketStream) IsComplete() bool { - ss.mu.RLock() - defer ss.mu.RUnlock() - return ss.completed -} - -// Stop implements the Logstream interface. -// Stop will close the listener so no new connections will be accepted, and close all current connections once they have been closed by their peers. -func (ss *socketStream) Stop() { - ss.stopOnce.Do(func() { - log.Println("signalling stop at next EOF") - close(ss.stopChan) - }) -} diff --git a/inputs/mtail/internal/tailer/logstream/socketstream_unix_test.go b/inputs/mtail/internal/tailer/logstream/socketstream_unix_test.go deleted file mode 100644 index c91a509c..00000000 --- a/inputs/mtail/internal/tailer/logstream/socketstream_unix_test.go +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package logstream_test - -import ( - "context" - "fmt" - "net" - "path/filepath" - "sync" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/tailer/logstream" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -func TestSocketStreamReadCompletedBecauseSocketClosed(t *testing.T) { - for _, scheme := range []string{"unix", "tcp"} { - scheme := scheme - t.Run(scheme, testutil.TimeoutTest(time.Second, func(t *testing.T) { //nolint:thelper - var wg sync.WaitGroup - - var addr string - switch scheme { - case "unix": - tmpDir := testutil.TestTempDir(t) - addr = filepath.Join(tmpDir, "sock") - case "tcp": - addr = fmt.Sprintf("[::]:%d", testutil.FreePort(t)) - default: - t.Fatalf("bad scheme %s", scheme) - } - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - - sockName := scheme + "://" + addr - ss, err := logstream.New(ctx, &wg, waker, sockName, lines, false) - testutil.FatalIfErr(t, err) - - s, err := net.Dial(scheme, addr) - testutil.FatalIfErr(t, err) - - _, err = s.Write([]byte("1\n")) - testutil.FatalIfErr(t, err) - - awaken(0) // Sync past read - - // Close the socket to signal to the socketStream to shut down. - testutil.FatalIfErr(t, s.Close()) - - ss.Stop() // stop after connection closes - - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: addr, Line: "1"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - cancel() - - if !ss.IsComplete() { - t.Errorf("expecting socketstream to be complete because socket closed") - } - })) - } -} - -func TestSocketStreamReadCompletedBecauseCancel(t *testing.T) { - for _, scheme := range []string{"unix", "tcp"} { - scheme := scheme - t.Run(scheme, testutil.TimeoutTest(time.Second, func(t *testing.T) { //nolint:thelper - var wg sync.WaitGroup - - var addr string - switch scheme { - case "unix": - tmpDir := testutil.TestTempDir(t) - addr = filepath.Join(tmpDir, "sock") - case "tcp": - addr = fmt.Sprintf("[::]:%d", testutil.FreePort(t)) - default: - t.Fatalf("bad scheme %s", scheme) - } - lines := make(chan *logline.LogLine, 1) - ctx, cancel := context.WithCancel(context.Background()) - waker, awaken := waker.NewTest(ctx, 1) - - sockName := scheme + "://" + addr - ss, err := logstream.New(ctx, &wg, waker, sockName, lines, false) - testutil.FatalIfErr(t, err) - - s, err := net.Dial(scheme, addr) - testutil.FatalIfErr(t, err) - - _, err = s.Write([]byte("1\n")) - testutil.FatalIfErr(t, err) - - awaken(0) // Sync past read to ensure we read - - cancel() // This cancellation should cause the stream to shut down immediately. - - wg.Wait() - close(lines) - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.TODO(), Filename: addr, Line: "1"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if !ss.IsComplete() { - t.Errorf("expecting socketstream to be complete because cancel") - } - })) - } -} diff --git a/inputs/mtail/internal/tailer/tail.go b/inputs/mtail/internal/tailer/tail.go index fd0a08b3..d82d92c6 100644 --- a/inputs/mtail/internal/tailer/tail.go +++ b/inputs/mtail/internal/tailer/tail.go @@ -16,12 +16,10 @@ import ( "path/filepath" "regexp" "sync" - "time" "flashcat.cloud/categraf/inputs/mtail/internal/logline" "flashcat.cloud/categraf/inputs/mtail/internal/tailer/logstream" "flashcat.cloud/categraf/inputs/mtail/internal/waker" - // "github.com/golang/glog" ) // logCount records the number of logs that are being tailed. @@ -30,19 +28,21 @@ var logCount = expvar.NewInt("log_count") // Tailer polls the filesystem for log sources that match given // `LogPathPatterns` and creates `LogStream`s to tail them. type Tailer struct { - ctx context.Context - wg sync.WaitGroup // Wait for our subroutines to finish - lines chan<- *logline.LogLine + ctx context.Context + cancel context.CancelFunc + + wg sync.WaitGroup // Wait for our subroutines to finish - globPatternsMu sync.RWMutex // protects `globPatterns' - globPatterns map[string]struct{} // glob patterns to match newly created logs in dir paths against - ignoreRegexPattern *regexp.Regexp + lines chan<- *logline.LogLine - socketPaths []string + logPatterns []string - oneShot bool + logPatternPollWaker waker.Waker // Used to poll for new logs + globPatternsMu sync.RWMutex // protects `globPatterns' + globPatterns map[string]struct{} // glob patterns to match newly created logs in dir paths against + ignoreRegexPattern *regexp.Regexp - pollMu sync.Mutex // protects Poll() + oneShot logstream.OneShotMode logstreamPollWaker waker.Waker // Used for waking idle logstreams logstreamsMu sync.RWMutex // protects `logstreams`. @@ -65,17 +65,13 @@ func (n *niladicOption) apply(t *Tailer) error { } // OneShot puts the tailer in one-shot mode, where sources are read once from the start and then closed. -var OneShot = &niladicOption{func(t *Tailer) error { t.oneShot = true; return nil }} +var OneShot = &niladicOption{func(t *Tailer) error { t.oneShot = logstream.OneShotEnabled; return nil }} // LogPatterns sets the glob patterns to use to match pathnames. type LogPatterns []string func (opt LogPatterns) apply(t *Tailer) error { - for _, p := range opt { - if err := t.AddPattern(p); err != nil { - return err - } - } + t.logPatterns = opt return nil } @@ -86,20 +82,6 @@ func (opt IgnoreRegex) apply(t *Tailer) error { return t.SetIgnorePattern(string(opt)) } -// StaleLogGcWaker triggers garbage collection runs for stale logs in the tailer. -func StaleLogGcWaker(w waker.Waker) Option { - return &staleLogGcWaker{w} -} - -type staleLogGcWaker struct { - waker.Waker -} - -func (opt staleLogGcWaker) apply(t *Tailer) error { - t.StartStaleLogstreamExpirationLoop(opt.Waker) - return nil -} - // LogPatternPollWaker triggers polls on the filesystem for new logs that match the log glob patterns. func LogPatternPollWaker(w waker.Waker) Option { return &logPatternPollWaker{w} @@ -110,7 +92,7 @@ type logPatternPollWaker struct { } func (opt logPatternPollWaker) apply(t *Tailer) error { - t.StartLogPatternPollLoop(opt.Waker) + t.logPatternPollWaker = opt.Waker return nil } @@ -128,58 +110,58 @@ func (opt logstreamPollWaker) apply(t *Tailer) error { return nil } -var ErrNoLinesChannel = errors.New("Tailer needs a lines channel") +var ( + ErrNoLinesChannel = errors.New("Tailer needs a lines channel") + ErrNeedsWaitgroup = errors.New("tailer needs a WaitGroup") +) // New creates a new Tailer. func New(ctx context.Context, wg *sync.WaitGroup, lines chan<- *logline.LogLine, options ...Option) (*Tailer, error) { if lines == nil { return nil, ErrNoLinesChannel } + if wg == nil { + return nil, ErrNeedsWaitgroup + } t := &Tailer{ - ctx: ctx, lines: lines, initDone: make(chan struct{}), globPatterns: make(map[string]struct{}), logstreams: make(map[string]logstream.LogStream), } + t.ctx, t.cancel = context.WithCancel(ctx) defer close(t.initDone) if err := t.SetOption(options...); err != nil { return nil, err } - if len(t.globPatterns) == 0 && len(t.socketPaths) == 0 { - log.Println("No patterns or sockets to tail, tailer done.") - close(t.lines) - return t, nil - } - // Set up listeners on every socket. - for _, pattern := range t.socketPaths { - if err := t.TailPath(pattern); err != nil { + // After processing options, we can add patterns. We need to ensure any Wakers were provided. + for _, p := range t.logPatterns { + if err := t.AddPattern(p); err != nil { return nil, err } } - // Guarantee all existing logs get tailed before we leave. Also necessary - // in case oneshot mode is active, the logs get read! - if err := t.PollLogPatterns(); err != nil { - return nil, err - } - // Setup for shutdown, once all routines are finished. + + // This goroutine cancels the Tailer if all of our dependent subroutines are done. + // These are any live logstreams, and any log pattern pollers. wg.Add(1) go func() { defer wg.Done() <-t.initDone - // We need to wait for context.Done() before we wait for the subbies - // because we don't know how many are running at any point -- as soon - // as t.wg.Wait begins the number of waited-on goroutines is fixed, and - // we may end up leaking a LogStream goroutine and it'll try to send on - // a closed channel as a result. But in tests and oneshot, we want to - // make sure the whole log gets read so we can't wait on context.Done - // here. - if !t.oneShot { - <-t.ctx.Done() - } t.wg.Wait() + t.cancel() + }() + + // This goroutine awaits cancellation, then cleans up the tailer. + wg.Add(1) + go func() { + defer wg.Done() + <-t.initDone + <-t.ctx.Done() + t.wg.Wait() + // glog.V(1).InfoContextf(ctx, "tailer finished") close(t.lines) }() + return t, nil } @@ -210,40 +192,46 @@ func (t *Tailer) AddPattern(pattern string) error { path := pattern switch u.Scheme { default: - // glog.V(2).Infof("%v: %q in path pattern %q, treating as path", ErrUnsupportedURLScheme, u.Scheme, pattern) + // glog.V(2).Infof("AddPattern(%v): %v in path pattern %q, treating as path", pattern, ErrUnsupportedURLScheme, u.Scheme) + // Leave path alone per log message case "unix", "unixgram", "tcp", "udp": // Keep the scheme. - // glog.V(2).Infof("AddPattern: socket %q", pattern) - t.socketPaths = append(t.socketPaths, pattern) - return nil + // glog.V(2).Infof("AddPattern(%v): is a socket", path) + return t.TailPath(path) case "", "file": - path = u.Path + // Leave path alone; may contain globs } - absPath, err := filepath.Abs(path) + if logstream.IsStdinPattern(pattern) { + // stdin is not really a socket, but it is handled by this codepath and should not be in the globs. + // glog.V(2).Infof("AddPattern(%v): is stdin", pattern) + return t.TailPath(pattern) + } + path, err = filepath.Abs(path) if err != nil { - // glog.V(2).Infof("Couldn't canonicalize path %q: %s", u.Path, err) + // glog.V(2).Infof("AddPattern(%v): couldn't canonicalize path: %v", path, err) return err } - // glog.V(2).Infof("AddPattern: file %q", absPath) + // glog.V(2).Infof("AddPattern(%v): is a file-like pattern", path) t.globPatternsMu.Lock() - t.globPatterns[absPath] = struct{}{} + t.globPatterns[path] = struct{}{} t.globPatternsMu.Unlock() + t.pollLogPattern(path) return nil } func (t *Tailer) Ignore(pathname string) bool { absPath, err := filepath.Abs(pathname) if err != nil { - // glog.V(2).Infof("Couldn't get absolute path for %q: %s", pathname, err) + // glog.V(2).Infof("Ignore(%v): couldn't get absolute path: %v", pathname, err) return true } fi, err := os.Stat(absPath) if err != nil { - // glog.V(2).Infof("Couldn't stat path %q: %s", pathname, err) + // glog.V(2).Infof("Ignore(%v): couldn't stat: %v", pathname, err) return true } if fi.Mode().IsDir() { - // glog.V(2).Infof("ignore path %q because it is a folder", pathname) + // glog.V(2).Infof("Ignore(%v): is a folder", pathname) return true } return t.ignoreRegexPattern != nil && t.ignoreRegexPattern.MatchString(fi.Name()) @@ -268,72 +256,42 @@ func (t *Tailer) SetIgnorePattern(pattern string) error { func (t *Tailer) TailPath(pathname string) error { t.logstreamsMu.Lock() defer t.logstreamsMu.Unlock() - if l, ok := t.logstreams[pathname]; ok { - if !l.IsComplete() { - // glog.V(2).Infof("already got a logstream on %q", pathname) - return nil - } - logCount.Add(-1) // Removing the current entry before re-adding. - // glog.V(2).Infof("Existing logstream is finished, creating a new one.") + if _, ok := t.logstreams[pathname]; ok { + // glog.V(2).Infof("already got a logstream on %q", pathname) + return nil } - l, err := logstream.New(t.ctx, &t.wg, t.logstreamPollWaker, pathname, t.lines, t.oneShot) + l, err := logstream.New(t.ctx, &t.wg, t.logstreamPollWaker, pathname, t.oneShot) if err != nil { return err } - if t.oneShot { - // glog.V(2).Infof("Starting oneshot read at startup of %q", pathname) - l.Stop() - } t.logstreams[pathname] = l - log.Printf("Tailing %s", pathname) - logCount.Add(1) - return nil -} - -// ExpireStaleLogstreams removes logstreams that have had no reads for 1h or more. -func (t *Tailer) ExpireStaleLogstreams() error { - t.logstreamsMu.Lock() - defer t.logstreamsMu.Unlock() - for _, v := range t.logstreams { - if time.Since(v.LastReadTime()) > (time.Hour * 24) { - v.Stop() - } - } - return nil -} - -// StartStaleLogstreamExpirationLoop runs a permanent goroutine to expire stale logstreams. -func (t *Tailer) StartStaleLogstreamExpirationLoop(waker waker.Waker) { - if waker == nil { - log.Printf("Log handle expiration disabled") - return - } t.wg.Add(1) + // Start a goroutine to move lines from the logstream to the main Tailer + // output and remove the stream from the map when the channel closes. go func() { defer t.wg.Done() - <-t.initDone - if t.oneShot { - log.Println("No gc loop in oneshot mode.") - return - } - // glog.Infof("Starting log handle expiry loop every %s", duration.String()) - for { - select { - case <-t.ctx.Done(): - return - case <-waker.Wake(): - if err := t.ExpireStaleLogstreams(); err != nil { - log.Println(err) - } - } + for line := range l.Lines() { + t.lines <- line } + t.logstreamsMu.Lock() + delete(t.logstreams, pathname) + logCount.Add(-1) + t.logstreamsMu.Unlock() }() + // glog.Infof("Tailing %s", pathname) + logCount.Add(1) + return nil } -// StartLogPatternPollLoop runs a permanent goroutine to poll for new log files. -func (t *Tailer) StartLogPatternPollLoop(waker waker.Waker) { - if waker == nil { - log.Println("Log pattern polling disabled") +// pollLogPattern runs a permanent goroutine to poll for new log files that +// match `pattern`. It is on the subroutine waitgroup as we do not want to +// shut down the tailer when there are outstanding patterns to poll for. +func (t *Tailer) pollLogPattern(pattern string) { + if err := t.doPatternGlob(pattern); err != nil { + log.Printf("pollPattern(%v): glob failed: %v", pattern, err) + } + if t.logPatternPollWaker == nil { + log.Printf("pollPattern(%v): log pattern polling disabled by no waker", pattern) return } t.wg.Add(1) @@ -341,72 +299,43 @@ func (t *Tailer) StartLogPatternPollLoop(waker waker.Waker) { defer t.wg.Done() <-t.initDone if t.oneShot { - log.Println("No polling loop in oneshot mode.") + log.Printf("pollPattern(%v): no polling loop in oneshot mode", pattern) return } - // glog.Infof("Starting log pattern poll loop every %s", duration.String()) + // glog.V(1).Infof("pollPattern(%v): starting log pattern poll loop", pattern) for { select { case <-t.ctx.Done(): return - case <-waker.Wake(): - if err := t.Poll(); err != nil { - log.Println(err) + case <-t.logPatternPollWaker.Wake(): + if err := t.doPatternGlob(pattern); err != nil { + log.Printf("pollPattern(%v): glob failed: %v", pattern, err) } } } }() } -func (t *Tailer) PollLogPatterns() error { - t.globPatternsMu.RLock() - defer t.globPatternsMu.RUnlock() - for pattern := range t.globPatterns { - matches, err := filepath.Glob(pattern) - if err != nil { - return err - } - // log.Printf("glob matches: %v", matches) - for _, pathname := range matches { - if t.Ignore(pathname) { - continue - } - absPath, err := filepath.Abs(pathname) - if err != nil { - // glog.V(2).Infof("Couldn't get absolute path for %q: %s", pathname, err) - continue - } - // glog.V(2).Infof("watched path is %q", absPath) - if err := t.TailPath(absPath); err != nil { - log.Println(err) - } - } +// doPatternGlob matches a glob-style pattern against the filesystem and issues +// a TailPath for any files that match. +func (t *Tailer) doPatternGlob(pattern string) error { + matches, err := filepath.Glob(pattern) + if err != nil { + return err } - return nil -} - -// PollLogStreamsForCompletion looks at the existing paths and checks if they're already -// complete, removing it from the map if so. -func (t *Tailer) PollLogStreamsForCompletion() error { - t.logstreamsMu.Lock() - defer t.logstreamsMu.Unlock() - for name, l := range t.logstreams { - if l.IsComplete() { - log.Printf("%s is complete", name) - delete(t.logstreams, name) - logCount.Add(-1) + // glog.V(1).Infof("doPatternGlob(%v): glob matches: %v", pattern, matches) + for _, pathname := range matches { + if t.Ignore(pathname) { continue } - } - return nil -} - -func (t *Tailer) Poll() error { - t.pollMu.Lock() - defer t.pollMu.Unlock() - for _, f := range []func() error{t.PollLogPatterns, t.PollLogStreamsForCompletion} { - if err := f(); err != nil { - return err + absPath, err := filepath.Abs(pathname) + if err != nil { + // glog.V(2).Infof("doPatternGlob(%v): couldn't get absolute path for %q: %s", pattern, pathname, err) + continue + } + // glog.V(2).Infof("doPatternGlob(%v): tailable path is %q", pattern, absPath) + if err := t.TailPath(absPath); err != nil { + log.Println(err) } } return nil diff --git a/inputs/mtail/internal/tailer/tail_test.go b/inputs/mtail/internal/tailer/tail_test.go deleted file mode 100644 index 2a5d5ffa..00000000 --- a/inputs/mtail/internal/tailer/tail_test.go +++ /dev/null @@ -1,271 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package tailer - -import ( - "context" - "log" - "os" - "path/filepath" - "sync" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -func makeTestTail(t *testing.T, options ...Option) (*Tailer, chan *logline.LogLine, func(int), string, func()) { - t.Helper() - tmpDir := testutil.TestTempDir(t) - - ctx, cancel := context.WithCancel(context.Background()) - lines := make(chan *logline.LogLine, 5) // 5 loglines ought to be enough for any test - var wg sync.WaitGroup - waker, awaken := waker.NewTest(ctx, 1) - options = append(options, LogPatterns([]string{tmpDir}), LogstreamPollWaker(waker)) - ta, err := New(ctx, &wg, lines, options...) - testutil.FatalIfErr(t, err) - return ta, lines, awaken, tmpDir, func() { cancel(); wg.Wait() } -} - -func TestTail(t *testing.T) { - ta, _, _, dir, stop := makeTestTail(t) - - logfile := filepath.Join(dir, "log") - f := testutil.TestOpenFile(t, logfile) - defer f.Close() - - err := ta.TailPath(logfile) - testutil.FatalIfErr(t, err) - - if _, ok := ta.logstreams[logfile]; !ok { - t.Errorf("path not found in files map: %+#v", ta.logstreams) - } - - stop() -} - -func TestHandleLogUpdate(t *testing.T) { - ta, lines, awaken, dir, stop := makeTestTail(t) - - logfile := filepath.Join(dir, "log") - f := testutil.TestOpenFile(t, logfile) - defer f.Close() - - testutil.FatalIfErr(t, ta.TailPath(logfile)) - awaken(1) - - testutil.WriteString(t, f, "a\nb\nc\nd\n") - awaken(1) - - stop() - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.Background(), Filename: logfile, Line: "a"}, - {Context: context.Background(), Filename: logfile, Line: "b"}, - {Context: context.Background(), Filename: logfile, Line: "c"}, - {Context: context.Background(), Filename: logfile, Line: "d"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) -} - -// TestHandleLogTruncate writes to a file, waits for those -// writes to be seen, then truncates the file and writes some more. -// At the end all lines written must be reported by the tailer. -func TestHandleLogTruncate(t *testing.T) { - ta, lines, awaken, dir, stop := makeTestTail(t) - - logfile := filepath.Join(dir, "log") - f := testutil.OpenLogFile(t, logfile) - defer f.Close() - - testutil.FatalIfErr(t, ta.TailPath(logfile)) - // Expect to wake 1 wakee, the logstream reading `logfile`. - awaken(1) - - testutil.WriteString(t, f, "a\nb\nc\n") - awaken(1) - - if err := f.Truncate(0); err != nil { - t.Fatal(err) - } - - // "File.Truncate" does not change the file offset, force a seek to start. - _, err := f.Seek(0, 0) - testutil.FatalIfErr(t, err) - awaken(1) - - testutil.WriteString(t, f, "d\ne\n") - awaken(1) - - stop() - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.Background(), Filename: logfile, Line: "a"}, - {Context: context.Background(), Filename: logfile, Line: "b"}, - {Context: context.Background(), Filename: logfile, Line: "c"}, - {Context: context.Background(), Filename: logfile, Line: "d"}, - {Context: context.Background(), Filename: logfile, Line: "e"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) -} - -func TestHandleLogUpdatePartialLine(t *testing.T) { - ta, lines, awaken, dir, stop := makeTestTail(t) - - logfile := filepath.Join(dir, "log") - f := testutil.TestOpenFile(t, logfile) - defer f.Close() - - testutil.FatalIfErr(t, ta.TailPath(logfile)) - awaken(1) // ensure we've hit an EOF before writing starts - - testutil.WriteString(t, f, "a") - awaken(1) - - testutil.WriteString(t, f, "b") - awaken(1) - - testutil.WriteString(t, f, "\n") - awaken(1) - - stop() - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.Background(), Filename: logfile, Line: "ab"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) -} - -func TestTailerUnreadableFile(t *testing.T) { - // Test broken files are skipped. - ta, lines, awaken, dir, stop := makeTestTail(t) - - brokenfile := filepath.Join(dir, "brokenlog") - logfile := filepath.Join(dir, "log") - testutil.FatalIfErr(t, ta.AddPattern(brokenfile)) - testutil.FatalIfErr(t, ta.AddPattern(logfile)) - - log.Println("create logs") - testutil.FatalIfErr(t, os.Symlink("/nonexistent", brokenfile)) - f := testutil.TestOpenFile(t, logfile) - defer f.Close() - - testutil.FatalIfErr(t, ta.PollLogPatterns()) - testutil.FatalIfErr(t, ta.PollLogStreamsForCompletion()) - - log.Println("write string") - testutil.WriteString(t, f, "\n") - awaken(1) - - stop() - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.Background(), Filename: logfile, Line: ""}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) -} - -func TestTailerInitErrors(t *testing.T) { - var wg sync.WaitGroup - _, err := New(context.TODO(), &wg, nil) - if err == nil { - t.Error("expected error") - } - ctx, cancel := context.WithCancel(context.Background()) - _, err = New(ctx, &wg, nil, nil) - if err == nil { - t.Error("expected error") - } - lines := make(chan *logline.LogLine, 1) - _, err = New(ctx, &wg, lines, nil) - if err == nil { - t.Error("expected error") - } - cancel() - wg.Wait() - lines = make(chan *logline.LogLine, 1) - ctx, cancel = context.WithCancel(context.Background()) - _, err = New(ctx, &wg, lines) - if err != nil { - t.Errorf("unexpected error %s", err) - } - cancel() - wg.Wait() - lines = make(chan *logline.LogLine, 1) - ctx, cancel = context.WithCancel(context.Background()) - _, err = New(ctx, &wg, lines, OneShot) - if err != nil { - t.Errorf("unexpected error %s", err) - } - cancel() - wg.Wait() -} - -func TestTailExpireStaleHandles(t *testing.T) { - t.Skip("need to set lastRead on logstream to inject condition") - ta, lines, awaken, dir, stop := makeTestTail(t) - - log1 := filepath.Join(dir, "log1") - f1 := testutil.TestOpenFile(t, log1) - log2 := filepath.Join(dir, "log2") - f2 := testutil.TestOpenFile(t, log2) - - if err := ta.TailPath(log1); err != nil { - t.Fatal(err) - } - if err := ta.TailPath(log2); err != nil { - t.Fatal(err) - } - testutil.WriteString(t, f1, "1\n") - testutil.WriteString(t, f2, "2\n") - - awaken(1) - - stop() - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.Background(), Filename: log1, Line: "1"}, - {Context: context.Background(), Filename: log2, Line: "2"}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) - - if err := ta.ExpireStaleLogstreams(); err != nil { - t.Fatal(err) - } - ta.logstreamsMu.RLock() - if len(ta.logstreams) != 2 { - t.Errorf("expecting 2 handles, got %v", ta.logstreams) - } - ta.logstreamsMu.RUnlock() - // ta.logstreamsMu.Lock() - // ta.logstreams[log1].(*File).lastRead = time.Now().Add(-time.Hour*24 + time.Minute) - // ta.logstreamsMu.Unlock() - if err := ta.ExpireStaleLogstreams(); err != nil { - t.Fatal(err) - } - ta.logstreamsMu.RLock() - if len(ta.logstreams) != 2 { - t.Errorf("expecting 2 handles, got %v", ta.logstreams) - } - ta.logstreamsMu.RUnlock() - // ta.logstreamsMu.Lock() - // ta.logstreams[log1].(*File).lastRead = time.Now().Add(-time.Hour*24 - time.Minute) - // ta.logstreamsMu.Unlock() - if err := ta.ExpireStaleLogstreams(); err != nil { - t.Fatal(err) - } - ta.logstreamsMu.RLock() - if len(ta.logstreams) != 1 { - t.Errorf("expecting 1 logstreams, got %v", ta.logstreams) - } - ta.logstreamsMu.RUnlock() - log.Println("good") -} diff --git a/inputs/mtail/internal/tailer/tail_unix_test.go b/inputs/mtail/internal/tailer/tail_unix_test.go deleted file mode 100644 index 551008e9..00000000 --- a/inputs/mtail/internal/tailer/tail_unix_test.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build unix -// +build unix - -package tailer - -import ( - "context" - "os" - "path/filepath" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/logline" - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -// TestTailerOpenRetries is a unix-specific test because on Windows, it is not possible to create a file -// that you yourself cannot read (minimum permissions are 0222). -func TestTailerOpenRetries(t *testing.T) { - // Can't force a permission denied error if run as root. - testutil.SkipIfRoot(t) - - ta, lines, awaken, dir, stop := makeTestTail(t) - - logfile := filepath.Join(dir, "log") - if _, err := os.OpenFile(logfile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0); err != nil { - t.Fatal(err) - } - - testutil.FatalIfErr(t, ta.AddPattern(logfile)) - - if err := ta.TailPath(logfile); err == nil || !os.IsPermission(err) { - t.Fatalf("Expected a permission denied error here: %s", err) - } - testutil.FatalIfErr(t, ta.PollLogPatterns()) - testutil.FatalIfErr(t, ta.PollLogStreamsForCompletion()) - // .Info("remove") - if err := os.Remove(logfile); err != nil { - t.Fatal(err) - } - testutil.FatalIfErr(t, ta.PollLogPatterns()) - testutil.FatalIfErr(t, ta.PollLogStreamsForCompletion()) - // glog.Info("openfile") - f, err := os.OpenFile(logfile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0) - defer f.Close() - testutil.FatalIfErr(t, err) - testutil.FatalIfErr(t, ta.PollLogPatterns()) - testutil.FatalIfErr(t, ta.PollLogStreamsForCompletion()) - // glog.Info("chmod") - if err := os.Chmod(logfile, 0o666); err != nil { - t.Fatal(err) - } - testutil.FatalIfErr(t, ta.PollLogPatterns()) - testutil.FatalIfErr(t, ta.PollLogStreamsForCompletion()) - awaken(1) // force sync to EOF - // glog.Info("write string") - testutil.WriteString(t, f, "\n") - awaken(1) - - stop() - - received := testutil.LinesReceived(lines) - expected := []*logline.LogLine{ - {Context: context.Background(), Filename: logfile, Line: ""}, - } - testutil.ExpectNoDiff(t, expected, received, testutil.IgnoreFields(logline.LogLine{}, "Context")) -} diff --git a/inputs/mtail/internal/tailer/tail_windows_test.go b/inputs/mtail/internal/tailer/tail_windows_test.go deleted file mode 100644 index 8d7f1e66..00000000 --- a/inputs/mtail/internal/tailer/tail_windows_test.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -//go:build windows -// +build windows - -package tailer - -import ( - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/testutil" -) - -func TestWindowsPath(t *testing.T) { - ta, _, _, _, stop := makeTestTail(t) - - testutil.FatalIfErr(t, ta.AddPattern("C:\\somefile")) - - if _, ok := ta.globPatterns["C:\\somefile"]; !ok { - t.Errorf("path not found in files map: %+#v", ta.globPatterns) - } - - stop() -} diff --git a/inputs/mtail/internal/testutil/diff.go b/inputs/mtail/internal/testutil/diff.go deleted file mode 100644 index 7feb810f..00000000 --- a/inputs/mtail/internal/testutil/diff.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -// Reimport the go-cmp package as the name 'cmp' conflicts with the cmp -// instruction in the vm. -package testutil - -import ( - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -func Diff(a, b interface{}, opts ...cmp.Option) string { - return cmp.Diff(a, b, opts...) -} - -func IgnoreUnexported(types ...interface{}) cmp.Option { - return cmpopts.IgnoreUnexported(types...) -} - -func AllowUnexported(types ...interface{}) cmp.Option { - return cmp.AllowUnexported(types...) -} - -func IgnoreFields(typ interface{}, names ...string) cmp.Option { - return cmpopts.IgnoreFields(typ, names...) -} - -func SortSlices(lessFunc interface{}) cmp.Option { - return cmpopts.SortSlices(lessFunc) -} - -// ExpectNoDiff tests to see if the two interfaces have no diff. -// If there is no diff, the retrun value is true. -// If there is a diff, it is logged to tb and an error is flagged, and the return value is false. -func ExpectNoDiff(tb testing.TB, a, b interface{}, opts ...cmp.Option) bool { - tb.Helper() - if diff := Diff(a, b, opts...); diff != "" { - tb.Errorf("Unexpected diff, -want +got:\n%s", diff) - tb.Logf("expected:\n%#v", a) - tb.Logf("received:\n%#v", b) - return false - } - return true -} diff --git a/inputs/mtail/internal/testutil/err.go b/inputs/mtail/internal/testutil/err.go deleted file mode 100644 index bfeb9cda..00000000 --- a/inputs/mtail/internal/testutil/err.go +++ /dev/null @@ -1,11 +0,0 @@ -package testutil - -import "testing" - -// FatalIfErr fails the test with a fatal error if err is not nil. -func FatalIfErr(tb testing.TB, err error) { - tb.Helper() - if err != nil { - tb.Fatal(err) - } -} diff --git a/inputs/mtail/internal/testutil/expvar.go b/inputs/mtail/internal/testutil/expvar.go deleted file mode 100644 index 7b460966..00000000 --- a/inputs/mtail/internal/testutil/expvar.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2021 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package testutil - -import ( - "expvar" - "log" - "testing" - "time" -) - -// TestGetExpvar fetches the expvar metric `name`, and returns the expvar. -// Callers are responsible for type assertions on the returned value. -func TestGetExpvar(tb testing.TB, name string) expvar.Var { - tb.Helper() - v := expvar.Get(name) - log.Printf("Var %s is %v", name, v) - return v -} - -const defaultDoOrTimeoutDeadline = 10 * time.Second - -// ExpectExpvarDeltaWithDeadline returns a deferrable function which tests if the expvar metric with name has changed by delta within the given deadline, once the function begins. Before returning, it fetches the original value for comparison. -func ExpectExpvarDeltaWithDeadline(tb testing.TB, name string, want int64) func() { - tb.Helper() - deadline := defaultDoOrTimeoutDeadline - start := TestGetExpvar(tb, name).(*expvar.Int).Value() - check := func() (bool, error) { - tb.Helper() - now := TestGetExpvar(tb, name).(*expvar.Int).Value() - log.Printf("now is %v", now) - return now-start == want, nil - } - return func() { - tb.Helper() - ok, err := DoOrTimeout(check, deadline, 10*time.Millisecond) - FatalIfErr(tb, err) - if !ok { - now := TestGetExpvar(tb, name).(*expvar.Int).Value() - tb.Errorf("Did not see %s have delta by deadline: got %v - %v = %d, want %d", name, now, start, now-start, want) - } - } -} - -// ExpectMapExpvarMetricDeltaWithDeadline returns a deferrable function which tests if the expvar map metric with name and key has changed by delta within the given deadline, once the function begins. Before returning, it fetches the original value for comparison. -func ExpectMapExpvarDeltaWithDeadline(tb testing.TB, name, key string, want int64) func() { - tb.Helper() - deadline := defaultDoOrTimeoutDeadline - startVar := TestGetExpvar(tb, name).(*expvar.Map).Get(key) - var start int64 - if startVar != nil { - start = startVar.(*expvar.Int).Value() - } - check := func() (bool, error) { - tb.Helper() - nowVar := TestGetExpvar(tb, name).(*expvar.Map).Get(key) - var now int64 - if nowVar != nil { - now = nowVar.(*expvar.Int).Value() - } - return now-start == want, nil - } - return func() { - tb.Helper() - ok, err := DoOrTimeout(check, deadline, 10*time.Millisecond) - FatalIfErr(tb, err) - if !ok { - nowVar := TestGetExpvar(tb, name).(*expvar.Map).Get(key) - var now int64 - if nowVar != nil { - now = nowVar.(*expvar.Int).Value() - } - tb.Errorf("Did not see %s[%s] have delta by deadline: got %v - %v = %d, want %d", name, key, now, start, now-start, want) - } - } -} diff --git a/inputs/mtail/internal/testutil/file.go b/inputs/mtail/internal/testutil/file.go deleted file mode 100644 index 1ca5ca30..00000000 --- a/inputs/mtail/internal/testutil/file.go +++ /dev/null @@ -1,27 +0,0 @@ -package testutil - -import ( - "io" - "log" - "os" - "testing" -) - -func WriteString(tb testing.TB, f io.StringWriter, str string) int { - tb.Helper() - n, err := f.WriteString(str) - FatalIfErr(tb, err) - log.Printf("Wrote %d bytes", n) - // If this is a regular file (not a pipe or other StringWriter) then ensure - // it's flushed to disk, to guarantee the write happens-before this - // returns. - if v, ok := f.(*os.File); ok { - fi, err := v.Stat() - FatalIfErr(tb, err) - if fi.Mode().IsRegular() { - log.Printf("This is a regular file, doing a sync.") - FatalIfErr(tb, v.Sync()) - } - } - return n -} diff --git a/inputs/mtail/internal/testutil/flag.go b/inputs/mtail/internal/testutil/flag.go deleted file mode 100644 index 68d1172c..00000000 --- a/inputs/mtail/internal/testutil/flag.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package testutil - -import ( - "flag" - "testing" -) - -// SetFlag sets the value of the commandline flag, and registers a cleanup function that restores the flag value. -func SetFlag(tb testing.TB, name, value string) { - tb.Helper() - val := flag.Lookup(name) - - if err := flag.Set(name, value); err != nil { - tb.Fatal(err) - } - - tb.Cleanup(func() { - if val != nil { - if err := flag.Set(name, val.Value.String()); err != nil { - tb.Fatal(err) - } - } - }) -} diff --git a/inputs/mtail/internal/testutil/fs.go b/inputs/mtail/internal/testutil/fs.go deleted file mode 100644 index 0d634881..00000000 --- a/inputs/mtail/internal/testutil/fs.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package testutil - -import ( - "os" - "path/filepath" - "testing" -) - -// TestTempDir creates a temporary directory for use during tests, returning the pathname. -func TestTempDir(tb testing.TB) string { - tb.Helper() - name, err := os.MkdirTemp("", "mtail-test") - if err != nil { - tb.Fatal(err) - } - tb.Cleanup(func() { - if err := os.RemoveAll(name); err != nil { - tb.Fatalf("os.RemoveAll(%s): %s", name, err) - } - }) - return name -} - -// TestOpenFile creates a new file called name and returns the opened file. -func TestOpenFile(tb testing.TB, name string) *os.File { - tb.Helper() - f, err := os.OpenFile(filepath.Clean(name), os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o600) - if err != nil { - tb.Fatal(err) - } - return f -} - -// OpenLogFile creates a new file that emulates being a log. -func OpenLogFile(tb testing.TB, name string) *os.File { - tb.Helper() - f, err := os.OpenFile(filepath.Clean(name), os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0o600) - if err != nil { - tb.Fatal(err) - } - return f -} - -// Chdir changes current working directory, and registers a cleanup function -// to return to the previous directory. -func Chdir(tb testing.TB, dir string) { - tb.Helper() - cwd, err := os.Getwd() - if err != nil { - tb.Fatal(err) - } - err = os.Chdir(dir) - if err != nil { - tb.Fatal(err) - } - tb.Cleanup(func() { - err := os.Chdir(cwd) - if err != nil { - tb.Fatal(err) - } - }) -} diff --git a/inputs/mtail/internal/testutil/lines.go b/inputs/mtail/internal/testutil/lines.go deleted file mode 100644 index d6d8e0e5..00000000 --- a/inputs/mtail/internal/testutil/lines.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2021 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package testutil - -import ( - "flashcat.cloud/categraf/inputs/mtail/internal/logline" -) - -func LinesReceived(lines <-chan *logline.LogLine) (r []*logline.LogLine) { - r = make([]*logline.LogLine, 0) - for line := range lines { - r = append(r, line) - } - return -} diff --git a/inputs/mtail/internal/testutil/norace.go b/inputs/mtail/internal/testutil/norace.go deleted file mode 100644 index c453f666..00000000 --- a/inputs/mtail/internal/testutil/norace.go +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright 2021 Google Inc. All Rights Reserved. -// This file is available under the Apache license. -//go:build !race -// +build !race - -package testutil - -const RaceDetectorMultiplier = 1.0 diff --git a/inputs/mtail/internal/testutil/port.go b/inputs/mtail/internal/testutil/port.go deleted file mode 100644 index 9d928b78..00000000 --- a/inputs/mtail/internal/testutil/port.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2021 Google Inc. All Rights Reserved. -// This file is available under the Apache license. -package testutil - -import ( - "net" - "testing" -) - -func FreePort(tb testing.TB) int { - tb.Helper() - addr, err := net.ResolveTCPAddr("tcp", "[::]:0") - if err != nil { - tb.Fatal(err) - } - l, err := net.ListenTCP("tcp", addr) - if err != nil { - tb.Fatal(err) - } - defer l.Close() - return l.Addr().(*net.TCPAddr).Port -} diff --git a/inputs/mtail/internal/testutil/race.go b/inputs/mtail/internal/testutil/race.go deleted file mode 100644 index 7643a6d0..00000000 --- a/inputs/mtail/internal/testutil/race.go +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright 2021 Google Inc. All Rights Reserved. -// This file is available under the Apache license. -//go:build race -// +build race - -package testutil - -const RaceDetectorMultiplier = 30.0 diff --git a/inputs/mtail/internal/testutil/root.go b/inputs/mtail/internal/testutil/root.go deleted file mode 100644 index 84a30944..00000000 --- a/inputs/mtail/internal/testutil/root.go +++ /dev/null @@ -1,18 +0,0 @@ -package testutil - -import ( - "fmt" - "os/user" - "testing" -) - -func SkipIfRoot(tb testing.TB) { - tb.Helper() - u, err := user.Current() - if err != nil { - tb.Skip(fmt.Sprintf("Couldn't determine current user id: %s", err)) - } - if u.Uid == "0" { - tb.Skip("Skipping test when run as root") - } -} diff --git a/inputs/mtail/internal/testutil/short.go b/inputs/mtail/internal/testutil/short.go deleted file mode 100644 index 676cc03a..00000000 --- a/inputs/mtail/internal/testutil/short.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package testutil - -import ( - "testing" -) - -func SkipIfShort(tb testing.TB) { - tb.Helper() - if testing.Short() { - tb.Skip("skipping test in -short mode") - } -} diff --git a/inputs/mtail/internal/testutil/timeout.go b/inputs/mtail/internal/testutil/timeout.go deleted file mode 100644 index e76a94e1..00000000 --- a/inputs/mtail/internal/testutil/timeout.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2019 Google Inc. All Rights Reserved. -// This file is available under the Apache license. -package testutil - -import ( - "os" - "runtime" - "testing" - "time" - // "github.com/golang/glog" -) - -// DoOrTimeout runs a check function every interval until deadline, unless the -// check returns true. The check should return false otherwise. If the check -// returns an error the check is immediately failed. -func DoOrTimeout(do func() (bool, error), deadline, interval time.Duration) (bool, error) { - timeout := time.After(deadline) - ticker := time.NewTicker(interval) - defer ticker.Stop() - for { - select { - case <-timeout: - return false, nil - case <-ticker.C: - // glog.V(2).Infof("tick") - ok, err := do() - // glog.V(2).Infof("ok, err: %v %v", ok, err) - if err != nil { - return false, err - } - if ok { - return true, nil - } - // otherwise wait and retry - } - } -} - -// TimeoutTest returns a test function that executes f with a timeout, If the -// test does not complete in time the test is failed. This lets us set a -// per-test timeout instead of the global `go test -timeout` coarse timeout. -func TimeoutTest(timeout time.Duration, f func(t *testing.T)) func(t *testing.T) { - // Raise the timeout if we're run under the race detector. - timeout *= RaceDetectorMultiplier - // If we're in a CI environment, raise the timeout by 10x. This mimics the - // timeout global flag set in the Makefile. - if os.Getenv("CI") == "true" { - timeout *= 10 - } - return func(t *testing.T) { - t.Helper() - done := make(chan bool) - go func() { - t.Helper() - defer close(done) - f(t) - }() - select { - case <-time.After(timeout): - buf := make([]byte, 1<<20) - stacklen := runtime.Stack(buf, true) - t.Fatalf("timed out after %s\n%s", timeout, buf[:stacklen]) - case <-done: - } - } -} diff --git a/inputs/mtail/internal/testutil/timeout_test.go b/inputs/mtail/internal/testutil/timeout_test.go deleted file mode 100644 index 5e1b81e0..00000000 --- a/inputs/mtail/internal/testutil/timeout_test.go +++ /dev/null @@ -1,56 +0,0 @@ -package testutil - -import ( - "fmt" - "testing" - "time" -) - -func TestDoOrTimeoutNeverOK(t *testing.T) { - SkipIfShort(t) - - // Never return OK so timeout at 10ms. - ok, err := DoOrTimeout(func() (bool, error) { - return false, nil - }, 10*time.Millisecond, time.Millisecond) - if ok || err != nil { - t.Errorf("Expected timeout (false, nil), got %v, %v", ok, err) - } -} - -func TestDoOrTimeoutAlwaysOK(t *testing.T) { - // Always return OK. - ok, err := DoOrTimeout(func() (bool, error) { - return true, nil - }, 10*time.Millisecond, time.Millisecond) - if !ok || err != nil { - t.Errorf("Expected OK, got %v, %v", ok, err) - } -} - -func TestDoOrTimeoutStallThenOK(t *testing.T) { - SkipIfShort(t) - - // Stall for 5 ticks (50ms) and then return OK; timeout at 1s. - i := 5 - ok, err := DoOrTimeout(func() (bool, error) { - i-- - if i > 0 { - return false, nil - } - return true, nil - }, time.Second, 10*time.Millisecond) - if !ok || err != nil { - t.Errorf("Expected OK, got %v, %v", ok, err) - } -} - -func TestDoOrTimeoutAlwaysErr(t *testing.T) { - // Return an error, should return false,err - ok, err := DoOrTimeout(func() (bool, error) { - return false, fmt.Errorf("oh no") // nolint:goerr113 - }, 1*time.Second, time.Millisecond) - if ok || err == nil { - t.Errorf("Expected error (false,!nil), got %v %v", ok, err) - } -} diff --git a/inputs/mtail/internal/waker/testwaker_test.go b/inputs/mtail/internal/waker/testwaker_test.go deleted file mode 100644 index 8a001085..00000000 --- a/inputs/mtail/internal/waker/testwaker_test.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package waker_test - -import ( - "context" - "sync" - "testing" - - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -func TestTestWakerWakes(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - w, awaken := waker.NewTest(ctx, 1) - c := w.Wake() - select { - case x := <-c: - t.Errorf("<-w.Wake() == %v, expected nothing (should block)", x) - default: - } - awaken(0) - select { - case <-c: - // Luke Luck likes lakes. Luke's duck likes lakes. - default: - t.Errorf("<-w.Wake() blocked, expected close") - } -} - -func TestTestWakerTwoWakees(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - w, awaken := waker.NewTest(ctx, 2) - var wg1, wg2, wg3 sync.WaitGroup - wg1.Add(1) - wg2.Add(1) - wg3.Add(1) - go func() { - defer wg3.Done() - c := w.Wake() - select { - case x := <-c: - t.Errorf("<-w.Wake() == %v, expected nothing (should block)", x) - default: - } - wg1.Wait() - select { - case x := <-c: - t.Errorf("<-w.Wake() == %v, expected nothing (should block)", x) - default: - } - d := w.Wake() - wg2.Wait() - select { - case <-c: - // Luke Luck likes lakes. - default: - t.Errorf("c<-w.Wake() blocked, expected close") - } - select { - case <-d: - // Luke's duck likes lakes. - default: - t.Errorf("d<-w.Wake() blocked, expected close") - } - }() - wg1.Done() - awaken(0) // wake 2, and await none - wg2.Done() - wg3.Wait() -} - -func TestTestWakerTwoWakeups(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - w, awaken := waker.NewTest(ctx, 1) - s := make(chan struct{}) - begin := make(chan struct{}) - var wg sync.WaitGroup - wg.Add(2) - go func() { - defer wg.Done() - <-begin - for i := 0; i < 2; i++ { - c := w.Wake() - select { - case x := <-c: - t.Errorf("<-w.Wake() == %v, expected nothing (should block), pass %d", x, i) - default: - } - s <- struct{}{} - <-c // wait to receive the wake - } - }() - go func() { - defer wg.Done() - <-begin - <-s - awaken(1) - <-s - // we don't expect anyone to call Wake() after this - awaken(0) - }() - close(begin) - wg.Wait() -} diff --git a/inputs/mtail/internal/waker/timedwaker_test.go b/inputs/mtail/internal/waker/timedwaker_test.go deleted file mode 100644 index e370ae69..00000000 --- a/inputs/mtail/internal/waker/timedwaker_test.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2020 Google Inc. All Rights Reserved. -// This file is available under the Apache license. - -package waker_test - -import ( - "context" - "testing" - "time" - - "flashcat.cloud/categraf/inputs/mtail/internal/waker" -) - -func TestTimedWakerWakes(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - w := waker.NewTimed(ctx, 10*time.Millisecond) - - timer := time.NewTimer(100 * time.Millisecond) - defer timer.Stop() - select { - case <-timer.C: - t.Errorf("no wake before deadline") - case <-w.Wake(): - // Luke Luck licks lakes. Luke's duck licks lakes. - } -} diff --git a/inputs/mtail/mtail.go b/inputs/mtail/mtail.go index 76aee03f..de51a2e9 100644 --- a/inputs/mtail/mtail.go +++ b/inputs/mtail/mtail.go @@ -111,8 +111,8 @@ func (ins *Instance) Init() error { } else { ins.ctx, ins.cancel = context.WithCancel(context.Background()) } - staleLogGcWaker := waker.NewTimed(ins.ctx, time.Hour) - opts = append(opts, mtail.StaleLogGcWaker(staleLogGcWaker)) + // staleLogGcWaker := waker.NewTimed(ins.ctx, time.Hour) + // opts = append(opts, mtail.StaleLogGcWaker(staleLogGcWaker)) if ins.PollInterval > 0 { logStreamPollWaker := waker.NewTimed(ins.ctx, ins.PollInterval)