Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add etl.Metadata for passing task metadata to parsers #1117

Merged
merged 4 commits into from
Jun 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 14 additions & 4 deletions etl/etl.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"errors"
"time"

"cloud.google.com/go/bigquery"
"cloud.google.com/go/civil"
)

Expand All @@ -22,7 +21,8 @@ type ProcessingError interface {
// RowStats interface defines some useful Inserter stats that will also be
// implemented by Parser.
// RowStats implementations should provide the invariants:
// Accepted == Failed + Committed + RowsInBuffer
//
// Accepted == Failed + Committed + RowsInBuffer
type RowStats interface {
// RowsInBuffer returns the count of rows currently in the buffer.
RowsInBuffer() int
Expand All @@ -36,7 +36,8 @@ type RowStats interface {

// Inserter is a data sink that writes to BigQuery tables.
// Inserters should provide the invariants:
// After Flush() returns, RowsInBuffer == 0
//
// After Flush() returns, RowsInBuffer == 0
type Inserter interface {
// Put synchronously sends a slice of rows to BigQuery
// This is THREADSAFE
Expand Down Expand Up @@ -102,6 +103,15 @@ type InserterParams struct {
MaxRetryDelay time.Duration // Maximum backoff time for Put retries.
}

// Metadata provides metadata about the parser and archive files.
type Metadata struct {
Version string
ArchiveURL string
GitCommit string
Date civil.Date
Start time.Time
}

// ErrHighInsertionFailureRate should be returned by TaskError when there are more than 10% BQ insertion errors.
var ErrHighInsertionFailureRate = errors.New("too many insertion failures")

Expand All @@ -116,7 +126,7 @@ type Parser interface {
// meta - metadata, e.g. from the original tar file name.
// testName - Name of test file (typically extracted from a tar file)
// test - binary test data
ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
ParseAndInsert(meta Metadata, testName string, test []byte) error

// Flush flushes any pending rows.
Flush() error
Expand Down
13 changes: 5 additions & 8 deletions parser/annotation2.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@ import (
"strings"
"time"

"cloud.google.com/go/bigquery"

"cloud.google.com/go/civil"
"github.com/m-lab/etl/etl"
"github.com/m-lab/etl/metrics"
"github.com/m-lab/etl/row"
Expand Down Expand Up @@ -60,17 +57,17 @@ func (ap *Annotation2Parser) IsParsable(testName string, data []byte) (string, b
}

// ParseAndInsert decodes the data.Annotation2 JSON and inserts it into BQ.
func (ap *Annotation2Parser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error {
func (ap *Annotation2Parser) ParseAndInsert(meta etl.Metadata, testName string, test []byte) error {
metrics.WorkerState.WithLabelValues(ap.TableName(), "annotation2").Inc()
defer metrics.WorkerState.WithLabelValues(ap.TableName(), "annotation2").Dec()

row := schema.Annotation2Row{
Parser: schema.ParseInfo{
Version: Version(),
Version: meta.Version,
Time: time.Now(),
ArchiveURL: meta["filename"].(string),
ArchiveURL: meta.ArchiveURL,
Filename: testName,
GitCommit: GitCommit(),
GitCommit: meta.GitCommit,
},
}

Expand Down Expand Up @@ -105,7 +102,7 @@ func (ap *Annotation2Parser) ParseAndInsert(meta map[string]bigquery.Value, test
// the given timestamp, regardless of the timestamp's timezone. Since we
// run our systems in UTC, all timestamps will be relative to UTC and as
// will these dates.
row.Date = meta["date"].(civil.Date)
row.Date = meta.Date

// Estimate the row size based on the input JSON size.
metrics.RowSizeHistogram.WithLabelValues(ap.TableName()).Observe(float64(len(test)))
Expand Down
11 changes: 7 additions & 4 deletions parser/annotation2_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ import (
"strings"
"testing"

"cloud.google.com/go/bigquery"
"github.com/m-lab/etl/etl"

"cloud.google.com/go/civil"
"github.com/go-test/deep"
"github.com/m-lab/etl/parser"
Expand Down Expand Up @@ -45,9 +46,11 @@ func TestAnnotation2Parser_ParseAndInsert(t *testing.T) {
t.Fatal("IsParsable() failed; got false, want true")
}

meta := map[string]bigquery.Value{
"filename": "gs://mlab-test-bucket/ndt/ndt7/2020/03/18/" + tt.file,
"date": civil.Date{Year: 2020, Month: 3, Day: 18},
meta := etl.Metadata{
ArchiveURL: "gs://mlab-test-bucket/ndt/ndt7/2020/03/18/" + tt.file,
Date: civil.Date{Year: 2020, Month: 3, Day: 18},
Version: parser.Version(),
GitCommit: parser.GitCommit(),
}

if err := n.ParseAndInsert(meta, tt.file, data); (err != nil) != tt.wantErr {
Expand Down
12 changes: 5 additions & 7 deletions parser/hopannotation2.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ import (
"strings"
"time"

"cloud.google.com/go/bigquery"
"cloud.google.com/go/civil"
"github.com/m-lab/etl/etl"
"github.com/m-lab/etl/metrics"
"github.com/m-lab/etl/row"
Expand Down Expand Up @@ -44,17 +42,17 @@ func (p *HopAnnotation2Parser) IsParsable(testName string, data []byte) (string,
}

// ParseAndInsert decodes the HopAnnotation2 data and inserts it into BQ.
func (p *HopAnnotation2Parser) ParseAndInsert(fileMetadata map[string]bigquery.Value, testName string, rawContent []byte) error {
func (p *HopAnnotation2Parser) ParseAndInsert(meta etl.Metadata, testName string, rawContent []byte) error {
metrics.WorkerState.WithLabelValues(p.TableName(), "hopannotation2").Inc()
defer metrics.WorkerState.WithLabelValues(p.TableName(), "hopannotation2").Dec()

row := schema.HopAnnotation2Row{
Parser: schema.ParseInfo{
Version: Version(),
Version: meta.Version,
Time: time.Now(),
ArchiveURL: fileMetadata["filename"].(string),
ArchiveURL: meta.ArchiveURL,
Filename: testName,
GitCommit: GitCommit(),
GitCommit: meta.GitCommit,
},
}

Expand All @@ -73,7 +71,7 @@ func (p *HopAnnotation2Parser) ParseAndInsert(fileMetadata map[string]bigquery.V
// the given timestamp, regardless of the timestamp's timezone. Since we
// run our systems in UTC, all timestamps will be relative to UTC and as
// will these dates.
row.Date = fileMetadata["date"].(civil.Date)
row.Date = meta.Date

// Estimate the row size based on the input JSON size.
metrics.RowSizeHistogram.WithLabelValues(p.TableName()).Observe(float64(len(rawContent)))
Expand Down
10 changes: 6 additions & 4 deletions parser/hopannotation2_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ import (
"testing"
"time"

"cloud.google.com/go/bigquery"
"cloud.google.com/go/civil"
"github.com/go-test/deep"
"github.com/m-lab/etl/etl"
"github.com/m-lab/etl/parser"
"github.com/m-lab/etl/schema"
"github.com/m-lab/go/rtx"
Expand All @@ -31,9 +31,11 @@ func TestHopAnnotation2Parser_ParseAndInsert(t *testing.T) {

date := civil.Date{Year: 2021, Month: 07, Day: 30}

meta := map[string]bigquery.Value{
"filename": path.Join(hopAnnotation2GCSPath, hopAnnotation2Filename),
"date": date,
meta := etl.Metadata{
ArchiveURL: path.Join(hopAnnotation2GCSPath, hopAnnotation2Filename),
Date: date,
Version: parser.Version(),
GitCommit: parser.GitCommit(),
}

if err := n.ParseAndInsert(meta, hopAnnotation2Filename, data); err != nil {
Expand Down
11 changes: 5 additions & 6 deletions parser/ndt5_result.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import (
"strings"
"time"

"cloud.google.com/go/bigquery"
"cloud.google.com/go/civil"

"github.com/m-lab/etl/etl"
Expand Down Expand Up @@ -66,7 +65,7 @@ func (dp *NDT5ResultParser) IsParsable(testName string, data []byte) (string, bo
// backend and to eventually rely on the schema inference in m-lab/go/cloud/bqx.CreateTable().

// ParseAndInsert decodes the data.NDT5Result JSON and inserts it into BQ.
func (dp *NDT5ResultParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error {
func (dp *NDT5ResultParser) ParseAndInsert(meta etl.Metadata, testName string, test []byte) error {
metrics.WorkerState.WithLabelValues(dp.TableName(), "ndt5_result").Inc()
defer metrics.WorkerState.WithLabelValues(dp.TableName(), "ndt5_result").Dec()

Expand All @@ -85,13 +84,13 @@ func (dp *NDT5ResultParser) ParseAndInsert(meta map[string]bigquery.Value, testN
}

parser := schema.ParseInfo{
Version: Version(),
Version: meta.Version,
Time: time.Now(),
ArchiveURL: meta["filename"].(string),
ArchiveURL: meta.ArchiveURL,
Filename: testName,
GitCommit: GitCommit(),
GitCommit: meta.GitCommit,
}
date := meta["date"].(civil.Date)
date := meta.Date

// Since ndt5 rows can include both download (S2C) and upload (C2S)
// measurements (or neither), check and write independent rows for either
Expand Down
10 changes: 6 additions & 4 deletions parser/ndt5_result_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import (

"cloud.google.com/go/civil"

"cloud.google.com/go/bigquery"
"github.com/m-lab/etl/etl"
"github.com/m-lab/etl/parser"
"github.com/m-lab/etl/schema"
)
Expand Down Expand Up @@ -60,9 +60,11 @@ func TestNDT5ResultParser_ParseAndInsert(t *testing.T) {
if err != nil {
t.Fatalf(err.Error())
}
meta := map[string]bigquery.Value{
"filename": "gs://mlab-test-bucket/ndt/ndt5/2019/08/22/ndt_ndt5_2019_08_22_20190822T194819.568936Z-ndt5-mlab1-lga0t-ndt.tgz",
"date": d,
meta := etl.Metadata{
ArchiveURL: "gs://mlab-test-bucket/ndt/ndt5/2019/08/22/ndt_ndt5_2019_08_22_20190822T194819.568936Z-ndt5-mlab1-lga0t-ndt.tgz",
Date: d,
Version: parser.Version(),
GitCommit: parser.GitCommit(),
}

if err := n.ParseAndInsert(meta, tt.testName, resultData); (err != nil) != tt.wantErr {
Expand Down
15 changes: 6 additions & 9 deletions parser/ndt7_result.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@ import (
"strings"
"time"

"cloud.google.com/go/bigquery"

"cloud.google.com/go/civil"
"github.com/m-lab/etl/etl"
"github.com/m-lab/etl/metrics"
"github.com/m-lab/etl/row"
Expand Down Expand Up @@ -65,25 +62,25 @@ func (dp *NDT7ResultParser) IsParsable(testName string, data []byte) (string, bo
}

// ParseAndInsert decodes the data.NDT7Result JSON and inserts it into BQ.
func (dp *NDT7ResultParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error {
func (dp *NDT7ResultParser) ParseAndInsert(meta etl.Metadata, testName string, test []byte) error {
// TODO: derive 'ndt5' (or 'ndt7') labels from testName.
metrics.WorkerState.WithLabelValues(dp.TableName(), "ndt7_result").Inc()
defer metrics.WorkerState.WithLabelValues(dp.TableName(), "ndt7_result").Dec()

row := schema.NDT7ResultRow{
Parser: schema.ParseInfo{
Version: Version(),
Version: meta.Version,
Time: time.Now(),
ArchiveURL: meta["filename"].(string),
ArchiveURL: meta.ArchiveURL,
Filename: testName,
GitCommit: GitCommit(),
GitCommit: meta.GitCommit,
},
}

// Parse the test.
err := json.Unmarshal(test, &row.Raw)
if err != nil {
log.Println(meta["filename"].(string), testName, err)
log.Println(meta.ArchiveURL, testName, err)
metrics.TestTotal.WithLabelValues(dp.TableName(), "ndt7_result", "Unmarshal").Inc()
return err
}
Expand All @@ -106,7 +103,7 @@ func (dp *NDT7ResultParser) ParseAndInsert(meta map[string]bigquery.Value, testN
// the given timestamp, regardless of the timestamp's timezone. Since we
// run our systems in UTC, all timestamps will be relative to UTC and as
// will these dates.
row.Date = meta["date"].(civil.Date)
row.Date = meta.Date
if row.Raw.Download != nil {
row.A = downSummary(row.Raw.Download)
} else if row.Raw.Upload != nil {
Expand Down
10 changes: 6 additions & 4 deletions parser/ndt7_result_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ import (
"strings"
"testing"

"cloud.google.com/go/bigquery"
"cloud.google.com/go/civil"
"github.com/go-test/deep"

"github.com/m-lab/etl/etl"
"github.com/m-lab/etl/parser"
"github.com/m-lab/etl/schema"
"github.com/m-lab/go/pretty"
Expand All @@ -23,9 +23,11 @@ func setupNDT7InMemoryParser(t *testing.T, testName string) (*schema.NDT7ResultR
if err != nil {
t.Fatalf(err.Error())
}
meta := map[string]bigquery.Value{
"filename": "gs://mlab-test-bucket/ndt/ndt7/2020/03/18/ndt_ndt7_2020_03_18_20200318T003853.425987Z-ndt7-mlab3-syd03-ndt.tgz",
"date": civil.Date{Year: 2020, Month: 3, Day: 18},
meta := etl.Metadata{
ArchiveURL: "gs://mlab-test-bucket/ndt/ndt7/2020/03/18/ndt_ndt7_2020_03_18_20200318T003853.425987Z-ndt7-mlab3-syd03-ndt.tgz",
Date: civil.Date{Year: 2020, Month: 3, Day: 18},
Version: parser.Version(),
GitCommit: parser.GitCommit(),
}
err = n.ParseAndInsert(meta, testName, resultData)
if err != nil {
Expand Down
18 changes: 9 additions & 9 deletions parser/parser_test.go
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
// TODO(soon) Implement good tests for the existing parsers.
//
package parser_test

import (
Expand Down Expand Up @@ -125,7 +124,7 @@ func TestGetHopID(t *testing.T) {
}
}

//------------------------------------------------------------------------------------
// ------------------------------------------------------------------------------------
// TestParser ignores the content, returns a MapSaver containing meta data and
// "testname":"..."
// TODO add tests
Expand All @@ -144,14 +143,15 @@ func (tp *TestParser) IsParsable(testName string, test []byte) (string, bool) {
return "ext", true
}

func (tp *TestParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error {
func (tp *TestParser) ParseAndInsert(meta etl.Metadata, testName string, test []byte) error {
metrics.TestTotal.WithLabelValues("table", "test", "ok").Inc()
values := make(map[string]bigquery.Value, len(meta)+1)
// TODO is there a better way to do this?
for k, v := range meta {
values[k] = v
values := map[string]bigquery.Value{
"filename": meta.ArchiveURL,
"date": meta.Date,
"version": meta.Version,
"git_commit": meta.GitCommit,
"testname": testName,
}
values["testname"] = testName
return tp.inserter.InsertRow(values)
}

Expand All @@ -174,7 +174,7 @@ func TestPlumbing(t *testing.T) {
tci := countingInserter{}
var ti etl.Inserter = &tci
var p etl.Parser = NewTestParser(ti)
err := p.ParseAndInsert(nil, "foo", foo[:])
err := p.ParseAndInsert(etl.Metadata{}, "foo", foo[:])
if err != nil {
fmt.Println(err)
}
Expand Down
Loading