mirror of
https://github.com/rclone/rclone.git
synced 2025-12-23 11:43:50 +00:00
vendor: update all dependencies
This commit is contained in:
414
vendor/cloud.google.com/go/bigquery/integration_test.go
generated
vendored
414
vendor/cloud.google.com/go/bigquery/integration_test.go
generated
vendored
@@ -15,6 +15,7 @@
|
||||
package bigquery
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
@@ -26,7 +27,10 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
gax "github.com/googleapis/gax-go"
|
||||
|
||||
"cloud.google.com/go/civil"
|
||||
"cloud.google.com/go/internal"
|
||||
"cloud.google.com/go/internal/pretty"
|
||||
"cloud.google.com/go/internal/testutil"
|
||||
"golang.org/x/net/context"
|
||||
@@ -42,7 +46,7 @@ var (
|
||||
{Name: "name", Type: StringFieldType},
|
||||
{Name: "num", Type: IntegerFieldType},
|
||||
}
|
||||
fiveMinutesFromNow time.Time
|
||||
testTableExpiration time.Time
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
@@ -79,6 +83,7 @@ func initIntegrationTest() {
|
||||
if err := dataset.Create(ctx); err != nil && !hasStatusCode(err, http.StatusConflict) { // AlreadyExists is 409
|
||||
log.Fatalf("creating dataset: %v", err)
|
||||
}
|
||||
testTableExpiration = time.Now().Add(10 * time.Minute).Round(time.Second)
|
||||
}
|
||||
|
||||
func TestIntegration_Create(t *testing.T) {
|
||||
@@ -136,7 +141,7 @@ func TestIntegration_TableMetadata(t *testing.T) {
|
||||
if got, want := md.Type, RegularTable; got != want {
|
||||
t.Errorf("metadata.Type: got %v, want %v", got, want)
|
||||
}
|
||||
if got, want := md.ExpirationTime, fiveMinutesFromNow; !got.Equal(want) {
|
||||
if got, want := md.ExpirationTime, testTableExpiration; !got.Equal(want) {
|
||||
t.Errorf("metadata.Type: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
@@ -221,32 +226,37 @@ func TestIntegration_Tables(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
table := newTable(t, schema)
|
||||
defer table.Delete(ctx)
|
||||
wantName := table.FullyQualifiedName()
|
||||
|
||||
// Iterate over tables in the dataset.
|
||||
it := dataset.Tables(ctx)
|
||||
var tables []*Table
|
||||
for {
|
||||
tbl, err := it.Next()
|
||||
if err == iterator.Done {
|
||||
break
|
||||
// This test is flaky due to eventual consistency.
|
||||
ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
err := internal.Retry(ctx, gax.Backoff{}, func() (stop bool, err error) {
|
||||
// Iterate over tables in the dataset.
|
||||
it := dataset.Tables(ctx)
|
||||
var tableNames []string
|
||||
for {
|
||||
tbl, err := it.Next()
|
||||
if err == iterator.Done {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
tableNames = append(tableNames, tbl.FullyQualifiedName())
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
// Other tests may be running with this dataset, so there might be more
|
||||
// than just our table in the list. So don't try for an exact match; just
|
||||
// make sure that our table is there somewhere.
|
||||
for _, tn := range tableNames {
|
||||
if tn == wantName {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
tables = append(tables, tbl)
|
||||
}
|
||||
// Other tests may be running with this dataset, so there might be more
|
||||
// than just our table in the list. So don't try for an exact match; just
|
||||
// make sure that our table is there somewhere.
|
||||
found := false
|
||||
for _, tbl := range tables {
|
||||
if reflect.DeepEqual(tbl, table) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Tables: got %v\nshould see %v in the list", pretty.Value(tables), pretty.Value(table))
|
||||
return false, fmt.Errorf("got %v\nwant %s in the list", tableNames, wantName)
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -302,6 +312,7 @@ func TestIntegration_UploadAndRead(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
job2, err := client.JobFromID(ctx, job1.ID())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@@ -312,6 +323,18 @@ func TestIntegration_UploadAndRead(t *testing.T) {
|
||||
}
|
||||
checkRead(t, "job.Read", rit, wantRows)
|
||||
|
||||
// Get statistics.
|
||||
jobStatus, err := job2.Status(ctx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if jobStatus.Statistics == nil {
|
||||
t.Fatal("jobStatus missing statistics")
|
||||
}
|
||||
if _, ok := jobStatus.Statistics.Details.(*QueryStatistics); !ok {
|
||||
t.Errorf("expected QueryStatistics, got %T", jobStatus.Statistics.Details)
|
||||
}
|
||||
|
||||
// Test reading directly into a []Value.
|
||||
valueLists, err := readAll(table.Read(ctx))
|
||||
if err != nil {
|
||||
@@ -348,20 +371,39 @@ func TestIntegration_UploadAndRead(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
type SubSubTestStruct struct {
|
||||
Integer int64
|
||||
}
|
||||
|
||||
type SubTestStruct struct {
|
||||
String string
|
||||
Record SubSubTestStruct
|
||||
RecordArray []SubSubTestStruct
|
||||
}
|
||||
|
||||
type TestStruct struct {
|
||||
Name string
|
||||
Nums []int
|
||||
Sub Sub
|
||||
Subs []*Sub
|
||||
}
|
||||
Name string
|
||||
Bytes []byte
|
||||
Integer int64
|
||||
Float float64
|
||||
Boolean bool
|
||||
Timestamp time.Time
|
||||
Date civil.Date
|
||||
Time civil.Time
|
||||
DateTime civil.DateTime
|
||||
|
||||
type Sub struct {
|
||||
B bool
|
||||
SubSub SubSub
|
||||
SubSubs []*SubSub
|
||||
}
|
||||
StringArray []string
|
||||
IntegerArray []int64
|
||||
FloatArray []float64
|
||||
BooleanArray []bool
|
||||
TimestampArray []time.Time
|
||||
DateArray []civil.Date
|
||||
TimeArray []civil.Time
|
||||
DateTimeArray []civil.DateTime
|
||||
|
||||
type SubSub struct{ Count int }
|
||||
Record SubTestStruct
|
||||
RecordArray []SubTestStruct
|
||||
}
|
||||
|
||||
func TestIntegration_UploadAndReadStructs(t *testing.T) {
|
||||
if client == nil {
|
||||
@@ -376,16 +418,61 @@ func TestIntegration_UploadAndReadStructs(t *testing.T) {
|
||||
table := newTable(t, schema)
|
||||
defer table.Delete(ctx)
|
||||
|
||||
d := civil.Date{2016, 3, 20}
|
||||
tm := civil.Time{15, 4, 5, 0}
|
||||
ts := time.Date(2016, 3, 20, 15, 4, 5, 0, time.UTC)
|
||||
dtm := civil.DateTime{d, tm}
|
||||
|
||||
d2 := civil.Date{1994, 5, 15}
|
||||
tm2 := civil.Time{1, 2, 4, 0}
|
||||
ts2 := time.Date(1994, 5, 15, 1, 2, 4, 0, time.UTC)
|
||||
dtm2 := civil.DateTime{d2, tm2}
|
||||
|
||||
// Populate the table.
|
||||
upl := table.Uploader()
|
||||
want := []*TestStruct{
|
||||
{Name: "a", Nums: []int{1, 2}, Sub: Sub{B: true}, Subs: []*Sub{{B: false}, {B: true}}},
|
||||
{Name: "b", Nums: []int{1}, Subs: []*Sub{{B: false}, {B: false}, {B: true}}},
|
||||
{Name: "c", Sub: Sub{B: true}},
|
||||
{
|
||||
Name: "d",
|
||||
Sub: Sub{SubSub: SubSub{12}, SubSubs: []*SubSub{{1}, {2}, {3}}},
|
||||
Subs: []*Sub{{B: false, SubSub: SubSub{4}}, {B: true, SubSubs: []*SubSub{{5}, {6}}}},
|
||||
"a",
|
||||
[]byte("byte"),
|
||||
42,
|
||||
3.14,
|
||||
true,
|
||||
ts,
|
||||
d,
|
||||
tm,
|
||||
dtm,
|
||||
[]string{"a", "b"},
|
||||
[]int64{1, 2},
|
||||
[]float64{1, 1.41},
|
||||
[]bool{true, false},
|
||||
[]time.Time{ts, ts2},
|
||||
[]civil.Date{d, d2},
|
||||
[]civil.Time{tm, tm2},
|
||||
[]civil.DateTime{dtm, dtm2},
|
||||
SubTestStruct{
|
||||
"string",
|
||||
SubSubTestStruct{24},
|
||||
[]SubSubTestStruct{{1}, {2}},
|
||||
},
|
||||
[]SubTestStruct{
|
||||
{String: "empty"},
|
||||
{
|
||||
"full",
|
||||
SubSubTestStruct{1},
|
||||
[]SubSubTestStruct{{1}, {2}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "b",
|
||||
Bytes: []byte("byte2"),
|
||||
Integer: 24,
|
||||
Float: 4.13,
|
||||
Boolean: false,
|
||||
Timestamp: ts,
|
||||
Date: d,
|
||||
Time: tm,
|
||||
DateTime: dtm,
|
||||
},
|
||||
}
|
||||
var savers []*StructSaver
|
||||
@@ -573,28 +660,38 @@ func TestIntegration_DML(t *testing.T) {
|
||||
t.Skip("Integration tests skipped")
|
||||
}
|
||||
ctx := context.Background()
|
||||
table := newTable(t, schema)
|
||||
defer table.Delete(ctx)
|
||||
// Retry insert; sometimes it fails with INTERNAL.
|
||||
err := internal.Retry(ctx, gax.Backoff{}, func() (bool, error) {
|
||||
table := newTable(t, schema)
|
||||
defer table.Delete(ctx)
|
||||
|
||||
// Use DML to insert.
|
||||
wantRows := [][]Value{
|
||||
[]Value{"a", int64(0)},
|
||||
[]Value{"b", int64(1)},
|
||||
[]Value{"c", int64(2)},
|
||||
}
|
||||
query := fmt.Sprintf("INSERT bigquery_integration_test.%s (name, num) "+
|
||||
"VALUES ('a', 0), ('b', 1), ('c', 2)",
|
||||
table.TableID)
|
||||
q := client.Query(query)
|
||||
q.UseStandardSQL = true // necessary for DML
|
||||
job, err := q.Run(ctx)
|
||||
// Use DML to insert.
|
||||
wantRows := [][]Value{
|
||||
[]Value{"a", int64(0)},
|
||||
[]Value{"b", int64(1)},
|
||||
[]Value{"c", int64(2)},
|
||||
}
|
||||
query := fmt.Sprintf("INSERT bigquery_integration_test.%s (name, num) "+
|
||||
"VALUES ('a', 0), ('b', 1), ('c', 2)",
|
||||
table.TableID)
|
||||
q := client.Query(query)
|
||||
q.UseStandardSQL = true // necessary for DML
|
||||
job, err := q.Run(ctx)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if err := wait(ctx, job); err != nil {
|
||||
return false, err
|
||||
}
|
||||
if msg, ok := compareRead(table.Read(ctx), wantRows); !ok {
|
||||
// Stop on read error, because that has never been flaky.
|
||||
return true, errors.New(msg)
|
||||
}
|
||||
return true, nil
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := wait(ctx, job); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
checkRead(t, "INSERT", table.Read(ctx), wantRows)
|
||||
}
|
||||
|
||||
func TestIntegration_TimeTypes(t *testing.T) {
|
||||
@@ -645,12 +742,183 @@ func TestIntegration_TimeTypes(t *testing.T) {
|
||||
checkRead(t, "TimeTypes", table.Read(ctx), wantRows)
|
||||
}
|
||||
|
||||
func TestIntegration_StandardQuery(t *testing.T) {
|
||||
if client == nil {
|
||||
t.Skip("Integration tests skipped")
|
||||
}
|
||||
ctx := context.Background()
|
||||
|
||||
d := civil.Date{2016, 3, 20}
|
||||
tm := civil.Time{15, 04, 05, 0}
|
||||
ts := time.Date(2016, 3, 20, 15, 04, 05, 0, time.UTC)
|
||||
dtm := ts.Format("2006-01-02 15:04:05")
|
||||
|
||||
// Constructs Value slices made up of int64s.
|
||||
ints := func(args ...int) []Value {
|
||||
vals := make([]Value, len(args))
|
||||
for i, arg := range args {
|
||||
vals[i] = int64(arg)
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
query string
|
||||
wantRow []Value
|
||||
}{
|
||||
{"SELECT 1", ints(1)},
|
||||
{"SELECT 1.3", []Value{1.3}},
|
||||
{"SELECT TRUE", []Value{true}},
|
||||
{"SELECT 'ABC'", []Value{"ABC"}},
|
||||
{"SELECT CAST('foo' AS BYTES)", []Value{[]byte("foo")}},
|
||||
{fmt.Sprintf("SELECT TIMESTAMP '%s'", dtm), []Value{ts}},
|
||||
{fmt.Sprintf("SELECT [TIMESTAMP '%s', TIMESTAMP '%s']", dtm, dtm), []Value{[]Value{ts, ts}}},
|
||||
{fmt.Sprintf("SELECT ('hello', TIMESTAMP '%s')", dtm), []Value{[]Value{"hello", ts}}},
|
||||
{fmt.Sprintf("SELECT DATETIME(TIMESTAMP '%s')", dtm), []Value{civil.DateTime{d, tm}}},
|
||||
{fmt.Sprintf("SELECT DATE(TIMESTAMP '%s')", dtm), []Value{d}},
|
||||
{fmt.Sprintf("SELECT TIME(TIMESTAMP '%s')", dtm), []Value{tm}},
|
||||
{"SELECT (1, 2)", []Value{ints(1, 2)}},
|
||||
{"SELECT [1, 2, 3]", []Value{ints(1, 2, 3)}},
|
||||
{"SELECT ([1, 2], 3, [4, 5])", []Value{[]Value{ints(1, 2), int64(3), ints(4, 5)}}},
|
||||
{"SELECT [(1, 2, 3), (4, 5, 6)]", []Value{[]Value{ints(1, 2, 3), ints(4, 5, 6)}}},
|
||||
{"SELECT [([1, 2, 3], 4), ([5, 6], 7)]", []Value{[]Value{[]Value{ints(1, 2, 3), int64(4)}, []Value{ints(5, 6), int64(7)}}}},
|
||||
{"SELECT ARRAY(SELECT STRUCT([1, 2]))", []Value{[]Value{[]Value{ints(1, 2)}}}},
|
||||
}
|
||||
for _, c := range testCases {
|
||||
q := client.Query(c.query)
|
||||
q.UseStandardSQL = true
|
||||
it, err := q.Read(ctx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
checkRead(t, "StandardQuery", it, [][]Value{c.wantRow})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIntegration_LegacyQuery(t *testing.T) {
|
||||
if client == nil {
|
||||
t.Skip("Integration tests skipped")
|
||||
}
|
||||
ctx := context.Background()
|
||||
|
||||
ts := time.Date(2016, 3, 20, 15, 04, 05, 0, time.UTC)
|
||||
dtm := ts.Format("2006-01-02 15:04:05")
|
||||
|
||||
testCases := []struct {
|
||||
query string
|
||||
wantRow []Value
|
||||
}{
|
||||
{"SELECT 1", []Value{int64(1)}},
|
||||
{"SELECT 1.3", []Value{1.3}},
|
||||
{"SELECT TRUE", []Value{true}},
|
||||
{"SELECT 'ABC'", []Value{"ABC"}},
|
||||
{"SELECT CAST('foo' AS BYTES)", []Value{[]byte("foo")}},
|
||||
{fmt.Sprintf("SELECT TIMESTAMP('%s')", dtm), []Value{ts}},
|
||||
{fmt.Sprintf("SELECT DATE(TIMESTAMP('%s'))", dtm), []Value{"2016-03-20"}},
|
||||
{fmt.Sprintf("SELECT TIME(TIMESTAMP('%s'))", dtm), []Value{"15:04:05"}},
|
||||
}
|
||||
for _, c := range testCases {
|
||||
q := client.Query(c.query)
|
||||
it, err := q.Read(ctx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
checkRead(t, "LegacyQuery", it, [][]Value{c.wantRow})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIntegration_QueryParameters(t *testing.T) {
|
||||
if client == nil {
|
||||
t.Skip("Integration tests skipped")
|
||||
}
|
||||
ctx := context.Background()
|
||||
|
||||
d := civil.Date{2016, 3, 20}
|
||||
tm := civil.Time{15, 04, 05, 0}
|
||||
dtm := civil.DateTime{d, tm}
|
||||
ts := time.Date(2016, 3, 20, 15, 04, 05, 0, time.UTC)
|
||||
|
||||
type ss struct {
|
||||
String string
|
||||
}
|
||||
|
||||
type s struct {
|
||||
Timestamp time.Time
|
||||
StringArray []string
|
||||
SubStruct ss
|
||||
SubStructArray []ss
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
query string
|
||||
parameters []QueryParameter
|
||||
wantRow []Value
|
||||
}{
|
||||
{"SELECT @val", []QueryParameter{{"val", 1}}, []Value{int64(1)}},
|
||||
{"SELECT @val", []QueryParameter{{"val", 1.3}}, []Value{1.3}},
|
||||
{"SELECT @val", []QueryParameter{{"val", true}}, []Value{true}},
|
||||
{"SELECT @val", []QueryParameter{{"val", "ABC"}}, []Value{"ABC"}},
|
||||
{"SELECT @val", []QueryParameter{{"val", []byte("foo")}}, []Value{[]byte("foo")}},
|
||||
{"SELECT @val", []QueryParameter{{"val", ts}}, []Value{ts}},
|
||||
{"SELECT @val", []QueryParameter{{"val", []time.Time{ts, ts}}}, []Value{[]Value{ts, ts}}},
|
||||
{"SELECT @val", []QueryParameter{{"val", dtm}}, []Value{dtm}},
|
||||
{"SELECT @val", []QueryParameter{{"val", d}}, []Value{d}},
|
||||
{"SELECT @val", []QueryParameter{{"val", tm}}, []Value{tm}},
|
||||
{"SELECT @val", []QueryParameter{{"val", s{ts, []string{"a", "b"}, ss{"c"}, []ss{{"d"}, {"e"}}}}},
|
||||
[]Value{[]Value{ts, []Value{"a", "b"}, []Value{"c"}, []Value{[]Value{"d"}, []Value{"e"}}}}},
|
||||
{"SELECT @val.Timestamp, @val.SubStruct.String", []QueryParameter{{"val", s{Timestamp: ts, SubStruct: ss{"a"}}}}, []Value{ts, "a"}},
|
||||
}
|
||||
for _, c := range testCases {
|
||||
q := client.Query(c.query)
|
||||
q.Parameters = c.parameters
|
||||
it, err := q.Read(ctx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
checkRead(t, "QueryParameters", it, [][]Value{c.wantRow})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIntegration_ReadNullIntoStruct(t *testing.T) {
|
||||
// Reading a null into a struct field should return an error (not panic).
|
||||
if client == nil {
|
||||
t.Skip("Integration tests skipped")
|
||||
}
|
||||
ctx := context.Background()
|
||||
table := newTable(t, schema)
|
||||
defer table.Delete(ctx)
|
||||
|
||||
upl := table.Uploader()
|
||||
row := &ValuesSaver{
|
||||
Schema: schema,
|
||||
Row: []Value{"name", nil},
|
||||
}
|
||||
if err := upl.Put(ctx, []*ValuesSaver{row}); err != nil {
|
||||
t.Fatal(putError(err))
|
||||
}
|
||||
if err := waitForRow(ctx, table); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
q := client.Query(fmt.Sprintf("select name, num from %s", table.TableID))
|
||||
q.DefaultProjectID = dataset.ProjectID
|
||||
q.DefaultDatasetID = dataset.DatasetID
|
||||
it, err := q.Read(ctx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
type S struct{ Num int64 }
|
||||
var s S
|
||||
if err := it.Next(&s); err == nil {
|
||||
t.Fatal("got nil, want error")
|
||||
}
|
||||
}
|
||||
|
||||
// Creates a new, temporary table with a unique name and the given schema.
|
||||
func newTable(t *testing.T, s Schema) *Table {
|
||||
fiveMinutesFromNow = time.Now().Add(5 * time.Minute).Round(time.Second)
|
||||
name := fmt.Sprintf("t%d", time.Now().UnixNano())
|
||||
table := dataset.Table(name)
|
||||
err := table.Create(context.Background(), s, TableExpiration(fiveMinutesFromNow))
|
||||
err := table.Create(context.Background(), s, TableExpiration(testTableExpiration))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@@ -658,21 +926,28 @@ func newTable(t *testing.T, s Schema) *Table {
|
||||
}
|
||||
|
||||
func checkRead(t *testing.T, msg string, it *RowIterator, want [][]Value) {
|
||||
if msg2, ok := compareRead(it, want); !ok {
|
||||
t.Errorf("%s: %s", msg, msg2)
|
||||
}
|
||||
}
|
||||
|
||||
func compareRead(it *RowIterator, want [][]Value) (msg string, ok bool) {
|
||||
got, err := readAll(it)
|
||||
if err != nil {
|
||||
t.Fatalf("%s: %v", msg, err)
|
||||
return err.Error(), false
|
||||
}
|
||||
if len(got) != len(want) {
|
||||
t.Errorf("%s: got %d rows, want %d", msg, len(got), len(want))
|
||||
return fmt.Sprintf("got %d rows, want %d", len(got), len(want)), false
|
||||
}
|
||||
sort.Sort(byCol0(got))
|
||||
for i, r := range got {
|
||||
gotRow := []Value(r)
|
||||
wantRow := want[i]
|
||||
if !reflect.DeepEqual(gotRow, wantRow) {
|
||||
t.Errorf("%s #%d: got %v, want %v", msg, i, gotRow, wantRow)
|
||||
return fmt.Sprintf("#%d: got %v, want %v", i, gotRow, wantRow), false
|
||||
}
|
||||
}
|
||||
return "", true
|
||||
}
|
||||
|
||||
func readAll(it *RowIterator) ([][]Value, error) {
|
||||
@@ -721,6 +996,15 @@ func wait(ctx context.Context, job *Job) error {
|
||||
if status.Err() != nil {
|
||||
return fmt.Errorf("job status error: %#v", status.Err())
|
||||
}
|
||||
if status.Statistics == nil {
|
||||
return errors.New("nil Statistics")
|
||||
}
|
||||
if status.Statistics.EndTime.IsZero() {
|
||||
return errors.New("EndTime is zero")
|
||||
}
|
||||
if status.Statistics.Details == nil {
|
||||
return errors.New("nil Statistics.Details")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user