Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
120809: sql: add ImportType to descriptor r=dt a=stevendanna

This adds an ImportType field to table descriptors. We need this field
if we want to support disabling ImportEpoch writing via a cluster
setting since we won't necessarily be able to trust that a non-zero
ImportEpoch is actually from the currently in-progress IMPORT.

Epic: none
Release note: None



121220: sql/importer: deflake TestExportImportBank r=rytaft a=rytaft

This commit fixes an issue caused by running two tests on the same database. Each test creates a table `bank2` and drops it at the end of the test. However, if the first test fails after creating the table, the table might not be successfully dropped. Therefore, the second test would fail when attempting to create the table since it already exists. This commit fixes this problem by adding a line to defer the `DROP TABLE` command right after it's created..

Fixes #121176

Release note: None

121223: workflows: some small improvements r=rail a=rickystewart

1. Remember to use RBE for the build of `mirror` in `check-generated-code`.
2. Use the `build.sh` script for Windows builds.

Epic: CRDB-8308
Release note: None

Co-authored-by: Steven Danna <danna@cockroachlabs.com>
Co-authored-by: Rebecca Taft <becca@cockroachlabs.com>
Co-authored-by: Ricky Stewart <ricky@cockroachlabs.com>
  • Loading branch information
4 people committed Mar 27, 2024
4 parents 6b662e3 + 0347bef + 9228bdd + 280e1d5 commit 40cc14f
Show file tree
Hide file tree
Showing 13 changed files with 180 additions and 137 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/github-actions-essential-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ jobs:
- run: ./build/github/get-engflow-keys.sh
- run: ./build/github/prepare-summarize-build.sh
- name: build
run: bazel build //pkg/cmd/cockroach //pkg/cmd/cockroach-short //pkg/cmd/cockroach-sql //pkg/cmd/cockroach-oss --config crosswindows --jobs 100 --remote_download_minimal --build_event_binary_file=bes.bin --enable_runfiles $(./build/github/engflow-args.sh)
run: ./build/github/build.sh crosswindows
- name: upload build results
run: ./build/github/summarize-build.sh bes.bin
if: always()
Expand Down
2 changes: 1 addition & 1 deletion build/bazelutil/bazel-generate.sh
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ if files_unchanged_from_upstream go.mod go.sum DEPS.bzl $(find_relevant ./pkg/cm
echo "Skipping //pkg/cmd/mirror/go:mirror (relevant files are unchanged from upstream)."
echo "Skipping //pkg/cmd/generate-staticcheck (relevant files are unchanged from upstream)."
else
CONTENTS=$(bazel run //pkg/cmd/mirror/go:mirror)
CONTENTS=$(bazel run //pkg/cmd/mirror/go:mirror ${EXTRA_BAZEL_ARGS:-})
echo "$CONTENTS" > DEPS.bzl
bazel run pkg/cmd/generate-staticcheck --run_under="cd $PWD && " ${EXTRA_BAZEL_ARGS:-}
fi
Expand Down
4 changes: 3 additions & 1 deletion pkg/ccl/backupccl/restore_job.go
Original file line number Diff line number Diff line change
Expand Up @@ -830,7 +830,9 @@ func epochBasedInProgressImport(desc catalog.Descriptor) bool {
return false
}

return table.GetInProgressImportStartTime() > 0 && table.TableDesc().ImportEpoch > 0
return table.GetInProgressImportStartTime() > 0 &&
table.TableDesc().ImportEpoch > 0 &&
table.TableDesc().ImportType == descpb.ImportType_IMPORT_WITH_IMPORT_EPOCH
}

// createImportingDescriptors creates the tables that we will restore into and returns up to three
Expand Down
31 changes: 24 additions & 7 deletions pkg/ccl/backupccl/testdata/backup-restore/import-epoch
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ exec-sql
SET CLUSTER SETTING bulkio.import.write_import_epoch.enabled=true;
----


exec-sql
CREATE DATABASE d;
USE d;
Expand All @@ -19,11 +18,11 @@ INSERT INTO baz VALUES (1, 'x'),(2,'y'),(3,'z');
----

exec-sql
CREATE VIEW import_epoch (epoch)
CREATE VIEW import_epoch (epoch, type)
AS WITH tbls AS (
SELECT id, crdb_internal.pb_to_json('cockroach.sql.sqlbase.Descriptor', descriptor) AS orig FROM system.descriptor
)
SELECT orig->'table'->'importEpoch' FROM tbls WHERE id = '109';
SELECT orig->'table'->'importEpoch', orig->'table'->'importType' FROM tbls WHERE id = '109';
----

exec-sql
Expand All @@ -43,7 +42,7 @@ foo 109
query-sql
SELECT * FROM import_epoch
----
1
1 <nil>

exec-sql
EXPORT INTO CSV 'nodelocal://1/export2/' FROM SELECT * FROM baz WHERE i = 2;
Expand All @@ -57,7 +56,7 @@ IMPORT INTO foo (i,s) CSV DATA ('nodelocal://1/export2/export*-n*.0.csv')
query-sql
SELECT * FROM import_epoch
----
2
2 <nil>

exec-sql
SET CLUSTER SETTING jobs.debug.pausepoints = 'import.after_ingest';
Expand All @@ -78,13 +77,31 @@ job paused at pausepoint
query-sql
SELECT * FROM import_epoch
----
3
3 "IMPORT_WITH_IMPORT_EPOCH"

# Cancel the job so that the cleanup hook runs.
job cancel=a
----

# Ensure that the import type is cleared.
query-sql
SELECT * FROM import_epoch
----
3 <nil>


exec-sql
SET CLUSTER SETTING bulkio.import.write_import_epoch.enabled=false;
----

# Ensure that the correct ImportType is set when the cluster setting is disabled
import expect-pausepoint tag=b
IMPORT INTO foo (i,s) CSV DATA ('nodelocal://1/export3/export*-n*.0.csv')
----
job paused at pausepoint

# We expect the zero-valued version here.
query-sql
SELECT * FROM import_epoch
----
3
3 <nil>
12 changes: 6 additions & 6 deletions pkg/cli/testdata/doctor/test_recreate_zipdir

Large diffs are not rendered by default.

12 changes: 6 additions & 6 deletions pkg/cli/testdata/doctor/test_recreate_zipdir-json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pkg/sql/catalog/bootstrap/bootstrap_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ func TestSystemDatabaseSchemaBootstrapVersionBumped(t *testing.T) {

// If you need to update this value (i.e. failed this test), check whether
// you need to bump systemschema.SystemDatabaseSchemaBootstrapVersion too.
const prevSystemHash = "2c6957d6e966665567ddf47ccc1f465b11b475b4341db308ed0c77524e6f5117"
const prevSystemHash = "104f55c10a62f501888678d7fbc987fbae51457307a3e9b23d66a5078e7d48a4"
_, curSystemHash := GetAndHashInitialValuesToString(0 /* tenantID */)

if prevSystemHash != curSystemHash {
Expand Down
212 changes: 106 additions & 106 deletions pkg/sql/catalog/bootstrap/testdata/testdata

Large diffs are not rendered by default.

23 changes: 22 additions & 1 deletion pkg/sql/catalog/descpb/structured.proto
Original file line number Diff line number Diff line change
Expand Up @@ -1210,7 +1210,28 @@ message TableDescriptor {
// gets incremented while preparing the table for ingestion.
optional uint32 import_epoch = 59 [(gogoproto.nullable) = false, (gogoproto.customname) = "ImportEpoch"];

// Next ID: 60
// ImportType is the type of import that is running if
// ImportStartWallTime is set.
optional ImportType import_type = 60 [(gogoproto.nullable) = false, (gogoproto.customname) = "ImportType"];

// Next ID: 61
}

// ImportType indicates the type of IMPORT that is in progress for a
// TableDescriptor.
enum ImportType {
// IMPORT_WITH_START_TIME_ONLY indicates that this import set the
// ImportStartWallTime on the descriptor but did not set increment
// the ImportEpoch. Such imports cannot be rolled back without MVCC
// timestamp filtering.
//
// Note this is purposefuly 0 since in progress imports from before
// this field was added won't have this set.
IMPORT_WITH_START_TIME_ONLY = 0;
// IMPORT_WITH_IMPORT_EPOCH indicates that the running import did
// increment the import epoch. Such imports can be rolled back using
// an ImportEpoch deletion predicate.
IMPORT_WITH_IMPORT_EPOCH = 1;
}

// SurvivalGoal is the survival goal for a database.
Expand Down
2 changes: 2 additions & 0 deletions pkg/sql/catalog/tabledesc/table_desc.go
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ func (desc *Mutable) SetPublicNonPrimaryIndex(indexOrdinal int, index descpb.Ind
// import, bumping the ImportEpoch.
func (desc *Mutable) OfflineForImport() {
desc.SetOffline(OfflineReasonImporting)
desc.ImportType = descpb.ImportType_IMPORT_WITH_IMPORT_EPOCH
desc.ImportEpoch++
}

Expand All @@ -224,6 +225,7 @@ func (desc *Mutable) InitializeImport(startWallTime int64) error {

// FinalizeImport removes in progress import metadata from the descriptor
func (desc *Mutable) FinalizeImport() {
desc.ImportType = 0
desc.ImportStartWallTime = 0
}

Expand Down
1 change: 1 addition & 0 deletions pkg/sql/catalog/tabledesc/validate_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ var validationMap = []struct {
"HistogramSamples": {status: thisFieldReferencesNoObjects},
"SchemaLocked": {status: thisFieldReferencesNoObjects},
"ImportEpoch": {status: thisFieldReferencesNoObjects},
"ImportType": {status: thisFieldReferencesNoObjects},
},
},
{
Expand Down
2 changes: 1 addition & 1 deletion pkg/sql/importer/exportcsv_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ func TestExportImportBank(t *testing.T) {
schema := bank.FromRows(1).Tables()[0].Schema
exportedFiles := filepath.Join(exportDir, "*")
db.Exec(t, fmt.Sprintf("CREATE TABLE bank2 %s", schema))
defer db.Exec(t, "DROP TABLE bank2")
db.Exec(t, fmt.Sprintf(`IMPORT INTO bank2 CSV DATA ($1) WITH delimiter = '|'%s`, nullIf), exportedFiles)

db.CheckQueryResults(t,
Expand All @@ -140,7 +141,6 @@ func TestExportImportBank(t *testing.T) {
`SELECT fingerprint FROM [SHOW EXPERIMENTAL_FINGERPRINTS FROM TABLE bank2]`,
db.QueryStr(t, `SELECT fingerprint FROM [SHOW EXPERIMENTAL_FINGERPRINTS FROM TABLE bank]`),
)
db.Exec(t, "DROP TABLE bank2")
})
}
}
Expand Down

0 comments on commit 40cc14f

Please sign in to comment.