Skip to content

Commit f56cad8

Browse files
committed
Cloud build test only
1 parent 79f2585 commit f56cad8

File tree

2 files changed

+95
-93
lines changed

2 files changed

+95
-93
lines changed

cloudbuild/cloudbuild.yaml

Lines changed: 93 additions & 93 deletions
Original file line numberDiff line numberDiff line change
@@ -4,100 +4,100 @@ steps:
44
id: 'docker-build'
55
args: ['build', '--tag=gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit', '-f', 'cloudbuild/Dockerfile', '.']
66

7-
# 2. Fetch maven and dependencies
7+
# 2. Fetch maven and dependencies
88
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
99
id: 'init'
1010
waitFor: ['docker-build']
1111
entrypoint: 'bash'
1212
args: ['/workspace/cloudbuild/presubmit.sh', 'init']
1313
env:
14-
- 'CODECOV_TOKEN=${_CODECOV_TOKEN}'
15-
16-
# 3. Run unit tests
17-
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
18-
id: 'unit-tests'
19-
waitFor: ['init']
20-
entrypoint: 'bash'
21-
args: ['/workspace/cloudbuild/presubmit.sh', 'unittest']
22-
env:
23-
- 'CODECOV_TOKEN=${_CODECOV_TOKEN}'
24-
25-
# 4a. Run integration tests concurrently with unit tests (DSv1, Scala 2.12)
26-
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
27-
id: 'integration-tests-2.12'
28-
waitFor: ['unit-tests']
29-
entrypoint: 'bash'
30-
args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-2.12']
31-
env:
32-
- 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
33-
- 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
34-
- 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
35-
- 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
36-
37-
# 4b. Run integration tests concurrently with unit tests (DSv1, Scala 2.13)
38-
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
39-
id: 'integration-tests-2.13'
40-
waitFor: ['unit-tests']
41-
entrypoint: 'bash'
42-
args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-2.13']
43-
env:
44-
- 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
45-
- 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
46-
- 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
47-
- 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
48-
49-
# 4c. Run integration tests concurrently with unit tests (DSv2, Spark 3.1)
50-
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
51-
id: 'integration-tests-3.1'
52-
waitFor: ['integration-tests-2.12']
53-
entrypoint: 'bash'
54-
args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.1']
55-
env:
56-
- 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
57-
- 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
58-
- 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
59-
- 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
60-
61-
# 4d. Run integration tests concurrently with unit tests (DSv2, Spark 3.2)
62-
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
63-
id: 'integration-tests-3.2'
64-
waitFor: ['integration-tests-2.13']
65-
entrypoint: 'bash'
66-
args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.2']
67-
env:
68-
- 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
69-
- 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
70-
- 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
71-
- 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
72-
73-
# 4e. Run integration tests concurrently with unit tests (DSv2, Spark 3.3)
74-
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
75-
id: 'integration-tests-3.3'
76-
waitFor: ['integration-tests-3.1']
77-
entrypoint: 'bash'
78-
args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.3']
79-
env:
80-
- 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
81-
- 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
82-
- 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
83-
- 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
14+
- 'CODECOV_TOKEN=${_CODECOV_TOKEN}'
8415

85-
# 4f. Run integration tests concurrently with unit tests (DSv2, Spark 3.4)
86-
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
87-
id: 'integration-tests-3.4'
88-
waitFor: ['integration-tests-3.2']
89-
entrypoint: 'bash'
90-
args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.4']
91-
env:
92-
- 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
93-
- 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
94-
- 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
95-
- 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
16+
## 3. Run unit tests
17+
# - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
18+
# id: 'unit-tests'
19+
# waitFor: ['init']
20+
# entrypoint: 'bash'
21+
# args: ['/workspace/cloudbuild/presubmit.sh', 'unittest']
22+
# env:
23+
# - 'CODECOV_TOKEN=${_CODECOV_TOKEN}'
24+
#
25+
## 4a. Run integration tests concurrently with unit tests (DSv1, Scala 2.12)
26+
# - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
27+
# id: 'integration-tests-2.12'
28+
# waitFor: ['unit-tests']
29+
# entrypoint: 'bash'
30+
# args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-2.12']
31+
# env:
32+
# - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
33+
# - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
34+
# - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
35+
# - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
36+
#
37+
## 4b. Run integration tests concurrently with unit tests (DSv1, Scala 2.13)
38+
# - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
39+
# id: 'integration-tests-2.13'
40+
# waitFor: ['unit-tests']
41+
# entrypoint: 'bash'
42+
# args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-2.13']
43+
# env:
44+
# - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
45+
# - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
46+
# - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
47+
# - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
48+
#
49+
## 4c. Run integration tests concurrently with unit tests (DSv2, Spark 3.1)
50+
# - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
51+
# id: 'integration-tests-3.1'
52+
# waitFor: ['integration-tests-2.12']
53+
# entrypoint: 'bash'
54+
# args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.1']
55+
# env:
56+
# - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
57+
# - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
58+
# - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
59+
# - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
60+
#
61+
## 4d. Run integration tests concurrently with unit tests (DSv2, Spark 3.2)
62+
# - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
63+
# id: 'integration-tests-3.2'
64+
# waitFor: ['integration-tests-2.13']
65+
# entrypoint: 'bash'
66+
# args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.2']
67+
# env:
68+
# - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
69+
# - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
70+
# - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
71+
# - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
72+
#
73+
## 4e. Run integration tests concurrently with unit tests (DSv2, Spark 3.3)
74+
# - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
75+
# id: 'integration-tests-3.3'
76+
# waitFor: ['integration-tests-3.1']
77+
# entrypoint: 'bash'
78+
# args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.3']
79+
# env:
80+
# - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
81+
# - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
82+
# - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
83+
# - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
84+
#
85+
## 4f. Run integration tests concurrently with unit tests (DSv2, Spark 3.4)
86+
# - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
87+
# id: 'integration-tests-3.4'
88+
# waitFor: ['integration-tests-3.2']
89+
# entrypoint: 'bash'
90+
# args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.4']
91+
# env:
92+
# - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
93+
# - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
94+
# - 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
95+
# - 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
9696

9797
# 4g. Run integration tests concurrently with unit tests (DSv2, Spark 3.5)
9898
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
9999
id: 'integration-tests-3.5'
100-
waitFor: ['integration-tests-3.3']
100+
waitFor: ['init'] # <-- FIXED
101101
entrypoint: 'bash'
102102
args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-3.5']
103103
env:
@@ -109,7 +109,7 @@ steps:
109109
# 4h. Run integration tests concurrently with unit tests (DSv2, Spark 3.5)
110110
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
111111
id: 'integration-tests-4.0'
112-
waitFor: ['integration-tests-3.4']
112+
waitFor: ['init'] # <-- FIXED
113113
entrypoint: 'bash'
114114
args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest-4.0']
115115
env:
@@ -118,18 +118,18 @@ steps:
118118
- 'BIGLAKE_CONNECTION_ID=${_BIGLAKE_CONNECTION_ID}'
119119
- 'BIGQUERY_KMS_KEY_NAME=${_BIGQUERY_KMS_KEY_NAME}'
120120

121-
# 5. Upload coverage to CodeCov
122-
- name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
123-
id: 'upload-it-to-codecov'
124-
waitFor: ['integration-tests-2.12','integration-tests-2.13','integration-tests-3.1','integration-tests-3.2','integration-tests-3.3', 'integration-tests-3.4', 'integration-tests-3.5', 'integration-tests-4.0']
125-
entrypoint: 'bash'
126-
args: ['/workspace/cloudbuild/presubmit.sh', 'upload-it-to-codecov']
127-
env:
128-
- 'CODECOV_TOKEN=${_CODECOV_TOKEN}'
121+
# # 5. Upload coverage to CodeCov
122+
# - name: 'gcr.io/$PROJECT_ID/dataproc-spark-bigquery-connector-presubmit'
123+
# id: 'upload-it-to-codecov'
124+
# waitFor: ['integration-tests-2.12','integration-tests-2.13','integration-tests-3.1','integration-tests-3.2','integration-tests-3.3', 'integration-tests-3.4', 'integration-tests-3.5', 'integration-tests-4.0']
125+
# entrypoint: 'bash'
126+
# args: ['/workspace/cloudbuild/presubmit.sh', 'upload-it-to-codecov']
127+
# env:
128+
# - 'CODECOV_TOKEN=${_CODECOV_TOKEN}'
129129

130130

131131
# Tests take around 1 hr 15 mins in general.
132132
timeout: 9000s
133133

134134
options:
135-
machineType: 'E2_HIGHCPU_32'
135+
machineType: 'E2_HIGHCPU_32'

spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/integration/CatalogIntegrationTestBase.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -259,6 +259,8 @@ public void testCatalogInitializationWithProject() {
259259
List<String> databaseNames =
260260
rows.stream().map(row -> row.getString(0)).collect(Collectors.toList());
261261
assertThat(databaseNames).contains("samples");
262+
System.out.println(databaseNames);
263+
spark.sql("SHOW TABLES IN public_catalog.samples").show();
262264
List<Row> data =
263265
spark.sql("SELECT * FROM public_catalog.samples.shakespeare LIMIT 10").collectAsList();
264266
assertThat(data).hasSize(10);

0 commit comments

Comments
 (0)