Skip to content

Commit 3171ef2

Browse files
authored
Merge pull request #1385 from cloudsufi/BigTableE2E
Bigtable E2E coverage as per ITN class.
2 parents e64d5e8 + 2b8165e commit 3171ef2

11 files changed

Lines changed: 509 additions & 3 deletions

File tree

.github/workflows/e2e.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ jobs:
4040
)
4141
strategy:
4242
matrix:
43-
tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy, datastore]
43+
tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy, datastore, bigtable]
4444
fail-fast: false
4545
steps:
4646
# Pinned 1.0.0 version

pom.xml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -854,6 +854,12 @@
854854
<version>0.2.0</version>
855855
</dependency>
856856
<!-- End: dependency used by the Dataplex connector -->
857+
<!-- https://mvnrepository.com/artifact/com.google.cloud/google-cloud-bigtable -->
858+
<dependency>
859+
<groupId>com.google.cloud</groupId>
860+
<artifactId>google-cloud-bigtable</artifactId>
861+
<version>1.17.1</version>
862+
</dependency>
857863
</dependencies>
858864

859865
<build>
Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
# Copyright © 2024 Cask Data, Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
4+
# use this file except in compliance with the License. You may obtain a copy of
5+
# the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12+
# License for the specific language governing permissions and limitations under
13+
# the License.
14+
@BigTable @BIGTABLE_SOURCE_TEST
15+
Feature: BigTable source - Verification of BigTable to BigTable Successful Data Transfer
16+
17+
@BIGTABLE_SINK_TEST
18+
Scenario: To verify data is getting transferred from BigTable source table to BigTable sink table
19+
Given Open Datafusion Project to configure pipeline
20+
When Select plugin: "Bigtable" from the plugins list as: "Source"
21+
When Expand Plugin group in the LHS plugins list: "Sink"
22+
When Select plugin: "Bigtable" from the plugins list as: "Sink"
23+
Then Connect plugins: "Bigtable" and "Bigtable2" to establish connection
24+
Then Navigate to the properties page of plugin: "Bigtable"
25+
Then Enter input plugin property: "referenceName" with value: "CBTSourceReferenceName"
26+
Then Replace input plugin property: "project" with value: "projectId"
27+
Then Enter input plugin property: "instance" with value: "bigtableInstance"
28+
Then Enter input plugin property: "table" with value: "bigtableSourceTable"
29+
Then Replace input plugin property: "keyAlias" with value: "id"
30+
Then Enter key value pairs for plugin property: "columnMappings" with values from json: "cbtsourceMappings"
31+
Then Select Macro action of output schema property: "outputSchemaMacroInput" and set the value to "macroKeyString"
32+
Then Validate "Bigtable" plugin properties
33+
Then Close the Plugin Properties page
34+
Then Navigate to the properties page of plugin: "Bigtable2"
35+
Then Enter input plugin property: "referenceName" with value: "CBTSinkReferenceName"
36+
Then Replace input plugin property: "project" with value: "projectId"
37+
Then Enter input plugin property: "instance" with value: "bigtableTargetInstance"
38+
Then Enter input plugin property: "table" with value: "bigtableTargetTable"
39+
Then Replace input plugin property: "keyAlias" with value: "id"
40+
Then Enter key value pairs for plugin property: "columnMappings" with values from json: "cbtsinkMappings"
41+
Then Validate "Bigtable" plugin properties
42+
Then Close the Plugin Properties page
43+
Then Save the pipeline
44+
Then Deploy the pipeline
45+
Then Run the Pipeline in Runtime
46+
Then Enter runtime argument value "cbtSourceOutputSchema" for key "macroKeyString"
47+
Then Run the Pipeline in Runtime with runtime arguments
48+
Then Wait till pipeline is in running state
49+
Then Open and capture logs
50+
Then Verify the pipeline status is "Succeeded"
51+
Then Validate OUT record count is equal to IN record count
52+
Then Validate data transferred to target bigtable table with data of source bigtable table
53+
54+
@EXISTING_BIGTABLE_SINK
55+
Scenario: To verify data is getting transferred from BigTable source table to existing BigTable sink
56+
Given Open Datafusion Project to configure pipeline
57+
When Select plugin: "Bigtable" from the plugins list as: "Source"
58+
When Expand Plugin group in the LHS plugins list: "Sink"
59+
When Select plugin: "Bigtable" from the plugins list as: "Sink"
60+
Then Connect plugins: "Bigtable" and "Bigtable2" to establish connection
61+
Then Navigate to the properties page of plugin: "Bigtable"
62+
Then Enter input plugin property: "referenceName" with value: "CBTSourceReferenceName"
63+
Then Replace input plugin property: "project" with value: "projectId"
64+
Then Enter input plugin property: "instance" with value: "bigtableInstance"
65+
Then Enter input plugin property: "table" with value: "bigtableSourceTable"
66+
Then Replace input plugin property: "keyAlias" with value: "id"
67+
Then Enter key value pairs for plugin property: "columnMappings" with values from json: "cbtsourceMappings"
68+
Then Select Macro action of output schema property: "outputSchemaMacroInput" and set the value to "macroKeyString"
69+
Then Validate "Bigtable" plugin properties
70+
Then Close the Plugin Properties page
71+
Then Navigate to the properties page of plugin: "Bigtable2"
72+
Then Enter input plugin property: "referenceName" with value: "CBTSinkReferenceName"
73+
Then Replace input plugin property: "project" with value: "projectId"
74+
Then Enter input plugin property: "instance" with value: "bigtableTargetInstance"
75+
Then Enter input plugin property: "table" with value: "bigtableTargetExistingTable"
76+
Then Replace input plugin property: "keyAlias" with value: "id"
77+
Then Enter key value pairs for plugin property: "columnMappings" with values from json: "cbtsinkMappings"
78+
Then Validate "Bigtable" plugin properties
79+
Then Close the Plugin Properties page
80+
Then Save the pipeline
81+
Then Deploy the pipeline
82+
Then Run the Pipeline in Runtime
83+
Then Enter runtime argument value "cbtSourceOutputSchema" for key "macroKeyString"
84+
Then Run the Pipeline in Runtime with runtime arguments
85+
Then Wait till pipeline is in running state
86+
Then Open and capture logs
87+
Then Verify the pipeline status is "Succeeded"
88+
Then Validate OUT record count is equal to IN record count
89+
Then Validate data transferred to existing target bigtable table with data of source bigtable table
90+
91+
@BIGTABLE_SINK_TEST
92+
Scenario: To verify data is getting transferred from unvalidated BigTable source table to BigTable sink table
93+
Given Open Datafusion Project to configure pipeline
94+
When Select plugin: "Bigtable" from the plugins list as: "Source"
95+
When Expand Plugin group in the LHS plugins list: "Sink"
96+
When Select plugin: "Bigtable" from the plugins list as: "Sink"
97+
Then Connect plugins: "Bigtable" and "Bigtable2" to establish connection
98+
Then Navigate to the properties page of plugin: "Bigtable"
99+
Then Enter input plugin property: "referenceName" with value: "CBTSourceReferenceName"
100+
Then Replace input plugin property: "project" with value: "projectId"
101+
Then Enter input plugin property: "instance" with value: "bigtableInstance"
102+
Then Enter input plugin property: "table" with value: "bigtableSourceTable"
103+
Then Replace input plugin property: "keyAlias" with value: "id"
104+
Then Enter key value pairs for plugin property: "columnMappings" with values from json: "cbtsourceMappings"
105+
Then Select Macro action of output schema property: "outputSchemaMacroInput" and set the value to "macroKeyString"
106+
Then Close the Plugin Properties page
107+
Then Navigate to the properties page of plugin: "Bigtable2"
108+
Then Enter input plugin property: "referenceName" with value: "CBTSinkReferenceName"
109+
Then Replace input plugin property: "project" with value: "projectId"
110+
Then Enter input plugin property: "instance" with value: "bigtableTargetInstance"
111+
Then Enter input plugin property: "table" with value: "bigtableTargetTable"
112+
Then Replace input plugin property: "keyAlias" with value: "id"
113+
Then Enter key value pairs for plugin property: "columnMappings" with values from json: "cbtsinkMappings"
114+
Then Validate "Bigtable" plugin properties
115+
Then Close the Plugin Properties page
116+
Then Save the pipeline
117+
Then Deploy the pipeline
118+
Then Run the Pipeline in Runtime
119+
Then Enter runtime argument value "cbtSourceOutputSchema" for key "macroKeyString"
120+
Then Run the Pipeline in Runtime with runtime arguments
121+
Then Wait till pipeline is in running state
122+
Then Open and capture logs
123+
Then Verify the pipeline status is "Succeeded"
124+
Then Validate OUT record count is equal to IN record count
125+
Then Validate data transferred to target bigtable table with data of source bigtable table

src/e2e-test/features/spanner/source/SpannerToSpanner_withConnections.feature

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
@Spanner_Source @SPANNER_TEST
2-
Feature: Spanner source - Verification of Spanner to Spanner successful data transfer using connections
2+
Feature: Spanner source - Verification Of Spanner to Spanner successful data transfer using connections
33

44
@SPANNER_SOURCE_BASIC_TEST @SPANNER_SINK_TEST @SPANNER_CONNECTION @Spanner_Source_Required
55
Scenario: To verify data transfer from Spanner to Spanner with pipeline connection created from wrangler
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
/*
2+
* Copyright © 2024 Cask Data, Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
5+
* use this file except in compliance with the License. You may obtain a copy of
6+
* the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12+
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13+
* License for the specific language governing permissions and limitations under
14+
* the License.
15+
*/
16+
17+
package io.cdap.plugin.bigtable.runners;
18+
19+
import io.cucumber.junit.Cucumber;
20+
import io.cucumber.junit.CucumberOptions;
21+
import org.junit.runner.RunWith;
22+
23+
/**
24+
* Test Runner to execute Bigtable testcases.
25+
*/
26+
@RunWith(Cucumber.class)
27+
@CucumberOptions(
28+
features = {"src/e2e-test/features"},
29+
glue = {"io.cdap.plugin.bigtable.stepsdesign", "io.cdap.plugin.common.stepsdesign", "stepsdesign"},
30+
tags = {"@BigTable"},
31+
monochrome = true,
32+
plugin = {"pretty", "html:target/cucumber-html-report/bigtable",
33+
"json:target/cucumber-reports/cucumber-bigtable.json",
34+
"junit:target/cucumber-reports/cucumber-bigtable.xml"}
35+
)
36+
public class TestRunner {
37+
}
38+
39+
40+
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
/**
2+
* Package contains the test runner for the Cloud BigTable features.
3+
*/
4+
5+
package io.cdap.plugin.bigtable.runners;
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
/*
2+
* Copyright © 2024 Cask Data, Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
5+
* use this file except in compliance with the License. You may obtain a copy of
6+
* the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12+
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13+
* License for the specific language governing permissions and limitations under
14+
* the License.
15+
*/
16+
17+
package io.cdap.plugin.bigtable.stepsdesign;
18+
19+
import io.cdap.e2e.utils.PluginPropertyUtils;
20+
import io.cdap.plugin.common.stepsdesign.TestSetupHooks;
21+
import io.cdap.plugin.utils.BigTableClient;
22+
import io.cucumber.java.en.Then;
23+
import org.apache.hadoop.hbase.client.Get;
24+
import org.apache.hadoop.hbase.client.Result;
25+
import org.apache.hadoop.hbase.client.Table;
26+
import org.apache.hadoop.hbase.util.Bytes;
27+
import org.junit.Assert;
28+
29+
import java.io.IOException;
30+
31+
/**
32+
* BigTable plugin related common test step definitions.
33+
*/
34+
35+
public class BigtableBase {
36+
@Then("Validate data transferred to target bigtable table with data of source bigtable table")
37+
public void validateDataTransferToTargetBigtableTable() throws IOException {
38+
Table sinkTable = BigTableClient.getTable(TestSetupHooks.bigTableConnection,
39+
PluginPropertyUtils.pluginProp("bigtableTargetTable"));
40+
validateData(sinkTable);
41+
}
42+
@Then("Validate data transferred to existing target bigtable table with data of source bigtable table")
43+
public void validateDataTransferToTargetExistingBigtableTable() throws IOException {
44+
Table existingSinkTable = BigTableClient.getTable(TestSetupHooks.bigTableExistingTargetTableConnection,
45+
PluginPropertyUtils.pluginProp("bigtableTargetExistingTable"));
46+
validateData(existingSinkTable);
47+
}
48+
public static void validateData(Table tableToBeValidated) throws IOException {
49+
Result result = tableToBeValidated.get(new Get(Bytes.toBytes("r1")));
50+
Assert.assertTrue(Bytes.toBoolean(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("boolean_column"))));
51+
Assert.assertEquals("bytes",
52+
Bytes.toString(result.getValue(Bytes.toBytes("cf2"), Bytes.toBytes("bytes_column"))));
53+
Assert.assertEquals(10.5D,
54+
Bytes.toDouble(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("double_column"))),
55+
0.0000001);
56+
Assert.assertEquals(10.5F,
57+
Bytes.toFloat(result.getValue(Bytes.toBytes("cf2"), Bytes.toBytes("float_column"))),
58+
0.0000001);
59+
Assert.assertEquals(1,
60+
Bytes.toInt(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("int_column"))));
61+
Assert.assertEquals(10L,
62+
Bytes.toLong(result.getValue(Bytes.toBytes("cf2"), Bytes.toBytes("long_column"))));
63+
Assert.assertEquals("string",
64+
Bytes.toString(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("string_column"))));
65+
66+
}
67+
}
68+
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
/**
2+
* Package contains the base for the Cloud BigTable features.
3+
*/
4+
package io.cdap.plugin.bigtable.stepsdesign;

0 commit comments

Comments
 (0)