From 2f6ad0f40e834d1c9bf269b80909c4c10c561ee3 Mon Sep 17 00:00:00 2001 From: Praveena2607 Date: Wed, 11 Dec 2024 07:03:01 +0000 Subject: [PATCH 1/7] Additional Steps for BQ. --- .../source/BigQuerySourceError.feature | 22 ++ .../source/BigQueryToBigQuery.feature | 31 +++ .../source/BigQueryToGCS_WithMacro.feature | 219 ++++++++++++++++++ .../bigquery/stepsdesign/BigQueryBase.java | 134 ++++++++++- .../cdap/plugin/utils/E2ETestConstants.java | 5 + .../resources/errorMessage.properties | 5 +- .../resources/pluginParameters.properties | 9 + 7 files changed, 419 insertions(+), 6 deletions(-) diff --git a/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature b/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature index eb475837ee..b88ac85944 100644 --- a/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature +++ b/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature @@ -55,3 +55,25 @@ Feature: BigQuery source - Validate BigQuery source plugin error scenarios Then Enter BigQuery source property table name Then Enter BigQuery property temporary bucket name "bqInvalidTemporaryBucket" Then Verify the BigQuery validation error message for invalid property "bucket" + + @BQ_SOURCE_TEST + Scenario Outline:To verify error message when unsupported format is provided in Partition Start date and Partition end Date + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Replace input plugin property: "dataset" with value: "dataset" + Then Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Enter BigQuery source properties partitionFrom and partitionTo + Then Validate BigQuery source incorrect property error for Partition Start date "" value "" + Then Validate BigQuery source incorrect property error for Partition End date "" value "" + Then Enter BigQuery source properties referenceName + Then Validate BigQuery source incorrect property error for reference name"" value "" + Then Enter BigQuery source properties filter + Examples: + | property | value | + | partitionFrom | bqIncorrectFormatStartDate | + | partitionTo | bqIncorrectFormatEndDate | + | referenceName | bqIncorrectReferenceName | diff --git a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature index 299a48125b..31568109b5 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature @@ -354,3 +354,34 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Validate the values of records transferred to BQ sink is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @BQ_SINK_TEST + Scenario:Validate that pipeline run gets failed when incorrect filter values and verify the log error message + Given Open Datafusion Project to configure pipeline + When Source is BigQuery + When Sink is BigQuery + Then Open BigQuery source properties + Then Enter BigQuery property reference name + Then Enter BigQuery property projectId "projectId" + Then Enter BigQuery property datasetProjectId "projectId" + Then Override Service account details if set in environment variables + Then Enter BigQuery property dataset "dataset" + Then Enter BigQuery source property table name + Then Enter input plugin property: "filter" with value: "incorrectFilter" + Then Validate output schema with expectedSchema "bqSourceSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Open BigQuery sink properties + Then Override Service account details if set in environment variables + Then Enter the BigQuery sink mandatory properties + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Connect source as "BigQuery" and sink as "BigQuery" to establish connection + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidFilter | diff --git a/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature b/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature index 363fb7535e..ae5bcba216 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature @@ -69,3 +69,222 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Verify the pipeline status is "Succeeded" Then Verify data is transferred to target GCS bucket Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled + + @CMEK @BQ_SOURCE_TEST @GCS_SINK_TEST + Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for partition start date and partition end date + Given Open Datafusion Project to configure pipeline + When Source is BigQuery + When Sink is GCS + Then Open BigQuery source properties + Then Enter BigQuery property reference name + Then Enter BigQuery property "projectId" as macro argument "bqProjectId" + Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" + Then Enter BigQuery property "partitionFrom" as macro argument "bqStartDate" + Then Enter BigQuery property "partitionTo" as macro argument "bqEndDate" + Then Enter BigQuery property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter BigQuery property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter BigQuery property "dataset" as macro argument "bqDataset" + Then Enter BigQuery property "table" as macro argument "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Open GCS sink properties + Then Enter GCS property reference name + Then Enter GCS property "projectId" as macro argument "gcsProjectId" + Then Enter GCS property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter GCS property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter GCS property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter GCS property "path" as macro argument "gcsSinkPath" + Then Enter GCS sink property "pathSuffix" as macro argument "gcsPathSuffix" + Then Enter GCS property "format" as macro argument "gcsFormat" + Then Enter GCS sink cmek property "encryptionKeyName" as macro argument "cmekGCS" if cmek is enabled + Then Validate "GCS" plugin properties + Then Close the GCS properties + Then Connect source as "BigQuery" and sink as "GCS" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "partitionFrom" for key "bqStartDate" + Then Enter runtime argument value "partitionTo" for key "bqEndDate" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Click on preview data for GCS sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "partitionFrom" for key "bqStartDate" + Then Enter runtime argument value "partitionTo" for key "bqEndDate" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Verify data is transferred to target GCS bucket + Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled + + @CMEK @BQ_SOURCE_TEST @GCS_SINK_TEST + Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for filter and outputschema + Given Open Datafusion Project to configure pipeline + When Source is BigQuery + When Sink is GCS + Then Open BigQuery source properties + Then Enter BigQuery property reference name + Then Enter BigQuery property "projectId" as macro argument "bqProjectId" + Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" + Then Enter BigQuery property "filter" as macro argument "bqFilter" + Then Enter BigQuery property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter BigQuery property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter BigQuery property "dataset" as macro argument "bqDataset" + Then Enter BigQuery property "table" as macro argument "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Open GCS sink properties + Then Enter GCS property reference name + Then Enter GCS property "projectId" as macro argument "gcsProjectId" + Then Enter GCS property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter GCS property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter GCS property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter GCS property "path" as macro argument "gcsSinkPath" + Then Enter GCS sink property "pathSuffix" as macro argument "gcsPathSuffix" + Then Enter GCS property "format" as macro argument "gcsFormat" + Then Enter GCS sink cmek property "encryptionKeyName" as macro argument "cmekGCS" if cmek is enabled + Then Validate "GCS" plugin properties + Then Close the GCS properties + Then Connect source as "BigQuery" and sink as "GCS" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "filter" for key "bqFilter" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Click on preview data for GCS sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "filter" for key "bqFilter" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Verify data is transferred to target GCS bucket + Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled + + @CMEK @BQ_SOURCE_TEST @GCS_SINK_TEST + Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for output schema + Given Open Datafusion Project to configure pipeline + When Source is BigQuery + When Sink is GCS + Then Open BigQuery source properties + Then Enter BigQuery property reference name + Then Enter BigQuery property "projectId" as macro argument "bqProjectId" + Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" + Then Enter BigQuery property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter BigQuery property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter BigQuery property "dataset" as macro argument "bqDataset" + Then Enter BigQuery property "table" as macro argument "bqSourceTable" + Then Enter BigQuery source property output schema "outputSchema" as macro argument "bqOutputSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Open GCS sink properties + Then Enter GCS property reference name + Then Enter GCS property "projectId" as macro argument "gcsProjectId" + Then Enter GCS property "serviceAccountType" as macro argument "serviceAccountType" + Then Enter GCS property "serviceAccountFilePath" as macro argument "serviceAccount" + Then Enter GCS property "serviceAccountJSON" as macro argument "serviceAccount" + Then Enter GCS property "path" as macro argument "gcsSinkPath" + Then Enter GCS sink property "pathSuffix" as macro argument "gcsPathSuffix" + Then Enter GCS property "format" as macro argument "gcsFormat" + Then Enter GCS sink cmek property "encryptionKeyName" as macro argument "cmekGCS" if cmek is enabled + Then Validate "GCS" plugin properties + Then Close the GCS properties + Then Connect source as "BigQuery" and sink as "GCS" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "OutputSchema" for key "bqOutputSchema" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Click on preview data for GCS sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" + Then Enter runtime argument value "serviceAccount" for key "serviceAccount" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "OutputSchema" for key "bqOutputSchema" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Verify data is transferred to target GCS bucket + Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java index d4ae865c8a..86417a44c6 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java @@ -17,17 +17,16 @@ import io.cdap.e2e.pages.actions.CdfBigQueryPropertiesActions; import io.cdap.e2e.pages.actions.CdfStudioActions; +import io.cdap.e2e.pages.locators.CdfBigQueryPropertiesLocators; import io.cdap.e2e.pages.locators.CdfStudioLocators; -import io.cdap.e2e.utils.BigQueryClient; -import io.cdap.e2e.utils.ConstantsUtil; -import io.cdap.e2e.utils.ElementHelper; -import io.cdap.e2e.utils.PluginPropertyUtils; -import io.cdap.e2e.utils.SeleniumHelper; +import io.cdap.e2e.utils.*; import io.cdap.plugin.common.stepsdesign.TestSetupHooks; +import io.cdap.plugin.utils.CdfPluginPropertyLocator; import io.cdap.plugin.utils.E2EHelper; import io.cdap.plugin.utils.E2ETestConstants; import io.cucumber.java.en.Then; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.time.DateUtils; import org.junit.Assert; import stepsdesign.BeforeActions; @@ -260,4 +259,129 @@ public void validateRecordsTransferredToTargetTableIsEqualToNumberOfRecordsFromS BeforeActions.scenario.write("Number of records transferred from source table to target table:" + count); Assert.assertEquals(count, countRecordsTarget); } + + @Then("Enter BigQuery source properties partitionFrom and partitionTo") + public void enterBigQuerySourcePropertiespartitionFromandpartitionTo() throws IOException { + CdfBigQueryPropertiesActions.enterPartitionStartDate(new SimpleDateFormat("dd-MM-yyyy").format(new Date())); + CdfBigQueryPropertiesActions.enterPartitionEndDate(new SimpleDateFormat("dd-MM-yyyy") + .format(DateUtils.addDays(new Date(), 1))); + } + @Then("Validate BigQuery source incorrect property error for Partition Start date {string} value {string}") + public void validateBigQuerySourceIncorrectErrorFor(String property, String value) { + CdfBigQueryPropertiesActions.getSchema(); + + + SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); + String tableFullName = StringUtils.EMPTY; + if (property.equalsIgnoreCase("dataset")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) + + "." + TestSetupHooks.bqSourceTable; + } else if (property.equalsIgnoreCase("table")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + + PluginPropertyUtils.pluginProp("dataset") + + "." + PluginPropertyUtils.pluginProp(value); + } else if (property.equalsIgnoreCase("datasetProject")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + + "." + TestSetupHooks.bqSourceTable; + + }else if (property.equalsIgnoreCase("partitionFrom")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + + PluginPropertyUtils.pluginProp("dataset") + + "." + PluginPropertyUtils.pluginProp(value);} + + String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONSTARTDATE) + .replaceAll("TABLENAME", tableFullName); + String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionFrom").getText(); + System.out.println(actualErrorMessage); + Assert.assertEquals("Error message mismatch for Partition Start Date", expectedErrorMessage, actualErrorMessage); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("partitionFrom")); + String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; + Assert.assertEquals(expectedColor, actualColor); + } + + @Then("Validate BigQuery source incorrect property error for Partition End date {string} value {string}") + public void validateBigQuerySourceIncorrectPartitionenddateErrorFor(String property, String value) { + CdfBigQueryPropertiesActions.getSchema(); + SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); + String tableFullName = StringUtils.EMPTY; + if (property.equalsIgnoreCase("dataset")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) + + "." + TestSetupHooks.bqSourceTable; + } else if (property.equalsIgnoreCase("table")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + + PluginPropertyUtils.pluginProp("dataset") + + "." + PluginPropertyUtils.pluginProp(value); + } else if (property.equalsIgnoreCase("datasetProjectId")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + + "." + TestSetupHooks.bqSourceTable; + }else if (property.equalsIgnoreCase("partitionEndDate")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + + PluginPropertyUtils.pluginProp("partitionTo") + + "." + TestSetupHooks.bqSourceTable; + } + + String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONENDDATE) + .replaceAll("TABLENAME", tableFullName); + String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionTo").getText(); + System.out.println(actualErrorMessage); + Assert.assertEquals("Error message mismatch for Partition End Date", expectedErrorMessage, actualErrorMessage); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("partitionTo")); + String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; + Assert.assertEquals(expectedColor, actualColor); + } + + @Then("Enter BigQuery source properties referenceName") + public void EnterBigQuerysourcepropertiesreferenceName() throws IOException { + CdfBigQueryPropertiesActions.enterBigQueryReferenceName("invalidRef&^*&&*"); + + } + + @Then("Validate BigQuery source incorrect property error for reference name{string} value {string}") + public void validateBigQuerySourceIncorrectPropertyErrorForreferncename(String property, String value) { + CdfBigQueryPropertiesActions.getSchema(); + SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); + String tableFullName = StringUtils.EMPTY; + if (property.equalsIgnoreCase("dataset")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) + + "." + TestSetupHooks.bqSourceTable; + } else if (property.equalsIgnoreCase("table")) { + tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + + PluginPropertyUtils.pluginProp("dataset") + + "." + PluginPropertyUtils.pluginProp(value); + } else if (property.equalsIgnoreCase("datasetProject")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + + "." + TestSetupHooks.bqSourceTable; + } + else if (property.equalsIgnoreCase("referenceName")) { + tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("reference") + + "." + TestSetupHooks.bqSourceTable; + } + String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_REFERENCENAME) + .replaceAll("TABLENAME", tableFullName); + String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("referenceName").getText(); + + Assert.assertEquals(expectedErrorMessage, actualErrorMessage); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("referenceName")); + String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; + Assert.assertEquals(expectedColor, actualColor); + + } + + @Then("Enter BigQuery source properties filter") + public void EnterBigQuerysourcepropertiesfilter() throws IOException { + CdfBigQueryPropertiesActions.enterFilter("%%%%"); + + } + + @Then("Enter BigQuery source property output schema {string} as macro argument {string}") + public void enterBigQueryPropertyAsMacroArgumentoutputschema(String pluginProperty, String macroArgument) { + SCHEMA_LOCATORS.schemaActions.click(); + SCHEMA_LOCATORS.schemaActionType("macro").click(); + WaitHelper.waitForElementToBeHidden(SCHEMA_LOCATORS.schemaActionType("macro"), 5); + try { + enterMacro(CdfPluginPropertyLocator.fromPropertyString(pluginProperty).pluginProperty, macroArgument); + } catch (NullPointerException e) { + Assert.fail("CDF_PLUGIN_PROPERTY_MAPPING for '" + pluginProperty + "' not present in CdfPluginPropertyLocator."); + } + } } diff --git a/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java index 4fb86da3b4..676a6b5650 100644 --- a/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java +++ b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java @@ -17,4 +17,9 @@ public class E2ETestConstants { public static final String ERROR_MSG_BQ_INCORRECT_CHUNKSIZE = "errorMessageIncorrectBQChunkSize"; public static final String ERROR_MSG_BQ_INCORRECT_TEMPORARY_BUCKET = "errorMessageIncorrectBQBucketName"; public static final String ERROR_MSG_BQ_INCORRECT_PROPERTY = "errorMessageIncorrectBQProperty"; + public static final String ERROR_MSG_INCORRECT_PARTITIONSTARTDATE= "errorMessageIncorrectPartitionStartDate"; + public static final String ERROR_MSG_INCORRECT_PARTITIONENDDATE= "errorMessageIncorrectPartitionEndDate"; + public static final String ERROR_MSG_INCORRECT_REFERENCENAME= "errorMessageIncorrectReferenceName"; + public static final String ERROR_MSG_INCORRECT_FILTER= "errorMessageIncorrectRegexPathFilter"; + } diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties index bd8a1610b9..8171c80cef 100644 --- a/src/e2e-test/resources/errorMessage.properties +++ b/src/e2e-test/resources/errorMessage.properties @@ -33,4 +33,7 @@ errorMessageMultipleFileWithoutClearDefaultSchema=Found a row with 4 fields when errorMessageInvalidSourcePath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidDestPath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidEncryptionKey=CryptoKeyName.parse: formattedString not in valid format: Parameter "abc@" must be - +errorMessageIncorrectPartitionStartDate=11-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionEndDate=12-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. +errorLogsMessageInvalidFilter=Spark Program 'phase-1' failed. diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index aae33e0e89..f4a17b1c25 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -354,6 +354,15 @@ bqTargetTable=dummy bqTargetTable2=dummy bqmtTargetTable=tabA bqmtTargetTable2=tabB +bqStartDate=2024-12-11 +bqEndDate=2024-12-12 +partitionFrom=2024-12-11 +partitionTo=2024-12-12 +filter=Id=20 +bqIncorrectReferenceName=invalidRef&^*&&* +OutputSchema={ "type": "record", "name": "text", "fields": [{ "name": "Id", "type": "long" }, { "name": "Value", "type": "long" }, \ + { "name": "UID", "type": "string" } ] } +incorrectFilter=%%%% ## BQMT-PLUGIN-PROPERTIES-END ##CLOUDBIGTABLE-PLUGIN-PROPERTIES-START From 60fb70f5790ff3aadda44530885c361b822338a8 Mon Sep 17 00:00:00 2001 From: Praveena2607 Date: Thu, 12 Dec 2024 05:43:27 +0000 Subject: [PATCH 2/7] Updated CdfPluginPropertyLocator and associated fiels for BigQuery Plugin: Added and modified. --- .../utils/CdfPluginPropertyLocator.java | 69 +++++++++++-------- .../resources/errorMessage.properties | 4 +- .../resources/pluginParameters.properties | 8 +-- 3 files changed, 45 insertions(+), 36 deletions(-) diff --git a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java index 297c623838..a9c3923b0b 100644 --- a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java +++ b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java @@ -36,7 +36,12 @@ public enum CdfPluginPropertyLocator { GCS_CREATE_OBJECTS_TO_CREATE("paths"), GCS_CREATE_FAIL_IF_OBJECT_EXISTS("failIfExists"), GCS_MOVE_SOURCE_PATH("sourcePath"), - GCS_MOVE_DESTINATION_PATH("destPath"); + GCS_MOVE_DESTINATION_PATH("destPath"), + PARTITION_START_DATE("partitionFrom"), + PARTITION_END_DATE("partitionTo"), + FILTER("filter"), + OUTPUT_SCHEMA("Output Schema-macro-input"); + public String pluginProperty; CdfPluginPropertyLocator(String property) { @@ -46,35 +51,39 @@ public enum CdfPluginPropertyLocator { private static final Map CDF_PLUGIN_PROPERTY_MAPPING; static { CDF_PLUGIN_PROPERTY_MAPPING = new ImmutableMap.Builder() - .put("projectId", CdfPluginPropertyLocator.PROJECT_ID) - .put("datasetProjectId", CdfPluginPropertyLocator.DATASET_PROJECT_ID) - .put("dataset", CdfPluginPropertyLocator.DATASET) - .put("table", CdfPluginPropertyLocator.TABLE) - .put("format", CdfPluginPropertyLocator.FORMAT) - .put("path", CdfPluginPropertyLocator.PATH) - .put("sampleSize", CdfPluginPropertyLocator.SAMPLE_SIZE) - .put("delimiter", CdfPluginPropertyLocator.DELIMITER) - .put("skipHeader", CdfPluginPropertyLocator.SKIP_HEADER) - .put("pathSuffix", CdfPluginPropertyLocator.SUFFIX) - .put("encryptionKeyName", CdfPluginPropertyLocator.CMEK_KEY) - .put("serviceAccountType", CdfPluginPropertyLocator.SERVICE_ACCOUNT_TYPE) - .put("serviceAccountFilePath", CdfPluginPropertyLocator.SERVICE_ACCOUNT_PATH) - .put("serviceAccountJSON", CdfPluginPropertyLocator.SERVICE_ACCOUNT_JSON) - .put("truncateTable", CdfPluginPropertyLocator.TRUNCATE_TABLE) - .put("updateTableSchema", CdfPluginPropertyLocator.UPDATE_TABLE_SCHEMA) - .put("topic", CdfPluginPropertyLocator.PUBSUB_TOPIC) - .put("maximumBatchCount", CdfPluginPropertyLocator.PUBSUB_MAXIMUM_BATCH_COUNT) - .put("maximumBatchSize", CdfPluginPropertyLocator.PUBSUB_MAXIMUM_BATCH_SIZE) - .put("publishDelayThreshold", CdfPluginPropertyLocator.PUBSUB_PUBLISH_DELAY_THRESHOLD) - .put("retryTimeout", CdfPluginPropertyLocator.PUBSUB_RETRY_TIMEOUT) - .put("errorThreshold", CdfPluginPropertyLocator.PUBSUB_ERROR_THRESHOLD) - .put("outputSchema", CdfPluginPropertyLocator.OUTPUT_SCHEMA_MACRO_INPUT) - .put("objectsToDelete", CdfPluginPropertyLocator.GCS_DELETE_OBJECTS_TO_DELETE) - .put("objectsToCreate", CdfPluginPropertyLocator.GCS_CREATE_OBJECTS_TO_CREATE) - .put("createFailIfObjectExists", CdfPluginPropertyLocator.GCS_CREATE_FAIL_IF_OBJECT_EXISTS) - .put("gcsMoveSourcePath", CdfPluginPropertyLocator.GCS_MOVE_SOURCE_PATH) - .put("gcsMoveDestinationPath", CdfPluginPropertyLocator.GCS_MOVE_DESTINATION_PATH) - .build(); + .put("projectId", CdfPluginPropertyLocator.PROJECT_ID) + .put("datasetProjectId", CdfPluginPropertyLocator.DATASET_PROJECT_ID) + .put("dataset", CdfPluginPropertyLocator.DATASET) + .put("table", CdfPluginPropertyLocator.TABLE) + .put("format", CdfPluginPropertyLocator.FORMAT) + .put("path", CdfPluginPropertyLocator.PATH) + .put("sampleSize", CdfPluginPropertyLocator.SAMPLE_SIZE) + .put("delimiter", CdfPluginPropertyLocator.DELIMITER) + .put("skipHeader", CdfPluginPropertyLocator.SKIP_HEADER) + .put("pathSuffix", CdfPluginPropertyLocator.SUFFIX) + .put("encryptionKeyName", CdfPluginPropertyLocator.CMEK_KEY) + .put("serviceAccountType", CdfPluginPropertyLocator.SERVICE_ACCOUNT_TYPE) + .put("serviceAccountFilePath", CdfPluginPropertyLocator.SERVICE_ACCOUNT_PATH) + .put("serviceAccountJSON", CdfPluginPropertyLocator.SERVICE_ACCOUNT_JSON) + .put("truncateTable", CdfPluginPropertyLocator.TRUNCATE_TABLE) + .put("updateTableSchema", CdfPluginPropertyLocator.UPDATE_TABLE_SCHEMA) + .put("topic", CdfPluginPropertyLocator.PUBSUB_TOPIC) + .put("maximumBatchCount", CdfPluginPropertyLocator.PUBSUB_MAXIMUM_BATCH_COUNT) + .put("maximumBatchSize", CdfPluginPropertyLocator.PUBSUB_MAXIMUM_BATCH_SIZE) + .put("publishDelayThreshold", CdfPluginPropertyLocator.PUBSUB_PUBLISH_DELAY_THRESHOLD) + .put("retryTimeout", CdfPluginPropertyLocator.PUBSUB_RETRY_TIMEOUT) + .put("errorThreshold", CdfPluginPropertyLocator.PUBSUB_ERROR_THRESHOLD) + .put("outputSchema", CdfPluginPropertyLocator.OUTPUT_SCHEMA_MACRO_INPUT) + .put("objectsToDelete", CdfPluginPropertyLocator.GCS_DELETE_OBJECTS_TO_DELETE) + .put("objectsToCreate", CdfPluginPropertyLocator.GCS_CREATE_OBJECTS_TO_CREATE) + .put("createFailIfObjectExists", CdfPluginPropertyLocator.GCS_CREATE_FAIL_IF_OBJECT_EXISTS) + .put("gcsMoveSourcePath", CdfPluginPropertyLocator.GCS_MOVE_SOURCE_PATH) + .put("gcsMoveDestinationPath", CdfPluginPropertyLocator.GCS_MOVE_DESTINATION_PATH) + .put("partitionFrom", CdfPluginPropertyLocator.PARTITION_START_DATE) + .put("partitionTo", CdfPluginPropertyLocator.PARTITION_END_DATE) + .put("filter", CdfPluginPropertyLocator.FILTER) + .put("Output Schema-macro-input", CdfPluginPropertyLocator.OUTPUT_SCHEMA) + .build(); } @Nullable diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties index 8171c80cef..5d65f2bea9 100644 --- a/src/e2e-test/resources/errorMessage.properties +++ b/src/e2e-test/resources/errorMessage.properties @@ -33,7 +33,7 @@ errorMessageMultipleFileWithoutClearDefaultSchema=Found a row with 4 fields when errorMessageInvalidSourcePath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidDestPath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidEncryptionKey=CryptoKeyName.parse: formattedString not in valid format: Parameter "abc@" must be -errorMessageIncorrectPartitionStartDate=11-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd -errorMessageIncorrectPartitionEndDate=12-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionStartDate=12-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionEndDate=13-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd errorMessageIncorrectReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. errorLogsMessageInvalidFilter=Spark Program 'phase-1' failed. diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index f4a17b1c25..3a80e5a11c 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -354,10 +354,10 @@ bqTargetTable=dummy bqTargetTable2=dummy bqmtTargetTable=tabA bqmtTargetTable2=tabB -bqStartDate=2024-12-11 -bqEndDate=2024-12-12 -partitionFrom=2024-12-11 -partitionTo=2024-12-12 +bqStartDate=2024-12-12 +bqEndDate=2024-12-13 +partitionFrom=2024-12-12 +partitionTo=2024-12-13 filter=Id=20 bqIncorrectReferenceName=invalidRef&^*&&* OutputSchema={ "type": "record", "name": "text", "fields": [{ "name": "Id", "type": "long" }, { "name": "Value", "type": "long" }, \ From 7e71bc2a2339c06b6118ab0bed87227a3dfac898 Mon Sep 17 00:00:00 2001 From: Praveena2607 Date: Sat, 14 Dec 2024 04:48:48 +0000 Subject: [PATCH 3/7] Fixed Checkstyle issues and ensured all scenarios are running successfully. --- pom.xml | 2 +- .../source/BigQuerySourceError.feature | 3 +- .../bigquery/stepsdesign/BigQueryBase.java | 32 +++++++++---------- .../cdap/plugin/utils/E2ETestConstants.java | 9 +++--- .../resources/errorMessage.properties | 4 +-- .../resources/pluginParameters.properties | 8 ++--- 6 files changed, 28 insertions(+), 30 deletions(-) diff --git a/pom.xml b/pom.xml index f3b98ceb3d..c3b1ff9463 100644 --- a/pom.xml +++ b/pom.xml @@ -86,7 +86,7 @@ 2.3.0 1.105.1 3.19.4 - 1.3.0-rc3 + 1.5.0 27.0.1-jre 3.3.6 1.4.13 diff --git a/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature b/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature index b88ac85944..e80adc0cdd 100644 --- a/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature +++ b/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature @@ -69,9 +69,8 @@ Feature: BigQuery source - Validate BigQuery source plugin error scenarios Then Enter BigQuery source properties partitionFrom and partitionTo Then Validate BigQuery source incorrect property error for Partition Start date "" value "" Then Validate BigQuery source incorrect property error for Partition End date "" value "" - Then Enter BigQuery source properties referenceName + And Enter input plugin property: "referenceName" with value: "bqIncorrectReferenceName" Then Validate BigQuery source incorrect property error for reference name"" value "" - Then Enter BigQuery source properties filter Examples: | property | value | | partitionFrom | bqIncorrectFormatStartDate | diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java index 86417a44c6..1dc05c7ef2 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java @@ -19,7 +19,12 @@ import io.cdap.e2e.pages.actions.CdfStudioActions; import io.cdap.e2e.pages.locators.CdfBigQueryPropertiesLocators; import io.cdap.e2e.pages.locators.CdfStudioLocators; -import io.cdap.e2e.utils.*; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.ConstantsUtil; +import io.cdap.e2e.utils.ElementHelper; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.e2e.utils.SeleniumHelper; +import io.cdap.e2e.utils.WaitHelper; import io.cdap.plugin.common.stepsdesign.TestSetupHooks; import io.cdap.plugin.utils.CdfPluginPropertyLocator; import io.cdap.plugin.utils.E2EHelper; @@ -284,17 +289,19 @@ public void validateBigQuerySourceIncorrectErrorFor(String property, String valu tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + "." + TestSetupHooks.bqSourceTable; - }else if (property.equalsIgnoreCase("partitionFrom")) { + } else if (property.equalsIgnoreCase("partitionFrom")) { tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value);} + + "." + PluginPropertyUtils.pluginProp(value); + } String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONSTARTDATE) .replaceAll("TABLENAME", tableFullName); String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionFrom").getText(); System.out.println(actualErrorMessage); Assert.assertEquals("Error message mismatch for Partition Start Date", expectedErrorMessage, actualErrorMessage); - String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("partitionFrom")); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement + ("partitionFrom")); String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; Assert.assertEquals(expectedColor, actualColor); } @@ -314,7 +321,7 @@ public void validateBigQuerySourceIncorrectPartitionenddateErrorFor(String prope } else if (property.equalsIgnoreCase("datasetProjectId")) { tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + "." + TestSetupHooks.bqSourceTable; - }else if (property.equalsIgnoreCase("partitionEndDate")) { + } else if (property.equalsIgnoreCase("partitionEndDate")) { tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("partitionTo") + "." + TestSetupHooks.bqSourceTable; @@ -330,12 +337,6 @@ public void validateBigQuerySourceIncorrectPartitionenddateErrorFor(String prope Assert.assertEquals(expectedColor, actualColor); } - @Then("Enter BigQuery source properties referenceName") - public void EnterBigQuerysourcepropertiesreferenceName() throws IOException { - CdfBigQueryPropertiesActions.enterBigQueryReferenceName("invalidRef&^*&&*"); - - } - @Then("Validate BigQuery source incorrect property error for reference name{string} value {string}") public void validateBigQuerySourceIncorrectPropertyErrorForreferncename(String property, String value) { CdfBigQueryPropertiesActions.getSchema(); @@ -351,8 +352,7 @@ public void validateBigQuerySourceIncorrectPropertyErrorForreferncename(String p } else if (property.equalsIgnoreCase("datasetProject")) { tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") + "." + TestSetupHooks.bqSourceTable; - } - else if (property.equalsIgnoreCase("referenceName")) { + } else if (property.equalsIgnoreCase("referenceName")) { tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("reference") + "." + TestSetupHooks.bqSourceTable; } @@ -361,16 +361,16 @@ else if (property.equalsIgnoreCase("referenceName")) { String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("referenceName").getText(); Assert.assertEquals(expectedErrorMessage, actualErrorMessage); - String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("referenceName")); + String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement + ("referenceName")); String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; Assert.assertEquals(expectedColor, actualColor); } @Then("Enter BigQuery source properties filter") - public void EnterBigQuerysourcepropertiesfilter() throws IOException { + public void enterBigQuerysourcePropertiesfilter() throws IOException { CdfBigQueryPropertiesActions.enterFilter("%%%%"); - } @Then("Enter BigQuery source property output schema {string} as macro argument {string}") diff --git a/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java index 676a6b5650..1e8ea9ba81 100644 --- a/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java +++ b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java @@ -17,9 +17,8 @@ public class E2ETestConstants { public static final String ERROR_MSG_BQ_INCORRECT_CHUNKSIZE = "errorMessageIncorrectBQChunkSize"; public static final String ERROR_MSG_BQ_INCORRECT_TEMPORARY_BUCKET = "errorMessageIncorrectBQBucketName"; public static final String ERROR_MSG_BQ_INCORRECT_PROPERTY = "errorMessageIncorrectBQProperty"; - public static final String ERROR_MSG_INCORRECT_PARTITIONSTARTDATE= "errorMessageIncorrectPartitionStartDate"; - public static final String ERROR_MSG_INCORRECT_PARTITIONENDDATE= "errorMessageIncorrectPartitionEndDate"; - public static final String ERROR_MSG_INCORRECT_REFERENCENAME= "errorMessageIncorrectReferenceName"; - public static final String ERROR_MSG_INCORRECT_FILTER= "errorMessageIncorrectRegexPathFilter"; - + public static final String ERROR_MSG_INCORRECT_PARTITIONSTARTDATE = "errorMessageIncorrectPartitionStartDate"; + public static final String ERROR_MSG_INCORRECT_PARTITIONENDDATE = "errorMessageIncorrectPartitionEndDate"; + public static final String ERROR_MSG_INCORRECT_REFERENCENAME = "errorMessageIncorrectReferenceName"; + public static final String ERROR_MSG_INCORRECT_FILTER = "errorMessageIncorrectRegexPathFilter"; } diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties index 5d65f2bea9..f474d65fe2 100644 --- a/src/e2e-test/resources/errorMessage.properties +++ b/src/e2e-test/resources/errorMessage.properties @@ -33,7 +33,7 @@ errorMessageMultipleFileWithoutClearDefaultSchema=Found a row with 4 fields when errorMessageInvalidSourcePath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidDestPath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidEncryptionKey=CryptoKeyName.parse: formattedString not in valid format: Parameter "abc@" must be -errorMessageIncorrectPartitionStartDate=12-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd -errorMessageIncorrectPartitionEndDate=13-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionStartDate=16-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionEndDate=17-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd errorMessageIncorrectReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. errorLogsMessageInvalidFilter=Spark Program 'phase-1' failed. diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index 3a80e5a11c..694e628126 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -354,10 +354,10 @@ bqTargetTable=dummy bqTargetTable2=dummy bqmtTargetTable=tabA bqmtTargetTable2=tabB -bqStartDate=2024-12-12 -bqEndDate=2024-12-13 -partitionFrom=2024-12-12 -partitionTo=2024-12-13 +bqStartDate=2024-12-16 +bqEndDate=2024-12-17 +partitionFrom=2024-12-16 +partitionTo=2024-12-17 filter=Id=20 bqIncorrectReferenceName=invalidRef&^*&&* OutputSchema={ "type": "record", "name": "text", "fields": [{ "name": "Id", "type": "long" }, { "name": "Value", "type": "long" }, \ From 08705764e55db45077d46ae251b0af308c955b6a Mon Sep 17 00:00:00 2001 From: Praveena2607 Date: Mon, 16 Dec 2024 08:34:17 +0000 Subject: [PATCH 4/7] Refactor CdfPluginPropertyLocator alignment and update build dependencies. -Fixed indentation alignment for partitionFrom, partitionTo, filter and Output Schema. -Updated pom.xml for as it is. --- pom.xml | 2 +- .../utils/CdfPluginPropertyLocator.java | 67 +++++++++---------- 2 files changed, 34 insertions(+), 35 deletions(-) diff --git a/pom.xml b/pom.xml index c3b1ff9463..a58dbcd995 100644 --- a/pom.xml +++ b/pom.xml @@ -86,7 +86,7 @@ 2.3.0 1.105.1 3.19.4 - 1.5.0 + 1.3.0 27.0.1-jre 3.3.6 1.4.13 diff --git a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java index a9c3923b0b..8d3199b7d0 100644 --- a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java +++ b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java @@ -42,7 +42,6 @@ public enum CdfPluginPropertyLocator { FILTER("filter"), OUTPUT_SCHEMA("Output Schema-macro-input"); - public String pluginProperty; CdfPluginPropertyLocator(String property) { this.pluginProperty = property; @@ -51,39 +50,39 @@ public enum CdfPluginPropertyLocator { private static final Map CDF_PLUGIN_PROPERTY_MAPPING; static { CDF_PLUGIN_PROPERTY_MAPPING = new ImmutableMap.Builder() - .put("projectId", CdfPluginPropertyLocator.PROJECT_ID) - .put("datasetProjectId", CdfPluginPropertyLocator.DATASET_PROJECT_ID) - .put("dataset", CdfPluginPropertyLocator.DATASET) - .put("table", CdfPluginPropertyLocator.TABLE) - .put("format", CdfPluginPropertyLocator.FORMAT) - .put("path", CdfPluginPropertyLocator.PATH) - .put("sampleSize", CdfPluginPropertyLocator.SAMPLE_SIZE) - .put("delimiter", CdfPluginPropertyLocator.DELIMITER) - .put("skipHeader", CdfPluginPropertyLocator.SKIP_HEADER) - .put("pathSuffix", CdfPluginPropertyLocator.SUFFIX) - .put("encryptionKeyName", CdfPluginPropertyLocator.CMEK_KEY) - .put("serviceAccountType", CdfPluginPropertyLocator.SERVICE_ACCOUNT_TYPE) - .put("serviceAccountFilePath", CdfPluginPropertyLocator.SERVICE_ACCOUNT_PATH) - .put("serviceAccountJSON", CdfPluginPropertyLocator.SERVICE_ACCOUNT_JSON) - .put("truncateTable", CdfPluginPropertyLocator.TRUNCATE_TABLE) - .put("updateTableSchema", CdfPluginPropertyLocator.UPDATE_TABLE_SCHEMA) - .put("topic", CdfPluginPropertyLocator.PUBSUB_TOPIC) - .put("maximumBatchCount", CdfPluginPropertyLocator.PUBSUB_MAXIMUM_BATCH_COUNT) - .put("maximumBatchSize", CdfPluginPropertyLocator.PUBSUB_MAXIMUM_BATCH_SIZE) - .put("publishDelayThreshold", CdfPluginPropertyLocator.PUBSUB_PUBLISH_DELAY_THRESHOLD) - .put("retryTimeout", CdfPluginPropertyLocator.PUBSUB_RETRY_TIMEOUT) - .put("errorThreshold", CdfPluginPropertyLocator.PUBSUB_ERROR_THRESHOLD) - .put("outputSchema", CdfPluginPropertyLocator.OUTPUT_SCHEMA_MACRO_INPUT) - .put("objectsToDelete", CdfPluginPropertyLocator.GCS_DELETE_OBJECTS_TO_DELETE) - .put("objectsToCreate", CdfPluginPropertyLocator.GCS_CREATE_OBJECTS_TO_CREATE) - .put("createFailIfObjectExists", CdfPluginPropertyLocator.GCS_CREATE_FAIL_IF_OBJECT_EXISTS) - .put("gcsMoveSourcePath", CdfPluginPropertyLocator.GCS_MOVE_SOURCE_PATH) - .put("gcsMoveDestinationPath", CdfPluginPropertyLocator.GCS_MOVE_DESTINATION_PATH) - .put("partitionFrom", CdfPluginPropertyLocator.PARTITION_START_DATE) - .put("partitionTo", CdfPluginPropertyLocator.PARTITION_END_DATE) - .put("filter", CdfPluginPropertyLocator.FILTER) - .put("Output Schema-macro-input", CdfPluginPropertyLocator.OUTPUT_SCHEMA) - .build(); + .put("projectId", CdfPluginPropertyLocator.PROJECT_ID) + .put("datasetProjectId", CdfPluginPropertyLocator.DATASET_PROJECT_ID) + .put("dataset", CdfPluginPropertyLocator.DATASET) + .put("table", CdfPluginPropertyLocator.TABLE) + .put("format", CdfPluginPropertyLocator.FORMAT) + .put("path", CdfPluginPropertyLocator.PATH) + .put("sampleSize", CdfPluginPropertyLocator.SAMPLE_SIZE) + .put("delimiter", CdfPluginPropertyLocator.DELIMITER) + .put("skipHeader", CdfPluginPropertyLocator.SKIP_HEADER) + .put("pathSuffix", CdfPluginPropertyLocator.SUFFIX) + .put("encryptionKeyName", CdfPluginPropertyLocator.CMEK_KEY) + .put("serviceAccountType", CdfPluginPropertyLocator.SERVICE_ACCOUNT_TYPE) + .put("serviceAccountFilePath", CdfPluginPropertyLocator.SERVICE_ACCOUNT_PATH) + .put("serviceAccountJSON", CdfPluginPropertyLocator.SERVICE_ACCOUNT_JSON) + .put("truncateTable", CdfPluginPropertyLocator.TRUNCATE_TABLE) + .put("updateTableSchema", CdfPluginPropertyLocator.UPDATE_TABLE_SCHEMA) + .put("topic", CdfPluginPropertyLocator.PUBSUB_TOPIC) + .put("maximumBatchCount", CdfPluginPropertyLocator.PUBSUB_MAXIMUM_BATCH_COUNT) + .put("maximumBatchSize", CdfPluginPropertyLocator.PUBSUB_MAXIMUM_BATCH_SIZE) + .put("publishDelayThreshold", CdfPluginPropertyLocator.PUBSUB_PUBLISH_DELAY_THRESHOLD) + .put("retryTimeout", CdfPluginPropertyLocator.PUBSUB_RETRY_TIMEOUT) + .put("errorThreshold", CdfPluginPropertyLocator.PUBSUB_ERROR_THRESHOLD) + .put("outputSchema", CdfPluginPropertyLocator.OUTPUT_SCHEMA_MACRO_INPUT) + .put("objectsToDelete", CdfPluginPropertyLocator.GCS_DELETE_OBJECTS_TO_DELETE) + .put("objectsToCreate", CdfPluginPropertyLocator.GCS_CREATE_OBJECTS_TO_CREATE) + .put("createFailIfObjectExists", CdfPluginPropertyLocator.GCS_CREATE_FAIL_IF_OBJECT_EXISTS) + .put("gcsMoveSourcePath", CdfPluginPropertyLocator.GCS_MOVE_SOURCE_PATH) + .put("gcsMoveDestinationPath", CdfPluginPropertyLocator.GCS_MOVE_DESTINATION_PATH) + .put("partitionFrom", CdfPluginPropertyLocator.PARTITION_START_DATE) + .put("partitionTo", CdfPluginPropertyLocator.PARTITION_END_DATE) + .put("filter", CdfPluginPropertyLocator.FILTER) + .put("Output Schema-macro-input", CdfPluginPropertyLocator.OUTPUT_SCHEMA) + .build(); } @Nullable From 06ac469243e8b9e08e29e8822017a4f0f330943a Mon Sep 17 00:00:00 2001 From: Praveena2607 Date: Thu, 19 Dec 2024 06:28:15 +0000 Subject: [PATCH 5/7] All the changes are made.Please check. --- .../source/BigQuerySourceError.feature | 19 +- .../source/BigQueryToGCS_WithMacro.feature | 75 +------- .../bigquery/stepsdesign/BigQueryBase.java | 176 ++++-------------- .../utils/CdfPluginPropertyLocator.java | 4 +- .../resources/errorMessage.properties | 4 +- .../resources/pluginParameters.properties | 15 +- 6 files changed, 56 insertions(+), 237 deletions(-) diff --git a/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature b/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature index e80adc0cdd..71626aff0c 100644 --- a/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature +++ b/src/e2e-test/features/bigquery/source/BigQuerySourceError.feature @@ -57,22 +57,19 @@ Feature: BigQuery source - Validate BigQuery source plugin error scenarios Then Verify the BigQuery validation error message for invalid property "bucket" @BQ_SOURCE_TEST - Scenario Outline:To verify error message when unsupported format is provided in Partition Start date and Partition end Date + Scenario:To verify error message when unsupported format is provided in Partition Start date and Partition end Date Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "BigQuery" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "referenceName" with value: "bqInvalidReferenceName" Then Replace input plugin property: "dataset" with value: "dataset" Then Replace input plugin property: "table" with value: "bqSourceTable" + And Enter input plugin property: "partitionFrom" with value: "bqIncorrectFormatStartDate" + And Enter input plugin property: "partitionTo" with value: "bqIncorrectFormatEndDate" Then Click on the Get Schema button - Then Enter BigQuery source properties partitionFrom and partitionTo - Then Validate BigQuery source incorrect property error for Partition Start date "" value "" - Then Validate BigQuery source incorrect property error for Partition End date "" value "" - And Enter input plugin property: "referenceName" with value: "bqIncorrectReferenceName" - Then Validate BigQuery source incorrect property error for reference name"" value "" - Examples: - | property | value | - | partitionFrom | bqIncorrectFormatStartDate | - | partitionTo | bqIncorrectFormatEndDate | - | referenceName | bqIncorrectReferenceName | + And Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageIncorrectReferenceName" + Then Verify that the Plugin Property: "partitionFrom" is displaying an in-line error message: "errorMessageIncorrectPartitionStartDate" + Then Verify that the Plugin Property: "partitionTo" is displaying an in-line error message: "errorMessageIncorrectPartitionEndDate" diff --git a/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature b/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature index ae5bcba216..3cd7f31963 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature @@ -146,7 +146,7 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled @CMEK @BQ_SOURCE_TEST @GCS_SINK_TEST - Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for filter and outputschema + Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for filter and Output Schema Given Open Datafusion Project to configure pipeline When Source is BigQuery When Sink is GCS @@ -160,6 +160,7 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" Then Enter BigQuery property "dataset" as macro argument "bqDataset" Then Enter BigQuery property "table" as macro argument "bqSourceTable" + Then Select Macro action of output schema property: "Output Schema-macro-input" and set the value to "bqOutputSchema" Then Validate "BigQuery" plugin properties Then Close the BigQuery properties Then Open GCS sink properties @@ -184,77 +185,6 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Enter runtime argument value "serviceAccount" for key "serviceAccount" Then Enter runtime argument value "dataset" for key "bqDataset" Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" - Then Enter runtime argument value "projectId" for key "gcsProjectId" - Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" - Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" - Then Enter runtime argument value "csvFormat" for key "gcsFormat" - Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled - Then Run the preview of pipeline with runtime arguments - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs - Then Click on preview data for GCS sink - Then Close the preview data - Then Deploy the pipeline - Then Run the Pipeline in Runtime - Then Enter runtime argument value "projectId" for key "bqProjectId" - Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" - Then Enter runtime argument value "filter" for key "bqFilter" - Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" - Then Enter runtime argument value "serviceAccount" for key "serviceAccount" - Then Enter runtime argument value "dataset" for key "bqDataset" - Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" - Then Enter runtime argument value "projectId" for key "gcsProjectId" - Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" - Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" - Then Enter runtime argument value "csvFormat" for key "gcsFormat" - Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled - Then Run the Pipeline in Runtime with runtime arguments - Then Wait till pipeline is in running state - Then Open and capture logs - Then Verify the pipeline status is "Succeeded" - Then Verify data is transferred to target GCS bucket - Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled - - @CMEK @BQ_SOURCE_TEST @GCS_SINK_TEST - Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments for output schema - Given Open Datafusion Project to configure pipeline - When Source is BigQuery - When Sink is GCS - Then Open BigQuery source properties - Then Enter BigQuery property reference name - Then Enter BigQuery property "projectId" as macro argument "bqProjectId" - Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" - Then Enter BigQuery property "serviceAccountType" as macro argument "serviceAccountType" - Then Enter BigQuery property "serviceAccountFilePath" as macro argument "serviceAccount" - Then Enter BigQuery property "serviceAccountJSON" as macro argument "serviceAccount" - Then Enter BigQuery property "dataset" as macro argument "bqDataset" - Then Enter BigQuery property "table" as macro argument "bqSourceTable" - Then Enter BigQuery source property output schema "outputSchema" as macro argument "bqOutputSchema" - Then Validate "BigQuery" plugin properties - Then Close the BigQuery properties - Then Open GCS sink properties - Then Enter GCS property reference name - Then Enter GCS property "projectId" as macro argument "gcsProjectId" - Then Enter GCS property "serviceAccountType" as macro argument "serviceAccountType" - Then Enter GCS property "serviceAccountFilePath" as macro argument "serviceAccount" - Then Enter GCS property "serviceAccountJSON" as macro argument "serviceAccount" - Then Enter GCS property "path" as macro argument "gcsSinkPath" - Then Enter GCS sink property "pathSuffix" as macro argument "gcsPathSuffix" - Then Enter GCS property "format" as macro argument "gcsFormat" - Then Enter GCS sink cmek property "encryptionKeyName" as macro argument "cmekGCS" if cmek is enabled - Then Validate "GCS" plugin properties - Then Close the GCS properties - Then Connect source as "BigQuery" and sink as "GCS" to establish connection - Then Save the pipeline - Then Preview and run the pipeline - Then Enter runtime argument value "projectId" for key "bqProjectId" - Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" - Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" - Then Enter runtime argument value "serviceAccount" for key "serviceAccount" - Then Enter runtime argument value "dataset" for key "bqDataset" - Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" Then Enter runtime argument value "OutputSchema" for key "bqOutputSchema" Then Enter runtime argument value "projectId" for key "gcsProjectId" Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" @@ -272,6 +202,7 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Run the Pipeline in Runtime Then Enter runtime argument value "projectId" for key "bqProjectId" Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "filter" for key "bqFilter" Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType" Then Enter runtime argument value "serviceAccount" for key "serviceAccount" Then Enter runtime argument value "dataset" for key "bqDataset" diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java index 1dc05c7ef2..390fd78013 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java @@ -16,6 +16,7 @@ package io.cdap.plugin.bigquery.stepsdesign; import io.cdap.e2e.pages.actions.CdfBigQueryPropertiesActions; +import io.cdap.e2e.pages.actions.CdfPluginPropertiesActions; import io.cdap.e2e.pages.actions.CdfStudioActions; import io.cdap.e2e.pages.locators.CdfBigQueryPropertiesLocators; import io.cdap.e2e.pages.locators.CdfStudioLocators; @@ -39,6 +40,7 @@ import java.text.SimpleDateFormat; import java.util.Date; import java.util.Optional; +import java.util.Properties; import java.util.UUID; /** @@ -94,18 +96,18 @@ public void getCountOfNoOfRecordsTransferredToTargetBigQueryTable() throws IOExc int countRecords = BigQueryClient.countBqQuery(TestSetupHooks.bqTargetTable); BeforeActions.scenario.write("**********No of Records Transferred******************:" + countRecords); Assert.assertEquals("Number of records transferred should be equal to records out ", - countRecords, recordOut()); + countRecords, recordOut()); } @Then("Validate records transferred to target table is equal to number of records from source table " + - "with filter {string}") + "with filter {string}") public void validateRecordsTransferredToTargetTableIsEqualToNumberOfRecordsFromSourceTableWithFilter(String filter) - throws IOException, InterruptedException { + throws IOException, InterruptedException { String projectId = (PluginPropertyUtils.pluginProp("projectId")); String datasetName = (PluginPropertyUtils.pluginProp("dataset")); int countRecordsTarget = BigQueryClient.countBqQuery(TestSetupHooks.bqTargetTable); String selectQuery = "SELECT count(*) FROM `" + projectId + "." + datasetName + "." + - TestSetupHooks.bqTargetTable + "` WHERE " + PluginPropertyUtils.pluginProp(filter); + TestSetupHooks.bqTargetTable + "` WHERE " + PluginPropertyUtils.pluginProp(filter); Optional result = BigQueryClient.getSoleQueryResult(selectQuery); int count = result.map(Integer::parseInt).orElse(0); BeforeActions.scenario.write("Number of records transferred with respect to filter:" + count); @@ -114,13 +116,13 @@ public void validateRecordsTransferredToTargetTableIsEqualToNumberOfRecordsFromS @Then("Validate partition date in output partitioned table") public void validatePartitionDateInOutputPartitionedTable() - throws IOException, InterruptedException { + throws IOException, InterruptedException { Optional result = BigQueryClient - .getSoleQueryResult("SELECT distinct _PARTITIONDATE as pt FROM `" + - (PluginPropertyUtils.pluginProp("projectId")) + "." + - (PluginPropertyUtils.pluginProp("dataset")) + "." + - TestSetupHooks.bqTargetTable + - "` WHERE _PARTITION_LOAD_TIME IS Not NULL ORDER BY _PARTITIONDATE DESC "); + .getSoleQueryResult("SELECT distinct _PARTITIONDATE as pt FROM `" + + (PluginPropertyUtils.pluginProp("projectId")) + "." + + (PluginPropertyUtils.pluginProp("dataset")) + "." + + TestSetupHooks.bqTargetTable + + "` WHERE _PARTITION_LOAD_TIME IS Not NULL ORDER BY _PARTITIONDATE DESC "); String outputDate = StringUtils.EMPTY; if (result.isPresent()) { outputDate = result.get(); @@ -140,10 +142,10 @@ public void validateTheRecordsAreNotCreatedInOutputTable() throws IOException, I public void validatePartitioningIsNotDoneOnTheOutputTable() { try { BigQueryClient.getSoleQueryResult("SELECT distinct _PARTITIONDATE as pt FROM `" + - (PluginPropertyUtils.pluginProp("projectId")) - + "." + (PluginPropertyUtils.pluginProp("dataset")) + "." + - TestSetupHooks.bqTargetTable - + "` WHERE _PARTITION_LOAD_TIME IS Not NULL "); + (PluginPropertyUtils.pluginProp("projectId")) + + "." + (PluginPropertyUtils.pluginProp("dataset")) + "." + + TestSetupHooks.bqTargetTable + + "` WHERE _PARTITION_LOAD_TIME IS Not NULL "); } catch (Exception e) { String partitionException = e.toString(); Assert.assertTrue(partitionException.contains("Unrecognized name: _PARTITION_LOAD_TIME")); @@ -172,8 +174,8 @@ public void validateTheCmekKeyOfTargetBigQueryTableIfCmekIsEnabled(String cmek) String cmekBQ = PluginPropertyUtils.pluginProp(cmek); if (cmekBQ != null) { Assert.assertTrue("Cmek key of target BigQuery table should be equal to " + - "cmek key provided in config file", - BigQueryClient.verifyCmekKey(TestSetupHooks.bqTargetTable, cmekBQ)); + "cmek key provided in config file", + BigQueryClient.verifyCmekKey(TestSetupHooks.bqTargetTable, cmekBQ)); return; } BeforeActions.scenario.write("CMEK not enabled"); @@ -208,13 +210,13 @@ public void enterRuntimeArgumentValueForBigQueryCmekPropertyKeyIfBQCmekIsEnabled @Then("Verify the partition table is created with partitioned on field {string}") public void verifyThePartitionTableIsCreatedWithPartitionedOnField(String partitioningField) throws IOException, - InterruptedException { + InterruptedException { Optional result = BigQueryClient - .getSoleQueryResult("SELECT IS_PARTITIONING_COLUMN FROM `" + - (PluginPropertyUtils.pluginProp("projectId")) + "." - + (PluginPropertyUtils.pluginProp("dataset")) + ".INFORMATION_SCHEMA.COLUMNS` " + - "WHERE table_name = '" + TestSetupHooks.bqTargetTable - + "' and column_name = '" + PluginPropertyUtils.pluginProp(partitioningField) + "' "); + .getSoleQueryResult("SELECT IS_PARTITIONING_COLUMN FROM `" + + (PluginPropertyUtils.pluginProp("projectId")) + "." + + (PluginPropertyUtils.pluginProp("dataset")) + ".INFORMATION_SCHEMA.COLUMNS` " + + "WHERE table_name = '" + TestSetupHooks.bqTargetTable + + "' and column_name = '" + PluginPropertyUtils.pluginProp(partitioningField) + "' "); String isPartitioningDoneOnField = StringUtils.EMPTY; if (result.isPresent()) { isPartitioningDoneOnField = result.get(); @@ -234,16 +236,16 @@ public void verifyTheBigQueryValidationErrorMessageForInvalidProperty(String pro String expectedErrorMessage; if (property.equalsIgnoreCase("gcsChunkSize")) { expectedErrorMessage = PluginPropertyUtils - .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_CHUNKSIZE); + .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_CHUNKSIZE); } else if (property.equalsIgnoreCase("bucket")) { expectedErrorMessage = PluginPropertyUtils - .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_TEMPORARY_BUCKET); + .errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_TEMPORARY_BUCKET); } else if (property.equalsIgnoreCase("table")) { expectedErrorMessage = PluginPropertyUtils - .errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_TABLE_NAME); + .errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_TABLE_NAME); } else { expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_BQ_INCORRECT_PROPERTY). - replaceAll("PROPERTY", property.substring(0, 1).toUpperCase() + property.substring(1)); + replaceAll("PROPERTY", property.substring(0, 1).toUpperCase() + property.substring(1)); } String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement(property).getText(); Assert.assertEquals(expectedErrorMessage, actualErrorMessage); @@ -254,134 +256,20 @@ public void verifyTheBigQueryValidationErrorMessageForInvalidProperty(String pro @Then("Validate records transferred to target table is equal to number of records from source table") public void validateRecordsTransferredToTargetTableIsEqualToNumberOfRecordsFromSourceTable() - throws IOException, InterruptedException { + throws IOException, InterruptedException { int countRecordsTarget = BigQueryClient.countBqQuery(TestSetupHooks.bqTargetTable); Optional result = BigQueryClient.getSoleQueryResult("SELECT count(*) FROM `" + - (PluginPropertyUtils.pluginProp("projectId")) - + "." + (PluginPropertyUtils.pluginProp - ("dataset")) + "." + TestSetupHooks.bqTargetTable + "` "); + (PluginPropertyUtils.pluginProp("projectId")) + + "." + (PluginPropertyUtils.pluginProp + ("dataset")) + "." + TestSetupHooks.bqTargetTable + "` "); int count = result.map(Integer::parseInt).orElse(0); BeforeActions.scenario.write("Number of records transferred from source table to target table:" + count); Assert.assertEquals(count, countRecordsTarget); } - @Then("Enter BigQuery source properties partitionFrom and partitionTo") - public void enterBigQuerySourcePropertiespartitionFromandpartitionTo() throws IOException { - CdfBigQueryPropertiesActions.enterPartitionStartDate(new SimpleDateFormat("dd-MM-yyyy").format(new Date())); - CdfBigQueryPropertiesActions.enterPartitionEndDate(new SimpleDateFormat("dd-MM-yyyy") - .format(DateUtils.addDays(new Date(), 1))); - } - @Then("Validate BigQuery source incorrect property error for Partition Start date {string} value {string}") - public void validateBigQuerySourceIncorrectErrorFor(String property, String value) { - CdfBigQueryPropertiesActions.getSchema(); - - - SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); - String tableFullName = StringUtils.EMPTY; - if (property.equalsIgnoreCase("dataset")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("table")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" - + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value); - } else if (property.equalsIgnoreCase("datasetProject")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") - + "." + TestSetupHooks.bqSourceTable; - - } else if (property.equalsIgnoreCase("partitionFrom")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" - + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value); - } - - String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONSTARTDATE) - .replaceAll("TABLENAME", tableFullName); - String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionFrom").getText(); - System.out.println(actualErrorMessage); - Assert.assertEquals("Error message mismatch for Partition Start Date", expectedErrorMessage, actualErrorMessage); - String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement - ("partitionFrom")); - String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; - Assert.assertEquals(expectedColor, actualColor); - } - - @Then("Validate BigQuery source incorrect property error for Partition End date {string} value {string}") - public void validateBigQuerySourceIncorrectPartitionenddateErrorFor(String property, String value) { - CdfBigQueryPropertiesActions.getSchema(); - SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); - String tableFullName = StringUtils.EMPTY; - if (property.equalsIgnoreCase("dataset")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("table")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" - + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value); - } else if (property.equalsIgnoreCase("datasetProjectId")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("partitionEndDate")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" - + PluginPropertyUtils.pluginProp("partitionTo") - + "." + TestSetupHooks.bqSourceTable; - } - - String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_PARTITIONENDDATE) - .replaceAll("TABLENAME", tableFullName); - String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("partitionTo").getText(); - System.out.println(actualErrorMessage); - Assert.assertEquals("Error message mismatch for Partition End Date", expectedErrorMessage, actualErrorMessage); - String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement("partitionTo")); - String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; - Assert.assertEquals(expectedColor, actualColor); - } - - @Then("Validate BigQuery source incorrect property error for reference name{string} value {string}") - public void validateBigQuerySourceIncorrectPropertyErrorForreferncename(String property, String value) { - CdfBigQueryPropertiesActions.getSchema(); - SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L); - String tableFullName = StringUtils.EMPTY; - if (property.equalsIgnoreCase("dataset")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" + PluginPropertyUtils.pluginProp(value) - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("table")) { - tableFullName = PluginPropertyUtils.pluginProp("projectId") + ":" - + PluginPropertyUtils.pluginProp("dataset") - + "." + PluginPropertyUtils.pluginProp(value); - } else if (property.equalsIgnoreCase("datasetProject")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("dataset") - + "." + TestSetupHooks.bqSourceTable; - } else if (property.equalsIgnoreCase("referenceName")) { - tableFullName = PluginPropertyUtils.pluginProp(value) + ":" + PluginPropertyUtils.pluginProp("reference") - + "." + TestSetupHooks.bqSourceTable; - } - String expectedErrorMessage = PluginPropertyUtils.errorProp(E2ETestConstants.ERROR_MSG_INCORRECT_REFERENCENAME) - .replaceAll("TABLENAME", tableFullName); - String actualErrorMessage = PluginPropertyUtils.findPropertyErrorElement("referenceName").getText(); - - Assert.assertEquals(expectedErrorMessage, actualErrorMessage); - String actualColor = PluginPropertyUtils.getErrorColor(PluginPropertyUtils.findPropertyErrorElement - ("referenceName")); - String expectedColor = ConstantsUtil.ERROR_MSG_COLOR; - Assert.assertEquals(expectedColor, actualColor); - - } - @Then("Enter BigQuery source properties filter") public void enterBigQuerysourcePropertiesfilter() throws IOException { CdfBigQueryPropertiesActions.enterFilter("%%%%"); } - @Then("Enter BigQuery source property output schema {string} as macro argument {string}") - public void enterBigQueryPropertyAsMacroArgumentoutputschema(String pluginProperty, String macroArgument) { - SCHEMA_LOCATORS.schemaActions.click(); - SCHEMA_LOCATORS.schemaActionType("macro").click(); - WaitHelper.waitForElementToBeHidden(SCHEMA_LOCATORS.schemaActionType("macro"), 5); - try { - enterMacro(CdfPluginPropertyLocator.fromPropertyString(pluginProperty).pluginProperty, macroArgument); - } catch (NullPointerException e) { - Assert.fail("CDF_PLUGIN_PROPERTY_MAPPING for '" + pluginProperty + "' not present in CdfPluginPropertyLocator."); - } - } } diff --git a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java index 8d3199b7d0..10a848a9bd 100644 --- a/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java +++ b/src/e2e-test/java/io/cdap/plugin/utils/CdfPluginPropertyLocator.java @@ -78,10 +78,10 @@ public enum CdfPluginPropertyLocator { .put("createFailIfObjectExists", CdfPluginPropertyLocator.GCS_CREATE_FAIL_IF_OBJECT_EXISTS) .put("gcsMoveSourcePath", CdfPluginPropertyLocator.GCS_MOVE_SOURCE_PATH) .put("gcsMoveDestinationPath", CdfPluginPropertyLocator.GCS_MOVE_DESTINATION_PATH) - .put("partitionFrom", CdfPluginPropertyLocator.PARTITION_START_DATE) - .put("partitionTo", CdfPluginPropertyLocator.PARTITION_END_DATE) .put("filter", CdfPluginPropertyLocator.FILTER) .put("Output Schema-macro-input", CdfPluginPropertyLocator.OUTPUT_SCHEMA) + .put("partitionFrom", CdfPluginPropertyLocator.PARTITION_START_DATE) + .put("partitionTo", CdfPluginPropertyLocator.PARTITION_END_DATE) .build(); } diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties index f474d65fe2..19050e28b4 100644 --- a/src/e2e-test/resources/errorMessage.properties +++ b/src/e2e-test/resources/errorMessage.properties @@ -33,7 +33,7 @@ errorMessageMultipleFileWithoutClearDefaultSchema=Found a row with 4 fields when errorMessageInvalidSourcePath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidDestPath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidEncryptionKey=CryptoKeyName.parse: formattedString not in valid format: Parameter "abc@" must be -errorMessageIncorrectPartitionStartDate=16-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd -errorMessageIncorrectPartitionEndDate=17-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionStartDate=17-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionEndDate=18-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd errorMessageIncorrectReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. errorLogsMessageInvalidFilter=Spark Program 'phase-1' failed. diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index 694e628126..0b3825c1ae 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -1,5 +1,6 @@ projectId=cdf-athena -datasetprojectId=cdf-athena +datasetprojectId=testbq_bqmt + //cdf-athena dataset=testbq_bqmt wrongSourcePath=gs://00000000-e2e-0014a44f-81be-4501-8360-0ddca192492 serviceAccountType=filePath @@ -354,15 +355,17 @@ bqTargetTable=dummy bqTargetTable2=dummy bqmtTargetTable=tabA bqmtTargetTable2=tabB -bqStartDate=2024-12-16 -bqEndDate=2024-12-17 -partitionFrom=2024-12-16 -partitionTo=2024-12-17 +bqStartDate=2024-12-17 +bqEndDate=2024-12-18 +partitionFrom=2024-12-17 +partitionTo=2024-12-18 filter=Id=20 -bqIncorrectReferenceName=invalidRef&^*&&* +bqInvalidReferenceName=invalidRef&^*&&* OutputSchema={ "type": "record", "name": "text", "fields": [{ "name": "Id", "type": "long" }, { "name": "Value", "type": "long" }, \ { "name": "UID", "type": "string" } ] } incorrectFilter=%%%% +bqIncorrectFormatStartDate=17-12-2024 +bqIncorrectFormatEndDate=18-12-2024 ## BQMT-PLUGIN-PROPERTIES-END ##CLOUDBIGTABLE-PLUGIN-PROPERTIES-START From 840465f9d65729017b2f30f89f8a040c4ea0c3d8 Mon Sep 17 00:00:00 2001 From: Praveena2607 Date: Thu, 2 Jan 2025 07:59:37 +0000 Subject: [PATCH 6/7] All the changes are made.Please check and let me know. --- pom.xml | 2 +- src/e2e-test/resources/errorMessage.properties | 4 ++-- src/e2e-test/resources/pluginParameters.properties | 12 ++++++------ 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/pom.xml b/pom.xml index a58dbcd995..f3b98ceb3d 100644 --- a/pom.xml +++ b/pom.xml @@ -86,7 +86,7 @@ 2.3.0 1.105.1 3.19.4 - 1.3.0 + 1.3.0-rc3 27.0.1-jre 3.3.6 1.4.13 diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties index 19050e28b4..900e80d22a 100644 --- a/src/e2e-test/resources/errorMessage.properties +++ b/src/e2e-test/resources/errorMessage.properties @@ -33,7 +33,7 @@ errorMessageMultipleFileWithoutClearDefaultSchema=Found a row with 4 fields when errorMessageInvalidSourcePath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidDestPath=Invalid bucket name in path 'abc@'. Bucket name should errorMessageInvalidEncryptionKey=CryptoKeyName.parse: formattedString not in valid format: Parameter "abc@" must be -errorMessageIncorrectPartitionStartDate=17-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd -errorMessageIncorrectPartitionEndDate=18-12-2024 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionStartDate=02-01-2025 is not in a valid format. Enter valid date in format: yyyy-MM-dd +errorMessageIncorrectPartitionEndDate=03-01-2025 is not in a valid format. Enter valid date in format: yyyy-MM-dd errorMessageIncorrectReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: letters, numbers, and '_', '-', '.', or '$'. errorLogsMessageInvalidFilter=Spark Program 'phase-1' failed. diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index 0b3825c1ae..9855607ed9 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -355,17 +355,17 @@ bqTargetTable=dummy bqTargetTable2=dummy bqmtTargetTable=tabA bqmtTargetTable2=tabB -bqStartDate=2024-12-17 -bqEndDate=2024-12-18 -partitionFrom=2024-12-17 -partitionTo=2024-12-18 +bqStartDate=2025-01-02 +bqEndDate=2025-01-03 +partitionFrom=2025-01-02 +partitionTo=2025-01-03 filter=Id=20 bqInvalidReferenceName=invalidRef&^*&&* OutputSchema={ "type": "record", "name": "text", "fields": [{ "name": "Id", "type": "long" }, { "name": "Value", "type": "long" }, \ { "name": "UID", "type": "string" } ] } incorrectFilter=%%%% -bqIncorrectFormatStartDate=17-12-2024 -bqIncorrectFormatEndDate=18-12-2024 +bqIncorrectFormatStartDate=02-01-2025 +bqIncorrectFormatEndDate=03-01-2025 ## BQMT-PLUGIN-PROPERTIES-END ##CLOUDBIGTABLE-PLUGIN-PROPERTIES-START From 6aeccd485b7fad83db33c7f3832bec27bb8a804b Mon Sep 17 00:00:00 2001 From: Praveena2607 Date: Thu, 2 Jan 2025 08:40:46 +0000 Subject: [PATCH 7/7] I have now removed the unnecessary imports that are not in use. --- .../io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java index 390fd78013..f12f6ff15c 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQueryBase.java @@ -16,23 +16,18 @@ package io.cdap.plugin.bigquery.stepsdesign; import io.cdap.e2e.pages.actions.CdfBigQueryPropertiesActions; -import io.cdap.e2e.pages.actions.CdfPluginPropertiesActions; import io.cdap.e2e.pages.actions.CdfStudioActions; -import io.cdap.e2e.pages.locators.CdfBigQueryPropertiesLocators; import io.cdap.e2e.pages.locators.CdfStudioLocators; import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.ConstantsUtil; import io.cdap.e2e.utils.ElementHelper; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.e2e.utils.SeleniumHelper; -import io.cdap.e2e.utils.WaitHelper; import io.cdap.plugin.common.stepsdesign.TestSetupHooks; -import io.cdap.plugin.utils.CdfPluginPropertyLocator; import io.cdap.plugin.utils.E2EHelper; import io.cdap.plugin.utils.E2ETestConstants; import io.cucumber.java.en.Then; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.time.DateUtils; import org.junit.Assert; import stepsdesign.BeforeActions; @@ -40,7 +35,6 @@ import java.text.SimpleDateFormat; import java.util.Date; import java.util.Optional; -import java.util.Properties; import java.util.UUID; /**