diff --git a/clients/venice-push-job/build.gradle b/clients/venice-push-job/build.gradle index 1c8b4f98e6d..a03f0844b17 100644 --- a/clients/venice-push-job/build.gradle +++ b/clients/venice-push-job/build.gradle @@ -8,6 +8,8 @@ dependencies { exclude module: ':internal:alpini' } + implementation project(':internal:venice-hadoop-common') + implementation (libraries.avro) { exclude group: 'org.mortbay.jetty' // jetty 6 conflicts with spark-java used in controller api } diff --git a/internal/venice-common/build.gradle b/internal/venice-common/build.gradle index b6478e9dddc..ee00f5b1a2c 100644 --- a/internal/venice-common/build.gradle +++ b/internal/venice-common/build.gradle @@ -30,12 +30,6 @@ dependencies { exclude group: 'com.linkedin.container', module: 'container-eventbus-factory' // Keeping' it clean. } - implementation (libraries.hadoopCommon) { - // Exclude transitive dependency - exclude group: 'org.apache.avro' - exclude group: 'javax.servlet' - } - implementation project(':clients:venice-thin-client') implementation libraries.avroUtilCompatHelper diff --git a/internal/venice-hadoop-common/build.gradle b/internal/venice-hadoop-common/build.gradle new file mode 100644 index 00000000000..76598c157b6 --- /dev/null +++ b/internal/venice-hadoop-common/build.gradle @@ -0,0 +1,16 @@ +dependencies { + implementation project(':internal:venice-common') + implementation libraries.log4j2api + implementation (libraries.hadoopCommon) { + // Exclude transitive dependency + exclude group: 'org.apache.avro' + exclude group: 'javax.servlet' + } + + testImplementation platform('org.junit:junit-bom:5.9.1') + testImplementation 'org.junit.jupiter:junit-jupiter' +} + +ext { + jacocoCoverageThreshold = 0.33 +} diff --git a/internal/venice-common/src/main/java/com/linkedin/venice/hadoop/schema/HDFSSchemaSource.java b/internal/venice-hadoop-common/src/main/java/com/linkedin/venice/hadoop/schema/HDFSSchemaSource.java similarity index 100% rename from internal/venice-common/src/main/java/com/linkedin/venice/hadoop/schema/HDFSSchemaSource.java rename to internal/venice-hadoop-common/src/main/java/com/linkedin/venice/hadoop/schema/HDFSSchemaSource.java diff --git a/internal/venice-common/src/main/java/com/linkedin/venice/hadoop/schema/SchemaSource.java b/internal/venice-hadoop-common/src/main/java/com/linkedin/venice/hadoop/schema/SchemaSource.java similarity index 100% rename from internal/venice-common/src/main/java/com/linkedin/venice/hadoop/schema/SchemaSource.java rename to internal/venice-hadoop-common/src/main/java/com/linkedin/venice/hadoop/schema/SchemaSource.java diff --git a/internal/venice-common/src/test/java/com/linkedin/venice/hadoop/schema/TestHDFSSchemaSource.java b/internal/venice-hadoop-common/src/test/java/com/linkedin/venice/hadoop/schema/TestHDFSSchemaSource.java similarity index 91% rename from internal/venice-common/src/test/java/com/linkedin/venice/hadoop/schema/TestHDFSSchemaSource.java rename to internal/venice-hadoop-common/src/test/java/com/linkedin/venice/hadoop/schema/TestHDFSSchemaSource.java index e4c3a117caf..17700d68004 100644 --- a/internal/venice-common/src/test/java/com/linkedin/venice/hadoop/schema/TestHDFSSchemaSource.java +++ b/internal/venice-hadoop-common/src/test/java/com/linkedin/venice/hadoop/schema/TestHDFSSchemaSource.java @@ -1,8 +1,6 @@ package com.linkedin.venice.hadoop.schema; import static com.linkedin.venice.utils.TestWriteUtils.getTempDataDirectory; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import com.linkedin.davinci.schema.SchemaUtils; @@ -16,6 +14,7 @@ import java.util.Map; import org.apache.avro.Schema; import org.apache.hadoop.fs.Path; +import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; @@ -45,7 +44,7 @@ public void setUp() throws IOException { Path rmdInputDir = new Path(getTempDataDirectory().getAbsolutePath()); Path valueInputDir = new Path(getTempDataDirectory().getAbsolutePath()); Path keyInputDir = new Path(getTempDataDirectory().getAbsolutePath()); - client = mock(ControllerClient.class); + client = Mockito.mock(ControllerClient.class); MultiSchemaResponse.Schema[] rmdSchemas = generateRmdSchemas(numOfSchemas); MultiSchemaResponse rmdSchemaResponse = new MultiSchemaResponse(); rmdSchemaResponse.setSchemas(rmdSchemas); @@ -58,9 +57,9 @@ public void setUp() throws IOException { keySchemaResponse.setSchemaStr(KEY_SCHEMA_STR); keySchemaResponse.setId(1); - doReturn(rmdSchemaResponse).when(client).getAllReplicationMetadataSchemas(TEST_STORE); - doReturn(valueSchemaResponse).when(client).getAllValueSchema(TEST_STORE); - doReturn(keySchemaResponse).when(client).getKeySchema(TEST_STORE); + Mockito.doReturn(rmdSchemaResponse).when(client).getAllReplicationMetadataSchemas(TEST_STORE); + Mockito.doReturn(valueSchemaResponse).when(client).getAllValueSchema(TEST_STORE); + Mockito.doReturn(keySchemaResponse).when(client).getKeySchema(TEST_STORE); source = new HDFSSchemaSource(valueInputDir, rmdInputDir, keyInputDir, TEST_STORE); source.saveSchemasOnDisk(client); @@ -94,7 +93,7 @@ public void testLoadKeySchemaThenFetch() throws IOException { @Test(expectedExceptions = IllegalStateException.class) public void testSaveKeySchemaThrowsExceptionWithInvalidResponse() throws IOException { - SchemaResponse mockResponse = mock(SchemaResponse.class); + SchemaResponse mockResponse = Mockito.mock(SchemaResponse.class); when(mockResponse.isError()).thenReturn(true); source.saveKeySchemaToDisk(mockResponse); diff --git a/settings.gradle b/settings.gradle index b5b0820ec39..88dba38627a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -54,6 +54,7 @@ include 'services:venice-standalone' include 'internal:venice-avro-compatibility-test' include 'internal:venice-client-common' include 'internal:venice-common' +include 'internal:venice-hadoop-common' include 'internal:venice-jdk-compatibility-test' include 'internal:venice-test-common'