Skip to content

Commit

Permalink
[test] Clean up integration test schema setup (#633)
Browse files Browse the repository at this point in the history
This PR does NOT change any logic in main code. It tries to become the first step to refactor & cleanup test code and make it easier to add integration tests & unit tests in the future.
The change in this PR includes:
(1) Use avsc file to replace loosely structured schema string in test classes
(2) Create a simple input file schema builder to construct Avro input schema.
(3) Clean up some data generator method to make it easier to use.
Note for reviewer:
The new changes lies in:
(1) PushInputSchemaBuilder.java (New simple test schema builder)
(2) TestWriteUtils.java (Parse schema from .avsc files, change some data gen methods)
(3) Move some schema strings into venice-test-common/src/integrationTest/resources folder.
  • Loading branch information
sixpluszero authored Sep 18, 2023
1 parent 5684066 commit 90f4631
Show file tree
Hide file tree
Showing 70 changed files with 909 additions and 828 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ protected Configuration getDefaultJobConfiguration() {
// Add extra configuration for this mapper
File inputDir = Utils.getTempDataDirectory();
try {
TestWriteUtils.writeSimpleAvroFileWithUserSchema(inputDir, true, 100);
TestWriteUtils.writeSimpleAvroFileWithStringToStringSchema(inputDir);
} catch (IOException e) {
throw new RuntimeException(e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public void testGetWithNoFile() throws Exception {
public void testGetWithEmptyFile() throws Exception {
File inputDir = Utils.getTempDataDirectory();
String avroOutputFile = "empty_file.avro";
TestWriteUtils.writeEmptyAvroFileWithUserSchema(inputDir, avroOutputFile, fileSchema.toString());
TestWriteUtils.writeEmptyAvroFile(inputDir, avroOutputFile, fileSchema);
ValidateSchemaAndBuildDictMapperOutputReader reader =
new ValidateSchemaAndBuildDictMapperOutputReader(inputDir.getAbsolutePath(), avroOutputFile);
reader.close();
Expand Down Expand Up @@ -94,7 +94,7 @@ public void testGetWithInvalidInputFileDataSize() throws Exception {
avroOutputFile,
0,
ByteBuffer.wrap("TestDictionary".getBytes()),
fileSchema.toString());
fileSchema);
ValidateSchemaAndBuildDictMapperOutputReader reader =
new ValidateSchemaAndBuildDictMapperOutputReader(inputDir.getAbsolutePath(), avroOutputFile);
reader.close();
Expand All @@ -109,7 +109,7 @@ public void testGetWithValidInputFileDataSize() throws Exception {
avroOutputFile,
1,
ByteBuffer.wrap("TestDictionary".getBytes()),
fileSchema.toString());
fileSchema);
ValidateSchemaAndBuildDictMapperOutputReader reader =
new ValidateSchemaAndBuildDictMapperOutputReader(inputDir.getAbsolutePath(), avroOutputFile);
ValidateSchemaAndBuildDictMapperOutput output = reader.getOutput();
Expand All @@ -128,12 +128,8 @@ public void testGetWithValidInputFileDataSize() throws Exception {
public void testGetWithNoZstdDictionary() throws Exception {
File inputDir = Utils.getTempDataDirectory();
String avroOutputFile = "valid_file.avro";
TestWriteUtils.writeSimpleAvroFileForValidateSchemaAndBuildDictMapperOutput(
inputDir,
avroOutputFile,
1,
null,
fileSchema.toString());
TestWriteUtils
.writeSimpleAvroFileForValidateSchemaAndBuildDictMapperOutput(inputDir, avroOutputFile, 1, null, fileSchema);
ValidateSchemaAndBuildDictMapperOutputReader reader =
new ValidateSchemaAndBuildDictMapperOutputReader(inputDir.getAbsolutePath(), avroOutputFile);
ValidateSchemaAndBuildDictMapperOutput output = reader.getOutput();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,11 @@
import static com.linkedin.venice.hadoop.VenicePushJob.TOPIC_PROP;
import static com.linkedin.venice.hadoop.VenicePushJob.UPDATE_SCHEMA_STRING_PROP;
import static com.linkedin.venice.hadoop.VenicePushJob.VALUE_FIELD_PROP;
import static com.linkedin.venice.utils.TestWriteUtils.NESTED_SCHEMA_STRING;
import static com.linkedin.venice.utils.TestWriteUtils.NESTED_SCHEMA_STRING_V2;
import static com.linkedin.venice.utils.TestWriteUtils.STRING_RECORD_SCHEMA_STRING;
import static com.linkedin.venice.utils.TestWriteUtils.NAME_RECORD_V2_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.STRING_TO_NAME_RECORD_V1_SCHEMA;

import com.linkedin.venice.schema.AvroSchemaParseUtils;
import com.linkedin.venice.schema.writecompute.WriteComputeSchemaConverter;
import com.linkedin.venice.utils.TestWriteUtils;
import com.linkedin.venice.utils.VeniceProperties;
import java.util.Properties;
import org.apache.avro.Schema;
Expand All @@ -27,22 +26,20 @@
public class TestVeniceAvroRecordReader {
@Test
public void testGeneratePartialUpdate() {
Schema updateSchema = WriteComputeSchemaConverter.getInstance().convert(NESTED_SCHEMA_STRING_V2);
Schema updateSchema = WriteComputeSchemaConverter.getInstance().convertFromValueRecordSchema(NAME_RECORD_V2_SCHEMA);
Properties properties = new Properties();
properties.put(TOPIC_PROP, "test_store_rt");
properties.put(SCHEMA_STRING_PROP, STRING_RECORD_SCHEMA_STRING);
properties.put(SCHEMA_STRING_PROP, STRING_TO_NAME_RECORD_V1_SCHEMA.toString());
properties.put(GENERATE_PARTIAL_UPDATE_RECORD_FROM_INPUT, true);
properties.put(UPDATE_SCHEMA_STRING_PROP, updateSchema);
properties.put(KEY_FIELD_PROP, "key");
properties.put(VALUE_FIELD_PROP, "value");
VeniceProperties veniceProperties = new VeniceProperties(properties);
VeniceAvroRecordReader recordReader = new VeniceAvroRecordReader(veniceProperties);

GenericRecord record =
new GenericData.Record(AvroSchemaParseUtils.parseSchemaFromJSONLooseValidation(STRING_RECORD_SCHEMA_STRING));
GenericRecord record = new GenericData.Record(STRING_TO_NAME_RECORD_V1_SCHEMA);
record.put("key", "123");
GenericRecord valueRecord =
new GenericData.Record(AvroSchemaParseUtils.parseSchemaFromJSONLooseValidation(NESTED_SCHEMA_STRING));
GenericRecord valueRecord = new GenericData.Record(TestWriteUtils.NAME_RECORD_V1_SCHEMA);
valueRecord.put("firstName", "FN");
valueRecord.put("lastName", "LN");
record.put("value", valueRecord);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
import static com.linkedin.venice.hadoop.VenicePushJob.VENICE_DISCOVER_URL_PROP;
import static com.linkedin.venice.hadoop.VenicePushJob.VENICE_STORE_NAME_PROP;
import static com.linkedin.venice.status.BatchJobHeartbeatConfigs.HEARTBEAT_ENABLED_CONFIG;
import static com.linkedin.venice.utils.TestWriteUtils.NESTED_SCHEMA_STRING;
import static com.linkedin.venice.utils.TestWriteUtils.UPDATE_SCHEMA_OF_NESTED_SCHEMA_STRING;
import static com.linkedin.venice.utils.TestWriteUtils.NAME_RECORD_V1_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.NAME_RECORD_V1_UPDATE_SCHEMA;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyInt;
Expand Down Expand Up @@ -101,8 +101,8 @@ public class VenicePushJobTest {
public void testVPJcheckInputUpdateSchema() {
VenicePushJob vpj = mock(VenicePushJob.class);
when(vpj.isUpdateSchema(anyString())).thenCallRealMethod();
Assert.assertTrue(vpj.isUpdateSchema(UPDATE_SCHEMA_OF_NESTED_SCHEMA_STRING));
Assert.assertFalse(vpj.isUpdateSchema(NESTED_SCHEMA_STRING));
Assert.assertTrue(vpj.isUpdateSchema(NAME_RECORD_V1_UPDATE_SCHEMA.toString()));
Assert.assertFalse(vpj.isUpdateSchema(NAME_RECORD_V1_SCHEMA.toString()));
}

@Test(expectedExceptions = VeniceException.class, expectedExceptionsMessageRegExp = ".*Repush with TTL is only supported while using Kafka Input Format.*")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@
import static com.linkedin.venice.hadoop.DefaultInputDataInfoProvider.PATH_FILTER;
import static com.linkedin.venice.utils.ByteUtils.BYTES_PER_KB;
import static com.linkedin.venice.utils.ByteUtils.BYTES_PER_MB;
import static com.linkedin.venice.utils.TestWriteUtils.writeSimpleAvroFileWithStringToStringSchema;

import com.github.luben.zstd.ZstdDictTrainer;
import com.github.luben.zstd.ZstdException;
import com.linkedin.venice.etl.ETLValueSchemaTransformation;
import com.linkedin.venice.hadoop.InputDataInfoProvider;
import com.linkedin.venice.hadoop.PushJobZstdConfig;
import com.linkedin.venice.hadoop.VeniceAvroRecordReader;
import com.linkedin.venice.utils.TestWriteUtils;
import com.linkedin.venice.utils.Utils;
import com.linkedin.venice.utils.VeniceProperties;
import java.io.File;
Expand All @@ -36,7 +36,9 @@ private void runTest(int numOfFiles, int numOfRecordsPerFile, int dictSizeLimitI
FileSystem fs = FileSystem.get(new Configuration());
File inputDir = Utils.getTempDataDirectory();
try {
TestWriteUtils.writeMultipleAvroFilesWithUserSchema(inputDir, numOfFiles, numOfRecordsPerFile);
for (int i = 0; i < numOfFiles; i++) {
writeSimpleAvroFileWithStringToStringSchema(inputDir, numOfRecordsPerFile, "testInput" + i + ".avro");
}
Properties props = new Properties();
props.setProperty(COMPRESSION_DICTIONARY_SIZE_LIMIT, String.valueOf(dictSizeLimitInKB * BYTES_PER_KB));
props.setProperty(COMPRESSION_DICTIONARY_SAMPLE_SIZE, String.valueOf(dictSampleSizeLimitInMB * BYTES_PER_MB));
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
package com.linkedin.venice.schema;

import static com.linkedin.venice.utils.TestWriteUtils.NESTED_SCHEMA_STRING;
import static com.linkedin.venice.utils.TestWriteUtils.NESTED_SCHEMA_STRING_V2;
import static com.linkedin.venice.utils.TestWriteUtils.NESTED_SCHEMA_STRING_V3;
import static com.linkedin.venice.utils.TestWriteUtils.NESTED_SCHEMA_STRING_V4;
import static com.linkedin.venice.utils.TestWriteUtils.NAME_RECORD_V1_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.NAME_RECORD_V2_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.NAME_RECORD_V3_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.NAME_RECORD_V4_SCHEMA;

import com.linkedin.avroutil1.compatibility.AvroCompatibilityHelper;
import com.linkedin.venice.controllerapi.MultiSchemaResponse;
Expand Down Expand Up @@ -459,10 +459,14 @@ public void testGetLatestUpdateSchemaFromSchemaResponse() {

@Test
public void testValidateSubsetSchema() {
Assert.assertTrue(AvroSupersetSchemaUtils.validateSubsetValueSchema(NESTED_SCHEMA_STRING, NESTED_SCHEMA_STRING_V2));
Assert.assertTrue(
AvroSupersetSchemaUtils
.validateSubsetValueSchema(NAME_RECORD_V1_SCHEMA.toString(), NAME_RECORD_V2_SCHEMA.toString()));
Assert.assertFalse(
AvroSupersetSchemaUtils.validateSubsetValueSchema(NESTED_SCHEMA_STRING_V2, NESTED_SCHEMA_STRING_V3));
AvroSupersetSchemaUtils
.validateSubsetValueSchema(NAME_RECORD_V2_SCHEMA.toString(), NAME_RECORD_V3_SCHEMA.toString()));
Assert.assertFalse(
AvroSupersetSchemaUtils.validateSubsetValueSchema(NESTED_SCHEMA_STRING_V3, NESTED_SCHEMA_STRING_V4));
AvroSupersetSchemaUtils
.validateSubsetValueSchema(NAME_RECORD_V3_SCHEMA.toString(), NAME_RECORD_V4_SCHEMA.toString()));
}
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package com.linkedin.venice.etl;

import static com.linkedin.venice.utils.TestWriteUtils.ETL_UNION_VALUE_SCHEMA_STRING_WITHOUT_NULL;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_UNION_VALUE_SCHEMA_STRING_WITH_NULL;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_VALUE_SCHEMA_STRING;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_UNION_VALUE_WITHOUT_NULL_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_UNION_VALUE_WITH_NULL_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_VALUE_SCHEMA;

import java.util.ArrayList;
import java.util.List;
Expand All @@ -14,7 +14,7 @@
public class ETLUtilsTest {
@Test
public void testTransformValueSchemaForETLForRecordSchema() {
Schema schema = Schema.parse(ETL_VALUE_SCHEMA_STRING);
Schema schema = ETL_VALUE_SCHEMA;
Schema etlValueSchema = ETLUtils.transformValueSchemaForETL(schema);

Assert.assertEquals(Schema.Type.UNION, etlValueSchema.getType());
Expand All @@ -26,7 +26,7 @@ public void testTransformValueSchemaForETLForRecordSchema() {

@Test
public void testTransformValueSchemaForETLForUnionSchemaWithoutNullField() {
Schema schema = Schema.parse(ETL_UNION_VALUE_SCHEMA_STRING_WITHOUT_NULL);
Schema schema = ETL_UNION_VALUE_WITHOUT_NULL_SCHEMA;
Schema etlValueSchema = ETLUtils.transformValueSchemaForETL(schema);

Assert.assertEquals(Schema.Type.UNION, etlValueSchema.getType());
Expand All @@ -45,7 +45,7 @@ public void testTransformValueSchemaForETLForUnionSchemaWithoutNullField() {

@Test
public void testTransformValueSchemaForETLForUnionSchemaWithNullField() {
Schema schema = Schema.parse(ETL_UNION_VALUE_SCHEMA_STRING_WITH_NULL);
Schema schema = ETL_UNION_VALUE_WITH_NULL_SCHEMA;
Schema etlValueSchema = ETLUtils.transformValueSchemaForETL(schema);

Assert.assertEquals(Schema.Type.UNION, etlValueSchema.getType());
Expand All @@ -58,7 +58,7 @@ public void testTransformValueSchemaForETLForUnionSchemaWithNullField() {

@Test
public void testGetValueSchemaFromETLValueSchemaForRecordTypes() {
Schema valueSchema = Schema.parse(ETL_VALUE_SCHEMA_STRING);
Schema valueSchema = ETL_VALUE_SCHEMA;
Schema etlValueSchema = ETLUtils.transformValueSchemaForETL(valueSchema);

Schema inferredValueSchema =
Expand All @@ -69,7 +69,7 @@ public void testGetValueSchemaFromETLValueSchemaForRecordTypes() {

@Test
public void testGetValueSchemaFromETLValueSchemaForUnionTypesWithoutNull() {
Schema valueSchema = Schema.parse(ETL_UNION_VALUE_SCHEMA_STRING_WITHOUT_NULL);
Schema valueSchema = ETL_UNION_VALUE_WITHOUT_NULL_SCHEMA;
Schema etlValueSchema = ETLUtils.transformValueSchemaForETL(valueSchema);

Schema inferredValueSchema =
Expand All @@ -80,7 +80,7 @@ public void testGetValueSchemaFromETLValueSchemaForUnionTypesWithoutNull() {

@Test
public void testGetValueSchemaFromETLValueSchemaForUnionTypesWithNull() {
Schema valueSchema = Schema.parse(ETL_UNION_VALUE_SCHEMA_STRING_WITH_NULL);
Schema valueSchema = ETL_UNION_VALUE_WITH_NULL_SCHEMA;
Schema etlValueSchema = ETLUtils.transformValueSchemaForETL(valueSchema);

Schema inferredValueSchema =
Expand Down
Original file line number Diff line number Diff line change
@@ -1,33 +1,31 @@
package com.linkedin.venice.etl;

import static com.linkedin.venice.utils.TestWriteUtils.ETL_UNION_VALUE_SCHEMA_STRING_WITHOUT_NULL;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_UNION_VALUE_SCHEMA_STRING_WITH_NULL;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_VALUE_SCHEMA_STRING;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_UNION_VALUE_WITHOUT_NULL_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_UNION_VALUE_WITH_NULL_SCHEMA;
import static com.linkedin.venice.utils.TestWriteUtils.ETL_VALUE_SCHEMA;

import org.apache.avro.Schema;
import org.testng.Assert;
import org.testng.annotations.Test;


public class ETLValueSchemaTransformationTest {
@Test
public void testRecordSchemaBecomesUnionWithNull() {
Schema valueSchema = Schema.parse(ETL_VALUE_SCHEMA_STRING);
ETLValueSchemaTransformation transformation = ETLValueSchemaTransformation.fromSchema(valueSchema);
ETLValueSchemaTransformation transformation = ETLValueSchemaTransformation.fromSchema(ETL_VALUE_SCHEMA);
Assert.assertEquals(transformation, ETLValueSchemaTransformation.UNIONIZE_WITH_NULL);
}

@Test
public void testUnionSchemaWithoutNullAddsNull() {
Schema valueSchema = Schema.parse(ETL_UNION_VALUE_SCHEMA_STRING_WITHOUT_NULL);
ETLValueSchemaTransformation transformation = ETLValueSchemaTransformation.fromSchema(valueSchema);
ETLValueSchemaTransformation transformation =
ETLValueSchemaTransformation.fromSchema(ETL_UNION_VALUE_WITHOUT_NULL_SCHEMA);
Assert.assertEquals(transformation, ETLValueSchemaTransformation.ADD_NULL_TO_UNION);
}

@Test
public void testUnionSchemaWithNullStaysUnchanged() {
Schema valueSchema = Schema.parse(ETL_UNION_VALUE_SCHEMA_STRING_WITH_NULL);
ETLValueSchemaTransformation transformation = ETLValueSchemaTransformation.fromSchema(valueSchema);
ETLValueSchemaTransformation transformation =
ETLValueSchemaTransformation.fromSchema(ETL_UNION_VALUE_WITH_NULL_SCHEMA);
Assert.assertEquals(transformation, ETLValueSchemaTransformation.NONE);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ private ResultsContainer clientStressTest(

int valueSchemaId = 1;
int valueSizeInBytes = 800;
String valueSchemaStr = TestWriteUtils.USER_SCHEMA_WITH_A_FLOAT_ARRAY_STRING;
String valueSchemaStr = TestWriteUtils.USER_WITH_FLOAT_ARRAY_SCHEMA.toString();
Schema valueSchema = new Schema.Parser().parse(valueSchemaStr);
Set<String> keys = new HashSet<>();
setupSchemaAndRequest(valueSchemaId, valueSchemaStr);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,15 @@ public void testStoresMetadataCopyOver() {
// Create a test store only in dc0 region
NewStoreResponse newStoreResponse = dc0Client.retryableRequest(
3,
c -> c.createNewStore(storeName, "", "\"string\"", TestWriteUtils.USER_SCHEMA_STRING_SIMPLE_WITH_DEFAULT));
c -> c.createNewStore(storeName, "", "\"string\"", TestWriteUtils.USER_WITH_DEFAULT_SCHEMA.toString()));
Assert.assertFalse(
newStoreResponse.isError(),
"The NewStoreResponse returned an error: " + newStoreResponse.getError());
// Enable read compute to test superset schema registration.
Assert.assertFalse(
dc0Client.updateStore(storeName, new UpdateStoreQueryParams().setReadComputationEnabled(true)).isError());
Assert.assertFalse(dc0Client.addValueSchema(storeName, TestWriteUtils.USER_SCHEMA_STRING_WITH_DEFAULT).isError());
Assert.assertFalse(
dc0Client.addValueSchema(storeName, TestWriteUtils.USER_WITH_DEFAULT_SCHEMA.toString()).isError());
checkStoreConfig(dc0Client, storeName);
// Mimic source fabric store-level execution id
Assert.assertFalse(
Expand Down Expand Up @@ -167,12 +168,18 @@ public void testCompareStore() {
ControllerClient childControllerClient1 =
new ControllerClient(clusterName, childDatacenters.get(1).getControllerConnectString())) {
String testStoreName = Utils.getUniqueString("test-store");
NewStoreResponse newStoreResponse = childControllerClient0
.createNewStore(testStoreName, "test", "\"string\"", TestWriteUtils.NESTED_SCHEMA_STRING);
NewStoreResponse newStoreResponse = childControllerClient0.createNewStore(
testStoreName,
"test",
TestWriteUtils.STRING_SCHEMA.toString(),
TestWriteUtils.NAME_RECORD_V1_SCHEMA.toString());
Assert.assertFalse(newStoreResponse.isError());
checkStoreConfig(childControllerClient0, testStoreName);
newStoreResponse = childControllerClient1
.createNewStore(testStoreName, "test", "\"string\"", TestWriteUtils.NESTED_SCHEMA_STRING);
newStoreResponse = childControllerClient1.createNewStore(
testStoreName,
"test",
TestWriteUtils.STRING_SCHEMA.toString(),
TestWriteUtils.NAME_RECORD_V1_SCHEMA.toString());
Assert.assertFalse(newStoreResponse.isError());
checkStoreConfig(childControllerClient1, testStoreName);

Expand All @@ -181,7 +188,7 @@ public void testCompareStore() {
childControllerClient0.emptyPush(testStoreName, Utils.getUniqueString("empty-push-1"), 1L);
Assert.assertFalse(versionCreationResponse.isError());
SchemaResponse schemaResponse =
childControllerClient0.addValueSchema(testStoreName, TestWriteUtils.NESTED_SCHEMA_STRING_V2);
childControllerClient0.addValueSchema(testStoreName, TestWriteUtils.NAME_RECORD_V2_SCHEMA.toString());
Assert.assertFalse(schemaResponse.isError());

StoreComparisonResponse response = parentControllerClient.compareStore(testStoreName, dcNames[0], dcNames[1]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,8 @@ public void testEnableActiveActiveReplicationSchema() {
String clusterName = CLUSTER_NAMES[0];
String storeName = Utils.getUniqueString("store");
String valueRecordSchemaStr1 = BASIC_USER_SCHEMA_STRING_WITH_DEFAULT;
String valueRecordSchemaStr2 = TestWriteUtils.USER_SCHEMA_STRING_SIMPLE_WITH_DEFAULT;
String valueRecordSchemaStr3 = TestWriteUtils.USER_SCHEMA_STRING_WITH_DEFAULT;
String valueRecordSchemaStr2 = TestWriteUtils.SIMPLE_USER_WITH_DEFAULT_SCHEMA.toString();
String valueRecordSchemaStr3 = TestWriteUtils.USER_WITH_DEFAULT_SCHEMA.toString();

Schema rmdSchema1 = RmdSchemaGenerator.generateMetadataSchema(valueRecordSchemaStr1, 1);
Schema rmdSchema2 = RmdSchemaGenerator.generateMetadataSchema(valueRecordSchemaStr2, 1);
Expand Down
Loading

0 comments on commit 90f4631

Please sign in to comment.