Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
549 changes: 548 additions & 1 deletion data/src/test/java/org/apache/iceberg/data/BaseFormatModelTests.java

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,17 @@
package org.apache.iceberg.flink.data;

import java.util.List;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.iceberg.PartitionData;
import org.apache.iceberg.Schema;
import org.apache.iceberg.data.BaseFormatModelTests;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.flink.FlinkSchemaUtil;
import org.apache.iceberg.flink.RowDataConverter;
import org.apache.iceberg.flink.TestHelpers;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;

public class TestFlinkFormatModel extends BaseFormatModelTests<RowData> {

Expand All @@ -48,4 +52,26 @@ protected RowData convertToEngine(Record record, Schema schema) {
protected void assertEquals(Schema schema, List<RowData> expected, List<RowData> actual) {
TestHelpers.assertRows(actual, expected, FlinkSchemaUtil.convert(schema));
}

@Override
protected Object convertConstantToEngine(Type type, Object value) {
if (value instanceof PartitionData partitionData) {
Types.StructType structType = type.asStructType();
List<Types.NestedField> fields = structType.fields();
GenericRowData rowData = new GenericRowData(fields.size());
int sourceSize = partitionData.size();
for (int i = 0; i < fields.size(); i++) {
if (i < sourceSize) {
Object fieldValue = partitionData.get(i, Object.class);
rowData.setField(i, convertConstantToEngine(fields.get(i).type(), fieldValue));
} else {
rowData.setField(i, null);
}
}

return rowData;
}

return RowDataUtil.convertConstant(type, value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,17 @@
package org.apache.iceberg.flink.data;

import java.util.List;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.iceberg.PartitionData;
import org.apache.iceberg.Schema;
import org.apache.iceberg.data.BaseFormatModelTests;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.flink.FlinkSchemaUtil;
import org.apache.iceberg.flink.RowDataConverter;
import org.apache.iceberg.flink.TestHelpers;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;

public class TestFlinkFormatModel extends BaseFormatModelTests<RowData> {

Expand All @@ -48,4 +52,26 @@ protected RowData convertToEngine(Record record, Schema schema) {
protected void assertEquals(Schema schema, List<RowData> expected, List<RowData> actual) {
TestHelpers.assertRows(actual, expected, FlinkSchemaUtil.convert(schema));
}

@Override
protected Object convertConstantToEngine(Type type, Object value) {
if (value instanceof PartitionData partitionData) {
Types.StructType structType = type.asStructType();
List<Types.NestedField> fields = structType.fields();
GenericRowData rowData = new GenericRowData(fields.size());
int sourceSize = partitionData.size();
for (int i = 0; i < fields.size(); i++) {
if (i < sourceSize) {
Object fieldValue = partitionData.get(i, Object.class);
rowData.setField(i, convertConstantToEngine(fields.get(i).type(), fieldValue));
} else {
rowData.setField(i, null);
}
}

return rowData;
}

return RowDataUtil.convertConstant(type, value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,17 @@
package org.apache.iceberg.flink.data;

import java.util.List;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.iceberg.PartitionData;
import org.apache.iceberg.Schema;
import org.apache.iceberg.data.BaseFormatModelTests;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.flink.FlinkSchemaUtil;
import org.apache.iceberg.flink.RowDataConverter;
import org.apache.iceberg.flink.TestHelpers;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;

public class TestFlinkFormatModel extends BaseFormatModelTests<RowData> {

Expand All @@ -48,4 +52,26 @@ protected RowData convertToEngine(Record record, Schema schema) {
protected void assertEquals(Schema schema, List<RowData> expected, List<RowData> actual) {
TestHelpers.assertRows(actual, expected, FlinkSchemaUtil.convert(schema));
}

@Override
protected Object convertConstantToEngine(Type type, Object value) {
if (value instanceof PartitionData partitionData) {
Types.StructType structType = type.asStructType();
List<Types.NestedField> fields = structType.fields();
GenericRowData rowData = new GenericRowData(fields.size());
int sourceSize = partitionData.size();
for (int i = 0; i < fields.size(); i++) {
if (i < sourceSize) {
Object fieldValue = partitionData.get(i, Object.class);
rowData.setField(i, convertConstantToEngine(fields.get(i).type(), fieldValue));
} else {
rowData.setField(i, null);
}
}

return rowData;
}

return RowDataUtil.convertConstant(type, value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import org.apache.iceberg.data.BaseFormatModelTests;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.spark.SparkSchemaUtil;
import org.apache.iceberg.spark.SparkUtil;
import org.apache.iceberg.types.Type;
import org.apache.spark.sql.catalyst.InternalRow;

public class TestSparkFormatModel extends BaseFormatModelTests<InternalRow> {
Expand All @@ -51,4 +53,9 @@ protected void assertEquals(Schema schema, List<InternalRow> expected, List<Inte
TestHelpers.assertEquals(schema, expected.get(i), actual.get(i));
}
}

@Override
protected Object convertConstantToEngine(Type type, Object value) {
return SparkUtil.internalToSpark(type, value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import org.apache.iceberg.data.BaseFormatModelTests;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.spark.SparkSchemaUtil;
import org.apache.iceberg.spark.SparkUtil;
import org.apache.iceberg.types.Type;
import org.apache.spark.sql.catalyst.InternalRow;

public class TestSparkFormatModel extends BaseFormatModelTests<InternalRow> {
Expand All @@ -51,4 +53,9 @@ protected void assertEquals(Schema schema, List<InternalRow> expected, List<Inte
TestHelpers.assertEquals(schema, expected.get(i), actual.get(i));
}
}

@Override
protected Object convertConstantToEngine(Type type, Object value) {
return SparkUtil.internalToSpark(type, value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import org.apache.iceberg.data.BaseFormatModelTests;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.spark.SparkSchemaUtil;
import org.apache.iceberg.spark.SparkUtil;
import org.apache.iceberg.types.Type;
import org.apache.spark.sql.catalyst.InternalRow;

public class TestSparkFormatModel extends BaseFormatModelTests<InternalRow> {
Expand All @@ -51,4 +53,9 @@ protected void assertEquals(Schema schema, List<InternalRow> expected, List<Inte
TestHelpers.assertEquals(schema, expected.get(i), actual.get(i));
}
}

@Override
protected Object convertConstantToEngine(Type type, Object value) {
return SparkUtil.internalToSpark(type, value);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import org.apache.iceberg.data.BaseFormatModelTests;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.spark.SparkSchemaUtil;
import org.apache.iceberg.spark.SparkUtil;
import org.apache.iceberg.types.Type;
import org.apache.spark.sql.catalyst.InternalRow;

public class TestSparkFormatModel extends BaseFormatModelTests<InternalRow> {
Expand All @@ -51,4 +53,9 @@ protected void assertEquals(Schema schema, List<InternalRow> expected, List<Inte
TestHelpers.assertEquals(schema, expected.get(i), actual.get(i));
}
}

@Override
protected Object convertConstantToEngine(Type type, Object value) {
return SparkUtil.internalToSpark(type, value);
}
}
Loading