Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[POC] druid 23 compatibility test #977

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions druid-lookups/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -163,19 +163,19 @@
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-processing</artifactId>
<version>${druid.version}</version>
<version>0.23.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-server</artifactId>
<version>${druid.version}</version>
<version>0.23.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-sql</artifactId>
<version>${druid.version}</version>
<version>0.23.0</version>
<scope>provided</scope>
</dependency>
<dependency>
Expand Down Expand Up @@ -290,14 +290,14 @@
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-server</artifactId>
<version>${druid.version}</version>
<version>0.23.0</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.druid.extensions</groupId>
<artifactId>druid-hdfs-storage</artifactId>
<version>${druid.version}</version>
<version>0.23.0</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import org.apache.druid.java.util.common.logger.Logger;

import java.util.Map;
import java.util.Set;

abstract public class MahaLookupExtractor extends LookupExtractor {
private static final Logger LOG = new Logger(MahaLookupExtractor.class);
Expand Down Expand Up @@ -44,4 +45,19 @@ public String apply(@Nullable String key) {
}
return null;
}

@Override
public Set<String> keySet() {
return null;
}

@Override
public boolean canGetKeySet() {
return false;
}

@Override
public byte[] getCacheKey() {
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,7 @@
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.ExprEval;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.math.expr.ExprType;
import org.apache.druid.math.expr.*;
import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider;
import org.apache.druid.query.lookup.LookupReferencesManager;

Expand Down Expand Up @@ -139,9 +136,9 @@ public Expr visit(Shuttle shuttle)

@Nullable
@Override
public ExprType getOutputType(InputBindingInspector inspector)
public ExpressionType getOutputType(InputBindingInspector inspector)
{
return ExprType.STRING;
return ExpressionType.STRING;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.math.expr.InputBindings;
import org.apache.druid.math.expr.Parser;
import org.apache.druid.metadata.MetadataStorageConnectorConfig;
import org.apache.druid.query.lookup.*;
Expand All @@ -46,7 +47,7 @@
public class MahaLookupExprMacroTest {


private static final Expr.ObjectBinding BINDINGS = Parser.withMap(
private static final Expr.ObjectBinding BINDINGS = InputBindings.withMap(
ImmutableMap.<String, Object>builder()
.put("id1", "dim_key1")
.build()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.lookup.*;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.sql.calcite.expression.DruidExpression;
Expand Down Expand Up @@ -58,10 +59,10 @@ public void testLookupReturnsExpectedResults() throws JsonProcessingException {
RexBuilder rexBuilder = new RexBuilder(util.typeFactory);
RowSignature ROW_SIGNATURE = RowSignature
.builder()
.add("d", ValueType.DOUBLE)
.add("l", ValueType.LONG)
.add("s", ValueType.STRING)
.add("student_id", ValueType.STRING)
.add("d", ColumnType.DOUBLE)
.add("l", ColumnType.LONG)
.add("s", ColumnType.STRING)
.add("student_id", ColumnType.STRING)
.build();

final LookupExtractorFactoryContainerProvider manager = EasyMock.createStrictMock(LookupReferencesManager.class);
Expand Down Expand Up @@ -97,10 +98,10 @@ public void testBasicMappedLookup() throws JsonProcessingException {
RexBuilder rexBuilder = new RexBuilder(util.typeFactory);
RowSignature ROW_SIGNATURE = RowSignature
.builder()
.add("d", ValueType.DOUBLE)
.add("l", ValueType.LONG)
.add("grade", ValueType.STRING)
.add("student_id", ValueType.STRING)
.add("d", ColumnType.DOUBLE)
.add("l", ColumnType.LONG)
.add("grade", ColumnType.STRING)
.add("student_id", ColumnType.STRING)
.build();

final LookupExtractorFactoryContainerProvider manager = EasyMock.createStrictMock(LookupReferencesManager.class);
Expand Down Expand Up @@ -139,10 +140,10 @@ public void testInvalidLookupCol() {
RexBuilder rexBuilder = new RexBuilder(util.typeFactory);
RowSignature ROW_SIGNATURE = RowSignature
.builder()
.add("d", ValueType.DOUBLE)
.add("l", ValueType.LONG)
.add("grade", ValueType.STRING)
.add("student_id", ValueType.STRING)
.add("d", ColumnType.DOUBLE)
.add("l", ColumnType.LONG)
.add("grade", ColumnType.STRING)
.add("student_id", ColumnType.STRING)
.build();

final LookupExtractorFactoryContainerProvider manager = EasyMock.createStrictMock(LookupReferencesManager.class);
Expand Down Expand Up @@ -175,10 +176,10 @@ public void testInvalidValueCol() throws JsonProcessingException {
RexBuilder rexBuilder = new RexBuilder(util.typeFactory);
RowSignature ROW_SIGNATURE = RowSignature
.builder()
.add("d", ValueType.DOUBLE)
.add("l", ValueType.LONG)
.add("grade", ValueType.STRING)
.add("student_id", ValueType.STRING)
.add("d", ColumnType.DOUBLE)
.add("l", ColumnType.LONG)
.add("grade", ColumnType.STRING)
.add("student_id", ColumnType.STRING)
.build();

final LookupExtractorFactoryContainerProvider manager = EasyMock.createStrictMock(LookupReferencesManager.class);
Expand Down Expand Up @@ -219,10 +220,10 @@ public void testMappedLookupWithNullKeys() throws JsonProcessingException {
RexBuilder rexBuilder = new RexBuilder(util.typeFactory);
RowSignature ROW_SIGNATURE = RowSignature
.builder()
.add("d", ValueType.DOUBLE)
.add("l", ValueType.LONG)
.add("grade", ValueType.STRING)
.add("student_id", ValueType.STRING)
.add("d", ColumnType.DOUBLE)
.add("l", ColumnType.LONG)
.add("grade", ColumnType.STRING)
.add("student_id", ColumnType.STRING)
.build();

final LookupExtractorFactoryContainerProvider manager = EasyMock.createStrictMock(LookupReferencesManager.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.sql.calcite.aggregation.DimensionExpression;
Expand Down Expand Up @@ -45,7 +46,7 @@ String convertToJson(DruidExpression druidExpression, String cubeName, String ou
.setInterval(lastWeek + "T00:00:00.000Z/" + today + "T00:00:00.000Z")
.setGranularity(Granularities.ALL)
.setDimensions(
DimensionExpression.ofSimpleColumn(outputName, druidExpression, ValueType.STRING).toDimensionSpec()
DimensionExpression.ofSimpleColumn(outputName, druidExpression, ColumnType.STRING).toDimensionSpec()
)
.build();

Expand Down