Skip to content

Commit

Permalink
Change MaterializedRow calls to have explicit long
Browse files Browse the repository at this point in the history
In preparation for INT/SMALLINT/TINYINT, it is required that the
MaterializedRow constructors do not assume all Numbers are to be
treated as longs. This changes all callsites of the row() method to
have explicit longs.
  • Loading branch information
Raghav Sethi committed Apr 8, 2016
1 parent 6bacf56 commit 8c4e324
Show file tree
Hide file tree
Showing 56 changed files with 1,929 additions and 1,937 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -195,9 +195,9 @@ public abstract class AbstractTestHiveClient
.build();

private static final MaterializedResult CREATE_TABLE_DATA = MaterializedResult.resultBuilder(SESSION, BIGINT, VARCHAR, BIGINT, DOUBLE, BOOLEAN, ARRAY_TYPE, MAP_TYPE, ROW_TYPE)
.row(1, "hello", 123, 43.5, true, ImmutableList.of("apple", "banana"), ImmutableMap.of("one", 1L, "two", 2L), ImmutableList.of("true", 1, true))
.row(2, null, null, null, null, null, null, null)
.row(3, "bye", 456, 98.1, false, ImmutableList.of("ape", "bear"), ImmutableMap.of("three", 3L, "four", 4L), ImmutableList.of("false", 0, false))
.row(1L, "hello", 123L, 43.5, true, ImmutableList.of("apple", "banana"), ImmutableMap.of("one", 1L, "two", 2L), ImmutableList.of("true", 1L, true))
.row(2L, null, null, null, null, null, null, null)
.row(3L, "bye", 456L, 98.1, false, ImmutableList.of("ape", "bear"), ImmutableMap.of("three", 3L, "four", 4L), ImmutableList.of("false", 0L, false))
.build();

private static final List<ColumnMetadata> CREATE_TABLE_COLUMNS_PARTITIONED = ImmutableList.<ColumnMetadata>builder()
Expand All @@ -215,9 +215,9 @@ public abstract class AbstractTestHiveClient
.build());

private static final MaterializedResult CREATE_TABLE_PARTITIONED_DATA_2ND = MaterializedResult.resultBuilder(SESSION, BIGINT, VARCHAR, BIGINT, DOUBLE, BOOLEAN, ARRAY_TYPE, MAP_TYPE, ROW_TYPE, VARCHAR)
.row(4, "hello", 123, 43.5, true, ImmutableList.of("apple", "banana"), ImmutableMap.of("one", 1L, "two", 2L), ImmutableList.of("true", 1, true), "2015-07-04")
.row(5, null, null, null, null, null, null, null, "2015-07-04")
.row(6, "bye", 456, 98.1, false, ImmutableList.of("ape", "bear"), ImmutableMap.of("three", 3L, "four", 4L), ImmutableList.of("false", 0, false), "2015-07-04")
.row(4L, "hello", 123L, 43.5, true, ImmutableList.of("apple", "banana"), ImmutableMap.of("one", 1L, "two", 2L), ImmutableList.of("true", 1L, true), "2015-07-04")
.row(5L, null, null, null, null, null, null, null, "2015-07-04")
.row(6L, "bye", 456L, 98.1, false, ImmutableList.of("ape", "bear"), ImmutableMap.of("three", 3L, "four", 4L), ImmutableList.of("false", 0L, false), "2015-07-04")
.build();

protected Set<HiveStorageFormat> createTableFormats = ImmutableSet.copyOf(HiveStorageFormat.values());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -319,9 +319,9 @@ private void doCreateTable(SchemaTableName tableName, HiveStorageFormat storageF
HiveOutputTableHandle outputHandle = metadata.beginCreateTable(SESSION, tableMetadata, Optional.empty());

MaterializedResult data = MaterializedResult.resultBuilder(SESSION, BIGINT)
.row(1)
.row(3)
.row(2)
.row(1L)
.row(3L)
.row(2L)
.build();

// write the records
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ public void testManySegments()
MaterializedResult result = queryRunner.execute("SELECT count(_message) from " + topicName);

MaterializedResult expected = MaterializedResult.resultBuilder(SESSION, BigintType.BIGINT)
.row(100000)
.row(100000L)
.build();

assertEquals(result, expected);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ public void testTopicHasData()
MaterializedResult result = queryRunner.execute("SELECT count(1) from " + topicName);

MaterializedResult expected = MaterializedResult.resultBuilder(SESSION, BigintType.BIGINT)
.row(0)
.row(0L)
.build();

assertEquals(result, expected);
Expand All @@ -134,7 +134,7 @@ public void testTopicHasData()
result = queryRunner.execute("SELECT count(1) from " + topicName);

expected = MaterializedResult.resultBuilder(SESSION, BigintType.BIGINT)
.row(count)
.row((long) count)
.build();

assertEquals(result, expected);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,15 +52,10 @@ public MaterializedRow(int precision, List<Object> values)

private static Object processValue(int precision, Object value)
{
checkArgument(!(value instanceof Integer || value instanceof Short || value instanceof Byte), "INTEGER, SMALLINT and TINYINT are not yet supported");
if (value instanceof Double || value instanceof Float) {
return new ApproximateDouble(((Number) value).doubleValue(), precision);
}
if (value instanceof BigDecimal) {
return value;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof List) {
return ((List<?>) value).stream()
.map(element -> processValue(precision, element))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,19 +46,19 @@ public void testPageSorter()
List<SortOrder> sortOrders = ImmutableList.of(ASC_NULLS_FIRST);

List<Page> inputPages = RowPagesBuilder.rowPagesBuilder(types)
.row(2, 1.1, "d")
.row(1, 2.2, "c")
.row(2L, 1.1, "d")
.row(1L, 2.2, "c")
.pageBreak()
.row(-2, 2.2, "b")
.row(-12, 2.2, "a")
.row(-2L, 2.2, "b")
.row(-12L, 2.2, "a")
.build();

List<Page> expectedPages = RowPagesBuilder.rowPagesBuilder(types)
.row(-12, 2.2, "a")
.row(-2, 2.2, "b")
.row(-12L, 2.2, "a")
.row(-2L, 2.2, "b")
.pageBreak()
.row(1, 2.2, "c")
.row(2, 1.1, "d")
.row(1L, 2.2, "c")
.row(2L, 1.1, "d")
.build();

assertSorted(inputPages, expectedPages, types, sortChannels, sortOrders, 100);
Expand All @@ -73,25 +73,25 @@ public void testPageSorterMultipleChannels()
List<SortOrder> sortOrders = Collections.nCopies(sortChannels.size(), ASC_NULLS_FIRST);

List<Page> inputPages = RowPagesBuilder.rowPagesBuilder(types)
.row(2, 1.1, "d")
.row(1, 2.2, "c")
.row(2L, 1.1, "d")
.row(1L, 2.2, "c")
.pageBreak()
.row(1, 2.2, "b")
.row(1, 2.2, "a")
.row(1L, 2.2, "b")
.row(1L, 2.2, "a")
.pageBreak()
.row(1, 2.2, null)
.row(1, null, "z")
.row(1, null, null)
.row(1L, 2.2, null)
.row(1L, null, "z")
.row(1L, null, null)
.build();

List<Page> expectedPages = RowPagesBuilder.rowPagesBuilder(types)
.row(1, null, null)
.row(1, null, "z")
.row(1, 2.2, null)
.row(1, 2.2, "a")
.row(1, 2.2, "b")
.row(1, 2.2, "c")
.row(2, 1.1, "d")
.row(1L, null, null)
.row(1L, null, "z")
.row(1L, 2.2, null)
.row(1L, 2.2, "a")
.row(1L, 2.2, "b")
.row(1L, 2.2, "c")
.row(2L, 1.1, "d")
.build();
assertSorted(inputPages, expectedPages, types, sortChannels, sortOrders, 100);
}
Expand All @@ -105,18 +105,18 @@ public void testPageSorterSorted()
List<SortOrder> sortOrders = ImmutableList.of(ASC_NULLS_FIRST);

List<Page> inputPages = RowPagesBuilder.rowPagesBuilder(types)
.row(-12, 2.2, "a")
.row(-2, 2.2, "b")
.row(-12L, 2.2, "a")
.row(-2L, 2.2, "b")
.pageBreak()
.row(1, 2.2, "d")
.row(2, 1.1, "c")
.row(1L, 2.2, "d")
.row(2L, 1.1, "c")
.build();

List<Page> expectedPages = RowPagesBuilder.rowPagesBuilder(types)
.row(-12, 2.2, "a")
.row(-2, 2.2, "b")
.row(1, 2.2, "d")
.row(2, 1.1, "c")
.row(-12L, 2.2, "a")
.row(-2L, 2.2, "b")
.row(1L, 2.2, "d")
.row(2L, 1.1, "c")
.build();

assertSorted(inputPages, expectedPages, types, sortChannels, sortOrders, 100);
Expand All @@ -131,19 +131,19 @@ public void testPageSorterForceExpansion()
List<SortOrder> sortOrders = ImmutableList.of(ASC_NULLS_FIRST);

List<Page> inputPages = RowPagesBuilder.rowPagesBuilder(types)
.row(2, 1.1, "c")
.row(1, 2.2, "d")
.row(2L, 1.1, "c")
.row(1L, 2.2, "d")
.pageBreak()
.row(-2, 2.2, "b")
.row(-12, 2.2, "a")
.row(-2L, 2.2, "b")
.row(-12L, 2.2, "a")
.build();

List<Page> expectedPages = RowPagesBuilder.rowPagesBuilder(types)
.row(-12, 2.2, "a")
.row(-2, 2.2, "b")
.row(-12L, 2.2, "a")
.row(-2L, 2.2, "b")
.pageBreak()
.row(1, 2.2, "d")
.row(2, 1.1, "c")
.row(1L, 2.2, "d")
.row(2L, 1.1, "c")
.build();

assertSorted(inputPages, expectedPages, types, sortChannels, sortOrders, 2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public void testAggregation()
Operator operator = operatorFactory.createOperator(driverContext);

MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, BIGINT, DOUBLE, VARCHAR, BIGINT, BIGINT, DOUBLE, VARCHAR)
.row(100, 4950, 49.5, "399", 100, 54950, 54950.0, "599")
.row(100L, 4950L, 49.5, "399", 100L, 54950L, 54950.0, "599")
.build();

assertOperatorEquals(operator, input, expected);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,11 @@ public void testDistinctLimit(boolean hashEnabled)
Operator operator = operatorFactory.createOperator(driverContext);

MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT)
.row(1)
.row(2)
.row(3)
.row(4)
.row(5)
.row(1L)
.row(2L)
.row(3L)
.row(4L)
.row(5L)
.build();

OperatorAssertion.assertOperatorEquals(operator, input, expected);
Expand All @@ -100,9 +100,9 @@ public void testDistinctLimitWithPageAlignment(boolean hashEnabled)
Operator operator = operatorFactory.createOperator(driverContext);

MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT)
.row(1)
.row(2)
.row(3)
.row(1L)
.row(2L)
.row(3L)
.build();

OperatorAssertion.assertOperatorEquals(operator, input, expected);
Expand All @@ -122,10 +122,10 @@ public void testDistinctLimitValuesLessThanLimit(boolean hashEnabled)
Operator operator = operatorFactory.createOperator(driverContext);

MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT)
.row(1)
.row(2)
.row(3)
.row(4)
.row(1L)
.row(2L)
.row(3L)
.row(4L)
.build();

OperatorAssertion.assertOperatorEquals(operator, input, expected);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,16 +102,16 @@ public Set<Integer> getInputChannels()
Operator operator = operatorFactory.createOperator(driverContext);

MaterializedResult expected = MaterializedResult.resultBuilder(driverContext.getSession(), VARCHAR, BIGINT)
.row("10", 15)
.row("11", 16)
.row("12", 17)
.row("13", 18)
.row("14", 19)
.row("15", 20)
.row("16", 21)
.row("17", 22)
.row("18", 23)
.row("19", 24)
.row("10", 15L)
.row("11", 16L)
.row("12", 17L)
.row("13", 18L)
.row("14", 19L)
.row("15", 20L)
.row("16", 21L)
.row("17", 22L)
.row("18", 23L)
.row("19", 24L)
.build();

assertOperatorEquals(operator, input, expected);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,18 +76,18 @@ public void testGroupId()
Operator operator = operatorFactory.createOperator(driverContext);

MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, VARCHAR, BOOLEAN, BIGINT, BIGINT)
.row(100, "400", true, null, 0)
.row(101, "401", false, null, 0)
.row(102, "402", true, null, 0)
.row(200, "500", true, null, 0)
.row(201, "501", false, null, 0)
.row(202, "502", true, null, 0)
.row(100, null, null, 1000, 1)
.row(101, null, null, 1001, 1)
.row(102, null, null, 1002, 1)
.row(200, null, null, 1100, 1)
.row(201, null, null, 1101, 1)
.row(202, null, null, 1102, 1)
.row(100L, "400", true, null, 0L)
.row(101L, "401", false, null, 0L)
.row(102L, "402", true, null, 0L)
.row(200L, "500", true, null, 0L)
.row(201L, "501", false, null, 0L)
.row(202L, "502", true, null, 0L)
.row(100L, null, null, 1000L, 1L)
.row(101L, null, null, 1001L, 1L)
.row(102L, null, null, 1002L, 1L)
.row(200L, null, null, 1100L, 1L)
.row(201L, null, null, 1101L, 1L)
.row(202L, null, null, 1102L, 1L)
.build();

List<Page> pages = toPages(operator, input.iterator());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,16 +135,16 @@ public void testHashAggregation(boolean hashEnabled)
Operator operator = operatorFactory.createOperator(driverContext);

MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, BIGINT, DOUBLE, VARCHAR, BIGINT, BIGINT)
.row("0", 3, 0, 0.0, "300", 3, 3)
.row("1", 3, 3, 1.0, "301", 3, 3)
.row("2", 3, 6, 2.0, "302", 3, 3)
.row("3", 3, 9, 3.0, "303", 3, 3)
.row("4", 3, 12, 4.0, "304", 3, 3)
.row("5", 3, 15, 5.0, "305", 3, 3)
.row("6", 3, 18, 6.0, "306", 3, 3)
.row("7", 3, 21, 7.0, "307", 3, 3)
.row("8", 3, 24, 8.0, "308", 3, 3)
.row("9", 3, 27, 9.0, "309", 3, 3)
.row("0", 3L, 0L, 0.0, "300", 3L, 3L)
.row("1", 3L, 3L, 1.0, "301", 3L, 3L)
.row("2", 3L, 6L, 2.0, "302", 3L, 3L)
.row("3", 3L, 9L, 3.0, "303", 3L, 3L)
.row("4", 3L, 12L, 4.0, "304", 3L, 3L)
.row("5", 3L, 15L, 5.0, "305", 3L, 3L)
.row("6", 3L, 18L, 6.0, "306", 3L, 3L)
.row("7", 3L, 21L, 7.0, "307", 3L, 3L)
.row("8", 3L, 24L, 8.0, "308", 3L, 3L)
.row("9", 3L, 27L, 9.0, "309", 3L, 3L)
.build();

assertOperatorEqualsIgnoreOrder(operator, input, expected, hashEnabled, Optional.of(hashChannels.size()));
Expand Down
Loading

0 comments on commit 8c4e324

Please sign in to comment.