Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add checkstyle and spotbugs #77

Merged
merged 2 commits into from
Dec 9, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Apply automated linters & regenerate models
  • Loading branch information
jogrogan committed Dec 9, 2024
commit 0f5ac441b2af975dd69aef5f73055a0dba8f1d0b
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
package com.linkedin.hoptimator;

import java.sql.Wrapper;
import java.sql.SQLException;
import java.sql.Wrapper;


/** Registers a set of tables, possibly within schemas and sub-schemas. */
public interface Catalog {

String name();

String description();

void register(Wrapper parentSchema) throws SQLException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import java.util.Collection;


public interface CatalogProvider {

Collection<Catalog> catalogs();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
package com.linkedin.hoptimator;

import java.util.Map;
import java.sql.SQLException;
import java.util.Map;


public interface Connector<T> {

Map<String, String> configure(T t) throws SQLException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import java.util.Collection;


public interface ConnectorProvider {

<T> Collection<Connector<T>> connectors(Class<T> clazz);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.linkedin.hoptimator;

/** A collection of tables, as populated by a Catalog. */
/** A collection of tables, as populated by a Catalog. */
public interface Database {

/** Name of the database. */
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
package com.linkedin.hoptimator;

import java.util.List;
import java.sql.SQLException;
import java.util.List;


public interface Deployable {

void create() throws SQLException;

void delete() throws SQLException;

void update() throws SQLException;

/** Render a list of specs, usually YAML. */
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
package com.linkedin.hoptimator;

import java.util.List;
import java.sql.SQLException;
import java.util.List;


public interface Deployer<T> {

void create(T t) throws SQLException;

void update(T t) throws SQLException;

void delete(T t) throws SQLException;

List<String> specify(T t) throws SQLException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import java.util.Collection;


public interface DeployerProvider {

<T> Collection<Deployer<T>> deployers(Class<T> clazz);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;


public interface Validator<T> {

void validate(T t, Issues issues);

static void validateSubdomainName(String s, Issues issues) {
Expand Down Expand Up @@ -142,7 +142,7 @@ private String fullPath() {
Collections.reverse(parts);
return String.join("/", parts);
}

private String format(int indentLevel) {
if (empty()) {
return "";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import java.util.Collection;


public interface ValidatorProvider {

<T> Collection<Validator<T>> validators(Class<T> clazz);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
package com.linkedin.hoptimator.avro;

import org.apache.avro.Schema;
import java.util.AbstractMap;
import java.util.List;
import java.util.stream.Collectors;

import org.apache.avro.Schema;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeImpl;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelProtoDataType;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.calcite.sql.type.SqlTypeName;

import java.util.AbstractMap;
import java.util.List;
import java.util.stream.Collectors;

/** Converts between Avro and Calcite's RelDataType */
public final class AvroConverter {
Expand All @@ -23,41 +23,43 @@ private AvroConverter() {

public static Schema avro(String namespace, String name, RelDataType dataType) {
if (dataType.isStruct()) {
List<Schema.Field> fields = dataType.getFieldList().stream()
.map(x -> new Schema.Field(sanitize(x.getName()), avro(namespace, x.getName(), x.getType()), describe(x), null))
.collect(Collectors.toList());
return createAvroSchemaWithNullability(Schema.createRecord(sanitize(name), dataType.toString(), namespace, false, fields),
dataType.isNullable());
List<Schema.Field> fields = dataType.getFieldList()
.stream()
.map(x -> new Schema.Field(sanitize(x.getName()), avro(namespace, x.getName(), x.getType()), describe(x),
null))
.collect(Collectors.toList());
return createAvroSchemaWithNullability(
Schema.createRecord(sanitize(name), dataType.toString(), namespace, false, fields), dataType.isNullable());
} else {
switch (dataType.getSqlTypeName()) {
case INTEGER:
return createAvroTypeWithNullability(Schema.Type.INT, dataType.isNullable());
case SMALLINT:
return createAvroTypeWithNullability(Schema.Type.INT, dataType.isNullable());
case BIGINT:
return createAvroTypeWithNullability(Schema.Type.LONG, dataType.isNullable());
case VARCHAR:
return createAvroTypeWithNullability(Schema.Type.STRING, dataType.isNullable());
case FLOAT:
return createAvroTypeWithNullability(Schema.Type.FLOAT, dataType.isNullable());
case DOUBLE:
return createAvroTypeWithNullability(Schema.Type.DOUBLE, dataType.isNullable());
case CHAR:
return createAvroTypeWithNullability(Schema.Type.STRING, dataType.isNullable());
case BOOLEAN:
return createAvroTypeWithNullability(Schema.Type.BOOLEAN, dataType.isNullable());
case ARRAY:
return createAvroSchemaWithNullability(Schema.createArray(avro(null, null, dataType.getComponentType())),
dataType.isNullable());
// TODO support map types
// Appears to require a Calcite version bump
// case MAP:
// return createAvroSchemaWithNullability(Schema.createMap(avroPrimitive(dataType.getValueType())), dataType.isNullable());
case UNKNOWN:
case NULL:
return Schema.createUnion(Schema.create(Schema.Type.NULL));
default:
throw new UnsupportedOperationException("No support yet for " + dataType.getSqlTypeName().toString());
case INTEGER:
return createAvroTypeWithNullability(Schema.Type.INT, dataType.isNullable());
case SMALLINT:
return createAvroTypeWithNullability(Schema.Type.INT, dataType.isNullable());
case BIGINT:
return createAvroTypeWithNullability(Schema.Type.LONG, dataType.isNullable());
case VARCHAR:
return createAvroTypeWithNullability(Schema.Type.STRING, dataType.isNullable());
case FLOAT:
return createAvroTypeWithNullability(Schema.Type.FLOAT, dataType.isNullable());
case DOUBLE:
return createAvroTypeWithNullability(Schema.Type.DOUBLE, dataType.isNullable());
case CHAR:
return createAvroTypeWithNullability(Schema.Type.STRING, dataType.isNullable());
case BOOLEAN:
return createAvroTypeWithNullability(Schema.Type.BOOLEAN, dataType.isNullable());
case ARRAY:
return createAvroSchemaWithNullability(Schema.createArray(avro(null, null, dataType.getComponentType())),
dataType.isNullable());
// TODO support map types
// Appears to require a Calcite version bump
// case MAP:
// return createAvroSchemaWithNullability(Schema.createMap(avroPrimitive(dataType.getValueType())), dataType.isNullable());
case UNKNOWN:
case NULL:
return Schema.createUnion(Schema.create(Schema.Type.NULL));
default:
throw new UnsupportedOperationException("No support yet for " + dataType.getSqlTypeName().toString());
}
}
}
Expand All @@ -82,42 +84,43 @@ private static Schema createAvroTypeWithNullability(Schema.Type rawType, boolean
public static RelDataType rel(Schema schema, RelDataTypeFactory typeFactory) {
RelDataType unknown = typeFactory.createUnknownType();
switch (schema.getType()) {
case RECORD:
return typeFactory.createStructType(schema.getFields().stream()
.map(x -> new AbstractMap.SimpleEntry<>(x.name(), rel(x.schema(), typeFactory)))
.filter(x -> x.getValue().getSqlTypeName() != SqlTypeName.NULL)
.filter(x -> x.getValue().getSqlTypeName() != unknown.getSqlTypeName())
.collect(Collectors.toList()));
case INT:
return createRelType(typeFactory, SqlTypeName.INTEGER);
case LONG:
return createRelType(typeFactory, SqlTypeName.BIGINT);
case ENUM:
case FIXED:
case STRING:
return createRelType(typeFactory, SqlTypeName.VARCHAR);
case FLOAT:
return createRelType(typeFactory, SqlTypeName.FLOAT);
case DOUBLE:
return createRelType(typeFactory, SqlTypeName.DOUBLE);
case BOOLEAN:
return createRelType(typeFactory, SqlTypeName.BOOLEAN);
case ARRAY:
return typeFactory.createArrayType(rel(schema.getElementType(), typeFactory), -1);
case RECORD:
return typeFactory.createStructType(schema.getFields()
.stream()
.map(x -> new AbstractMap.SimpleEntry<>(x.name(), rel(x.schema(), typeFactory)))
.filter(x -> x.getValue().getSqlTypeName() != SqlTypeName.NULL)
.filter(x -> x.getValue().getSqlTypeName() != unknown.getSqlTypeName())
.collect(Collectors.toList()));
case INT:
return createRelType(typeFactory, SqlTypeName.INTEGER);
case LONG:
return createRelType(typeFactory, SqlTypeName.BIGINT);
case ENUM:
case FIXED:
case STRING:
return createRelType(typeFactory, SqlTypeName.VARCHAR);
case FLOAT:
return createRelType(typeFactory, SqlTypeName.FLOAT);
case DOUBLE:
return createRelType(typeFactory, SqlTypeName.DOUBLE);
case BOOLEAN:
return createRelType(typeFactory, SqlTypeName.BOOLEAN);
case ARRAY:
return typeFactory.createArrayType(rel(schema.getElementType(), typeFactory), -1);
// TODO support map types
// Appears to require a Calcite version bump
// case MAP:
// return typeFactory.createMapType(typeFactory.createSqlType(SqlTypeName.VARCHAR), rel(schema.getValueType(), typeFactory));
case UNION:
if (schema.isNullable() && schema.getTypes().size() == 2) {
Schema innerType = schema.getTypes().stream().filter(x -> x.getType() != Schema.Type.NULL).findFirst().get();
return typeFactory.createTypeWithNullability(rel(innerType, typeFactory), true);
} else {
// TODO support more elaborate union types
return typeFactory.createTypeWithNullability(typeFactory.createUnknownType(), true);
}
default:
return typeFactory.createUnknownType();
case UNION:
if (schema.isNullable() && schema.getTypes().size() == 2) {
Schema innerType = schema.getTypes().stream().filter(x -> x.getType() != Schema.Type.NULL).findFirst().get();
return typeFactory.createTypeWithNullability(rel(innerType, typeFactory), true);
} else {
// TODO support more elaborate union types
return typeFactory.createTypeWithNullability(typeFactory.createUnknownType(), true);
}
default:
return typeFactory.createUnknownType();
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package com.linkedin.hoptimator.avro;

import com.linkedin.hoptimator.Validator;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;

import org.apache.avro.Schema;
import org.apache.avro.file.DataFileWriter;
Expand All @@ -11,16 +13,15 @@
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.schema.Table;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.Table;

import com.linkedin.hoptimator.Validator;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;

/** Validates that tables follow Avro schema evolution rules. */
class AvroTableValidator implements Validator<SchemaPlus> {

@Override
public void validate(SchemaPlus schema, Issues issues) {
try {
Expand Down Expand Up @@ -55,7 +56,7 @@ private void validate(SchemaPlus schema, Table table, Table originalTable, Issue
DataFileWriter<Object> dataFileWriter = new DataFileWriter<Object>(datumWriter)) {
dataFileWriter.create(originalAvroSchema, out);
for (Object obj : new RandomData(avroSchema, 1)) {
dataFileWriter.append(obj);
dataFileWriter.append(obj);
}
} catch (IOException | RuntimeException e) {
issues.error("Avro schema evolution error: cannot serialize new records using the existing schema");
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
package com.linkedin.hoptimator.avro;

import com.linkedin.hoptimator.Validator;
import com.linkedin.hoptimator.ValidatorProvider;
import java.util.Collection;
import java.util.Collections;

import org.apache.calcite.schema.SchemaPlus;

import java.util.Collection;
import java.util.Collections;
import com.linkedin.hoptimator.Validator;
import com.linkedin.hoptimator.ValidatorProvider;


/** Provides AvroValidator. */
public class AvroValidatorProvider implements ValidatorProvider {
Expand Down
Loading