Skip to content

Commit

Permalink
[hotfix][test] Fix some Architecture Violation errors
Browse files Browse the repository at this point in the history
  • Loading branch information
pnowojski committed Jan 12, 2024
1 parent b660b06 commit 1d45dd0
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.flink.api.java.hadoop.mapreduce.wrapper;

import org.apache.flink.annotation.Internal;
import org.apache.flink.core.io.InputSplit;
import org.apache.flink.core.io.LocatableInputSplit;

Expand All @@ -33,6 +34,7 @@
* A wrapper that represents an input split from the Hadoop mapreduce API as a Flink {@link
* InputSplit}.
*/
@Internal
public class HadoopInputSplit extends LocatableInputSplit {

private static final long serialVersionUID = 6119153593707857235L;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.flink.api.java.typeutils.runtime;

import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.typeutils.TypeComparator;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
Expand All @@ -36,6 +37,7 @@
*
* @param <T>
*/
@Internal
public class WritableComparator<T extends Writable & Comparable<T>> extends TypeComparator<T> {

private static final long serialVersionUID = 1L;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@ public TypeSerializerSnapshot<T> snapshotConfiguration() {
}

/** {@link WritableSerializer} snapshot class. */
@Internal
public static final class WritableSerializerSnapshot<T extends Writable>
extends GenericTypeSerializerSnapshot<T, WritableSerializer> {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.flink.streaming.api.functions.sink.filesystem;

import org.apache.flink.annotation.Internal;
import org.apache.flink.core.fs.Path;
import org.apache.flink.formats.hadoop.bulk.DefaultHadoopFileCommitterFactory;
import org.apache.flink.formats.hadoop.bulk.HadoopFileCommitterFactory;
Expand All @@ -32,6 +33,7 @@
import java.io.IOException;

/** Buckets builder to create buckets that use {@link HadoopPathBasedPartFileWriter}. */
@Internal
public class HadoopPathBasedBulkFormatBuilder<
IN, BucketID, T extends HadoopPathBasedBulkFormatBuilder<IN, BucketID, T>>
extends StreamingFileSink.BucketsBuilder<IN, BucketID, T> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.flink.streaming.api.functions.sink.filesystem;

import org.apache.flink.annotation.Internal;

import org.apache.hadoop.conf.Configuration;

import java.io.IOException;
Expand All @@ -25,6 +27,7 @@
import java.io.Serializable;

/** Wrapper of hadoop Configuration to make it serializable. */
@Internal
public class SerializableConfiguration implements Serializable {

private static final long serialVersionUID = 1L;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.flink.orc.shim;

import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.orc.OrcFilters.Predicate;
import org.apache.flink.orc.vector.HiveOrcBatchWrapper;
Expand Down Expand Up @@ -166,7 +165,6 @@ public boolean nextBatch(RecordReader reader, VectorizedRowBatch rowBatch) throw
}
}

@VisibleForTesting
public static Tuple2<Long, Long> getOffsetAndLengthForSplit(
long splitStart, long splitLength, List<StripeInformation> stripes) {
long splitEnd = splitStart + splitLength;
Expand Down

0 comments on commit 1d45dd0

Please sign in to comment.