From 27429a74f209d7c2bc0ef48f4c28847ce9d9ddf2 Mon Sep 17 00:00:00 2001 From: Greg Hogan Date: Mon, 31 Jul 2017 16:29:06 -0400 Subject: [PATCH] [hotfix] [gelly] Explicit type can be replaced with <> In Java 8 the diamond operator can be used in cases which would result in an error in Java 7. In Gelly we have often desired to use the diamond operator and only discovered an issue when running tests on TravisCI. This closes #4457. --- .../graph/drivers/ConnectedComponents.java | 4 +- .../apache/flink/graph/drivers/EdgeList.java | 2 +- .../graph/drivers/parameter/Simplify.java | 6 +- .../examples/EuclideanGraphWeighing.java | 8 +- .../flink/graph/examples/GSAPageRank.java | 2 +- .../GSASingleSourceShortestPaths.java | 2 +- .../flink/graph/examples/IncrementalSSSP.java | 2 +- .../flink/graph/examples/MusicProfiles.java | 14 +- .../apache/flink/graph/examples/PageRank.java | 4 +- .../flink/graph/examples/PregelSSSP.java | 2 +- .../examples/SingleSourceShortestPaths.java | 2 +- .../examples/data/CommunityDetectionData.java | 66 ++++----- .../examples/data/EuclideanGraphData.java | 40 ++--- .../examples/data/IncrementalSSSPData.java | 38 ++--- .../examples/data/LabelPropagationData.java | 68 ++++----- .../examples/data/MusicProfilesData.java | 84 +++++------ .../graph/examples/data/PageRankData.java | 20 +-- .../examples/data/TriangleCountData.java | 20 +-- .../library/CommunityDetectionITCase.java | 4 +- .../graph/library/LabelPropagationITCase.java | 4 +- .../graph/library/SummarizationITCase.java | 12 +- .../library/TriangleEnumeratorITCase.java | 2 +- .../graph/test/GatherSumApplyITCase.java | 10 +- .../graph/test/examples/PageRankITCase.java | 8 +- .../java/org/apache/flink/graph/Graph.java | 140 +++++++++--------- .../apache/flink/graph/GraphCsvReader.java | 6 +- .../annotate/directed/EdgeDegreesPair.java | 2 +- .../annotate/directed/EdgeSourceDegrees.java | 2 +- .../annotate/directed/EdgeTargetDegrees.java | 2 +- .../annotate/directed/VertexDegrees.java | 8 +- .../annotate/directed/VertexInDegree.java | 6 +- .../annotate/directed/VertexOutDegree.java | 6 +- .../annotate/undirected/EdgeDegreePair.java | 2 +- .../annotate/undirected/EdgeSourceDegree.java | 2 +- .../annotate/undirected/EdgeTargetDegree.java | 2 +- .../annotate/undirected/VertexDegree.java | 6 +- .../filter/undirected/MaximumDegree.java | 8 +- .../graph/asm/simple/directed/Simplify.java | 2 +- .../graph/asm/simple/undirected/Simplify.java | 2 +- .../flink/graph/asm/translate/Translate.java | 8 +- .../flink/graph/bipartite/BipartiteGraph.java | 8 +- .../graph/generator/GraphGeneratorUtils.java | 2 +- .../flink/graph/generator/RMatGraph.java | 2 +- .../flink/graph/gsa/GSAConfiguration.java | 6 +- .../graph/gsa/GatherSumApplyIteration.java | 10 +- .../graph/library/CommunityDetection.java | 8 +- .../graph/library/ConnectedComponents.java | 7 +- .../graph/library/GSAConnectedComponents.java | 5 +- .../library/GSASingleSourceShortestPaths.java | 4 +- .../flink/graph/library/LabelPropagation.java | 5 +- .../library/SingleSourceShortestPaths.java | 4 +- .../flink/graph/library/Summarization.java | 16 +- .../graph/library/TriangleEnumerator.java | 14 +- .../directed/LocalClusteringCoefficient.java | 6 +- .../clustering/directed/TriangleListing.java | 16 +- .../LocalClusteringCoefficient.java | 6 +- .../undirected/TriangleListing.java | 12 +- .../graph/library/linkanalysis/HITS.java | 24 +-- .../graph/library/linkanalysis/PageRank.java | 18 +-- .../library/metric/directed/EdgeMetrics.java | 6 +- .../metric/undirected/EdgeMetrics.java | 4 +- .../graph/library/similarity/AdamicAdar.java | 14 +- .../library/similarity/JaccardIndex.java | 10 +- .../pregel/VertexCentricConfiguration.java | 2 +- .../graph/pregel/VertexCentricIteration.java | 10 +- .../spargel/ScatterGatherConfiguration.java | 4 +- .../apache/flink/graph/utils/GraphUtils.java | 2 +- .../validation/InvalidVertexIdsValidator.java | 6 +- .../apache/flink/graph/asm/AsmTestBase.java | 4 +- .../directed/EdgeDegreesPairTest.java | 4 +- .../directed/EdgeSourceDegreesTest.java | 4 +- .../directed/EdgeTargetDegreesTest.java | 4 +- .../annotate/directed/VertexDegreesTest.java | 6 +- .../undirected/EdgeDegreePairTest.java | 4 +- .../undirected/EdgeSourceDegreeTest.java | 4 +- .../undirected/EdgeTargetDegreeTest.java | 4 +- .../annotate/undirected/VertexDegreeTest.java | 6 +- .../filter/undirected/MaximumDegreeTest.java | 7 +- .../asm/simple/directed/SimplifyTest.java | 2 +- .../asm/simple/undirected/SimplifyTest.java | 4 +- .../graph/generator/CirculantGraphTest.java | 6 +- .../graph/generator/CompleteGraphTest.java | 6 +- .../flink/graph/generator/CycleGraphTest.java | 6 +- .../flink/graph/generator/EchoGraphTest.java | 6 +- .../flink/graph/generator/EmptyGraphTest.java | 6 +- .../flink/graph/generator/GridGraphTest.java | 6 +- .../graph/generator/HypercubeGraphTest.java | 6 +- .../flink/graph/generator/PathGraphTest.java | 6 +- .../flink/graph/generator/RMatGraphTest.java | 6 +- .../generator/SingletonEdgeGraphTest.java | 6 +- .../flink/graph/generator/StarGraphTest.java | 6 +- .../flink/graph/gsa/GSACompilerTest.java | 4 +- .../flink/graph/gsa/GSATranslationTest.java | 4 +- ...edComponentsWithRandomisedEdgesITCase.java | 2 +- .../LocalClusteringCoefficientTest.java | 7 +- .../directed/TriangleListingTest.java | 2 +- .../LocalClusteringCoefficientTest.java | 7 +- .../undirected/TriangleListingTest.java | 2 +- .../graph/library/linkanalysis/HITSTest.java | 2 +- .../library/metric/ChecksumHashCodeTest.java | 2 +- .../library/similarity/AdamicAdarTest.java | 6 +- .../library/similarity/JaccardIndexTest.java | 2 +- .../graph/pregel/PregelCompilerTest.java | 12 +- .../graph/pregel/PregelTranslationTest.java | 2 +- .../graph/spargel/SpargelCompilerTest.java | 16 +- .../graph/spargel/SpargelTranslationTest.java | 4 +- .../test/CollectionModeSuperstepITCase.java | 6 +- .../ScatterGatherConfigurationITCase.java | 2 +- .../DegreesWithExceptionITCase.java | 12 +- .../test/operations/GraphCreationITCase.java | 4 +- .../test/operations/JoinWithEdgesITCase.java | 4 +- .../operations/JoinWithVerticesITCase.java | 4 +- .../ReduceOnEdgesWithExceptionITCase.java | 4 +- .../ReduceOnNeighborsWithExceptionITCase.java | 8 +- .../test/operations/TypeExtractorTest.java | 10 +- 115 files changed, 538 insertions(+), 568 deletions(-) diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java index 32f94c125984b..c40103b162e51 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java @@ -46,8 +46,8 @@ public String getLongDescription() { @Override public DataSet plan(Graph graph) throws Exception { return graph - .mapVertices(new MapVertices()) - .run(new GSAConnectedComponents(Integer.MAX_VALUE)); + .mapVertices(new MapVertices<>()) + .run(new GSAConnectedComponents<>(Integer.MAX_VALUE)); } /** diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/EdgeList.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/EdgeList.java index 563908c7829a4..f0de90a49ee70 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/EdgeList.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/EdgeList.java @@ -54,7 +54,7 @@ public DataSet plan(Graph graph) throws Exception { if (hasNullValueEdges(edges)) { return edges - .map(new EdgeToTuple2Map()) + .map(new EdgeToTuple2Map<>()) .name("Edge to Tuple2") .setParallelism(parallelism.getValue().intValue()); } else { diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java index 9fc937c3d7cc4..14ff9d06e2f3f 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java @@ -115,15 +115,15 @@ public > Graph simplify(Graph()); + .run(new org.apache.flink.graph.asm.simple.directed.Simplify<>()); break; case UNDIRECTED: graph = graph - .run(new org.apache.flink.graph.asm.simple.undirected.Simplify(false)); + .run(new org.apache.flink.graph.asm.simple.undirected.Simplify<>(false)); break; case UNDIRECTED_CLIP_AND_FLIP: graph = graph - .run(new org.apache.flink.graph.asm.simple.undirected.Simplify(true)); + .run(new org.apache.flink.graph.asm.simple.undirected.Simplify<>(true)); break; } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java index 6380628a69066..f426a9c0ca2ef 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java @@ -83,8 +83,8 @@ public Tuple3 map(Triplet triplet) Vertex srcVertex = triplet.getSrcVertex(); Vertex trgVertex = triplet.getTrgVertex(); - return new Tuple3(srcVertex.getId(), trgVertex.getId(), - srcVertex.getValue().euclideanDistance(trgVertex.getValue())); + return new Tuple3<>(srcVertex.getId(), trgVertex.getId(), + srcVertex.getValue().euclideanDistance(trgVertex.getValue())); } }); @@ -186,7 +186,7 @@ private static DataSet> getVerticesDataSet(ExecutionEnvironm @Override public Vertex map(Tuple3 value) throws Exception { - return new Vertex(value.f0, new Point(value.f1, value.f2)); + return new Vertex<>(value.f0, new Point(value.f1, value.f2)); } }); } else { @@ -203,7 +203,7 @@ private static DataSet> getEdgesDataSet(ExecutionEnvironment @Override public Edge map(Tuple2 tuple2) throws Exception { - return new Edge(tuple2.f0, tuple2.f1, 0.0); + return new Edge<>(tuple2.f0, tuple2.f1, 0.0); } }); } else { diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java index 450841931139f..2cd9f4e4aa95f 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java @@ -66,7 +66,7 @@ public DataSet> run(Graph network) throws E parameters.setOptNumVertices(true); return networkWithWeights.runGatherSumApplyIteration(new GatherRanks(), new SumRanks(), - new UpdateRanks(beta), maxIterations, parameters) + new UpdateRanks<>(beta), maxIterations, parameters) .getVertices(); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java index aa2b7e986f9fd..006ebd906ba2c 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java @@ -179,7 +179,7 @@ private static DataSet> getEdgeDataSet(ExecutionEnvironment e .fieldDelimiter("\t") .lineDelimiter("\n") .types(Long.class, Long.class, Double.class) - .map(new Tuple3ToEdgeMap()); + .map(new Tuple3ToEdgeMap<>()); } else { return SingleSourceShortestPathsData.getDefaultEdgeDataSet(env); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java index 197ad684b254e..d5d770daf1da6 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java @@ -265,7 +265,7 @@ private static Graph getSSSPGraph(ExecutionEnvironment env private static Edge getEdgeToBeRemoved() { if (fileOutput) { - return new Edge(srcEdgeToBeRemoved, trgEdgeToBeRemoved, valEdgeToBeRemoved); + return new Edge<>(srcEdgeToBeRemoved, trgEdgeToBeRemoved, valEdgeToBeRemoved); } else { return IncrementalSSSPData.getDefaultEdgeToBeRemoved(); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java index 43a0a9b2994a4..6afec71f2d83b 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java @@ -144,7 +144,7 @@ public Long map(String value) { .map(new MapFunction, Tuple2>() { @Override public Tuple2 map(Tuple2 tuple2) throws Exception { - return new Tuple2(tuple2.f1, tuple2.f0); + return new Tuple2<>(tuple2.f1, tuple2.f0); } }); @@ -154,7 +154,7 @@ public Tuple2 map(Tuple2 tuple2) throws Exception { public Long vertexJoin(Long vertexValue, Long inputValue) { return inputValue; } - }).run(new LabelPropagation(maxIterations)); + }).run(new LabelPropagation<>(maxIterations)); if (fileOutput) { verticesWithCommunity.writeAsCsv(communitiesOutputPath, "\n", "\t"); @@ -172,7 +172,7 @@ private static final class ExtractMismatchSongIds implements MapFunction map(String value) { String[] tokens = value.split("\\s+"); String songId = tokens[1].substring(1); - return new Tuple1(songId); + return new Tuple1<>(songId); } } @@ -211,7 +211,7 @@ public void iterateEdges(Vertex vertex, topSong = edge.getTarget(); } } - out.collect(new Tuple2(vertex.getId(), topSong)); + out.collect(new Tuple2<>(vertex.getId(), topSong)); } } @@ -219,14 +219,14 @@ private static final class CreateSimilarUserEdges implements GroupReduceFunction Edge> { public void reduce(Iterable> edges, Collector> out) { - List listeners = new ArrayList(); + List listeners = new ArrayList<>(); for (Edge edge : edges) { listeners.add(edge.getSource()); } for (int i = 0; i < listeners.size() - 1; i++) { for (int j = i + 1; j < listeners.size(); j++) { - out.collect(new Edge(listeners.get(i), - listeners.get(j), NullValue.getInstance())); + out.collect(new Edge<>(listeners.get(i), + listeners.get(j), NullValue.getInstance())); } } } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java index a88f80e419b54..19d80be7822f4 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java @@ -65,8 +65,8 @@ public DataSet> run(Graph network) throws E ScatterGatherConfiguration parameters = new ScatterGatherConfiguration(); parameters.setOptNumVertices(true); - return networkWithWeights.runScatterGatherIteration(new RankMessenger(), - new VertexRankUpdater(beta), maxIterations, parameters) + return networkWithWeights.runScatterGatherIteration(new RankMessenger<>(), + new VertexRankUpdater<>(beta), maxIterations, parameters) .getVertices(); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java index 97a7c5fd986c1..da227e011fc9e 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java @@ -183,7 +183,7 @@ private static DataSet> getEdgesDataSet(ExecutionEnvironment .fieldDelimiter("\t") .ignoreComments("%") .types(Long.class, Long.class, Double.class) - .map(new Tuple3ToEdgeMap()); + .map(new Tuple3ToEdgeMap<>()); } else { return SingleSourceShortestPathsData.getDefaultEdgeDataSet(env); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java index 07c6b56fa17a0..211ce39720755 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java @@ -188,7 +188,7 @@ private static DataSet> getEdgesDataSet(ExecutionEnvironment .lineDelimiter("\n") .fieldDelimiter("\t") .types(Long.class, Long.class, Double.class) - .map(new Tuple3ToEdgeMap()); + .map(new Tuple3ToEdgeMap<>()); } else { return SingleSourceShortestPathsData.getDefaultEdgeDataSet(env); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java index 1d9b257f61206..713cbbbbf2ae4 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java @@ -43,40 +43,40 @@ public class CommunityDetectionData { public static DataSet> getDefaultEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 2L, 1.0)); - edges.add(new Edge(1L, 3L, 2.0)); - edges.add(new Edge(1L, 4L, 3.0)); - edges.add(new Edge(2L, 3L, 4.0)); - edges.add(new Edge(2L, 4L, 5.0)); - edges.add(new Edge(3L, 5L, 6.0)); - edges.add(new Edge(5L, 6L, 7.0)); - edges.add(new Edge(5L, 7L, 8.0)); - edges.add(new Edge(6L, 7L, 9.0)); - edges.add(new Edge(7L, 12L, 10.0)); - edges.add(new Edge(8L, 9L, 11.0)); - edges.add(new Edge(8L, 10L, 12.0)); - edges.add(new Edge(8L, 11L, 13.0)); - edges.add(new Edge(9L, 10L, 14.0)); - edges.add(new Edge(9L, 11L, 15.0)); - edges.add(new Edge(10L, 11L, 16.0)); - edges.add(new Edge(10L, 12L, 17.0)); - edges.add(new Edge(11L, 12L, 18.0)); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 2L, 1.0)); + edges.add(new Edge<>(1L, 3L, 2.0)); + edges.add(new Edge<>(1L, 4L, 3.0)); + edges.add(new Edge<>(2L, 3L, 4.0)); + edges.add(new Edge<>(2L, 4L, 5.0)); + edges.add(new Edge<>(3L, 5L, 6.0)); + edges.add(new Edge<>(5L, 6L, 7.0)); + edges.add(new Edge<>(5L, 7L, 8.0)); + edges.add(new Edge<>(6L, 7L, 9.0)); + edges.add(new Edge<>(7L, 12L, 10.0)); + edges.add(new Edge<>(8L, 9L, 11.0)); + edges.add(new Edge<>(8L, 10L, 12.0)); + edges.add(new Edge<>(8L, 11L, 13.0)); + edges.add(new Edge<>(9L, 10L, 14.0)); + edges.add(new Edge<>(9L, 11L, 15.0)); + edges.add(new Edge<>(10L, 11L, 16.0)); + edges.add(new Edge<>(10L, 12L, 17.0)); + edges.add(new Edge<>(11L, 12L, 18.0)); return env.fromCollection(edges); } public static DataSet> getSimpleEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 2L, 1.0)); - edges.add(new Edge(1L, 3L, 2.0)); - edges.add(new Edge(1L, 4L, 3.0)); - edges.add(new Edge(1L, 5L, 4.0)); - edges.add(new Edge(2L, 6L, 5.0)); - edges.add(new Edge(6L, 7L, 6.0)); - edges.add(new Edge(6L, 8L, 7.0)); - edges.add(new Edge(7L, 8L, 8.0)); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 2L, 1.0)); + edges.add(new Edge<>(1L, 3L, 2.0)); + edges.add(new Edge<>(1L, 4L, 3.0)); + edges.add(new Edge<>(1L, 5L, 4.0)); + edges.add(new Edge<>(2L, 6L, 5.0)); + edges.add(new Edge<>(6L, 7L, 6.0)); + edges.add(new Edge<>(6L, 8L, 7.0)); + edges.add(new Edge<>(7L, 8L, 8.0)); return env.fromCollection(edges); } @@ -84,11 +84,11 @@ public static DataSet> getSimpleEdgeDataSet(ExecutionEnvironm private CommunityDetectionData() {} public static DataSet> getTieEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 2L, 1.0)); - edges.add(new Edge(1L, 3L, 1.0)); - edges.add(new Edge(1L, 4L, 1.0)); - edges.add(new Edge(1L, 5L, 1.0)); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 2L, 1.0)); + edges.add(new Edge<>(1L, 3L, 1.0)); + edges.add(new Edge<>(1L, 4L, 1.0)); + edges.add(new Edge<>(1L, 5L, 1.0)); return env.fromCollection(edges); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java index 1ac9272bd76b7..569824157fc42 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java @@ -40,10 +40,10 @@ public class EuclideanGraphData { public static DataSet> getDefaultVertexDataSet(ExecutionEnvironment env) { - List> vertices = new ArrayList>(); + List> vertices = new ArrayList<>(); for (int i = 1; i <= NUM_VERTICES; i++) { - vertices.add(new Vertex(new Long(i), - new EuclideanGraphWeighing.Point(new Double(i), new Double(i)))); + vertices.add(new Vertex<>(new Long(i), + new EuclideanGraphWeighing.Point(new Double(i), new Double(i)))); } return env.fromCollection(vertices); @@ -55,23 +55,23 @@ public static DataSet> getDefaultVert public static DataSet> getDefaultEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 2L, 0.0)); - edges.add(new Edge(1L, 4L, 0.0)); - edges.add(new Edge(2L, 3L, 0.0)); - edges.add(new Edge(2L, 4L, 0.0)); - edges.add(new Edge(2L, 5L, 0.0)); - edges.add(new Edge(3L, 5L, 0.0)); - edges.add(new Edge(4L, 5L, 0.0)); - edges.add(new Edge(4L, 6L, 0.0)); - edges.add(new Edge(5L, 7L, 0.0)); - edges.add(new Edge(5L, 9L, 0.0)); - edges.add(new Edge(6L, 7L, 0.0)); - edges.add(new Edge(6L, 8L, 0.0)); - edges.add(new Edge(6L, 8L, 0.0)); - edges.add(new Edge(7L, 8L, 0.0)); - edges.add(new Edge(7L, 9L, 0.0)); - edges.add(new Edge(8L, 9L, 0.0)); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 2L, 0.0)); + edges.add(new Edge<>(1L, 4L, 0.0)); + edges.add(new Edge<>(2L, 3L, 0.0)); + edges.add(new Edge<>(2L, 4L, 0.0)); + edges.add(new Edge<>(2L, 5L, 0.0)); + edges.add(new Edge<>(3L, 5L, 0.0)); + edges.add(new Edge<>(4L, 5L, 0.0)); + edges.add(new Edge<>(4L, 6L, 0.0)); + edges.add(new Edge<>(5L, 7L, 0.0)); + edges.add(new Edge<>(5L, 9L, 0.0)); + edges.add(new Edge<>(6L, 7L, 0.0)); + edges.add(new Edge<>(6L, 8L, 0.0)); + edges.add(new Edge<>(6L, 8L, 0.0)); + edges.add(new Edge<>(7L, 8L, 0.0)); + edges.add(new Edge<>(7L, 9L, 0.0)); + edges.add(new Edge<>(8L, 9L, 0.0)); return env.fromCollection(edges); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java index eaa2cde0a2191..d86ba96be93ba 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java @@ -38,12 +38,12 @@ public class IncrementalSSSPData { public static DataSet> getDefaultVertexDataSet(ExecutionEnvironment env) { - List> vertices = new ArrayList>(); - vertices.add(new Vertex(1L, 6.0)); - vertices.add(new Vertex(2L, 2.0)); - vertices.add(new Vertex(3L, 3.0)); - vertices.add(new Vertex(4L, 1.0)); - vertices.add(new Vertex(5L, 0.0)); + List> vertices = new ArrayList<>(); + vertices.add(new Vertex<>(1L, 6.0)); + vertices.add(new Vertex<>(2L, 2.0)); + vertices.add(new Vertex<>(3L, 3.0)); + vertices.add(new Vertex<>(4L, 1.0)); + vertices.add(new Vertex<>(5L, 0.0)); return env.fromCollection(vertices); } @@ -53,13 +53,13 @@ public static DataSet> getDefaultVertexDataSet(ExecutionEnv public static final DataSet> getDefaultEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 3L, 3.0)); - edges.add(new Edge(2L, 4L, 3.0)); - edges.add(new Edge(2L, 5L, 2.0)); - edges.add(new Edge(3L, 2L, 1.0)); - edges.add(new Edge(3L, 5L, 5.0)); - edges.add(new Edge(4L, 5L, 1.0)); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 3L, 3.0)); + edges.add(new Edge<>(2L, 4L, 3.0)); + edges.add(new Edge<>(2L, 5L, 2.0)); + edges.add(new Edge<>(3L, 2L, 1.0)); + edges.add(new Edge<>(3L, 5L, 5.0)); + edges.add(new Edge<>(4L, 5L, 1.0)); return env.fromCollection(edges); } @@ -68,11 +68,11 @@ public static final DataSet> getDefaultEdgeDataSet(ExecutionE public static final DataSet> getDefaultEdgesInSSSP(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 3L, 3.0)); - edges.add(new Edge(2L, 5L, 2.0)); - edges.add(new Edge(3L, 2L, 1.0)); - edges.add(new Edge(4L, 5L, 1.0)); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 3L, 3.0)); + edges.add(new Edge<>(2L, 5L, 2.0)); + edges.add(new Edge<>(3L, 2L, 1.0)); + edges.add(new Edge<>(4L, 5L, 1.0)); return env.fromCollection(edges); } @@ -85,7 +85,7 @@ public static final DataSet> getDefaultEdgesInSSSP(ExecutionE public static final Edge getDefaultEdgeToBeRemoved() { - return new Edge(2L, 5L, 2.0); + return new Edge<>(2L, 5L, 2.0); } public static final String RESULTED_VERTICES = "1," + Double.MAX_VALUE + "\n" + "2," + Double.MAX_VALUE + "\n" diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java index 343ff70ad73ba..21b33294b2c25 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java @@ -55,58 +55,58 @@ private LabelPropagationData() {} public static final DataSet> getDefaultVertexSet(ExecutionEnvironment env) { - List> vertices = new ArrayList>(); - vertices.add(new Vertex(1L, 10L)); - vertices.add(new Vertex(2L, 10L)); - vertices.add(new Vertex(3L, 30L)); - vertices.add(new Vertex(4L, 40L)); - vertices.add(new Vertex(5L, 40L)); - vertices.add(new Vertex(6L, 40L)); - vertices.add(new Vertex(7L, 40L)); + List> vertices = new ArrayList<>(); + vertices.add(new Vertex<>(1L, 10L)); + vertices.add(new Vertex<>(2L, 10L)); + vertices.add(new Vertex<>(3L, 30L)); + vertices.add(new Vertex<>(4L, 40L)); + vertices.add(new Vertex<>(5L, 40L)); + vertices.add(new Vertex<>(6L, 40L)); + vertices.add(new Vertex<>(7L, 40L)); return env.fromCollection(vertices); } public static final DataSet> getDefaultEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 3L, NullValue.getInstance())); - edges.add(new Edge(2L, 3L, NullValue.getInstance())); - edges.add(new Edge(4L, 7L, NullValue.getInstance())); - edges.add(new Edge(5L, 7L, NullValue.getInstance())); - edges.add(new Edge(6L, 7L, NullValue.getInstance())); - edges.add(new Edge(7L, 3L, NullValue.getInstance())); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 3L, NullValue.getInstance())); + edges.add(new Edge<>(2L, 3L, NullValue.getInstance())); + edges.add(new Edge<>(4L, 7L, NullValue.getInstance())); + edges.add(new Edge<>(5L, 7L, NullValue.getInstance())); + edges.add(new Edge<>(6L, 7L, NullValue.getInstance())); + edges.add(new Edge<>(7L, 3L, NullValue.getInstance())); return env.fromCollection(edges); } public static final DataSet> getTieVertexSet(ExecutionEnvironment env) { - List> vertices = new ArrayList>(); - vertices.add(new Vertex(1L, 10L)); - vertices.add(new Vertex(2L, 10L)); - vertices.add(new Vertex(3L, 10L)); - vertices.add(new Vertex(4L, 10L)); - vertices.add(new Vertex(5L, 0L)); - vertices.add(new Vertex(6L, 20L)); - vertices.add(new Vertex(7L, 20L)); - vertices.add(new Vertex(8L, 20L)); - vertices.add(new Vertex(9L, 20L)); + List> vertices = new ArrayList<>(); + vertices.add(new Vertex<>(1L, 10L)); + vertices.add(new Vertex<>(2L, 10L)); + vertices.add(new Vertex<>(3L, 10L)); + vertices.add(new Vertex<>(4L, 10L)); + vertices.add(new Vertex<>(5L, 0L)); + vertices.add(new Vertex<>(6L, 20L)); + vertices.add(new Vertex<>(7L, 20L)); + vertices.add(new Vertex<>(8L, 20L)); + vertices.add(new Vertex<>(9L, 20L)); return env.fromCollection(vertices); } public static final DataSet> getTieEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 5L, NullValue.getInstance())); - edges.add(new Edge(2L, 5L, NullValue.getInstance())); - edges.add(new Edge(4L, 5L, NullValue.getInstance())); - edges.add(new Edge(5L, 5L, NullValue.getInstance())); - edges.add(new Edge(6L, 5L, NullValue.getInstance())); - edges.add(new Edge(7L, 5L, NullValue.getInstance())); - edges.add(new Edge(8L, 5L, NullValue.getInstance())); - edges.add(new Edge(9L, 5L, NullValue.getInstance())); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(2L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(4L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(5L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(6L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(7L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(8L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(9L, 5L, NullValue.getInstance())); return env.fromCollection(edges); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java index df139f0c6efa5..21e8730775e1a 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java @@ -32,53 +32,53 @@ public class MusicProfilesData { public static DataSet> getUserSongTriplets(ExecutionEnvironment env) { - List> triplets = new ArrayList>(); - - triplets.add(new Tuple3("user_1", "song_1", 100)); - triplets.add(new Tuple3("user_1", "song_2", 10)); - triplets.add(new Tuple3("user_1", "song_3", 20)); - triplets.add(new Tuple3("user_1", "song_4", 30)); - triplets.add(new Tuple3("user_1", "song_5", 1)); - - triplets.add(new Tuple3("user_2", "song_6", 40)); - triplets.add(new Tuple3("user_2", "song_7", 10)); - triplets.add(new Tuple3("user_2", "song_8", 3)); - - triplets.add(new Tuple3("user_3", "song_1", 100)); - triplets.add(new Tuple3("user_3", "song_2", 10)); - triplets.add(new Tuple3("user_3", "song_3", 20)); - triplets.add(new Tuple3("user_3", "song_8", 30)); - triplets.add(new Tuple3("user_3", "song_9", 1)); - triplets.add(new Tuple3("user_3", "song_10", 8)); - triplets.add(new Tuple3("user_3", "song_11", 90)); - triplets.add(new Tuple3("user_3", "song_12", 30)); - triplets.add(new Tuple3("user_3", "song_13", 34)); - triplets.add(new Tuple3("user_3", "song_14", 17)); - - triplets.add(new Tuple3("user_4", "song_1", 100)); - triplets.add(new Tuple3("user_4", "song_6", 10)); - triplets.add(new Tuple3("user_4", "song_8", 20)); - triplets.add(new Tuple3("user_4", "song_12", 30)); - triplets.add(new Tuple3("user_4", "song_13", 1)); - triplets.add(new Tuple3("user_4", "song_15", 1)); - - triplets.add(new Tuple3("user_5", "song_3", 300)); - triplets.add(new Tuple3("user_5", "song_4", 4)); - triplets.add(new Tuple3("user_5", "song_5", 5)); - triplets.add(new Tuple3("user_5", "song_8", 8)); - triplets.add(new Tuple3("user_5", "song_9", 9)); - triplets.add(new Tuple3("user_5", "song_10", 10)); - triplets.add(new Tuple3("user_5", "song_12", 12)); - triplets.add(new Tuple3("user_5", "song_13", 13)); - triplets.add(new Tuple3("user_5", "song_15", 15)); - - triplets.add(new Tuple3("user_6", "song_6", 30)); + List> triplets = new ArrayList<>(); + + triplets.add(new Tuple3<>("user_1", "song_1", 100)); + triplets.add(new Tuple3<>("user_1", "song_2", 10)); + triplets.add(new Tuple3<>("user_1", "song_3", 20)); + triplets.add(new Tuple3<>("user_1", "song_4", 30)); + triplets.add(new Tuple3<>("user_1", "song_5", 1)); + + triplets.add(new Tuple3<>("user_2", "song_6", 40)); + triplets.add(new Tuple3<>("user_2", "song_7", 10)); + triplets.add(new Tuple3<>("user_2", "song_8", 3)); + + triplets.add(new Tuple3<>("user_3", "song_1", 100)); + triplets.add(new Tuple3<>("user_3", "song_2", 10)); + triplets.add(new Tuple3<>("user_3", "song_3", 20)); + triplets.add(new Tuple3<>("user_3", "song_8", 30)); + triplets.add(new Tuple3<>("user_3", "song_9", 1)); + triplets.add(new Tuple3<>("user_3", "song_10", 8)); + triplets.add(new Tuple3<>("user_3", "song_11", 90)); + triplets.add(new Tuple3<>("user_3", "song_12", 30)); + triplets.add(new Tuple3<>("user_3", "song_13", 34)); + triplets.add(new Tuple3<>("user_3", "song_14", 17)); + + triplets.add(new Tuple3<>("user_4", "song_1", 100)); + triplets.add(new Tuple3<>("user_4", "song_6", 10)); + triplets.add(new Tuple3<>("user_4", "song_8", 20)); + triplets.add(new Tuple3<>("user_4", "song_12", 30)); + triplets.add(new Tuple3<>("user_4", "song_13", 1)); + triplets.add(new Tuple3<>("user_4", "song_15", 1)); + + triplets.add(new Tuple3<>("user_5", "song_3", 300)); + triplets.add(new Tuple3<>("user_5", "song_4", 4)); + triplets.add(new Tuple3<>("user_5", "song_5", 5)); + triplets.add(new Tuple3<>("user_5", "song_8", 8)); + triplets.add(new Tuple3<>("user_5", "song_9", 9)); + triplets.add(new Tuple3<>("user_5", "song_10", 10)); + triplets.add(new Tuple3<>("user_5", "song_12", 12)); + triplets.add(new Tuple3<>("user_5", "song_13", 13)); + triplets.add(new Tuple3<>("user_5", "song_15", 15)); + + triplets.add(new Tuple3<>("user_6", "song_6", 30)); return env.fromCollection(triplets); } public static DataSet getMismatches(ExecutionEnvironment env) { - List errors = new ArrayList(); + List errors = new ArrayList<>(); errors.add("ERROR: Sever"); errors.add("ERROR: Black Trees"); return env.fromCollection(errors); diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java index 1c3ebb05d610a..0eb6c1dc67f4e 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java @@ -52,16 +52,16 @@ private PageRankData() {} public static final DataSet> getDefaultEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(2L, 1L, 1.0)); - edges.add(new Edge(5L, 2L, 1.0)); - edges.add(new Edge(5L, 4L, 1.0)); - edges.add(new Edge(4L, 3L, 1.0)); - edges.add(new Edge(4L, 2L, 1.0)); - edges.add(new Edge(1L, 4L, 1.0)); - edges.add(new Edge(1L, 2L, 1.0)); - edges.add(new Edge(1L, 3L, 1.0)); - edges.add(new Edge(3L, 5L, 1.0)); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(2L, 1L, 1.0)); + edges.add(new Edge<>(5L, 2L, 1.0)); + edges.add(new Edge<>(5L, 4L, 1.0)); + edges.add(new Edge<>(4L, 3L, 1.0)); + edges.add(new Edge<>(4L, 2L, 1.0)); + edges.add(new Edge<>(1L, 4L, 1.0)); + edges.add(new Edge<>(1L, 2L, 1.0)); + edges.add(new Edge<>(1L, 3L, 1.0)); + edges.add(new Edge<>(3L, 5L, 1.0)); return env.fromCollection(edges); } diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java index 7f4f9264aab9c..4346bd2f1a0a5 100644 --- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java +++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java @@ -45,16 +45,16 @@ public class TriangleCountData { public static DataSet> getDefaultEdgeDataSet(ExecutionEnvironment env) { - List> edges = new ArrayList>(); - edges.add(new Edge(1L, 2L, NullValue.getInstance())); - edges.add(new Edge(1L, 3L, NullValue.getInstance())); - edges.add(new Edge(2L, 3L, NullValue.getInstance())); - edges.add(new Edge(2L, 6L, NullValue.getInstance())); - edges.add(new Edge(3L, 4L, NullValue.getInstance())); - edges.add(new Edge(3L, 5L, NullValue.getInstance())); - edges.add(new Edge(3L, 6L, NullValue.getInstance())); - edges.add(new Edge(4L, 5L, NullValue.getInstance())); - edges.add(new Edge(6L, 7L, NullValue.getInstance())); + List> edges = new ArrayList<>(); + edges.add(new Edge<>(1L, 2L, NullValue.getInstance())); + edges.add(new Edge<>(1L, 3L, NullValue.getInstance())); + edges.add(new Edge<>(2L, 3L, NullValue.getInstance())); + edges.add(new Edge<>(2L, 6L, NullValue.getInstance())); + edges.add(new Edge<>(3L, 4L, NullValue.getInstance())); + edges.add(new Edge<>(3L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(3L, 6L, NullValue.getInstance())); + edges.add(new Edge<>(4L, 5L, NullValue.getInstance())); + edges.add(new Edge<>(6L, 7L, NullValue.getInstance())); return env.fromCollection(edges); } diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java index 8b8c44e914089..2a80aea35b782 100644 --- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java +++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java @@ -53,7 +53,7 @@ public void testSingleIteration() throws Exception { Graph inputGraph = Graph.fromDataSet( CommunityDetectionData.getSimpleEdgeDataSet(env), new InitLabels(), env); - List> result = inputGraph.run(new CommunityDetection(1, CommunityDetectionData.DELTA)) + List> result = inputGraph.run(new CommunityDetection<>(1, CommunityDetectionData.DELTA)) .getVertices().collect(); expected = CommunityDetectionData.COMMUNITIES_SINGLE_ITERATION; @@ -69,7 +69,7 @@ public void testTieBreaker() throws Exception { Graph inputGraph = Graph.fromDataSet( CommunityDetectionData.getTieEdgeDataSet(env), new InitLabels(), env); - List> result = inputGraph.run(new CommunityDetection(1, CommunityDetectionData.DELTA)) + List> result = inputGraph.run(new CommunityDetection<>(1, CommunityDetectionData.DELTA)) .getVertices().collect(); expected = CommunityDetectionData.COMMUNITIES_WITH_TIE; compareResultAsTuples(result, expected); diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java index d61c71d420ea4..d6dd3a03b47b6 100644 --- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java +++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java @@ -55,7 +55,7 @@ public void testSingleIteration() throws Exception { LabelPropagationData.getDefaultEdgeDataSet(env), env); List> result = inputGraph - .run(new LabelPropagation(1)) + .run(new LabelPropagation<>(1)) .collect(); expectedResult = LabelPropagationData.LABELS_AFTER_1_ITERATION; @@ -74,7 +74,7 @@ public void testTieBreaker() throws Exception { LabelPropagationData.getTieEdgeDataSet(env), env); List> result = inputGraph - .run(new LabelPropagation(1)) + .run(new LabelPropagation<>(1)) .collect(); expectedResult = LabelPropagationData.LABELS_WITH_TIE; diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java index baad6d0e42257..f66caf4589571 100644 --- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java +++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java @@ -72,7 +72,7 @@ public void testWithVertexAndEdgeStringValues() throws Exception { List>> summarizedEdges = new ArrayList<>(); Graph, EdgeValue> output = - input.run(new Summarization()); + input.run(new Summarization<>()); output.getVertices().output(new LocalCollectionOutputFormat<>(summarizedVertices)); output.getEdges().output(new LocalCollectionOutputFormat<>(summarizedEdges)); @@ -90,13 +90,13 @@ public void testWithVertexAndAbsentEdgeStringValues() throws Exception { SummarizationData.getVertices(env), SummarizationData.getEdges(env), env) - .run(new TranslateEdgeValues(new ToNullValue())); + .run(new TranslateEdgeValues<>(new ToNullValue<>())); List>> summarizedVertices = new ArrayList<>(); List>> summarizedEdges = new ArrayList<>(); Graph, EdgeValue> output = - input.run(new Summarization()); + input.run(new Summarization<>()); output.getVertices().output(new LocalCollectionOutputFormat<>(summarizedVertices)); output.getEdges().output(new LocalCollectionOutputFormat<>(summarizedEdges)); @@ -115,14 +115,14 @@ public void testWithVertexAndEdgeLongValues() throws Exception { SummarizationData.getVertices(env), SummarizationData.getEdges(env), env) - .run(new TranslateVertexValues(new StringToLong())) - .run(new TranslateEdgeValues(new StringToLong())); + .run(new TranslateVertexValues<>(new StringToLong())) + .run(new TranslateEdgeValues<>(new StringToLong())); List>> summarizedVertices = new ArrayList<>(); List>> summarizedEdges = new ArrayList<>(); Graph, EdgeValue> output = - input.run(new Summarization()); + input.run(new Summarization<>()); output.getVertices().output(new LocalCollectionOutputFormat<>(summarizedVertices)); output.getEdges().output(new LocalCollectionOutputFormat<>(summarizedEdges)); diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java index 2e1cc7c3bcf19..d999bae7c6205 100644 --- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java +++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java @@ -50,7 +50,7 @@ public void testTriangleEnumerator() throws Exception { Graph graph = Graph.fromDataSet(TriangleCountData.getDefaultEdgeDataSet(env), env); - List> actualOutput = graph.run(new TriangleEnumerator()).collect(); + List> actualOutput = graph.run(new TriangleEnumerator<>()).collect(); List> expectedResult = TriangleCountData.getListOfTriangles(); Assert.assertEquals(expectedResult.size(), actualOutput.size()); diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java index 066f2c47d2b6f..e44976e0eb073 100644 --- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java +++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java @@ -67,10 +67,10 @@ public void testConnectedComponentsWithObjectReuseDisabled() throws Exception { Graph inputGraph = Graph.fromDataSet( ConnectedComponentsDefaultData.getDefaultEdgeDataSet(env), - new IdentityMapper(), env); + new IdentityMapper<>(), env); List> result = inputGraph.run( - new GSAConnectedComponents(16)).collect(); + new GSAConnectedComponents<>(16)).collect(); compareResultAsTuples(result, expectedResultCC); } @@ -85,10 +85,10 @@ public void testConnectedComponentsWithObjectReuseEnabled() throws Exception { new LongToLongValue()); Graph inputGraph = Graph.fromDataSet( - edges, new IdentityMapper(), env); + edges, new IdentityMapper<>(), env); List> result = inputGraph.run( - new GSAConnectedComponents(16)).collect(); + new GSAConnectedComponents<>(16)).collect(); compareResultAsTuples(result, expectedResultCC); } @@ -106,7 +106,7 @@ public void testSingleSourceShortestPaths() throws Exception { new InitMapperSSSP(), env); List> result = inputGraph.run( - new GSASingleSourceShortestPaths(1L, 16)).collect(); + new GSASingleSourceShortestPaths<>(1L, 16)).collect(); String expectedResult = "1,0.0\n" + "2,12.0\n" + diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java index 62ed39fd42102..bd2c50d446f5d 100644 --- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java +++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java @@ -52,7 +52,7 @@ public void testPageRankWithThreeIterations() throws Exception { Graph inputGraph = Graph.fromDataSet( PageRankData.getDefaultEdgeDataSet(env), new InitMapper(), env); - List> result = inputGraph.run(new PageRank(0.85, 3)) + List> result = inputGraph.run(new PageRank<>(0.85, 3)) .collect(); compareWithDelta(result, 0.01); @@ -65,7 +65,7 @@ public void testGSAPageRankWithThreeIterations() throws Exception { Graph inputGraph = Graph.fromDataSet( PageRankData.getDefaultEdgeDataSet(env), new InitMapper(), env); - List> result = inputGraph.run(new GSAPageRank(0.85, 3)) + List> result = inputGraph.run(new GSAPageRank<>(0.85, 3)) .collect(); compareWithDelta(result, 0.01); @@ -78,7 +78,7 @@ public void testPageRankWithThreeIterationsAndNumOfVertices() throws Exception { Graph inputGraph = Graph.fromDataSet( PageRankData.getDefaultEdgeDataSet(env), new InitMapper(), env); - List> result = inputGraph.run(new PageRank(0.85, 3)) + List> result = inputGraph.run(new PageRank<>(0.85, 3)) .collect(); compareWithDelta(result, 0.01); @@ -91,7 +91,7 @@ public void testGSAPageRankWithThreeIterationsAndNumOfVertices() throws Exceptio Graph inputGraph = Graph.fromDataSet( PageRankData.getDefaultEdgeDataSet(env), new InitMapper(), env); - List> result = inputGraph.run(new GSAPageRank(0.85, 3)) + List> result = inputGraph.run(new GSAPageRank<>(0.85, 3)) .collect(); compareWithDelta(result, 0.01); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java index 670cefbd13738..446a2ba676420 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java @@ -177,7 +177,7 @@ public static Graph fromDataSet( DataSet> edges, ExecutionEnvironment context) { DataSet> vertices = edges - .flatMap(new EmitSrcAndTarget()) + .flatMap(new EmitSrcAndTarget<>()) .name("Source and target IDs") .distinct() .name("IDs"); @@ -221,7 +221,7 @@ public static Graph fromDataSet(DataSet> edge Vertex.class, keyType, valueType); DataSet> vertices = edges - .flatMap(new EmitSrcAndTargetAsTuple1()) + .flatMap(new EmitSrcAndTargetAsTuple1<>()) .name("Source and target IDs") .distinct() .name("IDs") @@ -269,11 +269,11 @@ public static Graph fromTupleDataSet(DataSet> edges, ExecutionEnvironment context) { DataSet> vertexDataSet = vertices - .map(new Tuple2ToVertexMap()) + .map(new Tuple2ToVertexMap<>()) .name("Type conversion"); DataSet> edgeDataSet = edges - .map(new Tuple3ToEdgeMap()) + .map(new Tuple3ToEdgeMap<>()) .name("Type conversion"); return fromDataSet(vertexDataSet, edgeDataSet, context); @@ -296,7 +296,7 @@ public static Graph fromTupleDataSet(DataSet> edgeDataSet = edges - .map(new Tuple3ToEdgeMap()) + .map(new Tuple3ToEdgeMap<>()) .name("Type conversion"); return fromDataSet(edgeDataSet, context); @@ -322,7 +322,7 @@ public static Graph fromTupleDataSet(DataSet vertexValueInitializer, ExecutionEnvironment context) { DataSet> edgeDataSet = edges - .map(new Tuple3ToEdgeMap()) + .map(new Tuple3ToEdgeMap<>()) .name("Type conversion"); return fromDataSet(edgeDataSet, vertexValueInitializer, context); @@ -343,7 +343,7 @@ public static Graph fromTuple2DataSet(DataSet> edgeDataSet = edges - .map(new Tuple2ToEdgeMap()) + .map(new Tuple2ToEdgeMap<>()) .name("To Edge"); return fromDataSet(edgeDataSet, context); @@ -368,7 +368,7 @@ public static Graph fromTuple2DataSet(DataSet vertexValueInitializer, ExecutionEnvironment context) { DataSet> edgeDataSet = edges - .map(new Tuple2ToEdgeMap()) + .map(new Tuple2ToEdgeMap<>()) .name("To Edge"); return fromDataSet(edgeDataSet, vertexValueInitializer, context); @@ -465,14 +465,14 @@ public DataSet> getEdges() { * @return the vertex DataSet as Tuple2. */ public DataSet> getVerticesAsTuple2() { - return vertices.map(new VertexToTuple2Map()); + return vertices.map(new VertexToTuple2Map<>()); } /** * @return the edge DataSet as Tuple3. */ public DataSet> getEdgesAsTuple3() { - return edges.map(new EdgeToTuple3Map()); + return edges.map(new EdgeToTuple3Map<>()); } /** @@ -483,10 +483,10 @@ public DataSet> getEdgesAsTuple3() { public DataSet> getTriplets() { return this.getVertices() .join(this.getEdges()).where(0).equalTo(0) - .with(new ProjectEdgeWithSrcValue()) + .with(new ProjectEdgeWithSrcValue<>()) .name("Project edge with source value") .join(this.getVertices()).where(1).equalTo(0) - .with(new ProjectEdgeWithVertexValues()) + .with(new ProjectEdgeWithVertexValues<>()) .name("Project edge with vertex values"); } @@ -628,7 +628,7 @@ public Edge map(Edge value) throws Exception { * @throws Exception */ public Graph translateGraphIds(TranslateFunction translator) throws Exception { - return run(new TranslateGraphIds(translator)); + return run(new TranslateGraphIds<>(translator)); } /** @@ -640,7 +640,7 @@ public Graph translateGraphIds(TranslateFunction tran * @throws Exception */ public Graph translateVertexValues(TranslateFunction translator) throws Exception { - return run(new TranslateVertexValues(translator)); + return run(new TranslateVertexValues<>(translator)); } /** @@ -652,7 +652,7 @@ public Graph translateVertexValues(TranslateFunction * @throws Exception */ public Graph translateEdgeValues(TranslateFunction translator) throws Exception { - return run(new TranslateEdgeValues(translator)); + return run(new TranslateEdgeValues<>(translator)); } /** @@ -676,7 +676,7 @@ public Graph joinWithVertices(DataSet> inputDataSet, DataSet> resultedVertices = this.getVertices() .coGroup(inputDataSet).where(0).equalTo(0) - .with(new ApplyCoGroupToVertexValues(vertexJoinFunction)) + .with(new ApplyCoGroupToVertexValues<>(vertexJoinFunction)) .name("Join with vertices"); return new Graph<>(resultedVertices, this.edges, this.context); } @@ -730,7 +730,7 @@ public Graph joinWithEdges(DataSet> inputDataSet, DataSet> resultedEdges = this.getEdges() .coGroup(inputDataSet).where(0, 1).equalTo(0, 1) - .with(new ApplyCoGroupToEdgeValues(edgeJoinFunction)) + .with(new ApplyCoGroupToEdgeValues<>(edgeJoinFunction)) .name("Join with edges"); return new Graph<>(this.vertices, resultedEdges, this.context); } @@ -787,7 +787,7 @@ public Graph joinWithEdgesOnSource(DataSet> inputDat DataSet> resultedEdges = this.getEdges() .coGroup(inputDataSet).where(0).equalTo(0) - .with(new ApplyCoGroupToEdgeValuesOnEitherSourceOrTarget(edgeJoinFunction)) + .with(new ApplyCoGroupToEdgeValuesOnEitherSourceOrTarget<>(edgeJoinFunction)) .name("Join with edges on source"); return new Graph<>(this.vertices, resultedEdges, this.context); @@ -850,7 +850,7 @@ public Graph joinWithEdgesOnTarget(DataSet> inputDat DataSet> resultedEdges = this.getEdges() .coGroup(inputDataSet).where(1).equalTo(0) - .with(new ApplyCoGroupToEdgeValuesOnEitherSourceOrTarget(edgeJoinFunction)) + .with(new ApplyCoGroupToEdgeValuesOnEitherSourceOrTarget<>(edgeJoinFunction)) .name("Join with edges on target"); return new Graph<>(this.vertices, resultedEdges, this.context); @@ -869,9 +869,9 @@ public Graph subgraph(FilterFunction> vertexFilter, Fil DataSet> filteredVertices = this.vertices.filter(vertexFilter); DataSet> remainingEdges = this.edges.join(filteredVertices) - .where(0).equalTo(0).with(new ProjectEdge()) + .where(0).equalTo(0).with(new ProjectEdge<>()) .join(filteredVertices).where(1).equalTo(0) - .with(new ProjectEdge()).name("Subgraph"); + .with(new ProjectEdge<>()).name("Subgraph"); DataSet> filteredEdges = remainingEdges.filter(edgeFilter); @@ -890,9 +890,9 @@ public Graph filterOnVertices(FilterFunction> vertexFil DataSet> filteredVertices = this.vertices.filter(vertexFilter); DataSet> remainingEdges = this.edges.join(filteredVertices) - .where(0).equalTo(0).with(new ProjectEdge()) + .where(0).equalTo(0).with(new ProjectEdge<>()) .join(filteredVertices).where(1).equalTo(0) - .with(new ProjectEdge()).name("Filter on vertices"); + .with(new ProjectEdge<>()).name("Filter on vertices"); return new Graph<>(filteredVertices, remainingEdges, this.context); } @@ -925,7 +925,7 @@ public void join(Edge first, Vertex second, Collector> */ public DataSet> outDegrees() { - return vertices.coGroup(edges).where(0).equalTo(0).with(new CountNeighborsCoGroup()) + return vertices.coGroup(edges).where(0).equalTo(0).with(new CountNeighborsCoGroup<>()) .name("Out-degree"); } @@ -962,7 +962,7 @@ public void coGroup(Iterable> vertex, Iterable> outEdg */ public DataSet> inDegrees() { - return vertices.coGroup(edges).where(0).equalTo(1).with(new CountNeighborsCoGroup()) + return vertices.coGroup(edges).where(0).equalTo(1).with(new CountNeighborsCoGroup<>()) .name("In-degree"); } @@ -985,7 +985,7 @@ public DataSet> getDegrees() { public Graph getUndirected() { DataSet> undirectedEdges = edges. - flatMap(new RegularAndReversedEdgesMap()).name("To undirected graph"); + flatMap(new RegularAndReversedEdgesMap<>()).name("To undirected graph"); return new Graph<>(vertices, undirectedEdges, this.context); } @@ -1014,7 +1014,7 @@ public DataSet groupReduceOnEdges(EdgesFunctionWithVertexValue(edgesFunction)).name("GroupReduce on out-edges"); case ALL: - return vertices.coGroup(edges.flatMap(new EmitOneEdgePerNode()) + return vertices.coGroup(edges.flatMap(new EmitOneEdgePerNode<>()) .name("Emit edge")) .where(0).equalTo(0).with(new ApplyCoGroupFunctionOnAllEdges<>(edgesFunction)) .name("GroupReduce on in- and out-edges"); @@ -1051,7 +1051,7 @@ public DataSet groupReduceOnEdges(EdgesFunctionWithVertexValue(edgesFunction)) .name("GroupReduce on out-edges").returns(typeInfo); case ALL: - return vertices.coGroup(edges.flatMap(new EmitOneEdgePerNode()) + return vertices.coGroup(edges.flatMap(new EmitOneEdgePerNode<>()) .name("Emit edge")) .where(0).equalTo(0).with(new ApplyCoGroupFunctionOnAllEdges<>(edgesFunction)) .name("GroupReduce on in- and out-edges").returns(typeInfo); @@ -1105,17 +1105,17 @@ public DataSet groupReduceOnEdges(EdgesFunction edgesFunction, switch (direction) { case IN: - return edges.map(new ProjectVertexIdMap(1)).name("Vertex ID") + return edges.map(new ProjectVertexIdMap<>(1)).name("Vertex ID") .withForwardedFields("f1->f0") .groupBy(0).reduceGroup(new ApplyGroupReduceFunction<>(edgesFunction)) .name("GroupReduce on in-edges").returns(typeInfo); case OUT: - return edges.map(new ProjectVertexIdMap(0)).name("Vertex ID") + return edges.map(new ProjectVertexIdMap<>(0)).name("Vertex ID") .withForwardedFields("f0") .groupBy(0).reduceGroup(new ApplyGroupReduceFunction<>(edgesFunction)) .name("GroupReduce on out-edges").returns(typeInfo); case ALL: - return edges.flatMap(new EmitOneEdgePerNode()).name("Emit edge") + return edges.flatMap(new EmitOneEdgePerNode<>()).name("Emit edge") .groupBy(0).reduceGroup(new ApplyGroupReduceFunction<>(edgesFunction)) .name("GroupReduce on in- and out-edges").returns(typeInfo); default: @@ -1306,7 +1306,7 @@ public void flatMap(Edge edge, Collector> out) throws Excepti * @throws UnsupportedOperationException */ public Graph reverse() throws UnsupportedOperationException { - DataSet> reversedEdges = edges.map(new ReverseEdgesMap()).name("Reverse edges"); + DataSet> reversedEdges = edges.map(new ReverseEdgesMap<>()).name("Reverse edges"); return new Graph<>(vertices, reversedEdges, this.context); } @@ -1328,7 +1328,7 @@ public long numberOfEdges() throws Exception { * @return The IDs of the vertices as DataSet */ public DataSet getVertexIds() { - return vertices.map(new ExtractVertexIDMapper()).name("Vertex IDs"); + return vertices.map(new ExtractVertexIDMapper<>()).name("Vertex IDs"); } private static final class ExtractVertexIDMapper @@ -1343,7 +1343,7 @@ public K map(Vertex vertex) { * @return The IDs of the edges as DataSet */ public DataSet> getEdgeIds() { - return edges.map(new ExtractEdgeIDsMapper()).name("Edge IDs"); + return edges.map(new ExtractEdgeIDsMapper<>()).name("Edge IDs"); } @ForwardedFields("f0; f1") @@ -1379,7 +1379,7 @@ public Graph addVertex(final Vertex vertex) { public Graph addVertices(List> verticesToAdd) { // Add the vertices DataSet> newVertices = this.vertices.coGroup(this.context.fromCollection(verticesToAdd)) - .where(0).equalTo(0).with(new VerticesUnionCoGroup()).name("Add vertices"); + .where(0).equalTo(0).with(new VerticesUnionCoGroup<>()).name("Add vertices"); return new Graph<>(newVertices, this.edges, this.context); } @@ -1433,9 +1433,9 @@ public Graph addEdges(List> newEdges) { DataSet> validNewEdges = this.getVertices().join(newEdgesDataSet) .where(0).equalTo(0) - .with(new JoinVerticesWithEdgesOnSrc()).name("Join with source") + .with(new JoinVerticesWithEdgesOnSrc<>()).name("Join with source") .join(this.getVertices()).where(1).equalTo(0) - .with(new JoinWithVerticesOnTrg()).name("Join with target"); + .with(new JoinWithVerticesOnTrg<>()).name("Join with target"); return Graph.fromDataSet(this.vertices, this.edges.union(validNewEdges), this.context); } @@ -1496,14 +1496,14 @@ public Graph removeVertices(List> verticesToBeRemoved) private Graph removeVertices(DataSet> verticesToBeRemoved) { DataSet> newVertices = getVertices().coGroup(verticesToBeRemoved).where(0).equalTo(0) - .with(new VerticesRemovalCoGroup()).name("Remove vertices"); + .with(new VerticesRemovalCoGroup<>()).name("Remove vertices"); DataSet > newEdges = newVertices.join(getEdges()).where(0).equalTo(0) // if the edge source was removed, the edge will also be removed - .with(new ProjectEdgeToBeRemoved()).name("Edges to be removed") + .with(new ProjectEdgeToBeRemoved<>()).name("Edges to be removed") // if the edge target was removed, the edge will also be removed .join(newVertices).where(1).equalTo(0) - .with(new ProjectEdge()).name("Remove edges"); + .with(new ProjectEdge<>()).name("Remove edges"); return new Graph<>(newVertices, newEdges, context); } @@ -1571,7 +1571,7 @@ public boolean filter(Edge edge) { public Graph removeEdges(List> edgesToBeRemoved) { DataSet> newEdges = getEdges().coGroup(this.context.fromCollection(edgesToBeRemoved)) - .where(0, 1).equalTo(0, 1).with(new EdgeRemovalCoGroup()).name("Remove edges"); + .where(0, 1).equalTo(0, 1).with(new EdgeRemovalCoGroup<>()).name("Remove edges"); return new Graph<>(this.vertices, newEdges, context); } @@ -1689,7 +1689,7 @@ private DataSet> getPairwiseEdgeIntersection(DataSet> ed .coGroup(edges) .where(0, 1, 2) .equalTo(0, 1, 2) - .with(new MatchingEdgeReducer()) + .with(new MatchingEdgeReducer<>()) .name("Intersect edges"); } @@ -1912,9 +1912,9 @@ public DataSet groupReduceOnNeighbors(NeighborsFunctionWithVertexValue pairs DataSet, Vertex>> edgesWithNeighbors = edges - .flatMap(new EmitOneEdgeWithNeighborPerNode()).name("Forward and reverse edges") + .flatMap(new EmitOneEdgeWithNeighborPerNode<>()).name("Forward and reverse edges") .join(this.vertices).where(1).equalTo(0) - .with(new ProjectEdgeWithNeighbor()).name("Edge with vertex"); + .with(new ProjectEdgeWithNeighbor<>()).name("Edge with vertex"); return vertices.coGroup(edgesWithNeighbors) .where(0).equalTo(0) @@ -1962,9 +1962,9 @@ public DataSet groupReduceOnNeighbors(NeighborsFunctionWithVertexValue pairs DataSet, Vertex>> edgesWithNeighbors = edges - .flatMap(new EmitOneEdgeWithNeighborPerNode()).name("Forward and reverse edges") + .flatMap(new EmitOneEdgeWithNeighborPerNode<>()).name("Forward and reverse edges") .join(this.vertices).where(1).equalTo(0) - .with(new ProjectEdgeWithNeighbor()).name("Edge with vertex"); + .with(new ProjectEdgeWithNeighbor<>()).name("Edge with vertex"); return vertices.coGroup(edgesWithNeighbors) .where(0).equalTo(0) @@ -1997,7 +1997,7 @@ public DataSet groupReduceOnNeighbors(NeighborsFunction nei // create pairs DataSet, Vertex>> edgesWithSources = edges .join(this.vertices).where(0).equalTo(0) - .with(new ProjectVertexIdJoin(1)) + .with(new ProjectVertexIdJoin<>(1)) .withForwardedFieldsFirst("f1->f0").name("Edge with source vertex ID"); return edgesWithSources.groupBy(0).reduceGroup( new ApplyNeighborGroupReduceFunction<>(neighborsFunction)).name("Neighbors function"); @@ -2005,16 +2005,16 @@ public DataSet groupReduceOnNeighbors(NeighborsFunction nei // create pairs DataSet, Vertex>> edgesWithTargets = edges .join(this.vertices).where(1).equalTo(0) - .with(new ProjectVertexIdJoin(0)) + .with(new ProjectVertexIdJoin<>(0)) .withForwardedFieldsFirst("f0").name("Edge with target vertex ID"); return edgesWithTargets.groupBy(0).reduceGroup( new ApplyNeighborGroupReduceFunction<>(neighborsFunction)).name("Neighbors function"); case ALL: // create pairs DataSet, Vertex>> edgesWithNeighbors = edges - .flatMap(new EmitOneEdgeWithNeighborPerNode()).name("Forward and reverse edges") + .flatMap(new EmitOneEdgeWithNeighborPerNode<>()).name("Forward and reverse edges") .join(this.vertices).where(1).equalTo(0) - .with(new ProjectEdgeWithNeighbor()).name("Edge with vertex ID"); + .with(new ProjectEdgeWithNeighbor<>()).name("Edge with vertex ID"); return edgesWithNeighbors.groupBy(0).reduceGroup( new ApplyNeighborGroupReduceFunction<>(neighborsFunction)).name("Neighbors function"); @@ -2046,7 +2046,7 @@ public DataSet groupReduceOnNeighbors(NeighborsFunction nei // create pairs DataSet, Vertex>> edgesWithSources = edges .join(this.vertices).where(0).equalTo(0) - .with(new ProjectVertexIdJoin(1)) + .with(new ProjectVertexIdJoin<>(1)) .withForwardedFieldsFirst("f1->f0").name("Edge with source vertex ID"); return edgesWithSources.groupBy(0).reduceGroup( new ApplyNeighborGroupReduceFunction<>(neighborsFunction)) @@ -2055,7 +2055,7 @@ public DataSet groupReduceOnNeighbors(NeighborsFunction nei // create pairs DataSet, Vertex>> edgesWithTargets = edges .join(this.vertices).where(1).equalTo(0) - .with(new ProjectVertexIdJoin(0)) + .with(new ProjectVertexIdJoin<>(0)) .withForwardedFieldsFirst("f0").name("Edge with target vertex ID"); return edgesWithTargets.groupBy(0).reduceGroup( new ApplyNeighborGroupReduceFunction<>(neighborsFunction)) @@ -2063,9 +2063,9 @@ public DataSet groupReduceOnNeighbors(NeighborsFunction nei case ALL: // create pairs DataSet, Vertex>> edgesWithNeighbors = edges - .flatMap(new EmitOneEdgeWithNeighborPerNode()) + .flatMap(new EmitOneEdgeWithNeighborPerNode<>()) .join(this.vertices).where(1).equalTo(0) - .with(new ProjectEdgeWithNeighbor()).name("Edge with vertex ID"); + .with(new ProjectEdgeWithNeighbor<>()).name("Edge with vertex ID"); return edgesWithNeighbors.groupBy(0).reduceGroup( new ApplyNeighborGroupReduceFunction<>(neighborsFunction)) @@ -2246,27 +2246,27 @@ public DataSet> reduceOnNeighbors(ReduceNeighborsFunction redu // create pairs final DataSet> verticesWithSourceNeighborValues = edges .join(this.vertices).where(0).equalTo(0) - .with(new ProjectVertexWithNeighborValueJoin(1)) + .with(new ProjectVertexWithNeighborValueJoin<>(1)) .withForwardedFieldsFirst("f1->f0").name("Vertex with in-neighbor value"); - return verticesWithSourceNeighborValues.groupBy(0).reduce(new ApplyNeighborReduceFunction( - reduceNeighborsFunction)).name("Neighbors function"); + return verticesWithSourceNeighborValues.groupBy(0).reduce(new ApplyNeighborReduceFunction<>( + reduceNeighborsFunction)).name("Neighbors function"); case OUT: // create pairs DataSet> verticesWithTargetNeighborValues = edges .join(this.vertices).where(1).equalTo(0) - .with(new ProjectVertexWithNeighborValueJoin(0)) + .with(new ProjectVertexWithNeighborValueJoin<>(0)) .withForwardedFieldsFirst("f0").name("Vertex with out-neighbor value"); - return verticesWithTargetNeighborValues.groupBy(0).reduce(new ApplyNeighborReduceFunction( - reduceNeighborsFunction)).name("Neighbors function"); + return verticesWithTargetNeighborValues.groupBy(0).reduce(new ApplyNeighborReduceFunction<>( + reduceNeighborsFunction)).name("Neighbors function"); case ALL: // create pairs DataSet> verticesWithNeighborValues = edges - .flatMap(new EmitOneEdgeWithNeighborPerNode()) + .flatMap(new EmitOneEdgeWithNeighborPerNode<>()) .join(this.vertices).where(1).equalTo(0) - .with(new ProjectNeighborValue()).name("Vertex with neighbor value"); + .with(new ProjectNeighborValue<>()).name("Vertex with neighbor value"); - return verticesWithNeighborValues.groupBy(0).reduce(new ApplyNeighborReduceFunction( - reduceNeighborsFunction)).name("Neighbors function"); + return verticesWithNeighborValues.groupBy(0).reduce(new ApplyNeighborReduceFunction<>( + reduceNeighborsFunction)).name("Neighbors function"); default: throw new IllegalArgumentException("Illegal edge direction"); } @@ -2306,22 +2306,22 @@ public DataSet> reduceOnEdges(ReduceEdgesFunction reduceEdgesF switch (direction) { case IN: - return edges.map(new ProjectVertexWithEdgeValueMap(1)) + return edges.map(new ProjectVertexWithEdgeValueMap<>(1)) .withForwardedFields("f1->f0") .name("Vertex with in-edges") - .groupBy(0).reduce(new ApplyReduceFunction(reduceEdgesFunction)) + .groupBy(0).reduce(new ApplyReduceFunction<>(reduceEdgesFunction)) .name("Reduce on edges"); case OUT: - return edges.map(new ProjectVertexWithEdgeValueMap(0)) + return edges.map(new ProjectVertexWithEdgeValueMap<>(0)) .withForwardedFields("f0->f0") .name("Vertex with out-edges") - .groupBy(0).reduce(new ApplyReduceFunction(reduceEdgesFunction)) + .groupBy(0).reduce(new ApplyReduceFunction<>(reduceEdgesFunction)) .name("Reduce on edges"); case ALL: - return edges.flatMap(new EmitOneVertexWithEdgeValuePerNode()) + return edges.flatMap(new EmitOneVertexWithEdgeValuePerNode<>()) .withForwardedFields("f2->f1") .name("Vertex with all edges") - .groupBy(0).reduce(new ApplyReduceFunction(reduceEdgesFunction)) + .groupBy(0).reduce(new ApplyReduceFunction<>(reduceEdgesFunction)) .name("Reduce on edges"); default: throw new IllegalArgumentException("Illegal edge direction"); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java index 6f5570f122cd7..36d2001109f03 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java @@ -158,7 +158,7 @@ public Graph keyType(Class vertexKey) { DataSet> edges = edgeReader .types(vertexKey, vertexKey) .name(GraphCsvReader.class.getName()) - .map(new Tuple2ToEdgeMap()) + .map(new Tuple2ToEdgeMap<>()) .name("Type conversion"); return Graph.fromDataSet(edges, executionContext); @@ -183,7 +183,7 @@ public Graph vertexTypes(Class vertexKey, Class DataSet> edges = edgeReader .types(vertexKey, vertexKey) .name(GraphCsvReader.class.getName()) - .map(new Tuple2ToEdgeMap()) + .map(new Tuple2ToEdgeMap<>()) .name("To Edge"); // the vertex value can be provided by an input file or a user-defined mapper @@ -191,7 +191,7 @@ public Graph vertexTypes(Class vertexKey, Class DataSet> vertices = vertexReader .types(vertexKey, vertexValue) .name(GraphCsvReader.class.getName()) - .map(new Tuple2ToVertexMap()) + .map(new Tuple2ToVertexMap<>()) .name("Type conversion"); return Graph.fromDataSet(vertices, edges, executionContext); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java index 7191bc941a9f7..71264d3af35eb 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java @@ -58,7 +58,7 @@ public DataSet>> runInternal(Graph()) + .with(new JoinEdgeDegreeWithVertexDegree<>()) .setParallelism(parallelism) .name("Edge target degree"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java index 30d30fa71d8b6..efae743a09f9e 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java @@ -52,7 +52,7 @@ public DataSet>> runInternal(Graph input) .join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND) .where(0) .equalTo(0) - .with(new JoinEdgeWithVertexDegree()) + .with(new JoinEdgeWithVertexDegree<>()) .setParallelism(parallelism) .name("Edge source degrees"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java index 57045a107b062..db87eb9527169 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java @@ -52,7 +52,7 @@ public DataSet>> runInternal(Graph input) .join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND) .where(1) .equalTo(0) - .with(new JoinEdgeWithVertexDegree()) + .with(new JoinEdgeWithVertexDegree<>()) .setParallelism(parallelism) .name("Edge target degrees"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java index 06a7fd2e25c6f..a7037894c9c13 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java @@ -93,18 +93,18 @@ public DataSet> runInternal(Graph input) throws Exception { // s, t, bitmask DataSet> vertexWithEdgeOrder = input.getEdges() - .flatMap(new EmitAndFlipEdge()) + .flatMap(new EmitAndFlipEdge<>()) .setParallelism(parallelism) .name("Emit and flip edge") .groupBy(0, 1) - .reduceGroup(new ReduceBitmask()) + .reduceGroup(new ReduceBitmask<>()) .setParallelism(parallelism) .name("Reduce bitmask"); // s, d(s) DataSet> vertexDegrees = vertexWithEdgeOrder .groupBy(0) - .reduceGroup(new DegreeCount()) + .reduceGroup(new DegreeCount<>()) .setParallelism(parallelism) .name("Degree count"); @@ -113,7 +113,7 @@ public DataSet> runInternal(Graph input) .leftOuterJoin(vertexDegrees) .where(0) .equalTo(0) - .with(new JoinVertexWithVertexDegrees()) + .with(new JoinVertexWithVertexDegrees<>()) .setParallelism(parallelism) .name("Zero degree vertices"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java index dc071cf14c673..38c799524d362 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java @@ -84,14 +84,14 @@ public DataSet> runInternal(Graph input) // t DataSet> targetIds = input .getEdges() - .map(new MapEdgeToTargetId()) + .map(new MapEdgeToTargetId<>()) .setParallelism(parallelism) .name("Edge to target ID"); // t, d(t) DataSet> targetDegree = targetIds .groupBy(0) - .reduce(new DegreeCount()) + .reduce(new DegreeCount<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Degree count"); @@ -101,7 +101,7 @@ public DataSet> runInternal(Graph input) .leftOuterJoin(targetDegree) .where(0) .equalTo(0) - .with(new JoinVertexWithVertexDegree()) + .with(new JoinVertexWithVertexDegree<>()) .setParallelism(parallelism) .name("Zero degree vertices"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java index 4a4689bf4f69d..ef9c781c876a3 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java @@ -84,14 +84,14 @@ public DataSet> runInternal(Graph input) // s DataSet> sourceIds = input .getEdges() - .map(new MapEdgeToSourceId()) + .map(new MapEdgeToSourceId<>()) .setParallelism(parallelism) .name("Edge to source ID"); // s, d(s) DataSet> sourceDegree = sourceIds .groupBy(0) - .reduce(new DegreeCount()) + .reduce(new DegreeCount<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Degree count"); @@ -101,7 +101,7 @@ public DataSet> runInternal(Graph input) .leftOuterJoin(sourceDegree) .where(0) .equalTo(0) - .with(new JoinVertexWithVertexDegree()) + .with(new JoinVertexWithVertexDegree<>()) .setParallelism(parallelism) .name("Zero degree vertices"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java index ff4285fbb308c..682529533a234 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java @@ -89,7 +89,7 @@ public DataSet>> runInternal(Graph()) + .with(new JoinEdgeDegreeWithVertexDegree<>()) .setParallelism(parallelism) .name("Edge target degree"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java index bd8ce3df4f245..3fe05d9b82114 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java @@ -81,7 +81,7 @@ public DataSet>> runInternal(Graph inpu .join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND) .where(0) .equalTo(0) - .with(new JoinEdgeWithVertexDegree()) + .with(new JoinEdgeWithVertexDegree<>()) .setParallelism(parallelism) .name("Edge source degree"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java index cb18d2cc3ab07..6020ba39474a5 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java @@ -81,7 +81,7 @@ public DataSet>> runInternal(Graph inpu .join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND) .where(1) .equalTo(0) - .with(new JoinEdgeWithVertexDegree()) + .with(new JoinEdgeWithVertexDegree<>()) .setParallelism(parallelism) .name("Edge target degree"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java index d2fad18299733..fee58a354bf53 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java @@ -103,7 +103,7 @@ protected void mergeConfiguration(GraphAlgorithmWrappingBase other) { public DataSet> runInternal(Graph input) throws Exception { MapFunction, Vertex> mapEdgeToId = reduceOnTargetId.get() ? - new MapEdgeToTargetId() : new MapEdgeToSourceId(); + new MapEdgeToTargetId<>() : new MapEdgeToSourceId<>(); // v DataSet> vertexIds = input @@ -115,7 +115,7 @@ public DataSet> runInternal(Graph input) // v, deg(v) DataSet> degree = vertexIds .groupBy(0) - .reduce(new DegreeCount()) + .reduce(new DegreeCount<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Degree count"); @@ -126,7 +126,7 @@ public DataSet> runInternal(Graph input) .leftOuterJoin(degree) .where(0) .equalTo(0) - .with(new JoinVertexWithVertexDegree()) + .with(new JoinVertexWithVertexDegree<>()) .setParallelism(parallelism) .name("Zero degree vertices"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java index 522d39c981a5a..41dc64b33cafd 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java @@ -138,7 +138,7 @@ public Graph runInternal(Graph input) // u, d(u) if d(u) > maximumDegree DataSet> highDegreeVertices = vertexDegree - .flatMap(new DegreeFilter(maximumDegree)) + .flatMap(new DegreeFilter<>(maximumDegree)) .setParallelism(parallelism) .name("Filter high-degree vertices"); @@ -150,7 +150,7 @@ public Graph runInternal(Graph input) .leftOuterJoin(highDegreeVertices, joinHint) .where(0) .equalTo(0) - .with(new ProjectVertex()) + .with(new ProjectVertex<>()) .setParallelism(parallelism) .name("Project low-degree vertices"); @@ -160,13 +160,13 @@ public Graph runInternal(Graph input) .leftOuterJoin(highDegreeVertices, joinHint) .where(reduceOnTargetId.get() ? 1 : 0) .equalTo(0) - .with(new ProjectEdge()) + .with(new ProjectEdge<>()) .setParallelism(parallelism) .name("Project low-degree edges by " + (reduceOnTargetId.get() ? "target" : "source")) .leftOuterJoin(highDegreeVertices, joinHint) .where(reduceOnTargetId.get() ? 0 : 1) .equalTo(0) - .with(new ProjectEdge()) + .with(new ProjectEdge<>()) .setParallelism(parallelism) .name("Project low-degree edges by " + (reduceOnTargetId.get() ? "source" : "target")); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java index 511840aa97fd3..0d4fa1ece28dc 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java @@ -41,7 +41,7 @@ public Graph runInternal(Graph input) // Edges DataSet> edges = input .getEdges() - .filter(new RemoveSelfLoops()) + .filter(new RemoveSelfLoops<>()) .setParallelism(parallelism) .name("Remove self-loops") .distinct(0, 1) diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java index 21db233270eb6..f00a162b36833 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java @@ -71,7 +71,7 @@ public Graph runInternal(Graph input) // Edges DataSet> edges = input .getEdges() - .flatMap(new SymmetrizeAndRemoveSelfLoops(clipAndFlip)) + .flatMap(new SymmetrizeAndRemoveSelfLoops<>(clipAndFlip)) .setParallelism(parallelism) .name("Remove self-loops") .distinct(0, 1) diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java index 4cb4e010e09e6..6dcf766cce397 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java @@ -88,7 +88,7 @@ public static DataSet> translateVertexIds(DataSet TupleTypeInfo> returnType = new TupleTypeInfo<>(vertexClass, newType, vertexValueType); return vertices - .map(new TranslateVertexId(translator)) + .map(new TranslateVertexId<>(translator)) .returns(returnType) .setParallelism(parallelism) .name("Translate vertex IDs"); @@ -172,7 +172,7 @@ public static DataSet> translateEdgeIds(DataSet> returnType = new TupleTypeInfo<>(edgeClass, newType, newType, edgeValueType); return edges - .map(new TranslateEdgeId(translator)) + .map(new TranslateEdgeId<>(translator)) .returns(returnType) .setParallelism(parallelism) .name("Translate edge IDs"); @@ -257,7 +257,7 @@ public static DataSet> translateVertexValues(DataSe TupleTypeInfo> returnType = new TupleTypeInfo<>(vertexClass, idType, newType); return vertices - .map(new TranslateVertexValue(translator)) + .map(new TranslateVertexValue<>(translator)) .returns(returnType) .setParallelism(parallelism) .name("Translate vertex values"); @@ -341,7 +341,7 @@ public static DataSet> translateEdgeValues(DataSet> returnType = new TupleTypeInfo<>(edgeClass, idType, idType, newType); return edges - .map(new TranslateEdgeValue(translator)) + .map(new TranslateEdgeValue<>(translator)) .returns(returnType) .setParallelism(parallelism) .name("Translate edge values"); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java index 029e2c4039e8e..97c93e287c4a7 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java @@ -128,7 +128,7 @@ public Graph> projectionTopSimple() { DataSet>> newEdges = edges.join(edges) .where(1) .equalTo(1) - .with(new ProjectionTopSimple()) + .with(new ProjectionTopSimple<>()) .name("Simple top projection"); return Graph.fromDataSet(topVertices, newEdges, context); @@ -172,7 +172,7 @@ public Graph> projectionBottomSimple() { DataSet>> newEdges = edges.join(edges) .where(0) .equalTo(0) - .with(new ProjectionBottomSimple()) + .with(new ProjectionBottomSimple<>()) .name("Simple bottom projection"); return Graph.fromDataSet(bottomVertices, newEdges, context); @@ -218,7 +218,7 @@ public Graph> projectionTopFull() { DataSet>> newEdges = edgesWithVertices.join(edgesWithVertices) .where(1) .equalTo(1) - .with(new ProjectionTopFull()) + .with(new ProjectionTopFull<>()) .name("Full top projection"); return Graph.fromDataSet(topVertices, newEdges, context); @@ -284,7 +284,7 @@ public Graph> projectionBottomFull() { DataSet>> newEdges = edgesWithVertices.join(edgesWithVertices) .where(0) .equalTo(0) - .with(new ProjectionBottomFull()) + .with(new ProjectionBottomFull<>()) .name("Full bottom projection"); return Graph.fromDataSet(bottomVertices, newEdges, context); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java index fca9d8b8ec898..d5a70f325c2c8 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java @@ -91,7 +91,7 @@ public Vertex map(LongValue value) */ public static DataSet> vertexSet(DataSet> edges, int parallelism) { DataSet> vertexSet = edges - .flatMap(new EmitSrcAndTarget()) + .flatMap(new EmitSrcAndTarget<>()) .setParallelism(parallelism) .name("Emit source and target labels"); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java index d14d32cdcff53..1960aa3344e2c 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java @@ -156,7 +156,7 @@ public Graph generate() { .rebalance() .setParallelism(parallelism) .name("Rebalance") - .flatMap(new GenerateEdges(vertexCount, scale, a, b, c, noiseEnabled, noise)) + .flatMap(new GenerateEdges<>(vertexCount, scale, a, b, c, noiseEnabled, noise)) .setParallelism(parallelism) .name("RMat graph edges"); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java index 72e18aec68992..f09a89086274f 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java @@ -58,7 +58,7 @@ public GSAConfiguration() {} * @param data The data set to be broadcasted. */ public void addBroadcastSetForGatherFunction(String name, DataSet data) { - this.bcVarsGather.add(new Tuple2>(name, data)); + this.bcVarsGather.add(new Tuple2<>(name, data)); } /** @@ -68,7 +68,7 @@ public void addBroadcastSetForGatherFunction(String name, DataSet data) { * @param data The data set to be broadcasted. */ public void addBroadcastSetForSumFunction(String name, DataSet data) { - this.bcVarsSum.add(new Tuple2>(name, data)); + this.bcVarsSum.add(new Tuple2<>(name, data)); } /** @@ -78,7 +78,7 @@ public void addBroadcastSetForSumFunction(String name, DataSet data) { * @param data The data set to be broadcasted. */ public void addBroadcastSetForApplyFunction(String name, DataSet data) { - this.bcVarsApply.add(new Tuple2>(name, data)); + this.bcVarsApply.add(new Tuple2<>(name, data)); } /** diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java index 12d497718d3b3..5f04b705d2a68 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java @@ -169,24 +169,24 @@ public DataSet> createResult() { case OUT: neighbors = iteration .getWorkset().join(edgeDataSet) - .where(0).equalTo(0).with(new ProjectKeyWithNeighborOUT()); + .where(0).equalTo(0).with(new ProjectKeyWithNeighborOUT<>()); break; case IN: neighbors = iteration .getWorkset().join(edgeDataSet) - .where(0).equalTo(1).with(new ProjectKeyWithNeighborIN()); + .where(0).equalTo(1).with(new ProjectKeyWithNeighborIN<>()); break; case ALL: neighbors = iteration .getWorkset().join(edgeDataSet) - .where(0).equalTo(0).with(new ProjectKeyWithNeighborOUT()).union(iteration + .where(0).equalTo(0).with(new ProjectKeyWithNeighborOUT<>()).union(iteration .getWorkset().join(edgeDataSet) - .where(0).equalTo(1).with(new ProjectKeyWithNeighborIN())); + .where(0).equalTo(1).with(new ProjectKeyWithNeighborIN<>())); break; default: neighbors = iteration .getWorkset().join(edgeDataSet) - .where(0).equalTo(0).with(new ProjectKeyWithNeighborOUT()); + .where(0).equalTo(0).with(new ProjectKeyWithNeighborOUT<>()); break; } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java index 984628653cd98..ccf2bb18c7078 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java @@ -73,14 +73,14 @@ public CommunityDetection(int maxIterations, double delta) { public Graph run(Graph graph) { DataSet>> initializedVertices = graph.getVertices() - .map(new AddScoreToVertexValuesMapper()); + .map(new AddScoreToVertexValuesMapper<>()); Graph, Double> graphWithScoredVertices = Graph.fromDataSet(initializedVertices, graph.getEdges(), graph.getContext()).getUndirected(); - return graphWithScoredVertices.runScatterGatherIteration(new LabelMessenger(), - new VertexLabelUpdater(delta), maxIterations) - .mapVertices(new RemoveScoreFromVertexValuesMapper()); + return graphWithScoredVertices.runScatterGatherIteration(new LabelMessenger<>(), + new VertexLabelUpdater<>(delta), maxIterations) + .mapVertices(new RemoveScoreFromVertexValuesMapper<>()); } @SuppressWarnings("serial") diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java index a3110ab239afa..5cb8abed97d45 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java @@ -22,7 +22,6 @@ import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.typeutils.ResultTypeQueryable; import org.apache.flink.api.java.typeutils.TupleTypeInfo; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; import org.apache.flink.graph.GraphAlgorithm; import org.apache.flink.graph.Vertex; @@ -73,12 +72,12 @@ public DataSet> run(Graph graph) throws Exception { TypeInformation valueTypeInfo = ((TupleTypeInfo) graph.getVertices().getType()).getTypeAt(1); Graph undirectedGraph = graph - .mapEdges(new MapTo, NullValue>(NullValue.getInstance())) + .mapEdges(new MapTo<>(NullValue.getInstance())) .getUndirected(); return undirectedGraph.runScatterGatherIteration( - new CCMessenger(valueTypeInfo), - new CCUpdater(), + new CCMessenger<>(valueTypeInfo), + new CCUpdater<>(), maxIterations).getVertices(); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java index 37e5cabcb7686..230f88eac9301 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java @@ -22,7 +22,6 @@ import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.typeutils.ResultTypeQueryable; import org.apache.flink.api.java.typeutils.TupleTypeInfo; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; import org.apache.flink.graph.GraphAlgorithm; import org.apache.flink.graph.Vertex; @@ -74,13 +73,13 @@ public DataSet> run(Graph graph) throws Exception { TypeInformation valueTypeInfo = ((TupleTypeInfo) graph.getVertices().getType()).getTypeAt(1); Graph undirectedGraph = graph - .mapEdges(new MapTo, NullValue>(NullValue.getInstance())) + .mapEdges(new MapTo<>(NullValue.getInstance())) .getUndirected(); return undirectedGraph.runGatherSumApplyIteration( new GatherNeighborIds<>(valueTypeInfo), new SelectMinId<>(valueTypeInfo), - new UpdateComponentId(valueTypeInfo), + new UpdateComponentId<>(valueTypeInfo), maxIterations).getVertices(); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java index 2d0b8da8d9460..28e9168f816d2 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java @@ -51,9 +51,9 @@ public GSASingleSourceShortestPaths(K srcVertexId, Integer maxIterations) { @Override public DataSet> run(Graph input) { - return input.mapVertices(new InitVerticesMapper(srcVertexId)) + return input.mapVertices(new InitVerticesMapper<>(srcVertexId)) .runGatherSumApplyIteration(new CalculateDistances(), new ChooseMinDistance(), - new UpdateDistance(), maxIterations) + new UpdateDistance<>(), maxIterations) .getVertices(); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java index 1e700f4c057f8..880a67bb66fd9 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java @@ -22,7 +22,6 @@ import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.typeutils.ResultTypeQueryable; import org.apache.flink.api.java.typeutils.TupleTypeInfo; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; import org.apache.flink.graph.GraphAlgorithm; import org.apache.flink.graph.Vertex; @@ -77,9 +76,9 @@ public DataSet> run(Graph input) { TypeInformation valueType = ((TupleTypeInfo) input.getVertices().getType()).getTypeAt(1); // iteratively adopt the most frequent label among the neighbors of each vertex return input - .mapEdges(new MapTo, NullValue>(NullValue.getInstance())) + .mapEdges(new MapTo<>(NullValue.getInstance())) .runScatterGatherIteration( - new SendNewLabelToNeighbors(valueType), new UpdateVertexLabel(), maxIterations) + new SendNewLabelToNeighbors<>(valueType), new UpdateVertexLabel<>(), maxIterations) .getVertices(); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java index 15f0a84f69e79..8f41fa03d2c43 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java @@ -51,8 +51,8 @@ public SingleSourceShortestPaths(K srcVertexId, Integer maxIterations) { @Override public DataSet> run(Graph input) { - return input.mapVertices(new InitVerticesMapper(srcVertexId)) - .runScatterGatherIteration(new MinDistanceMessenger(), new VertexDistanceUpdater(), + return input.mapVertices(new InitVerticesMapper<>(srcVertexId)) + .runScatterGatherIteration(new MinDistanceMessenger<>(), new VertexDistanceUpdater<>(), maxIterations).getVertices(); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java index 44ea988532e36..a1498df5d0896 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java @@ -102,11 +102,11 @@ public Graph, EdgeValue> run(Graph input) thro // group vertices by value and create vertex group items DataSet> vertexGroupItems = input.getVertices() .groupBy(1) - .reduceGroup(new VertexGroupReducer()); + .reduceGroup(new VertexGroupReducer<>()); // create super vertices DataSet>> summarizedVertices = vertexGroupItems - .filter(new VertexGroupItemToSummarizedVertexFilter()) - .map(new VertexGroupItemToSummarizedVertexMapper()); + .filter(new VertexGroupItemToSummarizedVertexFilter<>()) + .map(new VertexGroupItemToSummarizedVertexMapper<>()); // ------------------------- // build super edges @@ -114,22 +114,22 @@ public Graph, EdgeValue> run(Graph input) thro // create mapping between vertices and their representative DataSet> vertexToRepresentativeMap = vertexGroupItems - .filter(new VertexGroupItemToRepresentativeFilter()) - .map(new VertexGroupItemToVertexWithRepresentativeMapper()); + .filter(new VertexGroupItemToRepresentativeFilter<>()) + .map(new VertexGroupItemToVertexWithRepresentativeMapper<>()); // join edges with vertex representatives and update source and target identifiers DataSet> edgesForGrouping = input.getEdges() .join(vertexToRepresentativeMap) .where(0) // source vertex id .equalTo(0) // vertex id - .with(new SourceVertexJoinFunction()) + .with(new SourceVertexJoinFunction<>()) .join(vertexToRepresentativeMap) .where(1) // target vertex id .equalTo(0) // vertex id - .with(new TargetVertexJoinFunction()); + .with(new TargetVertexJoinFunction<>()); // create super edges DataSet>> summarizedEdges = edgesForGrouping .groupBy(0, 1, 2) // group by source id (0), target id (1) and edge value (2) - .reduceGroup(new EdgeGroupReducer()); + .reduceGroup(new EdgeGroupReducer<>()); return Graph.fromDataSet(summarizedVertices, summarizedEdges, input.getContext()); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java index 2ae6120cce18b..23f942c68cbe3 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java @@ -65,21 +65,21 @@ public DataSet> run(Graph input) throws Exception { DataSet> edges = input.getEdges(); // annotate edges with degrees - DataSet> edgesWithDegrees = edges.flatMap(new EdgeDuplicator()) - .groupBy(0).sortGroup(1, Order.ASCENDING).reduceGroup(new DegreeCounter()) - .groupBy(EdgeWithDegrees.V1, EdgeWithDegrees.V2).reduce(new DegreeJoiner()); + DataSet> edgesWithDegrees = edges.flatMap(new EdgeDuplicator<>()) + .groupBy(0).sortGroup(1, Order.ASCENDING).reduceGroup(new DegreeCounter<>()) + .groupBy(EdgeWithDegrees.V1, EdgeWithDegrees.V2).reduce(new DegreeJoiner<>()); // project edges by degrees - DataSet> edgesByDegree = edgesWithDegrees.map(new EdgeByDegreeProjector()); + DataSet> edgesByDegree = edgesWithDegrees.map(new EdgeByDegreeProjector<>()); // project edges by vertex id - DataSet> edgesById = edgesByDegree.map(new EdgeByIdProjector()); + DataSet> edgesById = edgesByDegree.map(new EdgeByIdProjector<>()); DataSet> triangles = edgesByDegree // build triads .groupBy(EdgeWithDegrees.V1).sortGroup(EdgeWithDegrees.V2, Order.ASCENDING) - .reduceGroup(new TriadBuilder()) + .reduceGroup(new TriadBuilder<>()) // filter triads - .join(edgesById, JoinHint.REPARTITION_HASH_SECOND).where(Triad.V2, Triad.V3).equalTo(0, 1).with(new TriadFilter()); + .join(edgesById, JoinHint.REPARTITION_HASH_SECOND).where(Triad.V2, Triad.V3).equalTo(0, 1).with(new TriadFilter<>()); return triangles; } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java index 981110fabd2a1..55d305686029e 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java @@ -119,13 +119,13 @@ public DataSet> runInternal(Graph input) // u, edge count DataSet> triangleVertices = triangles - .flatMap(new SplitTriangles()) + .flatMap(new SplitTriangles<>()) .name("Split triangle vertices"); // u, triangle count DataSet> vertexTriangleCount = triangleVertices .groupBy(0) - .reduce(new CountTriangles()) + .reduce(new CountTriangles<>()) .setCombineHint(CombineHint.HASH) .name("Count triangles"); @@ -140,7 +140,7 @@ public DataSet> runInternal(Graph input) .leftOuterJoin(vertexTriangleCount) .where(0) .equalTo(0) - .with(new JoinVertexDegreeWithTriangleCount()) + .with(new JoinVertexDegreeWithTriangleCount<>()) .setParallelism(parallelism) .name("Clustering coefficient"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java index 00b22100680cf..52d3c10e8473b 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java @@ -84,11 +84,11 @@ public DataSet> runInternal(Graph input) // u, v, bitmask where u < v DataSet> filteredByID = input .getEdges() - .map(new OrderByID()) + .map(new OrderByID<>()) .setParallelism(parallelism) .name("Order by ID") .groupBy(0, 1) - .reduceGroup(new ReduceBitmask()) + .reduceGroup(new ReduceBitmask<>()) .setParallelism(parallelism) .name("Flatten by ID"); @@ -99,11 +99,11 @@ public DataSet> runInternal(Graph input) // u, v, bitmask where deg(u) < deg(v) or (deg(u) == deg(v) and u < v) DataSet> filteredByDegree = pairDegrees - .map(new OrderByDegree()) + .map(new OrderByDegree<>()) .setParallelism(parallelism) .name("Order by degree") .groupBy(0, 1) - .reduceGroup(new ReduceBitmask()) + .reduceGroup(new ReduceBitmask<>()) .setParallelism(parallelism) .name("Flatten by degree"); @@ -111,7 +111,7 @@ public DataSet> runInternal(Graph input) DataSet> triplets = filteredByDegree .groupBy(0) .sortGroup(1, Order.ASCENDING) - .reduceGroup(new GenerateTriplets()) + .reduceGroup(new GenerateTriplets<>()) .name("Generate triplets"); // u, v, w, bitmask where (u, v), (u, w), and (v, w) are edges in graph @@ -119,16 +119,16 @@ public DataSet> runInternal(Graph input) .join(filteredByID, JoinOperatorBase.JoinHint.REPARTITION_HASH_SECOND) .where(1, 2) .equalTo(0, 1) - .with(new ProjectTriangles()) + .with(new ProjectTriangles<>()) .name("Triangle listing"); if (permuteResults) { triangles = triangles - .flatMap(new PermuteResult()) + .flatMap(new PermuteResult<>()) .name("Permute triangle vertices"); } else if (sortTriangleVertices.get()) { triangles = triangles - .map(new SortTriangleVertices()) + .map(new SortTriangleVertices<>()) .name("Sort triangle vertices"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java index 0beb989b60d4c..e1c7655347170 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java @@ -118,13 +118,13 @@ public DataSet> runInternal(Graph input) // u, 1 DataSet> triangleVertices = triangles - .flatMap(new SplitTriangles()) + .flatMap(new SplitTriangles<>()) .name("Split triangle vertices"); // u, triangle count DataSet> vertexTriangleCount = triangleVertices .groupBy(0) - .reduce(new CountTriangles()) + .reduce(new CountTriangles<>()) .setCombineHint(CombineHint.HASH) .name("Count triangles"); @@ -139,7 +139,7 @@ public DataSet> runInternal(Graph input) .leftOuterJoin(vertexTriangleCount) .where(0) .equalTo(0) - .with(new JoinVertexDegreeWithTriangleCount()) + .with(new JoinVertexDegreeWithTriangleCount<>()) .setParallelism(parallelism) .name("Clustering coefficient"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java index 2472744dc80dd..3f1b00a700d5c 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java @@ -83,7 +83,7 @@ public DataSet> runInternal(Graph input) // u, v where u < v DataSet> filteredByID = input .getEdges() - .flatMap(new FilterByID()) + .flatMap(new FilterByID<>()) .setParallelism(parallelism) .name("Filter by ID"); @@ -94,7 +94,7 @@ public DataSet> runInternal(Graph input) // u, v where deg(u) < deg(v) or (deg(u) == deg(v) and u < v) DataSet> filteredByDegree = pairDegree - .flatMap(new FilterByDegree()) + .flatMap(new FilterByDegree<>()) .setParallelism(parallelism) .name("Filter by degree"); @@ -102,7 +102,7 @@ public DataSet> runInternal(Graph input) DataSet> triplets = filteredByDegree .groupBy(0) .sortGroup(1, Order.ASCENDING) - .reduceGroup(new GenerateTriplets()) + .reduceGroup(new GenerateTriplets<>()) .name("Generate triplets"); // u, v, w where (u, v), (u, w), and (v, w) are edges in graph, v < w @@ -110,16 +110,16 @@ public DataSet> runInternal(Graph input) .join(filteredByID, JoinOperatorBase.JoinHint.REPARTITION_HASH_SECOND) .where(1, 2) .equalTo(0, 1) - .with(new ProjectTriangles()) + .with(new ProjectTriangles<>()) .name("Triangle listing"); if (permuteResults) { triangles = triangles - .flatMap(new PermuteResult()) + .flatMap(new PermuteResult<>()) .name("Permute triangle vertices"); } else if (sortTriangleVertices.get()) { triangles = triangles - .map(new SortTriangleVertices()) + .map(new SortTriangleVertices<>()) .name("Sort triangle vertices"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/HITS.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/HITS.java index 6b41ee4b3686b..e59240b05294d 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/HITS.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/HITS.java @@ -129,17 +129,17 @@ public DataSet> runInternal(Graph input) throws Exception { DataSet> edges = input .getEdges() - .map(new ExtractEdgeIDs()) + .map(new ExtractEdgeIDs<>()) .setParallelism(parallelism) .name("Extract edge IDs"); // ID, hub, authority DataSet> initialScores = edges - .map(new InitializeScores()) + .map(new InitializeScores<>()) .setParallelism(parallelism) .name("Initial scores") .groupBy(0) - .reduce(new SumScores()) + .reduce(new SumScores<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Sum"); @@ -153,18 +153,18 @@ public DataSet> runInternal(Graph input) .coGroup(edges) .where(0) .equalTo(1) - .with(new Hubbiness()) + .with(new Hubbiness<>()) .setParallelism(parallelism) .name("Hub") .groupBy(0) - .reduce(new SumScore()) + .reduce(new SumScore<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Sum"); // sum-of-hubbiness-squared DataSet hubbinessSumSquared = hubbiness - .map(new Square()) + .map(new Square<>()) .setParallelism(parallelism) .name("Square") .reduce(new Sum()) @@ -177,18 +177,18 @@ public DataSet> runInternal(Graph input) .coGroup(edges) .where(0) .equalTo(0) - .with(new Authority()) + .with(new Authority<>()) .setParallelism(parallelism) .name("Authority") .groupBy(0) - .reduce(new SumScore()) + .reduce(new SumScore<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Sum"); // sum-of-authority-squared DataSet authoritySumSquared = authority - .map(new Square()) + .map(new Square<>()) .setParallelism(parallelism) .name("Square") .reduce(new Sum()) @@ -201,7 +201,7 @@ public DataSet> runInternal(Graph input) .fullOuterJoin(authority, JoinHint.REPARTITION_SORT_MERGE) .where(0) .equalTo(0) - .with(new JoinAndNormalizeHubAndAuthority()) + .with(new JoinAndNormalizeHubAndAuthority<>()) .withBroadcastSet(hubbinessSumSquared, HUBBINESS_SUM_SQUARED) .withBroadcastSet(authoritySumSquared, AUTHORITY_SUM_SQUARED) .setParallelism(parallelism) @@ -214,7 +214,7 @@ public DataSet> runInternal(Graph input) .fullOuterJoin(scores, JoinHint.REPARTITION_SORT_MERGE) .where(0) .equalTo(0) - .with(new ChangeInScores()) + .with(new ChangeInScores<>()) .setParallelism(parallelism) .name("Change in scores"); @@ -225,7 +225,7 @@ public DataSet> runInternal(Graph input) return iterative .closeWith(passThrough) - .map(new TranslateResult()) + .map(new TranslateResult<>()) .setParallelism(parallelism) .name("Map result"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/PageRank.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/PageRank.java index af56e506e8351..71c37aa555219 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/PageRank.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/PageRank.java @@ -151,20 +151,20 @@ public DataSet> runInternal(Graph input) DataSet> edgeSourceDegree = input .run(new EdgeSourceDegrees() .setParallelism(parallelism)) - .map(new ExtractSourceDegree()) + .map(new ExtractSourceDegree<>()) .setParallelism(parallelism) .name("Extract source degree"); // vertices with zero in-edges DataSet> sourceVertices = vertexDegree - .flatMap(new InitializeSourceVertices()) + .flatMap(new InitializeSourceVertices<>()) .withBroadcastSet(vertexCount, VERTEX_COUNT) .setParallelism(parallelism) .name("Initialize source vertex scores"); // s, initial pagerank(s) DataSet> initialScores = vertexDegree - .map(new InitializeVertexScores()) + .map(new InitializeVertexScores<>()) .withBroadcastSet(vertexCount, VERTEX_COUNT) .setParallelism(parallelism) .name("Initialize scores"); @@ -178,18 +178,18 @@ public DataSet> runInternal(Graph input) .coGroup(edgeSourceDegree) .where(0) .equalTo(0) - .with(new SendScore()) + .with(new SendScore<>()) .setParallelism(parallelism) .name("Send score") .groupBy(0) - .reduce(new SumScore()) + .reduce(new SumScore<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Sum"); // ignored ID, total pagerank DataSet> sumOfScores = vertexScores - .reduce(new SumVertexScores()) + .reduce(new SumVertexScores<>()) .setParallelism(parallelism) .name("Sum"); @@ -198,7 +198,7 @@ public DataSet> runInternal(Graph input) .union(sourceVertices) .setParallelism(parallelism) .name("Union with source vertices") - .map(new AdjustScores(dampingFactor)) + .map(new AdjustScores<>(dampingFactor)) .withBroadcastSet(sumOfScores, SUM_OF_SCORES) .withBroadcastSet(vertexCount, VERTEX_COUNT) .setParallelism(parallelism) @@ -211,7 +211,7 @@ public DataSet> runInternal(Graph input) .join(adjustedScores) .where(0) .equalTo(0) - .with(new ChangeInScores()) + .with(new ChangeInScores<>()) .setParallelism(parallelism) .name("Change in scores"); @@ -222,7 +222,7 @@ public DataSet> runInternal(Graph input) return iterative .closeWith(passThrough) - .map(new TranslateResult()) + .map(new TranslateResult<>()) .setParallelism(parallelism) .name("Map result"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java index c88e4016fb817..7294fd152e331 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java @@ -92,15 +92,15 @@ public EdgeMetrics run(Graph input) // s, d(s), count of (u, v) where deg(u) < deg(v) or (deg(u) == deg(v) and u < v) DataSet> edgeStats = edgeDegreesPair - .flatMap(new EdgeStats()) + .flatMap(new EdgeStats<>()) .setParallelism(parallelism) .name("Edge stats") .groupBy(0, 1) - .reduceGroup(new ReduceEdgeStats()) + .reduceGroup(new ReduceEdgeStats<>()) .setParallelism(parallelism) .name("Reduce edge stats") .groupBy(0) - .reduce(new SumEdgeStats()) + .reduce(new SumEdgeStats<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Sum edge stats"); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java index 4c0d65453a277..8c520e67b6b9c 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java @@ -103,11 +103,11 @@ public EdgeMetrics run(Graph input) // s, d(s), count of (u, v) where deg(u) < deg(v) or (deg(u) == deg(v) and u < v) DataSet> edgeStats = edgeDegreePair - .map(new EdgeStats()) + .map(new EdgeStats<>()) .setParallelism(parallelism) .name("Edge stats") .groupBy(0) - .reduce(new SumEdgeStats()) + .reduce(new SumEdgeStats<>()) .setCombineHint(CombineHint.HASH) .setParallelism(parallelism) .name("Sum edge stats"); diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java index d761f609f1582..752e206483877 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java @@ -153,7 +153,7 @@ public DataSet> runInternal(Graph input) DataSet> inverseLogDegree = input .run(new VertexDegree() .setParallelism(parallelism)) - .map(new VertexInverseLogDegree()) + .map(new VertexInverseLogDegree<>()) .setParallelism(parallelism) .name("Vertex score"); @@ -172,7 +172,7 @@ public DataSet> runInternal(Graph input) DataSet> groupSpans = sourceInverseLogDegree .groupBy(0) .sortGroup(1, Order.ASCENDING) - .reduceGroup(new GenerateGroupSpans()) + .reduceGroup(new GenerateGroupSpans<>()) .setParallelism(parallelism) .name("Generate group spans"); @@ -181,7 +181,7 @@ public DataSet> runInternal(Graph input) .rebalance() .setParallelism(parallelism) .name("Rebalance") - .flatMap(new GenerateGroups()) + .flatMap(new GenerateGroups<>()) .setParallelism(parallelism) .name("Generate groups"); @@ -189,19 +189,19 @@ public DataSet> runInternal(Graph input) DataSet> twoPaths = groups .groupBy(0, 1) .sortGroup(2, Order.ASCENDING) - .reduceGroup(new GenerateGroupPairs()) + .reduceGroup(new GenerateGroupPairs<>()) .name("Generate group pairs"); // t, u, adamic-adar score GroupReduceOperator, Result> scores = twoPaths .groupBy(0, 1) - .reduceGroup(new ComputeScores(minimumScore, minimumRatio)) + .reduceGroup(new ComputeScores<>(minimumScore, minimumRatio)) .name("Compute scores"); if (minimumRatio > 0.0f) { // total score, number of pairs of neighbors DataSet> sumOfScoresAndNumberOfNeighborPairs = inverseLogDegree - .map(new ComputeScoreFromVertex()) + .map(new ComputeScoreFromVertex<>()) .setParallelism(parallelism) .name("Average score") .sum(0) @@ -213,7 +213,7 @@ public DataSet> runInternal(Graph input) if (mirrorResults) { return scores - .flatMap(new MirrorResult>()) + .flatMap(new MirrorResult<>()) .name("Mirror results"); } else { return scores; diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java index 8e820acb7fd26..92bf9e3e4bdd5 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java @@ -199,7 +199,7 @@ public DataSet> runInternal(Graph input) DataSet> groupSpans = neighborDegree .groupBy(0) .sortGroup(1, Order.ASCENDING) - .reduceGroup(new GenerateGroupSpans(groupSize)) + .reduceGroup(new GenerateGroupSpans<>(groupSize)) .setParallelism(parallelism) .name("Generate group spans"); @@ -208,7 +208,7 @@ public DataSet> runInternal(Graph input) .rebalance() .setParallelism(parallelism) .name("Rebalance") - .flatMap(new GenerateGroups()) + .flatMap(new GenerateGroups<>()) .setParallelism(parallelism) .name("Generate groups"); @@ -216,20 +216,20 @@ public DataSet> runInternal(Graph input) DataSet> twoPaths = groups .groupBy(0, 1) .sortGroup(2, Order.ASCENDING) - .reduceGroup(new GenerateGroupPairs(groupSize)) + .reduceGroup(new GenerateGroupPairs<>(groupSize)) .name("Generate group pairs"); // t, u, intersection, union DataSet> scores = twoPaths .groupBy(0, 1) - .reduceGroup(new ComputeScores(unboundedScores, + .reduceGroup(new ComputeScores<>(unboundedScores, minimumScoreNumerator, minimumScoreDenominator, maximumScoreNumerator, maximumScoreDenominator)) .name("Compute scores"); if (mirrorResults) { scores = scores - .flatMap(new MirrorResult>()) + .flatMap(new MirrorResult<>()) .name("Mirror results"); } diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java index 69fcc527222f5..39b9bcfb93ab6 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java @@ -48,7 +48,7 @@ public VertexCentricConfiguration() {} * @param data The data set to be broadcasted. */ public void addBroadcastSet(String name, DataSet data) { - this.bcVars.add(new Tuple2>(name, data)); + this.bcVars.add(new Tuple2<>(name, data)); } /** diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java index c30b1a7907578..6c06a3a992e4c 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java @@ -176,9 +176,9 @@ public DataSet> createResult() { DataSet, Either>> verticesWithMsgs = iteration.getSolutionSet().join(iteration.getWorkset()) .where(0).equalTo(0) - .with(new AppendVertexState()) - .returns(new TupleTypeInfo, Either>>( - vertexType, nullableMsgTypeInfo)); + .with(new AppendVertexState<>()) + .returns(new TupleTypeInfo<>( + vertexType, nullableMsgTypeInfo)); VertexComputeUdf vertexUdf = new VertexComputeUdf<>(computeFunction, intermediateTypeInfo); @@ -190,11 +190,11 @@ public DataSet> createResult() { // compute the solution set delta DataSet> solutionSetDelta = superstepComputation.flatMap( - new ProjectNewVertexValue()).returns(vertexType); + new ProjectNewVertexValue<>()).returns(vertexType); // compute the inbox of each vertex for the next superstep (new workset) DataSet>> allMessages = superstepComputation.flatMap( - new ProjectMessages()).returns(workSetTypeInfo); + new ProjectMessages<>()).returns(workSetTypeInfo); DataSet>> newWorkSet = allMessages; diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java index 6a628473c7a60..0422f138330c4 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java @@ -59,7 +59,7 @@ public ScatterGatherConfiguration() {} * @param data The data set to be broadcasted. */ public void addBroadcastSetForScatterFunction(String name, DataSet data) { - this.bcVarsScatter.add(new Tuple2>(name, data)); + this.bcVarsScatter.add(new Tuple2<>(name, data)); } /** @@ -69,7 +69,7 @@ public void addBroadcastSetForScatterFunction(String name, DataSet data) { * @param data The data set to be broadcasted. */ public void addBroadcastSetForGatherFunction(String name, DataSet data) { - this.bcVarsGather.add(new Tuple2>(name, data)); + this.bcVarsGather.add(new Tuple2<>(name, data)); } /** diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java index 3e2ac239c8ca9..8082cd9223421 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java @@ -46,7 +46,7 @@ private GraphUtils() {} */ public static DataSet count(DataSet input) { return input - .map(new MapTo(new LongValue(1))) + .map(new MapTo<>(new LongValue(1))) .returns(LONG_VALUE_TYPE_INFO) .name("Emit 1") .reduce(new AddLongValue()) diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java index 57aa9874dd8f4..b85b6e61c6821 100644 --- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java +++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java @@ -48,11 +48,11 @@ public class InvalidVertexIdsValidator extends GraphValidator graph) throws Exception { DataSet> edgeIds = graph.getEdges() - .flatMap(new MapEdgeIds()).distinct(); + .flatMap(new MapEdgeIds<>()).distinct(); DataSet invalidIds = graph.getVertices().coGroup(edgeIds).where(0) - .equalTo(0).with(new GroupInvalidIds()).first(1); + .equalTo(0).with(new GroupInvalidIds<>()).first(1); - return invalidIds.map(new KToTupleMap()).count() == 0; + return invalidIds.map(new KToTupleMap<>()).count() == 0; } private static final class MapEdgeIds implements FlatMapFunction, Tuple1> { diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java index f89d4f505676a..1afb5dadda881 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java @@ -120,7 +120,7 @@ protected Graph directedRMatGraph(int scale, in return new RMatGraph<>(env, new JDKRandomGeneratorFactory(), vertexCount, edgeCount) .generate() - .run(new org.apache.flink.graph.asm.simple.directed.Simplify()); + .run(new org.apache.flink.graph.asm.simple.directed.Simplify<>()); } /** @@ -149,6 +149,6 @@ protected Graph undirectedRMatGraph(int scale, return new RMatGraph<>(env, new JDKRandomGeneratorFactory(), vertexCount, edgeCount) .generate() - .run(new org.apache.flink.graph.asm.simple.undirected.Simplify(false)); + .run(new org.apache.flink.graph.asm.simple.undirected.Simplify<>(false)); } } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java index 22b47fe4e57a5..63bf133aef16b 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java @@ -53,7 +53,7 @@ public void testWithSimpleGraph() "(5,3,((null),(1,1,0),(4,2,2)))"; DataSet>> degreesPair = directedSimpleGraph - .run(new EdgeDegreesPair()); + .run(new EdgeDegreesPair<>()); TestBaseUtils.compareResultAsText(degreesPair.collect(), expectedResult); } @@ -62,7 +62,7 @@ public void testWithSimpleGraph() public void testWithRMatGraph() throws Exception { DataSet>> degreesPair = directedRMatGraph(10, 16) - .run(new EdgeDegreesPair()); + .run(new EdgeDegreesPair<>()); Checksum checksum = new ChecksumHashCode>>() .run(degreesPair) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java index f0d51d2e1e2f0..967cfb2bbb73b 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java @@ -53,7 +53,7 @@ public void testWithSimpleGraph() "(5,3,((null),(1,1,0)))"; DataSet>> sourceDegrees = directedSimpleGraph - .run(new EdgeSourceDegrees()); + .run(new EdgeSourceDegrees<>()); TestBaseUtils.compareResultAsText(sourceDegrees.collect(), expectedResult); } @@ -62,7 +62,7 @@ public void testWithSimpleGraph() public void testWithRMatGraph() throws Exception { DataSet>> sourceDegrees = directedRMatGraph(10, 16) - .run(new EdgeSourceDegrees()); + .run(new EdgeSourceDegrees<>()); Checksum checksum = new ChecksumHashCode>>() .run(sourceDegrees) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java index 6d58bb09e18c6..abb76c4f1b542 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java @@ -53,7 +53,7 @@ public void testWithSimpleGraph() "(5,3,((null),(4,2,2)))"; DataSet>> targetDegrees = directedSimpleGraph - .run(new EdgeTargetDegrees()); + .run(new EdgeTargetDegrees<>()); TestBaseUtils.compareResultAsText(targetDegrees.collect(), expectedResult); } @@ -62,7 +62,7 @@ public void testWithSimpleGraph() public void testWithRMatGraph() throws Exception { DataSet>> targetDegrees = directedRMatGraph(10, 16) - .run(new EdgeTargetDegrees()); + .run(new EdgeTargetDegrees<>()); Checksum checksum = new ChecksumHashCode>>() .run(targetDegrees) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java index 521428268fd4b..91f354f2eef75 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java @@ -43,7 +43,7 @@ public class VertexDegreesTest public void testWithSimpleDirectedGraph() throws Exception { DataSet> degrees = directedSimpleGraph - .run(new VertexDegrees()); + .run(new VertexDegrees<>()); String expectedResult = "(0,(2,2,0))\n" + @@ -60,7 +60,7 @@ public void testWithSimpleDirectedGraph() public void testWithSimpleUndirectedGraph() throws Exception { DataSet> degrees = undirectedSimpleGraph - .run(new VertexDegrees()); + .run(new VertexDegrees<>()); String expectedResult = "(0,(2,2,2))\n" + @@ -100,7 +100,7 @@ public void testWithEmptyGraph() public void testWithRMatGraph() throws Exception { DataSet> degrees = directedRMatGraph(10, 16) - .run(new VertexDegrees()); + .run(new VertexDegrees<>()); Checksum checksum = new ChecksumHashCode>() .run(degrees) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java index 5f492e4eecfcb..1cae2e77f17f7 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java @@ -59,7 +59,7 @@ public void testWithSimpleGraph() "(5,3,((null),1,4))"; DataSet>> degreePairOnSourceId = undirectedSimpleGraph - .run(new EdgeDegreePair()); + .run(new EdgeDegreePair<>()); TestBaseUtils.compareResultAsText(degreePairOnSourceId.collect(), expectedResult); @@ -74,7 +74,7 @@ public void testWithSimpleGraph() public void testWithRMatGraph() throws Exception { DataSet>> degreePairOnSourceId = undirectedRMatGraph(10, 16) - .run(new EdgeDegreePair()); + .run(new EdgeDegreePair<>()); Checksum checksumOnSourceId = new ChecksumHashCode>>() .run(degreePairOnSourceId) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java index 393220df956af..2d8b2e3071dea 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java @@ -59,7 +59,7 @@ public void testWithSimpleGraph() "(5,3,((null),1))"; DataSet>> sourceDegreeOnSourceId = undirectedSimpleGraph - .run(new EdgeSourceDegree()); + .run(new EdgeSourceDegree<>()); TestBaseUtils.compareResultAsText(sourceDegreeOnSourceId.collect(), expectedResult); @@ -74,7 +74,7 @@ public void testWithSimpleGraph() public void testWithRMatGraph() throws Exception { DataSet>> sourceDegreeOnSourceId = undirectedRMatGraph(10, 16) - .run(new EdgeSourceDegree()); + .run(new EdgeSourceDegree<>()); Checksum checksumOnSourceId = new ChecksumHashCode>>() .run(sourceDegreeOnSourceId) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java index 782296a5ff6cb..a7c88a1f0f1a4 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java @@ -59,7 +59,7 @@ public void testWithSimpleGraph() "(5,3,((null),4))"; DataSet>> targetDegreeOnTargetId = undirectedSimpleGraph - .run(new EdgeTargetDegree()); + .run(new EdgeTargetDegree<>()); TestBaseUtils.compareResultAsText(targetDegreeOnTargetId.collect(), expectedResult); @@ -74,7 +74,7 @@ public void testWithSimpleGraph() public void testWithRMatGraph() throws Exception { DataSet>> targetDegreeOnTargetId = undirectedRMatGraph(10, 16) - .run(new EdgeSourceDegree()); + .run(new EdgeSourceDegree<>()); Checksum checksumOnTargetId = new ChecksumHashCode>>() .run(targetDegreeOnTargetId) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java index 192782d154171..bc76bffba377f 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java @@ -50,7 +50,7 @@ public void testWithSimpleGraph() "(5,1)"; DataSet> degreeOnSourceId = undirectedSimpleGraph - .run(new VertexDegree()); + .run(new VertexDegree<>()); TestBaseUtils.compareResultAsText(degreeOnSourceId.collect(), expectedResult); @@ -67,7 +67,7 @@ public void testWithCompleteGraph() long expectedDegree = completeGraphVertexCount - 1; DataSet> degreeOnSourceId = completeGraph - .run(new VertexDegree()); + .run(new VertexDegree<>()); for (Vertex vertex : degreeOnSourceId.collect()) { assertEquals(expectedDegree, vertex.getValue().getValue()); @@ -109,7 +109,7 @@ public void testWithEmptyGraph() public void testWithRMatGraph() throws Exception { DataSet> degreeOnSourceId = undirectedRMatGraph(10, 16) - .run(new VertexDegree()); + .run(new VertexDegree<>()); Checksum checksumOnSourceId = new ChecksumHashCode>() .run(degreeOnSourceId) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java index f03d82c0aca39..51e7712f5893a 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java @@ -24,7 +24,6 @@ import org.apache.flink.graph.library.metric.ChecksumHashCode; import org.apache.flink.test.util.TestBaseUtils; import org.apache.flink.types.IntValue; -import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; import org.junit.Test; @@ -41,7 +40,7 @@ public class MaximumDegreeTest public void testWithSimpleGraph() throws Exception { Graph graph = undirectedSimpleGraph - .run(new MaximumDegree(3)); + .run(new MaximumDegree<>(3)); String expectedVerticesResult = "(0,(null))\n" + @@ -67,8 +66,8 @@ public void testWithSimpleGraph() public void testWithRMatGraph() throws Exception { Checksum checksum = undirectedRMatGraph(10, 16) - .run(new MaximumDegree(16)) - .run(new ChecksumHashCode()) + .run(new MaximumDegree<>(16)) + .run(new ChecksumHashCode<>()) .execute(); assertEquals(805, checksum.getCount()); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java index a3aad4b2d3115..751d030027fe8 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java @@ -70,7 +70,7 @@ public void test() "(1,0,(null))"; Graph simpleGraph = graph - .run(new Simplify()); + .run(new Simplify<>()); TestBaseUtils.compareResultAsText(simpleGraph.getEdges().collect(), expectedResult); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java index 6ff4292aa4f6d..68b4e0c3cbedc 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java @@ -71,7 +71,7 @@ public void testWithFullFlip() "(2,0,(null))"; Graph simpleGraph = graph - .run(new Simplify(false)); + .run(new Simplify<>(false)); TestBaseUtils.compareResultAsText(simpleGraph.getEdges().collect(), expectedResult); } @@ -84,7 +84,7 @@ public void testWithClipAndFlip() "(1,0,(null))"; Graph simpleGraph = graph - .run(new Simplify(true)); + .run(new Simplify<>(true)); TestBaseUtils.compareResultAsText(simpleGraph.getEdges().collect(), expectedResult); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java index b7197a42443e8..79c3468f2b5e7 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -82,8 +80,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java index 1791f2ec5211d..45446b39f631c 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -80,8 +78,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java index e4e296063b526..dd0912aa6d7c6 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -79,8 +77,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java index fc64d62848d9e..5d8bfc4a61007 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -124,8 +122,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java index 939d871aef2c2..3590922091979 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -74,8 +72,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java index c40d456f84f46..7281770e6d34d 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -89,8 +87,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java index 05f84cd2d7bdb..3ecd4a446c62c 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -81,8 +79,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java index 19b0f257e316b..6817e92a7639f 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -79,8 +77,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java index 920fc4eabfdfe..5f18a73f89a0b 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.graph.generator.random.JDKRandomGeneratorFactory; import org.apache.flink.graph.generator.random.RandomGenerableFactory; import org.apache.flink.types.LongValue; @@ -66,8 +64,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java index 045354de16020..f78b7ff972a56 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -80,8 +78,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java index ee9df7493dbaa..9e9da5f1cc374 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java @@ -19,9 +19,7 @@ package org.apache.flink.graph.generator; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; -import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; @@ -81,8 +79,8 @@ public void testParallelism() .setParallelism(parallelism) .generate(); - graph.getVertices().output(new DiscardingOutputFormat>()); - graph.getEdges().output(new DiscardingOutputFormat>()); + graph.getVertices().output(new DiscardingOutputFormat<>()); + graph.getEdges().output(new DiscardingOutputFormat<>()); TestUtils.verifyParallelism(env, parallelism); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java index 70a2d158a13b9..c94e1692c99a9 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java @@ -60,7 +60,7 @@ public void testGSACompiler() { // compose test program DataSet> edges = env.fromElements(new Tuple3<>( - 1L, 2L, NullValue.getInstance())).map(new Tuple3ToEdgeMap()); + 1L, 2L, NullValue.getInstance())).map(new Tuple3ToEdgeMap<>()); Graph graph = Graph.fromDataSet(edges, new InitVertices(), env); @@ -68,7 +68,7 @@ public void testGSACompiler() { new GatherNeighborIds(), new SelectMinId(), new UpdateComponentId(), 100).getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); Plan p = env.createProgramPlan("GSA Connected Components"); OptimizedPlan op = compileNoStats(p); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java index 0dfbcb7b8e34f..0501820dbab0c 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java @@ -72,7 +72,7 @@ public void testTranslation() { // ------------ construct the test program ------------------ DataSet> edges = env.fromElements(new Tuple3<>( - 1L, 2L, NullValue.getInstance())).map(new Tuple3ToEdgeMap()); + 1L, 2L, NullValue.getInstance())).map(new Tuple3ToEdgeMap<>()); Graph graph = Graph.fromDataSet(edges, new InitVertices(), env); @@ -89,7 +89,7 @@ public void testTranslation() { new GatherNeighborIds(), new SelectMinId(), new UpdateComponentId(), NUM_ITERATIONS, parameters).getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); // ------------- validate the java program ---------------- diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java index 3e76005574c26..8b8927ee751de 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java @@ -62,7 +62,7 @@ protected void testProgram() throws Exception { Graph graph = Graph.fromDataSet(initialVertices, edges, env); - DataSet> result = graph.run(new ConnectedComponents(100)); + DataSet> result = graph.run(new ConnectedComponents<>(100)); result.writeAsCsv(resultPath, "\n", " "); env.execute(); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java index a0aba65b1d18e..626b754b37400 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java @@ -25,7 +25,6 @@ import org.apache.flink.test.util.TestBaseUtils; import org.apache.flink.types.IntValue; import org.apache.flink.types.LongValue; -import org.apache.flink.types.NullValue; import org.apache.commons.math3.util.CombinatoricsUtils; import org.junit.Test; @@ -52,7 +51,7 @@ public void testSimpleGraph() "(5,1,0)"; DataSet> cc = directedSimpleGraph - .run(new LocalClusteringCoefficient()); + .run(new LocalClusteringCoefficient<>()); TestBaseUtils.compareResultAsText(cc.collect(), expectedResult); } @@ -64,7 +63,7 @@ public void testCompleteGraph() long expectedTriangleCount = 2 * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2); DataSet> cc = completeGraph - .run(new LocalClusteringCoefficient()); + .run(new LocalClusteringCoefficient<>()); List> results = cc.collect(); @@ -80,7 +79,7 @@ public void testCompleteGraph() public void testRMatGraph() throws Exception { DataSet> cc = directedRMatGraph(10, 16) - .run(new LocalClusteringCoefficient()); + .run(new LocalClusteringCoefficient<>()); Checksum checksum = new org.apache.flink.graph.asm.dataset.ChecksumHashCode>() .run(cc) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java index e8f8659b31087..2d02907b55c85 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java @@ -95,7 +95,7 @@ public void testCompleteGraph() long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2) / 3; DataSet> tl = completeGraph - .run(new TriangleListing()); + .run(new TriangleListing<>()); List> results = tl.collect(); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java index e00669b818b6f..d77707e925637 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java @@ -26,7 +26,6 @@ import org.apache.flink.test.util.TestBaseUtils; import org.apache.flink.types.IntValue; import org.apache.flink.types.LongValue; -import org.apache.flink.types.NullValue; import org.apache.commons.math3.util.CombinatoricsUtils; import org.junit.Test; @@ -53,7 +52,7 @@ public void testSimpleGraph() "(5,1,0)"; DataSet> cc = undirectedSimpleGraph - .run(new LocalClusteringCoefficient()); + .run(new LocalClusteringCoefficient<>()); TestBaseUtils.compareResultAsText(cc.collect(), expectedResult); } @@ -65,7 +64,7 @@ public void testCompleteGraph() long expectedTriangleCount = CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2); DataSet> cc = completeGraph - .run(new LocalClusteringCoefficient()); + .run(new LocalClusteringCoefficient<>()); List> results = cc.collect(); @@ -81,7 +80,7 @@ public void testCompleteGraph() public void testRMatGraph() throws Exception { DataSet> cc = undirectedRMatGraph(10, 16) - .run(new LocalClusteringCoefficient()); + .run(new LocalClusteringCoefficient<>()); Checksum checksum = new ChecksumHashCode>() .run(cc) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java index 6af1b013f123f..2e34945b58fe1 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java @@ -95,7 +95,7 @@ public void testCompleteGraph() long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2) / 3; DataSet> tl = completeGraph - .run(new TriangleListing()); + .run(new TriangleListing<>()); Checksum checksum = new ChecksumHashCode>() .run(tl) diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/HITSTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/HITSTest.java index 44711051d5bdc..2c942605bdf70 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/HITSTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/HITSTest.java @@ -115,7 +115,7 @@ public void testWithCompleteGraph() public void testWithRMatGraph() throws Exception { DataSet> hits = directedRMatGraph(10, 16) - .run(new HITS(0.000001)); + .run(new HITS<>(0.000001)); Map> results = new HashMap<>(); for (Result result : new Collect>().run(hits).execute()) { diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java index 9b1d18cb4f90e..4deb491d85b1a 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java @@ -41,7 +41,7 @@ public void testSmallGraph() throws Exception { env); Checksum checksum = graph - .run(new ChecksumHashCode()) + .run(new ChecksumHashCode<>()) .execute(); assertEquals(checksum.getCount(), 12L); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java index aa259a2a98155..5afd0ee796d11 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java @@ -49,7 +49,7 @@ public class AdamicAdarTest public void testSimpleGraph() throws Exception { DataSet> aa = undirectedSimpleGraph - .run(new AdamicAdar()); + .run(new AdamicAdar<>()); String expectedResult = "(0,1," + ilog[2] + ")\n" + @@ -105,7 +105,7 @@ public void testCompleteGraph() float expectedScore = (completeGraphVertexCount - 2) / (float) Math.log(completeGraphVertexCount - 1); DataSet> aa = completeGraph - .run(new AdamicAdar()); + .run(new AdamicAdar<>()); for (Result result : aa.collect()) { assertEquals(expectedScore, result.getAdamicAdarScore().getValue(), 0.00001); @@ -116,7 +116,7 @@ public void testCompleteGraph() public void testRMatGraph() throws Exception { DataSet> aa = undirectedRMatGraph(8, 8) - .run(new AdamicAdar()); + .run(new AdamicAdar<>()); assertEquals(13954, aa.count()); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java index d8cd29830013c..2e59f934ba4f9 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java @@ -42,7 +42,7 @@ public class JaccardIndexTest public void testSimpleGraph() throws Exception { DataSet> ji = undirectedSimpleGraph - .run(new JaccardIndex()); + .run(new JaccardIndex<>()); String expectedResult = "(0,1,1,4)\n" + diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java index 71937db0c9e83..95dd96e0d24c6 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java @@ -61,7 +61,7 @@ public void testPregelCompiler() { DataSet> initialVertices = env.fromElements( new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L)) - .map(new Tuple2ToVertexMap()); + .map(new Tuple2ToVertexMap<>()); DataSet> edges = env.fromElements(new Tuple2<>(1L, 2L)) .map(new MapFunction, Edge>() { @@ -76,7 +76,7 @@ public Edge map(Tuple2 edge) { DataSet> result = graph.runVertexCentricIteration( new CCCompute(), null, 100).getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); } Plan p = env.createProgramPlan("Pregel Connected Components"); @@ -126,7 +126,7 @@ public void testPregelCompilerWithBroadcastVariable() { DataSet> initialVertices = env.fromElements( new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L)) - .map(new Tuple2ToVertexMap()); + .map(new Tuple2ToVertexMap<>()); DataSet> edges = env.fromElements(new Tuple2<>(1L, 2L)) .map(new MapFunction, Edge>() { @@ -145,7 +145,7 @@ public Edge map(Tuple2 edge) { new CCCompute(), null, 100, parameters) .getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); } Plan p = env.createProgramPlan("Pregel Connected Components"); @@ -192,7 +192,7 @@ public void testPregelWithCombiner() { DataSet> initialVertices = env.fromElements( new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L)) - .map(new Tuple2ToVertexMap()); + .map(new Tuple2ToVertexMap<>()); DataSet> edges = env.fromElements(new Tuple2<>(1L, 2L)) .map(new MapFunction, Edge>() { @@ -207,7 +207,7 @@ public Edge map(Tuple2 edge) { DataSet> result = graph.runVertexCentricIteration( new CCCompute(), new CCCombiner(), 100).getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); } Plan p = env.createProgramPlan("Pregel Connected Components"); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java index 8084e71193d31..0e79f65e2b0db 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java @@ -88,7 +88,7 @@ public Tuple3 map( result = graph.runVertexCentricIteration(new MyCompute(), null, NUM_ITERATIONS, parameters).getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); // ------------- validate the java program ---------------- diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java index 1c6d08ebcecd0..901276c3338b8 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java @@ -62,7 +62,7 @@ public void testSpargelCompiler() { // compose test program DataSet> initialVertices = env.fromElements( new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L)) - .map(new Tuple2ToVertexMap()); + .map(new Tuple2ToVertexMap<>()); DataSet> edges = env.fromElements(new Tuple2<>(1L, 2L)) .map(new MapFunction, Edge>() { @@ -75,11 +75,11 @@ public Edge map(Tuple2 edge) { Graph graph = Graph.fromDataSet(initialVertices, edges, env); DataSet> result = graph.runScatterGatherIteration( - new ConnectedComponents.CCMessenger(BasicTypeInfo.LONG_TYPE_INFO), - new ConnectedComponents.CCUpdater(), 100) + new ConnectedComponents.CCMessenger<>(BasicTypeInfo.LONG_TYPE_INFO), + new ConnectedComponents.CCUpdater<>(), 100) .getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); Plan p = env.createProgramPlan("Spargel Connected Components"); OptimizedPlan op = compileNoStats(p); @@ -136,7 +136,7 @@ public void testSpargelCompilerWithBroadcastVariable() { DataSet> initialVertices = env.fromElements( new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L)) - .map(new Tuple2ToVertexMap()); + .map(new Tuple2ToVertexMap<>()); DataSet> edges = env.fromElements(new Tuple2<>(1L, 2L)) .map(new MapFunction, Edge>() { @@ -153,11 +153,11 @@ public Edge map(Tuple2 edge) { parameters.addBroadcastSetForGatherFunction(broadcastVariableName, bcVar); DataSet> result = graph.runScatterGatherIteration( - new ConnectedComponents.CCMessenger(BasicTypeInfo.LONG_TYPE_INFO), - new ConnectedComponents.CCUpdater(), 100) + new ConnectedComponents.CCMessenger<>(BasicTypeInfo.LONG_TYPE_INFO), + new ConnectedComponents.CCUpdater<>(), 100) .getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); Plan p = env.createProgramPlan("Spargel Connected Components"); OptimizedPlan op = compileNoStats(p); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java index d209a2dccfea0..cbbf0c4e76800 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java @@ -91,7 +91,7 @@ public Tuple3 map( result = graph.runScatterGatherIteration(new MessageFunctionNoEdgeValue(), new UpdateFunction(), NUM_ITERATIONS, parameters).getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); // ------------- validate the java program ---------------- @@ -154,7 +154,7 @@ public Tuple3 map( result = graph.runScatterGatherIteration(new MessageFunctionNoEdgeValue(), new UpdateFunction(), NUM_ITERATIONS, parameters).getVertices(); - result.output(new DiscardingOutputFormat>()); + result.output(new DiscardingOutputFormat<>()); // ------------- validate the java program ---------------- diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java index 2454b38b45625..8b1ab91b0477e 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java @@ -21,7 +21,6 @@ import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.graph.Graph; import org.apache.flink.graph.Vertex; import org.apache.flink.graph.spargel.GatherFunction; @@ -53,8 +52,9 @@ public void testProgram() throws Exception { new MessageFunction(), new UpdateFunction(), 10); result.getVertices().map( - new VertexToTuple2Map()).output( - new DiscardingOutputFormat>()); + new VertexToTuple2Map<>()).output( + new DiscardingOutputFormat<>()); + env.execute(); } diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java index 139ff1ef424df..94d981c692be0 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java @@ -137,7 +137,7 @@ public void testDefaultConfiguration() throws Exception { Graph res = graph.runScatterGatherIteration( new MessageFunctionDefault(), new UpdateFunctionDefault(), 5); - DataSet> data = res.getVertices().map(new VertexToTuple2Map()); + DataSet> data = res.getVertices().map(new VertexToTuple2Map<>()); List> result = data.collect(); expectedResult = "1,6\n" + diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java index 8b726f42b1359..f01daec247b06 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java @@ -20,14 +20,12 @@ import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.io.DiscardingOutputFormat; -import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.graph.Graph; import org.apache.flink.graph.test.TestGraphUtils; import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster; import org.apache.flink.test.util.TestEnvironment; -import org.apache.flink.types.LongValue; import org.apache.flink.util.TestLogger; import org.junit.AfterClass; @@ -77,7 +75,7 @@ public void testOutDegreesInvalidEdgeSrcId() throws Exception { TestGraphUtils.getLongLongEdgeInvalidSrcData(env), env); try { - graph.outDegrees().output(new DiscardingOutputFormat>()); + graph.outDegrees().output(new DiscardingOutputFormat<>()); env.execute(); fail("graph.outDegrees() did not fail."); @@ -100,7 +98,7 @@ public void testInDegreesInvalidEdgeTrgId() throws Exception { TestGraphUtils.getLongLongEdgeInvalidTrgData(env), env); try { - graph.inDegrees().output(new DiscardingOutputFormat>()); + graph.inDegrees().output(new DiscardingOutputFormat<>()); env.execute(); fail("graph.inDegrees() did not fail."); @@ -123,7 +121,7 @@ public void testGetDegreesInvalidEdgeTrgId() throws Exception { TestGraphUtils.getLongLongEdgeInvalidTrgData(env), env); try { - graph.getDegrees().output(new DiscardingOutputFormat>()); + graph.getDegrees().output(new DiscardingOutputFormat<>()); env.execute(); fail("graph.getDegrees() did not fail."); @@ -146,7 +144,7 @@ public void testGetDegreesInvalidEdgeSrcId() throws Exception { TestGraphUtils.getLongLongEdgeInvalidSrcData(env), env); try { - graph.getDegrees().output(new DiscardingOutputFormat>()); + graph.getDegrees().output(new DiscardingOutputFormat<>()); env.execute(); fail("graph.getDegrees() did not fail."); @@ -169,7 +167,7 @@ public void testGetDegreesInvalidEdgeSrcTrgId() throws Exception { TestGraphUtils.getLongLongEdgeInvalidSrcTrgData(env), env); try { - graph.getDegrees().output(new DiscardingOutputFormat>()); + graph.getDegrees().output(new DiscardingOutputFormat<>()); env.execute(); fail("graph.getDegrees() did not fail."); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java index 991a4209e5e78..eb2fe5bcae611 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java @@ -122,7 +122,7 @@ public void testValidate() throws Exception { DataSet> edges = TestGraphUtils.getLongLongEdgeData(env); Graph graph = Graph.fromDataSet(vertices, edges, env); - Boolean valid = graph.validate(new InvalidVertexIdsValidator()); + Boolean valid = graph.validate(new InvalidVertexIdsValidator<>()); //env.fromElements(result).writeAsText(resultPath); @@ -144,7 +144,7 @@ public void testValidateWithInvalidIds() throws Exception { DataSet> edges = TestGraphUtils.getLongLongEdgeData(env); Graph graph = Graph.fromDataSet(vertices, edges, env); - Boolean valid = graph.validate(new InvalidVertexIdsValidator()); + Boolean valid = graph.validate(new InvalidVertexIdsValidator<>()); String res = valid.toString(); //env.fromElements(valid); List result = new LinkedList<>(); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java index 43ff12467d25c..e3909ca182028 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java @@ -62,7 +62,7 @@ public void testWithEdgesInputDataset() throws Exception { TestGraphUtils.getLongLongEdgeData(env), env); Graph res = graph.joinWithEdges(graph.getEdges() - .map(new EdgeToTuple3Map()), new AddValuesMapper()); + .map(new EdgeToTuple3Map<>()), new AddValuesMapper()); DataSet> data = res.getEdges(); List> result = data.collect(); @@ -90,7 +90,7 @@ public void testWithLessElements() throws Exception { TestGraphUtils.getLongLongEdgeData(env), env); Graph res = graph.joinWithEdges(graph.getEdges().first(3) - .map(new EdgeToTuple3Map()), new AddValuesMapper()); + .map(new EdgeToTuple3Map<>()), new AddValuesMapper()); DataSet> data = res.getEdges(); List> result = data.collect(); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java index 181c1a7289e01..fe5c52b05e6c6 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java @@ -60,7 +60,7 @@ public void testJoinWithVertexSet() throws Exception { TestGraphUtils.getLongLongEdgeData(env), env); Graph res = graph.joinWithVertices(graph.getVertices() - .map(new VertexToTuple2Map()), new AddValuesMapper()); + .map(new VertexToTuple2Map<>()), new AddValuesMapper()); DataSet> data = res.getVertices(); List> result = data.collect(); @@ -86,7 +86,7 @@ public void testWithLessElements() throws Exception { TestGraphUtils.getLongLongEdgeData(env), env); Graph res = graph.joinWithVertices(graph.getVertices().first(3) - .map(new VertexToTuple2Map()), new AddValuesMapper()); + .map(new VertexToTuple2Map<>()), new AddValuesMapper()); DataSet> data = res.getVertices(); List> result = data.collect(); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java index 19e701dddee1a..6733ba10002f4 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java @@ -84,7 +84,7 @@ public void testGroupReduceOnEdgesInvalidEdgeSrcId() throws Exception { DataSet> verticesWithAllNeighbors = graph.groupReduceOnEdges(new SelectNeighborsValueGreaterThanFour(), EdgeDirection.ALL); - verticesWithAllNeighbors.output(new DiscardingOutputFormat>()); + verticesWithAllNeighbors.output(new DiscardingOutputFormat<>()); env.execute(); fail("Expected an exception."); @@ -110,7 +110,7 @@ public void testGroupReduceOnEdgesInvalidEdgeTrgId() throws Exception { DataSet> verticesWithAllNeighbors = graph.groupReduceOnEdges(new SelectNeighborsValueGreaterThanFour(), EdgeDirection.ALL); - verticesWithAllNeighbors.output(new DiscardingOutputFormat>()); + verticesWithAllNeighbors.output(new DiscardingOutputFormat<>()); env.execute(); fail("Expected an exception."); diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java index d3b97a1e3d10d..1a6087449fdfc 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java @@ -86,7 +86,7 @@ public void testGroupReduceOnNeighborsWithVVInvalidEdgeSrcId() throws Exception DataSet> verticesWithSumOfOutNeighborValues = graph.groupReduceOnNeighbors(new SumAllNeighbors(), EdgeDirection.ALL); - verticesWithSumOfOutNeighborValues.output(new DiscardingOutputFormat>()); + verticesWithSumOfOutNeighborValues.output(new DiscardingOutputFormat<>()); env.execute(); fail("Expected an exception."); @@ -113,7 +113,7 @@ public void testGroupReduceOnNeighborsWithVVInvalidEdgeTrgId() throws Exception DataSet> verticesWithSumOfOutNeighborValues = graph.groupReduceOnNeighbors(new SumAllNeighbors(), EdgeDirection.ALL); - verticesWithSumOfOutNeighborValues.output(new DiscardingOutputFormat>()); + verticesWithSumOfOutNeighborValues.output(new DiscardingOutputFormat<>()); env.execute(); fail("Expected an exception."); @@ -140,7 +140,7 @@ public void testGroupReduceOnNeighborsInvalidEdgeSrcId() throws Exception { DataSet> verticesWithSumOfAllNeighborValues = graph.reduceOnNeighbors(new SumNeighbors(), EdgeDirection.ALL); - verticesWithSumOfAllNeighborValues.output(new DiscardingOutputFormat>()); + verticesWithSumOfAllNeighborValues.output(new DiscardingOutputFormat<>()); env.execute(); } catch (Exception e) { // We expect the job to fail with an exception @@ -165,7 +165,7 @@ public void testGroupReduceOnNeighborsInvalidEdgeTrgId() throws Exception { DataSet> verticesWithSumOfAllNeighborValues = graph.reduceOnNeighbors(new SumNeighbors(), EdgeDirection.ALL); - verticesWithSumOfAllNeighborValues.output(new DiscardingOutputFormat>()); + verticesWithSumOfAllNeighborValues.output(new DiscardingOutputFormat<>()); env.execute(); } catch (Exception e) { // We expect the job to fail with an exception diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java index 4d9040c033869..cad07c0d5ba22 100644 --- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java +++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java @@ -58,7 +58,7 @@ public void setUp() throws Exception { public void testMapVerticesType() throws Exception { // test type extraction in mapVertices - DataSet>> outVertices = inputGraph.mapVertices(new VertexMapper()).getVertices(); + DataSet>> outVertices = inputGraph.mapVertices(new VertexMapper<>()).getVertices(); Assert.assertTrue(new TupleTypeInfo(Vertex.class, BasicTypeInfo.LONG_TYPE_INFO, new TupleTypeInfo>(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO)) .equals(outVertices.getType())); @@ -68,7 +68,7 @@ public void testMapVerticesType() throws Exception { public void testMapEdgesType() throws Exception { // test type extraction in mapEdges - DataSet>> outEdges = inputGraph.mapEdges(new EdgeMapper()).getEdges(); + DataSet>> outEdges = inputGraph.mapEdges(new EdgeMapper<>()).getEdges(); Assert.assertTrue(new TupleTypeInfo(Edge.class, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, new TupleTypeInfo>(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO)) .equals(outEdges.getType())); @@ -76,7 +76,7 @@ public void testMapEdgesType() throws Exception { @Test public void testFromDataSet() throws Exception { - DataSet>> outVertices = Graph.fromDataSet(edges, new VertexInitializer(), env) + DataSet>> outVertices = Graph.fromDataSet(edges, new VertexInitializer<>(), env) .getVertices(); Assert.assertTrue(new TupleTypeInfo(Vertex.class, BasicTypeInfo.LONG_TYPE_INFO, new TupleTypeInfo>(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO)) @@ -85,7 +85,7 @@ public void testFromDataSet() throws Exception { @Test public void testGroupReduceOnEdges() throws Exception { - DataSet> output = inputGraph.groupReduceOnEdges(new EdgesGroupFunction(), EdgeDirection.OUT); + DataSet> output = inputGraph.groupReduceOnEdges(new EdgesGroupFunction<>(), EdgeDirection.OUT); Assert.assertTrue((new TupleTypeInfo>(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO)).equals(output.getType())); } @@ -113,7 +113,7 @@ private static final class EdgesGroupFunction implements EdgesFunction>> edges, Collector> out) throws Exception { - out.collect(new Tuple2()); + out.collect(new Tuple2<>()); } }