Skip to content

Commit

Permalink
Reduce the read time in BoundedReadFromUnboundedSourceTest
Browse files Browse the repository at this point in the history
We only wish to demonsrate that we read "some elements" in the test. Do
so in 20ms rather than 200ms.
  • Loading branch information
tgroh committed Oct 7, 2016
1 parent 7e31709 commit f5e0bad
Showing 1 changed file with 1 addition and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
package com.google.cloud.dataflow.sdk.io;

import static com.google.cloud.dataflow.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;

import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
Expand All @@ -33,13 +32,11 @@
import com.google.cloud.dataflow.sdk.transforms.display.DisplayData;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.cloud.dataflow.sdk.values.PCollection;

import org.joda.time.Duration;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
Expand Down Expand Up @@ -132,7 +129,7 @@ private void test(boolean dedup, boolean timeBound) throws Exception {
}
PCollection<KV<Integer, Integer>> output =
timeBound
? p.apply(Read.from(source).withMaxReadTime(Duration.millis(200)))
? p.apply(Read.from(source).withMaxReadTime(Duration.millis(20)))
: p.apply(Read.from(source).withMaxNumRecords(NUM_RECORDS));

// Because some of the NUM_RECORDS elements read are dupes, the final output
Expand Down

0 comments on commit f5e0bad

Please sign in to comment.