Mercurial > hg > openjdk > lambda > jdk
changeset 9521:939c3be6cc86
8015318: Extend Collector with 'finish' operation
Reviewed-by: mduigou
Contributed-by: brian.goetz@oracle.com
line wrap: on
line diff
--- a/src/share/classes/java/util/DoubleSummaryStatistics.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/DoubleSummaryStatistics.java Fri Jun 28 16:26:54 2013 -0400 @@ -25,6 +25,7 @@ package java.util; import java.util.function.DoubleConsumer; +import java.util.stream.Collector; /** * A state object for collecting statistics such as count, min, max, sum, and @@ -35,24 +36,24 @@ * summary statistics on a stream of doubles with: * <pre> {@code * DoubleSummaryStatistics stats = doubleStream.collect(DoubleSummaryStatistics::new, - * DoubleSummaryStatistics::accept, - * DoubleSummaryStatistics::combine); + * DoubleSummaryStatistics::accept, + * DoubleSummaryStatistics::combine); * }</pre> * * <p>{@code DoubleSummaryStatistics} can be used as a - * {@linkplain java.util.stream.Stream#reduce(java.util.function.BinaryOperator) reduction} + * {@linkplain java.util.stream.Stream#collect(Collector) reduction} * target for a {@linkplain java.util.stream.Stream stream}. For example: * * <pre> {@code * DoubleSummaryStatistics stats = people.stream() - * .collect(Collectors.toDoubleSummaryStatistics(Person::getWeight)); + * .collect(Collectors.summarizingDouble(Person::getWeight)); *}</pre> * * This computes, in a single pass, the count of people, as well as the minimum, * maximum, sum, and average of their weights. * * @implNote This implementation is not thread safe. However, it is safe to use - * {@link java.util.stream.Collectors#toDoubleSummaryStatistics(java.util.function.ToDoubleFunction) + * {@link java.util.stream.Collectors#summarizingDouble(java.util.function.ToDoubleFunction) * Collectors.toDoubleStatistics()} on a parallel stream, because the parallel * implementation of {@link java.util.stream.Stream#collect Stream.collect()} * provides the necessary partitioning, isolation, and merging of results for @@ -152,7 +153,7 @@ } /** - * Returns the average of values recorded, or zero if no values have been + * Returns the arithmetic mean of values recorded, or zero if no values have been * recorded. The average returned can vary depending upon the order in * which values are recorded. This is due to accumulated rounding error in * addition of values of differing magnitudes. Values sorted by increasing @@ -160,7 +161,7 @@ * value is a {@code NaN} or the sum is at any point a {@code NaN} then the * average will be {@code NaN}. * - * @return the average of values, or zero if none + * @return the arithmetic mean of values, or zero if none */ public final double getAverage() { return getCount() > 0 ? getSum() / getCount() : 0.0d;
--- a/src/share/classes/java/util/IntSummaryStatistics.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/IntSummaryStatistics.java Fri Jun 28 16:26:54 2013 -0400 @@ -25,6 +25,7 @@ package java.util; import java.util.function.IntConsumer; +import java.util.stream.Collector; /** * A state object for collecting statistics such as count, min, max, sum, and @@ -35,24 +36,24 @@ * summary statistics on a stream of ints with: * <pre> {@code * IntSummaryStatistics stats = intStream.collect(IntSummaryStatistics::new, - * IntSummaryStatistics::accept, - * IntSummaryStatistics::combine); + * IntSummaryStatistics::accept, + * IntSummaryStatistics::combine); * }</pre> * * <p>{@code IntSummaryStatistics} can be used as a - * {@linkplain java.util.stream.Stream#reduce(java.util.function.BinaryOperator) reduction} + * {@linkplain java.util.stream.Stream#collect(Collector) reduction} * target for a {@linkplain java.util.stream.Stream stream}. For example: * * <pre> {@code * IntSummaryStatistics stats = people.stream() - * .collect(Collectors.toIntSummaryStatistics(Person::getDependents)); + * .collect(Collectors.summarizingInt(Person::getDependents)); *}</pre> * * This computes, in a single pass, the count of people, as well as the minimum, * maximum, sum, and average of their number of dependents. * * @implNote This implementation is not thread safe. However, it is safe to use - * {@link java.util.stream.Collectors#toIntSummaryStatistics(java.util.function.ToIntFunction) + * {@link java.util.stream.Collectors#summarizingInt(java.util.function.ToIntFunction) * Collectors.toIntStatistics()} on a parallel stream, because the parallel * implementation of {@link java.util.stream.Stream#collect Stream.collect()} * provides the necessary partitioning, isolation, and merging of results for @@ -140,10 +141,10 @@ } /** - * Returns the average of values recorded, or zero if no values have been + * Returns the arithmetic mean of values recorded, or zero if no values have been * recorded. * - * @return the average of values, or zero if none + * @return the arithmetic mean of values, or zero if none */ public final double getAverage() { return getCount() > 0 ? (double) getSum() / getCount() : 0.0d;
--- a/src/share/classes/java/util/LongSummaryStatistics.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/LongSummaryStatistics.java Fri Jun 28 16:26:54 2013 -0400 @@ -26,6 +26,7 @@ import java.util.function.IntConsumer; import java.util.function.LongConsumer; +import java.util.stream.Collector; /** * A state object for collecting statistics such as count, min, max, sum, and @@ -36,24 +37,24 @@ * summary statistics on a stream of longs with: * <pre> {@code * LongSummaryStatistics stats = longStream.collect(LongSummaryStatistics::new, - * LongSummaryStatistics::accept, - * LongSummaryStatistics::combine); + * LongSummaryStatistics::accept, + * LongSummaryStatistics::combine); * }</pre> * * <p>{@code LongSummaryStatistics} can be used as a - * {@linkplain java.util.stream.Stream#reduce(java.util.function.BinaryOperator) reduction} + * {@linkplain java.util.stream.Stream#collect(Collector)} reduction} * target for a {@linkplain java.util.stream.Stream stream}. For example: * * <pre> {@code * LongSummaryStatistics stats = people.stream() - * .collect(Collectors.toLongSummaryStatistics(Person::getAge)); + * .collect(Collectors.summarizingLong(Person::getAge)); *}</pre> * * This computes, in a single pass, the count of people, as well as the minimum, - * maximum, sum, and average of their ages in milliseconds. + * maximum, sum, and average of their ages. * * @implNote This implementation is not thread safe. However, it is safe to use - * {@link java.util.stream.Collectors#toLongSummaryStatistics(java.util.function.ToLongFunction) + * {@link java.util.stream.Collectors#summarizingLong(java.util.function.ToLongFunction) * Collectors.toLongStatistics()} on a parallel stream, because the parallel * implementation of {@link java.util.stream.Stream#collect Stream.collect()} * provides the necessary partitioning, isolation, and merging of results for @@ -152,10 +153,10 @@ } /** - * Returns the average of values recorded, or zero if no values have been + * Returns the arithmetic mean of values recorded, or zero if no values have been * recorded. * - * @return The average of values, or zero if none + * @return The arithmetic mean of values, or zero if none */ public final double getAverage() { return getCount() > 0 ? (double) getSum() / getCount() : 0.0d;
--- a/src/share/classes/java/util/StringJoiner.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/StringJoiner.java Fri Jun 28 16:26:54 2013 -0400 @@ -49,16 +49,17 @@ * <p> * A {@code StringJoiner} may be employed to create formatted output from a * {@link java.util.stream.Stream} using - * {@link java.util.stream.Collectors#toStringJoiner}. For example: + * {@link java.util.stream.Collectors#joining(CharSequence)}. For example: * * <pre> {@code * List<Integer> numbers = Arrays.asList(1, 2, 3, 4); * String commaSeparatedNumbers = numbers.stream() * .map(i -> i.toString()) - * .collect(Collectors.toStringJoiner(", ")).toString(); + * .collect(Collectors.joining(", ")); * }</pre> * - * @see java.util.stream.Collectors#toStringJoiner + * @see java.util.stream.Collectors#joining(CharSequence) + * @see java.util.stream.Collectors#joining(CharSequence, CharSequence, CharSequence) * @since 1.8 */ public final class StringJoiner {
--- a/src/share/classes/java/util/stream/Collector.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/Collector.java Fri Jun 28 16:26:54 2013 -0400 @@ -25,40 +25,45 @@ package java.util.stream; import java.util.Collections; +import java.util.EnumSet; import java.util.Set; -import java.util.function.BiFunction; +import java.util.function.BiConsumer; import java.util.function.BinaryOperator; +import java.util.function.Function; import java.util.function.Supplier; /** * A <a href="package-summary.html#Reduction">reduction operation</a> that - * supports folding input elements into a cumulative result. The result may be - * a value or may be a mutable result container. Examples of operations - * accumulating results into a mutable result container include: accumulating - * input elements into a {@code Collection}; concatenating strings into a - * {@code StringBuilder}; computing summary information about elements such as - * sum, min, max, or average; computing "pivot table" summaries such as "maximum - * valued transaction by seller", etc. Reduction operations can be performed - * either sequentially or in parallel. + * folds input elements into a mutable result container, optionally transforming + * the accumulated result into a final representation after all input elements + * have been processed. + * + * <p>Examples of mutable reduction operations include: + * accumulating elements into a {@code Collection}; concatenating + * strings using a {@code StringBuilder}; computing summary information about + * elements such as sum, min, max, or average; computing "pivot table" summaries + * such as "maximum valued transaction by seller", etc. Reduction operations + * can be performed either sequentially or in parallel. * * <p>The following are examples of using the predefined {@code Collector} * implementations in {@link Collectors} with the {@code Stream} API to perform * mutable reduction tasks: * <pre>{@code - * // Accumulate elements into a List - * List<String> list = stream.collect(Collectors.toList()); + * // Accumulate names into a List + * List<String> list = people.stream().map(Person::getName).collect(Collectors.toList()); * - * // Accumulate elements into a TreeSet - * Set<String> list = stream.collect(Collectors.toCollection(TreeSet::new)); + * // Accumulate names into a TreeSet + * Set<String> list = people.stream().map(Person::getName).collect(Collectors.toCollection(TreeSet::new)); * * // Convert elements to strings and concatenate them, separated by commas - * String joined = stream.map(Object::toString) - * .collect(Collectors.toStringJoiner(", ")) - * .toString(); + * String joined = things.stream() + * .map(Object::toString) + * .collect(Collectors.joining(", ")); * * // Find highest-paid employee * Employee highestPaid = employees.stream() - * .collect(Collectors.maxBy(Comparators.comparing(Employee::getSalary))); + * .collect(Collectors.maxBy(Comparators.comparing(Employee::getSalary))) + * .get(); * * // Group employees by department * Map<Department, List<Employee>> byDept @@ -66,7 +71,7 @@ * .collect(Collectors.groupingBy(Employee::getDepartment)); * * // Find highest-paid employee by department - * Map<Department, Employee> highestPaidByDept + * Map<Department, Optional<Employee>> highestPaidByDept * = employees.stream() * .collect(Collectors.groupingBy(Employee::getDepartment, * Collectors.maxBy(Comparators.comparing(Employee::getSalary)))); @@ -74,43 +79,42 @@ * // Partition students into passing and failing * Map<Boolean, List<Student>> passingFailing = * students.stream() - * .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD); + * .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD)); * * }</pre> * - * <p>A {@code Collector} is specified by three functions that work together to - * manage a result or result container. They are: creation of an initial - * result, incorporating a new data element into a result, and combining two - * results into one. The last function -- combining two results into one -- is - * used during parallel operations, where subsets of the input are accumulated - * in parallel, and then the subresults merged into a combined result. The - * result may be a mutable container or a value. If the result is mutable, the - * accumulation and combination functions may either mutate their left argument - * and return that (such as adding elements to a collection), or return a new - * result, in which case it should not perform any mutation. + * <p>A {@code Collector} is specified by four functions that work together to + * accumulate entries into a mutable result container, and optionally perform + * a final transform on the result. They are: creation of a new result container, + * incorporating a new data element into a result container, combining two + * result containers into one, and performing a final transform on the container. + * The combiner function is used during parallel operations, where + * subsets of the input are accumulated into separate result + * containers, and then the subresults merged into a combined result. The + * combiner function may merge one set of subresults into the other and return + * that, or it may return a new object to describe the combined results. * - * <p>Collectors also have a set of characteristics, including - * {@link Characteristics#CONCURRENT} and - * {@link Characteristics#STRICTLY_MUTATIVE}. These characteristics provide + * <p>Collectors also have a set of characteristics, such as + * {@link Characteristics#CONCURRENT}. These characteristics provide * hints that can be used by a reduction implementation to provide better * performance. * * <p>Libraries that implement reduction based on {@code Collector}, such as * {@link Stream#collect(Collector)}, must adhere to the following constraints: * <ul> - * <li>The first argument passed to the accumulator function, and both - * arguments passed to the combiner function, must be the result of a - * previous invocation of {@link #resultSupplier()}, {@link #accumulator()}, - * or {@link #combiner()}.</li> + * <li>The first argument passed to the accumulator function, both + * arguments passed to the combiner function, and the argument passed to the + * finisher function must be the result of a previous invocation of the + * result supplier, accumulator, or combiner functions.</li> * <li>The implementation should not do anything with the result of any of * the result supplier, accumulator, or combiner functions other than to - * pass them again to the accumulator or combiner functions, or return them - * to the caller of the reduction operation.</li> - * <li>If a result is passed to the accumulator or combiner function, and - * the same object is not returned from that function, it is never used - * again.</li> - * <li>Once a result is passed to the combiner function, it is never passed - * to the accumulator function again.</li> + * pass them again to the accumulator, combiner, or finisher functions, + * or return them to the caller of the reduction operation.</li> + * <li>If a result is passed to the combiner or finisher + * function, and the same object is not returned from that function, it is + * never used again.</li> + * <li>Once a result is passed to the combiner or finisher function, it + * is never passed to the accumulator function again.</li> * <li>For non-concurrent collectors, any result returned from the result * supplier, accumulator, or combiner functions must be serially * thread-confined. This enables collection to occur in parallel without @@ -132,11 +136,10 @@ * Performing a reduction operation with a {@code Collector} should produce a * result equivalent to: * <pre>{@code - * BiFunction<R,T,R> accumulator = collector.accumulator(); - * R result = collector.resultSupplier().get(); + * R container = collector.supplier().get(); * for (T t : data) - * result = accumulator.apply(result, t); - * return result; + * collector.accumulator().accept(container, t); + * return collector.finisher().apply(container); * }</pre> * * <p>However, the library is free to partition the input, perform the reduction @@ -149,7 +152,7 @@ * is accumulating elements into a {@code TreeSet}. In this case, the {@code * resultSupplier()} function is {@code () -> new Treeset<T>()}, the * {@code accumulator} function is - * {@code (set, element) -> { set.add(element); return set; }}, and the combiner + * {@code (set, element) -> set.add(element) }, and the combiner * function is {@code (left, right) -> { left.addAll(right); return left; }}. * (This behavior is implemented by * {@code Collectors.toCollection(TreeSet::new)}). @@ -159,51 +162,49 @@ * @see Stream#collect(Collector) * @see Collectors * - * @param <T> the type of input element to the collect operation - * @param <R> the result type of the collect operation + * @param <T> the type of input elements to the reduction operation + * @param <A> the mutable accumulation type of the reduction operation (often + * hidden as an implementation detail) + * @param <R> the result type of the reduction operation * @since 1.8 */ -public interface Collector<T, R> { +public interface Collector<T, A, R> { /** - * A function that creates and returns a new result that represents - * "no values". If the accumulator or combiner functions may mutate their - * arguments, this must be a new, empty result container. + * A function that creates and returns a new mutable result container. * - * @return a function which, when invoked, returns a result representing - * "no values" + * @return a function which returns a new, mutable result container */ - Supplier<R> resultSupplier(); + Supplier<A> supplier(); /** - * A function that folds a new value into a cumulative result. The result - * may be a mutable result container or a value. The accumulator function - * may modify a mutable container and return it, or create a new result and - * return that, but if it returns a new result object, it must not modify - * any of its arguments. + * A function that folds a new value into a mutable result container. * - * <p>If the collector has the {@link Characteristics#STRICTLY_MUTATIVE} - * characteristic, then the accumulator function <em>must</em> always return - * its first argument, after possibly mutating its state. - * - * @return a function which folds a new value into a cumulative result + * @return a function which folds a new value into a mutable result container */ - BiFunction<R, T, R> accumulator(); + BiConsumer<A, T> accumulator(); /** * A function that accepts two partial results and merges them. The * combiner function may fold state from one argument into the other and - * return that, or may return a new result object, but if it returns - * a new result object, it must not modify the state of either of its - * arguments. - * - * <p>If the collector has the {@link Characteristics#STRICTLY_MUTATIVE} - * characteristic, then the combiner function <em>must</em> always return - * its first argument, after possibly mutating its state. + * return that, or may return a new result object. * * @return a function which combines two partial results into a cumulative * result */ - BinaryOperator<R> combiner(); + BinaryOperator<A> combiner(); + + /** + * Perform the final transformation from the intermediate accumulation type + * {@code A} to the final result representation {@code R}. + * + * <p>If the characteristic {@code IDENTITY_TRANSFORM} is + * set, this function may be presumed to be an identity transform with an + * unchecked cast from {@code A} to {@code R}. + * + * @return a function which transforms the intermediate result to the final + * result + */ + Function<A, R> finisher(); /** * Returns a {@code Set} of {@code Collector.Characteristics} indicating @@ -214,6 +215,62 @@ Set<Characteristics> characteristics(); /** + * Returns a new {@code Collector} described by the given {@code supplier}, + * {@code accumulator}, and {@code combiner} functions. The resulting + * {@code Collector} has the {@code Collector.Characteristics.IDENTITY_FINISH} + * characteristic. + * + * @param supplier The supplier function for the new collector + * @param accumulator The accumulator function for the new collector + * @param combiner The combiner function for the new collector + * @param characteristics The collector characteristics for the new + * collector + * @param <T> The type of input elements for the new collector + * @param <R> The type of intermediate accumulation result, and final result, + * for the new collector + * @return the new {@code Collector} + */ + public static<T, R> Collector<T, R, R> of(Supplier<R> supplier, + BiConsumer<R, T> accumulator, + BinaryOperator<R> combiner, + Characteristics... characteristics) { + Set<Characteristics> cs = (characteristics.length == 0) + ? Collectors.CH_ID + : Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.IDENTITY_FINISH, + characteristics)); + return new Collectors.CollectorImpl<>(supplier, accumulator, combiner, cs); + } + + /** + * Returns a new {@code Collector} described by the given {@code supplier}, + * {@code accumulator}, {@code combiner}, and {@code finisher} functions. + * + * @param supplier The supplier function for the new collector + * @param accumulator The accumulator function for the new collector + * @param combiner The combiner function for the new collector + * @param finisher The finisher function for the new collector + * @param characteristics The collector characteristics for the new + * collector + * @param <T> The type of input elements for the new collector + * @param <A> The intermediate accumulation type of the new collector + * @param <R> The final result type of the new collector + * @return the new {@code Collector} + */ + public static<T, A, R> Collector<T, A, R> of(Supplier<A> supplier, + BiConsumer<A, T> accumulator, + BinaryOperator<A> combiner, + Function<A, R> finisher, + Characteristics... characteristics) { + Set<Characteristics> cs = Collectors.CH_NOID; + if (characteristics.length > 0) { + cs = EnumSet.noneOf(Characteristics.class); + Collections.addAll(cs, characteristics); + cs = Collections.unmodifiableSet(cs); + } + return new Collectors.CollectorImpl<>(supplier, accumulator, combiner, finisher, cs); + } + + /** * Characteristics indicating properties of a {@code Collector}, which can * be used to optimize reduction implementations. */ @@ -222,8 +279,7 @@ * Indicates that this collector is <em>concurrent</em>, meaning that * the result container can support the accumulator function being * called concurrently with the same result container from multiple - * threads. Concurrent collectors must also always have the - * {@code STRICTLY_MUTATIVE} characteristic. + * threads. * * <p>If a {@code CONCURRENT} collector is not also {@code UNORDERED}, * then it should only be evaluated concurrently if applied to an @@ -238,12 +294,10 @@ UNORDERED, /** - * Indicates that this collector operates by strict mutation of its - * result container. This means that the {@link #accumulator()} and - * {@link #combiner()} functions will always modify the state of and - * return their first argument, rather than returning a different result - * container. + * Indicates that the finisher function is the identity function and + * can be elided. If set, it must be the case that an unchecked cast + * from A to R will succeed. */ - STRICTLY_MUTATIVE + IDENTITY_FINISH } }
--- a/src/share/classes/java/util/stream/Collectors.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/Collectors.java Fri Jun 28 16:26:54 2013 -0400 @@ -27,6 +27,7 @@ import java.util.AbstractMap; import java.util.AbstractSet; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; @@ -39,14 +40,16 @@ import java.util.List; import java.util.LongSummaryStatistics; import java.util.Map; -import java.util.NoSuchElementException; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.StringJoiner; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.BinaryOperator; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; @@ -64,20 +67,21 @@ * mutable reduction tasks: * * <pre>{@code - * // Accumulate elements into a List - * List<Person> list = people.collect(Collectors.toList()); + * // Accumulate names into a List + * List<String> list = people.stream().map(Person::getName).collect(Collectors.toList()); * - * // Accumulate elements into a TreeSet - * List<Person> list = people.collect(Collectors.toCollection(TreeSet::new)); + * // Accumulate names into a TreeSet + * Set<String> list = people.stream().map(Person::getName).collect(Collectors.toCollection(TreeSet::new)); * * // Convert elements to strings and concatenate them, separated by commas - * String joined = stream.map(Object::toString) - * .collect(Collectors.toStringJoiner(", ")) - * .toString(); + * String joined = things.stream() + * .map(Object::toString) + * .collect(Collectors.joining(", ")); * * // Find highest-paid employee * Employee highestPaid = employees.stream() - * .collect(Collectors.maxBy(Comparator.comparing(Employee::getSalary))); + * .collect(Collectors.maxBy(Comparator.comparing(Employee::getSalary))) + * .get(); * * // Group employees by department * Map<Department, List<Employee>> byDept @@ -85,7 +89,7 @@ * .collect(Collectors.groupingBy(Employee::getDepartment)); * * // Find highest-paid employee by department - * Map<Department, Employee> highestPaidByDept + * Map<Department, Optional<Employee>> highestPaidByDept * = employees.stream() * .collect(Collectors.groupingBy(Employee::getDepartment, * Collectors.maxBy(Comparator.comparing(Employee::getSalary)))); @@ -93,7 +97,7 @@ * // Partition students into passing and failing * Map<Boolean, List<Student>> passingFailing = * students.stream() - * .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD); + * .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD)); * * }</pre> * @@ -103,15 +107,19 @@ */ public final class Collectors { - private static final Set<Collector.Characteristics> CH_CONCURRENT + static final Set<Collector.Characteristics> CH_CONCURRENT_ID = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT, - Collector.Characteristics.STRICTLY_MUTATIVE, + Collector.Characteristics.UNORDERED, + Collector.Characteristics.IDENTITY_FINISH)); + static final Set<Collector.Characteristics> CH_CONCURRENT_NOID + = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT, Collector.Characteristics.UNORDERED)); - private static final Set<Collector.Characteristics> CH_STRICT - = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.STRICTLY_MUTATIVE)); - private static final Set<Collector.Characteristics> CH_STRICT_UNORDERED - = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.STRICTLY_MUTATIVE, - Collector.Characteristics.UNORDERED)); + static final Set<Collector.Characteristics> CH_ID + = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.IDENTITY_FINISH)); + static final Set<Collector.Characteristics> CH_UNORDERED_ID + = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.UNORDERED, + Collector.Characteristics.IDENTITY_FINISH)); + static final Set<Collector.Characteristics> CH_NOID = Collections.emptySet(); private Collectors() { } @@ -124,88 +132,64 @@ * * @param <T> the type of input arguments to the merge function * @return a merge function which always throw {@code IllegalStateException} - * - * @see #firstWinsMerger() - * @see #lastWinsMerger() */ - public static <T> BinaryOperator<T> throwingMerger() { + private static <T> BinaryOperator<T> throwingMerger() { return (u,v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; } /** - * Returns a merge function, suitable for use in - * {@link Map#merge(Object, Object, BiFunction) Map.merge()} or - * {@link #toMap(Function, Function, BinaryOperator) toMap()}, - * which implements a "first wins" policy. - * - * @param <T> the type of input arguments to the merge function - * @return a merge function which always returns its first argument - * @see #lastWinsMerger() - * @see #throwingMerger() - */ - public static <T> BinaryOperator<T> firstWinsMerger() { - return (u,v) -> u; - } - - /** - * Returns a merge function, suitable for use in - * {@link Map#merge(Object, Object, BiFunction) Map.merge()} or - * {@link #toMap(Function, Function, BinaryOperator) toMap()}, - * which implements a "last wins" policy. - * - * @param <T> the type of input arguments to the merge function - * @return a merge function which always returns its second argument - * @see #firstWinsMerger() - * @see #throwingMerger() - */ - public static <T> BinaryOperator<T> lastWinsMerger() { - return (u,v) -> v; - } - - /** * Simple implementation class for {@code Collector}. * * @param <T> the type of elements to be collected * @param <R> the type of the result */ - private static final class CollectorImpl<T, R> implements Collector<T,R> { - private final Supplier<R> resultSupplier; - private final BiFunction<R, T, R> accumulator; - private final BinaryOperator<R> combiner; + static class CollectorImpl<T, A, R> implements Collector<T, A, R> { + private final Supplier<A> supplier; + private final BiConsumer<A, T> accumulator; + private final BinaryOperator<A> combiner; + private final Function<A, R> finisher; private final Set<Characteristics> characteristics; - CollectorImpl(Supplier<R> resultSupplier, - BiFunction<R, T, R> accumulator, - BinaryOperator<R> combiner, + CollectorImpl(Supplier<A> supplier, + BiConsumer<A, T> accumulator, + BinaryOperator<A> combiner, + Function<A,R> finisher, Set<Characteristics> characteristics) { - this.resultSupplier = resultSupplier; + this.supplier = supplier; this.accumulator = accumulator; this.combiner = combiner; + this.finisher = finisher; this.characteristics = characteristics; } - CollectorImpl(Supplier<R> resultSupplier, - BiFunction<R, T, R> accumulator, - BinaryOperator<R> combiner) { - this(resultSupplier, accumulator, combiner, Collections.emptySet()); + CollectorImpl(Supplier<A> supplier, + BiConsumer<A, T> accumulator, + BinaryOperator<A> combiner, + Set<Characteristics> characteristics) { + this(supplier, accumulator, combiner, i -> (R) i, characteristics); } @Override - public BiFunction<R, T, R> accumulator() { + public BiConsumer<A, T> accumulator() { return accumulator; } @Override - public Supplier<R> resultSupplier() { - return resultSupplier; + public Supplier<A> supplier() { + return supplier; } @Override - public BinaryOperator<R> combiner() { + public BinaryOperator<A> combiner() { return combiner; } @Override + public Function<A, R> finisher() { + return finisher; + } + + @Override public Set<Characteristics> characteristics() { return characteristics; } @@ -224,11 +208,10 @@ * {@code Collection}, in encounter order */ public static <T, C extends Collection<T>> - Collector<T, C> toCollection(Supplier<C> collectionFactory) { - return new CollectorImpl<>(collectionFactory, - (r, t) -> { r.add(t); return r; }, + Collector<T, ?, C> toCollection(Supplier<C> collectionFactory) { + return new CollectorImpl<>(collectionFactory, Collection::add, (r1, r2) -> { r1.addAll(r2); return r1; }, - CH_STRICT); + CH_ID); } /** @@ -241,36 +224,10 @@ * {@code List}, in encounter order */ public static <T> - Collector<T, List<T>> toList() { - BiFunction<List<T>, T, List<T>> accumulator = (list, t) -> { - switch (list.size()) { - case 0: - return Collections.singletonList(t); - case 1: - List<T> newList = new ArrayList<>(); - newList.add(list.get(0)); - newList.add(t); - return newList; - default: - list.add(t); - return list; - } - }; - BinaryOperator<List<T>> combiner = (left, right) -> { - switch (left.size()) { - case 0: - return right; - case 1: - List<T> newList = new ArrayList<>(left.size() + right.size()); - newList.addAll(left); - newList.addAll(right); - return newList; - default: - left.addAll(right); - return left; - } - }; - return new CollectorImpl<>(Collections::emptyList, accumulator, combiner); + Collector<T, ?, List<T>> toList() { + return new CollectorImpl<>((Supplier<List<T>>) ArrayList::new, List::add, + (left, right) -> { left.addAll(right); return left; }, + CH_ID); } /** @@ -286,44 +243,58 @@ * {@code Set} */ public static <T> - Collector<T, Set<T>> toSet() { - return new CollectorImpl<>((Supplier<Set<T>>) HashSet::new, - (r, t) -> { r.add(t); return r; }, - (r1, r2) -> { r1.addAll(r2); return r1; }, - CH_STRICT_UNORDERED); + Collector<T, ?, Set<T>> toSet() { + return new CollectorImpl<>((Supplier<Set<T>>) HashSet::new, Set::add, + (left, right) -> { left.addAll(right); return left; }, + CH_UNORDERED_ID); } /** * Returns a {@code Collector} that concatenates the input elements into a - * new {@link StringBuilder}. + * {@code String}, in encounter order. * - * @return a {@code Collector} which collects String elements into a - * {@code StringBuilder}, in encounter order + * @return a {@code Collector} that concatenates the input elements into a + * {@code String}, in encounter order */ - public static Collector<String, StringBuilder> toStringBuilder() { - return new CollectorImpl<>(StringBuilder::new, - (r, t) -> { r.append(t); return r; }, - (r1, r2) -> { r1.append(r2); return r1; }, - CH_STRICT); + public static Collector<CharSequence, ?, String> joining() { + return new CollectorImpl<CharSequence, StringBuilder, String>( + StringBuilder::new, StringBuilder::append, + (r1, r2) -> { r1.append(r2); return r1; }, + StringBuilder::toString, CH_NOID); } /** - * Returns a {@code Collector} that concatenates the input elements into a - * new {@link StringJoiner}, using the specified delimiter. + * Returns a {@code Collector} that concatenates the input elements, + * separated by the specified delimiter, in encounter order. * * @param delimiter the delimiter to be used between each element - * @return A {@code Collector} which collects String elements into a - * {@code StringJoiner}, in encounter order + * @return A {@code Collector} which concatenates CharSequence elements, + * separated by the specified delimiter, in encounter order */ - public static Collector<CharSequence, StringJoiner> toStringJoiner(CharSequence delimiter) { - BinaryOperator<StringJoiner> merger = (sj, other) -> { - if (other.length() > 0) - sj.add(other.toString()); - return sj; - }; - return new CollectorImpl<>(() -> new StringJoiner(delimiter), - (r, t) -> { r.add(t); return r; }, - merger, CH_STRICT); + public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) { + return joining(delimiter, "", ""); + } + + /** + * Returns a {@code Collector} that concatenates the input elements, + * separated by the specified delimiter, with the specified prefix and + * suffix, in encounter order. + * + * @param delimiter the delimiter to be used between each element + * @param prefix the sequence of characters to be used at the beginning + * of the joined result + * @param suffix the sequence of characters to be used at the end + * of the joined result + * @return A {@code Collector} which concatenates CharSequence elements, + * separated by the specified delimiter, in encounter order + */ + public static Collector<CharSequence, ?, String> joining(CharSequence delimiter, + CharSequence prefix, + CharSequence suffix) { + return new CollectorImpl<>( + () -> new StringJoiner(delimiter, prefix, suffix), + StringJoiner::add, StringJoiner::merge, + StringJoiner::toString, CH_NOID); } /** @@ -348,12 +319,13 @@ } /** - * Adapts a {@code Collector<U,R>} to a {@code Collector<T,R>} by applying - * a mapping function to each input element before accumulation. + * Adapts a {@code Collector} accepting elements of type {@code U} to one + * accepting elements of type {@code T} by applying a mapping function to + * each input element before accumulation. * * @apiNote * The {@code mapping()} collectors are most useful when used in a - * multi-level reduction, downstream of {@code groupingBy} or + * multi-level reduction, such as downstream of a {@code groupingBy} or * {@code partitioningBy}. For example, given a stream of * {@code Person}, to accumulate the set of last names in each city: * <pre>{@code @@ -364,23 +336,27 @@ * * @param <T> the type of the input elements * @param <U> type of elements accepted by downstream collector + * @param <A> intermediate accumulation type of the downstream collector * @param <R> result type of collector * @param mapper a function to be applied to the input elements * @param downstream a collector which will accept mapped values * @return a collector which applies the mapping function to the input * elements and provides the mapped results to the downstream collector */ - public static <T, U, R> Collector<T, R> - mapping(Function<? super T, ? extends U> mapper, Collector<? super U, R> downstream) { - BiFunction<R, ? super U, R> downstreamAccumulator = downstream.accumulator(); - return new CollectorImpl<>(downstream.resultSupplier(), - (r, t) -> downstreamAccumulator.apply(r, mapper.apply(t)), - downstream.combiner(), downstream.characteristics()); + public static <T, U, A, R> + Collector<T, ?, R> mapping(Function<? super T, ? extends U> mapper, + Collector<? super U, A, R> downstream) { + BiConsumer<A, ? super U> downstreamAccumulator = downstream.accumulator(); + return new CollectorImpl<>(downstream.supplier(), + (r, t) -> downstreamAccumulator.accept(r, mapper.apply(t)), + downstream.combiner(), downstream.finisher(), + downstream.characteristics()); } /** - * Returns a {@code Collector<T, Long>} that counts the number of input - * elements. + * Returns a {@code Collector} accepting elements of type {@code T} that + * counts the number of input elements. If no elements are present, the + * result is 0. * * @implSpec * This produces a result equivalent to: @@ -391,14 +367,14 @@ * @param <T> the type of the input elements * @return a {@code Collector} that counts the input elements */ - public static <T> Collector<T, Long> + public static <T> Collector<T, ?, Long> counting() { return reducing(0L, e -> 1L, Long::sum); } /** - * Returns a {@code Collector<T, T>} that produces the minimal element - * according to a given {@code Comparator}. + * Returns a {@code Collector} that produces the minimal element according + * to a given {@code Comparator}, described as an {@code Optional<T>}. * * @implSpec * This produces a result equivalent to: @@ -410,14 +386,14 @@ * @param comparator a {@code Comparator} for comparing elements * @return a {@code Collector} that produces the minimal value */ - public static <T> Collector<T, T> + public static <T> Collector<T, ?, Optional<T>> minBy(Comparator<? super T> comparator) { return reducing(BinaryOperator.minBy(comparator)); } /** - * Returns a {@code Collector<T, T>} that produces the maximal element - * according to a given {@code Comparator}. + * Returns a {@code Collector} that produces the maximal element according + * to a given {@code Comparator}, described as an {@code Optional<T>}. * * @implSpec * This produces a result equivalent to: @@ -429,39 +405,143 @@ * @param comparator a {@code Comparator} for comparing elements * @return a {@code Collector} that produces the maximal value */ - public static <T> Collector<T, T> + public static <T> Collector<T, ?, Optional<T>> maxBy(Comparator<? super T> comparator) { return reducing(BinaryOperator.maxBy(comparator)); } /** - * Returns a {@code Collector<T, Long>} that produces the sum of a - * long-valued function applied to the input element. + * Returns a {@code Collector} that produces the sum of a integer-valued + * function applied to the input elements. If no elements are present, + * the result is 0. + * + * @param <T> the type of the input elements + * @param mapper a function extracting the property to be summed + * @return a {@code Collector} that produces the sum of a derived property + */ + public static <T> Collector<T, ?, Integer> + summingInt(ToIntFunction<? super T> mapper) { + return new CollectorImpl<T, int[], Integer>( + () -> new int[1], + (a, t) -> { a[0] += mapper.applyAsInt(t); }, + (a, b) -> { a[0] += b[0]; return a; }, + a -> a[0], CH_NOID); + } + + /** + * Returns a {@code Collector} that produces the sum of a long-valued + * function applied to the input elements. If no elements are present, + * the result is 0. * - * @implSpec - * This produces a result equivalent to: - * <pre>{@code - * reducing(0L, mapper, Long::sum) - * }</pre> + * @param <T> the type of the input elements + * @param mapper a function extracting the property to be summed + * @return a {@code Collector} that produces the sum of a derived property + */ + public static <T> Collector<T, ?, Long> + summingLong(ToLongFunction<? super T> mapper) { + return new CollectorImpl<T, long[], Long>( + () -> new long[1], + (a, t) -> { a[0] += mapper.applyAsLong(t); }, + (a, b) -> { a[0] += b[0]; return a; }, + a -> a[0], CH_NOID); + } + + /** + * Returns a {@code Collector} that produces the sum of a double-valued + * function applied to the input elements. If no elements are present, + * the result is 0. + * + * <p>The sum returned can vary depending upon the order in which + * values are recorded, due to accumulated rounding error in + * addition of values of differing magnitudes. Values sorted by increasing + * absolute magnitude tend to yield more accurate results. If any recorded + * value is a {@code NaN} or the sum is at any point a {@code NaN} then the + * sum will be {@code NaN}. * * @param <T> the type of the input elements * @param mapper a function extracting the property to be summed * @return a {@code Collector} that produces the sum of a derived property */ - public static <T> Collector<T, Long> - sumBy(Function<? super T, Long> mapper) { - return reducing(0L, mapper, Long::sum); + public static <T> Collector<T, ?, Double> + summingDouble(ToDoubleFunction<? super T> mapper) { + return new CollectorImpl<T, double[], Double>( + () -> new double[1], + (a, t) -> { a[0] += mapper.applyAsDouble(t); }, + (a, b) -> { a[0] += b[0]; return a; }, + a -> a[0], CH_NOID); + } + + /** + * Returns a {@code Collector} that produces the arithmetic mean of an integer-valued + * function applied to the input elements. If no elements are present, + * the result is 0. + * + * @param <T> the type of the input elements + * @param mapper a function extracting the property to be summed + * @return a {@code Collector} that produces the sum of a derived property + */ + public static <T> Collector<T, ?, Double> + averagingInt(ToIntFunction<? super T> mapper) { + return new CollectorImpl<T, long[], Double>( + () -> new long[2], + (a, t) -> { a[0] += mapper.applyAsInt(t); a[1]++; }, + (a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; }, + a -> (a[1] == 0) ? 0.0d : (double) a[0] / a[1], CH_NOID); } /** - * Returns a {@code Collector<T,T>} which performs a reduction of its - * input elements under a specified {@code BinaryOperator}. + * Returns a {@code Collector} that produces the arithmetic mean of a long-valued + * function applied to the input elements. If no elements are present, + * the result is 0. + * + * @param <T> the type of the input elements + * @param mapper a function extracting the property to be summed + * @return a {@code Collector} that produces the sum of a derived property + */ + public static <T> Collector<T, ?, Double> + averagingLong(ToLongFunction<? super T> mapper) { + return new CollectorImpl<T, long[], Double>( + () -> new long[2], + (a, t) -> { a[0] += mapper.applyAsLong(t); a[1]++; }, + (a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; }, + a -> (a[1] == 0) ? 0.0d : (double) a[0] / a[1], CH_NOID); + } + + /** + * Returns a {@code Collector} that produces the arithmetic mean of a double-valued + * function applied to the input elements. If no elements are present, + * the result is 0. + * + * <p>The average returned can vary depending upon the order in which + * values are recorded, due to accumulated rounding error in + * addition of values of differing magnitudes. Values sorted by increasing + * absolute magnitude tend to yield more accurate results. If any recorded + * value is a {@code NaN} or the sum is at any point a {@code NaN} then the + * average will be {@code NaN}. + * + * @param <T> the type of the input elements + * @param mapper a function extracting the property to be summed + * @return a {@code Collector} that produces the sum of a derived property + */ + public static <T> Collector<T, ?, Double> + averagingDouble(ToDoubleFunction<? super T> mapper) { + return new CollectorImpl<T, double[], Double>( + () -> new double[2], + (a, t) -> { a[0] += mapper.applyAsDouble(t); a[1]++; }, + (a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; }, + a -> (a[1] == 0) ? 0.0d : a[0] / a[1], CH_NOID); + } + + /** + * Returns a {@code Collector} which performs a reduction of its + * input elements under a specified {@code BinaryOperator} using the + * provided identity. * * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or * {@code partitioningBy}. To perform a simple reduction on a stream, - * use {@link Stream#reduce(BinaryOperator)} instead. + * use {@link Stream#reduce(Object, BinaryOperator)}} instead. * * @param <T> element type for the input and output of the reduction * @param identity the identity value for the reduction (also, the value @@ -472,14 +552,25 @@ * @see #reducing(BinaryOperator) * @see #reducing(Object, Function, BinaryOperator) */ - public static <T> Collector<T, T> + public static <T> Collector<T, ?, T> reducing(T identity, BinaryOperator<T> op) { - return new CollectorImpl<>(() -> identity, (r, t) -> (r == null ? t : op.apply(r, t)), op); + return new CollectorImpl<>( + boxSupplier(identity), + (a, t) -> { a[0] = op.apply(a[0], t); }, + (a, b) -> { a[0] = op.apply(a[0], b[0]); return a; }, + a -> a[0], + CH_NOID); + } + + @SuppressWarnings("unchecked") + private static <T> Supplier<T[]> boxSupplier(T identity) { + return () -> (T[]) new Object[] { identity }; } /** - * Returns a {@code Collector<T,T>} which performs a reduction of its - * input elements under a specified {@code BinaryOperator}. + * Returns a {@code Collector} which performs a reduction of its + * input elements under a specified {@code BinaryOperator}. The result + * is described as an {@code Optional<T>}. * * @apiNote * The {@code reducing()} collectors are most useful when used in a @@ -491,15 +582,8 @@ * person in each city: * <pre>{@code * Comparator<Person> byHeight = Comparator.comparing(Person::getHeight); - * BinaryOperator<Person> tallerOf = BinaryOperator.greaterOf(byHeight); * Map<City, Person> tallestByCity - * = people.stream().collect(groupingBy(Person::getCity, reducing(tallerOf))); - * }</pre> - * - * @implSpec - * The default implementation is equivalent to: - * <pre>{@code - * reducing(null, op); + * = people.stream().collect(groupingBy(Person::getCity, reducing(BinaryOperator.maxBy(byHeight)))); * }</pre> * * @param <T> element type for the input and output of the reduction @@ -509,13 +593,32 @@ * @see #reducing(Object, BinaryOperator) * @see #reducing(Object, Function, BinaryOperator) */ - public static <T> Collector<T, T> + public static <T> Collector<T, ?, Optional<T>> reducing(BinaryOperator<T> op) { - return reducing(null, op); + class OptionalBox implements Consumer<T> { + T value = null; + boolean present = false; + + @Override + public void accept(T t) { + if (present) { + value = op.apply(value, t); + } + else { + value = t; + present = true; + } + } + } + + return new CollectorImpl<T, OptionalBox, Optional<T>>( + OptionalBox::new, OptionalBox::accept, + (a, b) -> { if (b.present) a.accept(b.value); return a; }, + a -> Optional.ofNullable(a.value), CH_NOID); } /** - * Returns a {@code Collector<T,U>} which performs a reduction of its + * Returns a {@code Collector} which performs a reduction of its * input elements under a specified mapping function and * {@code BinaryOperator}. This is a generalization of * {@link #reducing(Object, BinaryOperator)} which allows a transformation @@ -524,17 +627,17 @@ * @apiNote * The {@code reducing()} collectors are most useful when used in a * multi-level reduction, downstream of {@code groupingBy} or - * {@code partitioningBy}. To perform a simple reduction on a stream, - * use {@link Stream#reduce(BinaryOperator)} instead. + * {@code partitioningBy}. To perform a simple map-reduce on a stream, + * use {@link Stream#map(Function)} and {@link Stream#reduce(Object, BinaryOperator)} + * instead. * * <p>For example, given a stream of {@code Person}, to calculate the longest * last name of residents in each city: * <pre>{@code * Comparator<String> byLength = Comparator.comparing(String::length); - * BinaryOperator<String> longerOf = BinaryOperator.greaterOf(byLength); * Map<City, String> longestLastNameByCity * = people.stream().collect(groupingBy(Person::getCity, - * reducing(Person::getLastName, longerOf))); + * reducing(Person::getLastName, BinaryOperator.maxBy(byLength)))); * }</pre> * * @param <T> the type of the input elements @@ -549,18 +652,20 @@ * @see #reducing(BinaryOperator) */ public static <T, U> - Collector<T, U> reducing(U identity, - Function<? super T, ? extends U> mapper, - BinaryOperator<U> op) { - return new CollectorImpl<>(() -> identity, - (r, t) -> (r == null ? mapper.apply(t) : op.apply(r, mapper.apply(t))), - op); + Collector<T, ?, U> reducing(U identity, + Function<? super T, ? extends U> mapper, + BinaryOperator<U> op) { + return new CollectorImpl<>( + boxSupplier(identity), + (a, t) -> { a[0] = op.apply(a[0], mapper.apply(t)); }, + (a, b) -> { a[0] = op.apply(a[0], b[0]); return a; }, + a -> a[0], CH_NOID); } /** * Returns a {@code Collector} implementing a "group by" operation on * input elements of type {@code T}, grouping elements according to a - * classification function. + * classification function, and returning the results in a {@code Map}. * * <p>The classification function maps elements to some key type {@code K}. * The collector produces a {@code Map<K, List<T>>} whose keys are the @@ -586,9 +691,9 @@ * @see #groupingBy(Function, Supplier, Collector) * @see #groupingByConcurrent(Function) */ - public static <T, K> - Collector<T, Map<K, List<T>>> groupingBy(Function<? super T, ? extends K> classifier) { - return groupingBy(classifier, HashMap::new, toList()); + public static <T, K> Collector<T, ?, Map<K, List<T>>> + groupingBy(Function<? super T, ? extends K> classifier) { + return groupingBy(classifier, toList()); } /** @@ -615,6 +720,7 @@ * * @param <T> the type of the input elements * @param <K> the type of the keys + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction @@ -624,9 +730,9 @@ * @see #groupingBy(Function, Supplier, Collector) * @see #groupingByConcurrent(Function, Collector) */ - public static <T, K, D> - Collector<T, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier, - Collector<? super T, D> downstream) { + public static <T, K, A, D> + Collector<T, ?, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier, + Collector<? super T, A, D> downstream) { return groupingBy(classifier, HashMap::new, downstream); } @@ -653,6 +759,7 @@ * * @param <T> the type of the input elements * @param <K> the type of the keys + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param <M> the type of the resulting {@code Map} * @param classifier a classifier function mapping input elements to keys @@ -665,25 +772,39 @@ * @see #groupingBy(Function) * @see #groupingByConcurrent(Function, Supplier, Collector) */ - public static <T, K, D, M extends Map<K, D>> - Collector<T, M> groupingBy(Function<? super T, ? extends K> classifier, - Supplier<M> mapFactory, - Collector<? super T, D> downstream) { - Supplier<D> downstreamSupplier = downstream.resultSupplier(); - BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); - BiFunction<M, T, M> accumulator = (m, t) -> { + public static <T, K, D, A, M extends Map<K, D>> + Collector<T, ?, M> groupingBy(Function<? super T, ? extends K> classifier, + Supplier<M> mapFactory, + Collector<? super T, A, D> downstream) { + Supplier<A> downstreamSupplier = downstream.supplier(); + BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator(); + BiConsumer<Map<K, A>, T> accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); - D oldContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); - D newContainer = downstreamAccumulator.apply(oldContainer, t); - if (newContainer != oldContainer) - m.put(key, newContainer); - return m; + A container = m.computeIfAbsent(key, k -> downstreamSupplier.get()); + downstreamAccumulator.accept(container, t); }; - return new CollectorImpl<>(mapFactory, accumulator, mapMerger(downstream.combiner()), CH_STRICT); + BinaryOperator<Map<K, A>> merger = Collectors.<K, A, Map<K, A>>mapMerger(downstream.combiner()); + @SuppressWarnings("unchecked") + Supplier<Map<K, A>> mangledFactory = (Supplier<Map<K, A>>) mapFactory; + + if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { + return new CollectorImpl<>(mangledFactory, accumulator, merger, CH_ID); + } + else { + @SuppressWarnings("unchecked") + Function<A, A> downstreamFinisher = (Function<A, A>) downstream.finisher(); + Function<Map<K, A>, M> finisher = intermediate -> { + intermediate.replaceAll((k, v) -> downstreamFinisher.apply(v)); + @SuppressWarnings("unchecked") + M castResult = (M) intermediate; + return castResult; + }; + return new CollectorImpl<>(mangledFactory, accumulator, merger, finisher, CH_NOID); + } } /** - * Returns a {@code Collector} implementing a concurrent "group by" + * Returns a concurrent {@code Collector} implementing a "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function. * @@ -716,12 +837,13 @@ * @see #groupingByConcurrent(Function, Supplier, Collector) */ public static <T, K> - Collector<T, ConcurrentMap<K, List<T>>> groupingByConcurrent(Function<? super T, ? extends K> classifier) { + Collector<T, ?, ConcurrentMap<K, List<T>>> + groupingByConcurrent(Function<? super T, ? extends K> classifier) { return groupingByConcurrent(classifier, ConcurrentHashMap::new, toList()); } /** - * Returns a {@code Collector} implementing a concurrent cascaded "group by" + * Returns a concurrent {@code Collector} implementing a cascaded "group by" * operation on input elements of type {@code T}, grouping elements * according to a classification function, and then performing a reduction * operation on the values associated with a given key using the specified @@ -739,12 +861,13 @@ * where the city names are sorted: * <pre>{@code * ConcurrentMap<City, Set<String>> namesByCity - * = people.stream().collect(groupingByConcurrent(Person::getCity, TreeMap::new, + * = people.stream().collect(groupingByConcurrent(Person::getCity, ConcurrentSkipListMap::new, * mapping(Person::getLastName, toSet()))); * }</pre> * * @param <T> the type of the input elements * @param <K> the type of the keys + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param classifier a classifier function mapping input elements to keys * @param downstream a {@code Collector} implementing the downstream reduction @@ -754,9 +877,9 @@ * @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function, Supplier, Collector) */ - public static <T, K, D> - Collector<T, ConcurrentMap<K, D>> groupingByConcurrent(Function<? super T, ? extends K> classifier, - Collector<? super T, D> downstream) { + public static <T, K, A, D> + Collector<T, ?, ConcurrentMap<K, D>> groupingByConcurrent(Function<? super T, ? extends K> classifier, + Collector<? super T, A, D> downstream) { return groupingByConcurrent(classifier, ConcurrentHashMap::new, downstream); } @@ -787,6 +910,7 @@ * * @param <T> the type of the input elements * @param <K> the type of the keys + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param <M> the type of the resulting {@code ConcurrentMap} * @param classifier a classifier function mapping input elements to keys @@ -799,51 +923,46 @@ * @see #groupingByConcurrent(Function, Collector) * @see #groupingBy(Function, Supplier, Collector) */ - public static <T, K, D, M extends ConcurrentMap<K, D>> - Collector<T, M> groupingByConcurrent(Function<? super T, ? extends K> classifier, - Supplier<M> mapFactory, - Collector<? super T, D> downstream) { - Supplier<D> downstreamSupplier = downstream.resultSupplier(); - BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); - BinaryOperator<M> combiner = mapMerger(downstream.combiner()); + public static <T, K, A, D, M extends ConcurrentMap<K, D>> + Collector<T, ?, M> groupingByConcurrent(Function<? super T, ? extends K> classifier, + Supplier<M> mapFactory, + Collector<? super T, A, D> downstream) { + Supplier<A> downstreamSupplier = downstream.supplier(); + BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator(); + BinaryOperator<ConcurrentMap<K, A>> merger = Collectors.<K, A, ConcurrentMap<K, A>>mapMerger(downstream.combiner()); + @SuppressWarnings("unchecked") + Supplier<ConcurrentMap<K, A>> mangledFactory = (Supplier<ConcurrentMap<K, A>>) mapFactory; + BiConsumer<ConcurrentMap<K, A>, T> accumulator; if (downstream.characteristics().contains(Collector.Characteristics.CONCURRENT)) { - BiFunction<M, T, M> accumulator = (m, t) -> { + accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); - downstreamAccumulator.apply(m.computeIfAbsent(key, k -> downstreamSupplier.get()), t); - return m; - }; - return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); - } else if (downstream.characteristics().contains(Collector.Characteristics.STRICTLY_MUTATIVE)) { - BiFunction<M, T, M> accumulator = (m, t) -> { - K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); - D resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); - synchronized (resultContainer) { - downstreamAccumulator.apply(resultContainer, t); - } - return m; + A resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); + downstreamAccumulator.accept(resultContainer, t); }; - return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); - } else { - BiFunction<M, T, M> accumulator = (m, t) -> { + } + else { + accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); - do { - D oldResult = m.computeIfAbsent(key, k -> downstreamSupplier.get()); - if (oldResult == null) { - if (m.putIfAbsent(key, downstreamAccumulator.apply(null, t)) == null) - return m; - } else { - synchronized (oldResult) { - if (m.get(key) != oldResult) - continue; - D newResult = downstreamAccumulator.apply(oldResult, t); - if (oldResult != newResult) - m.put(key, newResult); - return m; - } - } - } while (true); + A resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); + synchronized (resultContainer) { + downstreamAccumulator.accept(resultContainer, t); + } }; - return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); + } + + if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { + return new CollectorImpl<>(mangledFactory, accumulator, merger, CH_CONCURRENT_ID); + } + else { + @SuppressWarnings("unchecked") + Function<A, A> downstreamFinisher = (Function<A, A>) downstream.finisher(); + Function<ConcurrentMap<K, A>, M> finisher = intermediate -> { + intermediate.replaceAll((k, v) -> downstreamFinisher.apply(v)); + @SuppressWarnings("unchecked") + M castResult = (M) intermediate; + return castResult; + }; + return new CollectorImpl<>(mangledFactory, accumulator, merger, finisher, CH_CONCURRENT_NOID); } } @@ -862,7 +981,7 @@ * @see #partitioningBy(Predicate, Collector) */ public static <T> - Collector<T, Map<Boolean, List<T>>> partitioningBy(Predicate<? super T> predicate) { + Collector<T, ?, Map<Boolean, List<T>>> partitioningBy(Predicate<? super T> predicate) { return partitioningBy(predicate, toList()); } @@ -877,6 +996,7 @@ * serializability, or thread-safety of the {@code Map} returned. * * @param <T> the type of the input elements + * @param <A> the intermediate accumulation type of the downstream collector * @param <D> the result type of the downstream reduction * @param predicate a predicate used for classifying input elements * @param downstream a {@code Collector} implementing the downstream @@ -886,52 +1006,43 @@ * * @see #partitioningBy(Predicate) */ - public static <T, D> - Collector<T, Map<Boolean, D>> partitioningBy(Predicate<? super T> predicate, - Collector<? super T, D> downstream) { - BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); - BiFunction<Map<Boolean, D>, T, Map<Boolean, D>> accumulator = (result, t) -> { + public static <T, D, A> + Collector<T, ?, Map<Boolean, D>> partitioningBy(Predicate<? super T> predicate, + Collector<? super T, A, D> downstream) { + @SuppressWarnings("unchecked") + BiConsumer<D, ? super T> downstreamAccumulator = (BiConsumer<D, ? super T>) downstream.accumulator(); + BiConsumer<Map<Boolean, A>, T> accumulator = (result, t) -> { Partition<D> asPartition = ((Partition<D>) result); - if (predicate.test(t)) { - D newResult = downstreamAccumulator.apply(asPartition.forTrue, t); - if (newResult != asPartition.forTrue) - asPartition.forTrue = newResult; - } else { - D newResult = downstreamAccumulator.apply(asPartition.forFalse, t); - if (newResult != asPartition.forFalse) - asPartition.forFalse = newResult; - } - return result; + downstreamAccumulator.accept(predicate.test(t) ? asPartition.forTrue : asPartition.forFalse, t); + }; + BinaryOperator<A> op = downstream.combiner(); + BinaryOperator<Map<Boolean, A>> merger = (m1, m2) -> { + Partition<A> left = (Partition<A>) m1; + Partition<A> right = (Partition<A>) m2; + return new Partition<>(op.apply(left.forTrue, right.forTrue), + op.apply(left.forFalse, right.forFalse)); }; - return new CollectorImpl<>(() -> new Partition<>(downstream.resultSupplier().get(), - downstream.resultSupplier().get()), - accumulator, partitionMerger(downstream.combiner()), CH_STRICT); + Supplier<Map<Boolean, A>> supplier = () -> new Partition<>(downstream.supplier().get(), + downstream.supplier().get()); + if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) { + return new CollectorImpl<>(supplier, accumulator, merger, CH_ID); + } + else { + Function<Map<Boolean, A>, Map<Boolean, D>> finisher = (Map<Boolean, A> par) -> { + Partition<A> asAPartition = (Partition<A>) par; + return new Partition<>(downstream.finisher().apply(asAPartition.forTrue), + downstream.finisher().apply(asAPartition.forFalse)); + }; + return new CollectorImpl<>(supplier, accumulator, merger, finisher, CH_NOID); + } } /** - * Merge function for two partitions, given a merge function for the - * elements. - */ - private static <D> BinaryOperator<Map<Boolean, D>> partitionMerger(BinaryOperator<D> op) { - return (m1, m2) -> { - Partition<D> left = (Partition<D>) m1; - Partition<D> right = (Partition<D>) m2; - if (left.forFalse == null) - left.forFalse = right.forFalse; - else if (right.forFalse != null) - left.forFalse = op.apply(left.forFalse, right.forFalse); - if (left.forTrue == null) - left.forTrue = right.forTrue; - else if (right.forTrue != null) - left.forTrue = op.apply(left.forTrue, right.forTrue); - return left; - }; - } - - /** - * Accumulate elements into a {@code Map} whose keys and values are the - * result of applying mapping functions to the input elements. - * If the mapped keys contains duplicates (according to + * Returns a {@code Collector} that accumulate elements into a + * {@code Map} whose keys and values are the result of applying the provided + * mapping functions to the input elements. + * + * <p>If the mapped keys contains duplicates (according to * {@link Object#equals(Object)}), an {@code IllegalStateException} is * thrown when the collection operation is performed. If the mapped keys * may have duplicates, use {@link #toMap(Function, Function, BinaryOperator)} @@ -970,24 +1081,26 @@ * @see #toConcurrentMap(Function, Function) */ public static <T, K, U> - Collector<T, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, - Function<? super T, ? extends U> valueMapper) { + Collector<T, ?, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, + Function<? super T, ? extends U> valueMapper) { return toMap(keyMapper, valueMapper, throwingMerger(), HashMap::new); } /** - * Accumulate elements into a {@code Map} whose keys and values are the - * result of applying mapping functions to the input elements. If the mapped + * Returns a {@code Collector} that accumulate elements into a + * {@code Map} whose keys and values are the result of applying the provided + * mapping functions to the input elements. + * + * <p>If the mapped * keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. * * @apiNote * There are multiple ways to deal with collisions between multiple elements - * mapping to the same key. There are some predefined merging functions, - * such as {@link #throwingMerger()}, {@link #firstWinsMerger()}, and - * {@link #lastWinsMerger()}, that implement common policies, or you can - * implement custom policies easily. For example, if you have a stream + * mapping to the same key. The other forms of {@code toMap} simply use + * a merge function that throws unconditionally, but you can easily write + * more flexible merge policies. For example, if you have a stream * of {@code Person}, and you want to produce a "phone book" mapping name to * address, but it is possible that two persons have the same name, you can * do as follows to gracefully deals with these collisions, and produce a @@ -1018,15 +1131,18 @@ * @see #toConcurrentMap(Function, Function, BinaryOperator) */ public static <T, K, U> - Collector<T, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, - Function<? super T, ? extends U> valueMapper, - BinaryOperator<U> mergeFunction) { + Collector<T, ?, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, + Function<? super T, ? extends U> valueMapper, + BinaryOperator<U> mergeFunction) { return toMap(keyMapper, valueMapper, mergeFunction, HashMap::new); } /** - * Accumulate elements into a {@code Map} whose keys and values are the - * result of applying mapping functions to the input elements. If the mapped + * Returns a {@code Collector} that accumulate elements into a + * {@code Map} whose keys and values are the result of applying the provided + * mapping functions to the input elements. + * + * <p>If the mapped * keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. The {@code Map} @@ -1054,22 +1170,22 @@ * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U, M extends Map<K, U>> - Collector<T, M> toMap(Function<? super T, ? extends K> keyMapper, - Function<? super T, ? extends U> valueMapper, - BinaryOperator<U> mergeFunction, - Supplier<M> mapSupplier) { - BiFunction<M, T, M> accumulator - = (map, element) -> { - map.merge(keyMapper.apply(element), valueMapper.apply(element), mergeFunction); - return map; - }; - return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_STRICT); + Collector<T, ?, M> toMap(Function<? super T, ? extends K> keyMapper, + Function<? super T, ? extends U> valueMapper, + BinaryOperator<U> mergeFunction, + Supplier<M> mapSupplier) { + BiConsumer<M, T> accumulator + = (map, element) -> map.merge(keyMapper.apply(element), + valueMapper.apply(element), mergeFunction); + return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_ID); } /** - * Accumulate elements into a {@code ConcurrentMap} whose keys and values - * are the result of applying mapping functions to the input elements. - * If the mapped keys contains duplicates (according to + * Returns a {@code Collector} that accumulate elements into a + * {@code ConcurrentMap} whose keys and values are the result of applying + * the provided mapping functions to the input elements. + * + * <p>If the mapped keys contains duplicates (according to * {@link Object#equals(Object)}), an {@code IllegalStateException} is * thrown when the collection operation is performed. If the mapped keys * may have duplicates, use @@ -1112,24 +1228,25 @@ * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U> - Collector<T, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper, - Function<? super T, ? extends U> valueMapper) { + Collector<T, ?, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper, + Function<? super T, ? extends U> valueMapper) { return toConcurrentMap(keyMapper, valueMapper, throwingMerger(), ConcurrentHashMap::new); } /** - * Accumulate elements into a {@code ConcurrentMap} whose keys and values - * are the result of applying mapping functions to the input elements. If - * the mapped keys contains duplicates (according to {@link Object#equals(Object)}), + * Returns a {@code Collector} that accumulate elements into a + * {@code ConcurrentMap} whose keys and values are the result of applying + * the provided mapping functions to the input elements. + * + * <p>If the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. * * @apiNote * There are multiple ways to deal with collisions between multiple elements - * mapping to the same key. There are some predefined merging functions, - * such as {@link #throwingMerger()}, {@link #firstWinsMerger()}, and - * {@link #lastWinsMerger()}, that implement common policies, or you can - * implement custom policies easily. For example, if you have a stream + * mapping to the same key. The other forms of {@code toConcurrentMap} simply use + * a merge function that throws unconditionally, but you can easily write + * more flexible merge policies. For example, if you have a stream * of {@code Person}, and you want to produce a "phone book" mapping name to * address, but it is possible that two persons have the same name, you can * do as follows to gracefully deals with these collisions, and produce a @@ -1163,16 +1280,19 @@ * @see #toMap(Function, Function, BinaryOperator) */ public static <T, K, U> - Collector<T, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper, - Function<? super T, ? extends U> valueMapper, - BinaryOperator<U> mergeFunction) { + Collector<T, ?, ConcurrentMap<K,U>> + toConcurrentMap(Function<? super T, ? extends K> keyMapper, + Function<? super T, ? extends U> valueMapper, + BinaryOperator<U> mergeFunction) { return toConcurrentMap(keyMapper, valueMapper, mergeFunction, ConcurrentHashMap::new); } /** - * Accumulate elements into a {@code ConcurrentMap} whose keys and values - * are the result of applying mapping functions to the input elements. If - * the mapped keys contains duplicates (according to {@link Object#equals(Object)}), + * Returns a {@code Collector} that accumulate elements into a + * {@code ConcurrentMap} whose keys and values are the result of applying + * the provided mapping functions to the input elements. + * + * <p>If the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the value mapping function is applied to each equal element, and the * results are merged using the provided merging function. The * {@code ConcurrentMap} is created by a provided supplier function. @@ -1202,15 +1322,14 @@ * @see #toMap(Function, Function, BinaryOperator, Supplier) */ public static <T, K, U, M extends ConcurrentMap<K, U>> - Collector<T, M> toConcurrentMap(Function<? super T, ? extends K> keyMapper, - Function<? super T, ? extends U> valueMapper, - BinaryOperator<U> mergeFunction, - Supplier<M> mapSupplier) { - BiFunction<M, T, M> accumulator = (map, element) -> { - map.merge(keyMapper.apply(element), valueMapper.apply(element), mergeFunction); - return map; - }; - return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_CONCURRENT); + Collector<T, ?, M> toConcurrentMap(Function<? super T, ? extends K> keyMapper, + Function<? super T, ? extends U> valueMapper, + BinaryOperator<U> mergeFunction, + Supplier<M> mapSupplier) { + BiConsumer<M, T> accumulator + = (map, element) -> map.merge(keyMapper.apply(element), + valueMapper.apply(element), mergeFunction); + return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_CONCURRENT_ID); } /** @@ -1222,14 +1341,15 @@ * @param mapper a mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * - * @see #toDoubleSummaryStatistics(ToDoubleFunction) - * @see #toLongSummaryStatistics(ToLongFunction) + * @see #summarizingDouble(ToDoubleFunction) + * @see #summarizingLong(ToLongFunction) */ public static <T> - Collector<T, IntSummaryStatistics> toIntSummaryStatistics(ToIntFunction<? super T> mapper) { - return new CollectorImpl<>(IntSummaryStatistics::new, - (r, t) -> { r.accept(mapper.applyAsInt(t)); return r; }, - (l, r) -> { l.combine(r); return l; }, CH_STRICT); + Collector<T, ?, IntSummaryStatistics> summarizingInt(ToIntFunction<? super T> mapper) { + return new CollectorImpl<T, IntSummaryStatistics, IntSummaryStatistics>( + IntSummaryStatistics::new, + (r, t) -> r.accept(mapper.applyAsInt(t)), + (l, r) -> { l.combine(r); return l; }, CH_ID); } /** @@ -1241,14 +1361,15 @@ * @param mapper the mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * - * @see #toDoubleSummaryStatistics(ToDoubleFunction) - * @see #toIntSummaryStatistics(ToIntFunction) + * @see #summarizingDouble(ToDoubleFunction) + * @see #summarizingInt(ToIntFunction) */ public static <T> - Collector<T, LongSummaryStatistics> toLongSummaryStatistics(ToLongFunction<? super T> mapper) { - return new CollectorImpl<>(LongSummaryStatistics::new, - (r, t) -> { r.accept(mapper.applyAsLong(t)); return r; }, - (l, r) -> { l.combine(r); return l; }, CH_STRICT); + Collector<T, ?, LongSummaryStatistics> summarizingLong(ToLongFunction<? super T> mapper) { + return new CollectorImpl<T, LongSummaryStatistics, LongSummaryStatistics>( + LongSummaryStatistics::new, + (r, t) -> r.accept(mapper.applyAsLong(t)), + (l, r) -> { l.combine(r); return l; }, CH_ID); } /** @@ -1260,14 +1381,15 @@ * @param mapper a mapping function to apply to each element * @return a {@code Collector} implementing the summary-statistics reduction * - * @see #toLongSummaryStatistics(ToLongFunction) - * @see #toIntSummaryStatistics(ToIntFunction) + * @see #summarizingLong(ToLongFunction) + * @see #summarizingInt(ToIntFunction) */ public static <T> - Collector<T, DoubleSummaryStatistics> toDoubleSummaryStatistics(ToDoubleFunction<? super T> mapper) { - return new CollectorImpl<>(DoubleSummaryStatistics::new, - (r, t) -> { r.accept(mapper.applyAsDouble(t)); return r; }, - (l, r) -> { l.combine(r); return l; }, CH_STRICT); + Collector<T, ?, DoubleSummaryStatistics> summarizingDouble(ToDoubleFunction<? super T> mapper) { + return new CollectorImpl<T, DoubleSummaryStatistics, DoubleSummaryStatistics>( + DoubleSummaryStatistics::new, + (r, t) -> r.accept(mapper.applyAsDouble(t)), + (l, r) -> { l.combine(r); return l; }, CH_ID); } /** @@ -1276,8 +1398,8 @@ private static final class Partition<T> extends AbstractMap<Boolean, T> implements Map<Boolean, T> { - T forTrue; - T forFalse; + final T forTrue; + final T forFalse; Partition(T forTrue, T forFalse) { this.forTrue = forTrue; @@ -1289,24 +1411,9 @@ return new AbstractSet<Map.Entry<Boolean, T>>() { @Override public Iterator<Map.Entry<Boolean, T>> iterator() { - - return new Iterator<Map.Entry<Boolean, T>>() { - int state = 0; - - @Override - public boolean hasNext() { - return state < 2; - } - - @Override - public Map.Entry<Boolean, T> next() { - if (state >= 2) - throw new NoSuchElementException(); - return (state++ == 0) - ? new SimpleImmutableEntry<>(false, forFalse) - : new SimpleImmutableEntry<>(true, forTrue); - } - }; + Map.Entry<Boolean, T> falseEntry = new SimpleImmutableEntry<>(false, forFalse); + Map.Entry<Boolean, T> trueEntry = new SimpleImmutableEntry<>(true, forTrue); + return Arrays.asList(falseEntry, trueEntry).iterator(); } @Override
--- a/src/share/classes/java/util/stream/DelegatingStream.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/DelegatingStream.java Fri Jun 28 16:26:54 2013 -0400 @@ -209,7 +209,7 @@ } @Override - public <R> R collect(Collector<? super T, R> collector) { + public <R, A> R collect(Collector<? super T, A, ? extends R> collector) { return delegate.collect(collector); }
--- a/src/share/classes/java/util/stream/DoubleStream.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/DoubleStream.java Fri Jun 28 16:26:54 2013 -0400 @@ -527,7 +527,7 @@ long count(); /** - * Returns an {@code OptionalDouble} describing the average of elements of + * Returns an {@code OptionalDouble} describing the arithmetic mean of elements of * this stream, or an empty optional if this stream is empty. The average * returned can vary depending upon the order in which elements are * encountered. This is due to accumulated rounding error in addition of
--- a/src/share/classes/java/util/stream/IntStream.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/IntStream.java Fri Jun 28 16:26:54 2013 -0400 @@ -517,7 +517,7 @@ long count(); /** - * Returns an {@code OptionalDouble} describing the average of elements of + * Returns an {@code OptionalDouble} describing the arithmetic mean of elements of * this stream, or an empty optional if this stream is empty. This is a * special case of a * <a href="package-summary.html#MutableReduction">reduction</a>.
--- a/src/share/classes/java/util/stream/LongStream.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/LongStream.java Fri Jun 28 16:26:54 2013 -0400 @@ -517,7 +517,7 @@ long count(); /** - * Returns an {@code OptionalDouble} describing the average of elements of + * Returns an {@code OptionalDouble} describing the arithmetic mean of elements of * this stream, or an empty optional if this stream is empty. This is a * special case of a * <a href="package-summary.html#MutableReduction">reduction</a>.
--- a/src/share/classes/java/util/stream/ReduceOps.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/ReduceOps.java Fri Jun 28 16:26:54 2013 -0400 @@ -148,17 +148,17 @@ * reference values. * * @param <T> the type of the input elements - * @param <R> the type of the result + * @param <I> the type of the intermediate reduction result * @param collector a {@code Collector} defining the reduction * @return a {@code ReduceOp} implementing the reduction */ - public static <T,R> TerminalOp<T, R> - makeRef(Collector<? super T,R> collector) { - Supplier<R> supplier = Objects.requireNonNull(collector).resultSupplier(); - BiFunction<R, ? super T, R> accumulator = collector.accumulator(); - BinaryOperator<R> combiner = collector.combiner(); - class ReducingSink extends Box<R> - implements AccumulatingSink<T, R, ReducingSink> { + public static <T, I> TerminalOp<T, I> + makeRef(Collector<? super T, I, ?> collector) { + Supplier<I> supplier = Objects.requireNonNull(collector).supplier(); + BiConsumer<I, ? super T> accumulator = collector.accumulator(); + BinaryOperator<I> combiner = collector.combiner(); + class ReducingSink extends Box<I> + implements AccumulatingSink<T, I, ReducingSink> { @Override public void begin(long size) { state = supplier.get(); @@ -166,9 +166,7 @@ @Override public void accept(T t) { - R newResult = accumulator.apply(state, t); - if (state != newResult) - state = newResult; + accumulator.accept(state, t); } @Override @@ -176,7 +174,7 @@ state = combiner.apply(state, other.state); } } - return new ReduceOp<T, R, ReducingSink>(StreamShape.REFERENCE) { + return new ReduceOp<T, I, ReducingSink>(StreamShape.REFERENCE) { @Override public ReducingSink makeSink() { return new ReducingSink();
--- a/src/share/classes/java/util/stream/ReferencePipeline.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/ReferencePipeline.java Fri Jun 28 16:26:54 2013 -0400 @@ -490,16 +490,21 @@ } @Override - public final <R> R collect(Collector<? super P_OUT, R> collector) { + public final <R, A> R collect(Collector<? super P_OUT, A, ? extends R> collector) { + A container; if (isParallel() && (collector.characteristics().contains(Collector.Characteristics.CONCURRENT)) && (!isOrdered() || collector.characteristics().contains(Collector.Characteristics.UNORDERED))) { - R container = collector.resultSupplier().get(); - BiFunction<R, ? super P_OUT, R> accumulator = collector.accumulator(); - forEach(u -> accumulator.apply(container, u)); - return container; + container = collector.supplier().get(); + BiConsumer<A, ? super P_OUT> accumulator = collector.accumulator(); + forEach(u -> accumulator.accept(container, u)); } - return evaluate(ReduceOps.makeRef(collector)); + else { + container = evaluate(ReduceOps.makeRef(collector)); + } + return collector.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH) + ? (R) container + : collector.finisher().apply(container); } @Override
--- a/src/share/classes/java/util/stream/Stream.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/Stream.java Fri Jun 28 16:26:54 2013 -0400 @@ -651,12 +651,13 @@ * }</pre> * * @param <R> the type of the result + * @param <A> the intermediate accumulation type of the {@code Collector} * @param collector the {@code Collector} describing the reduction * @return the result of the reduction * @see #collect(Supplier, BiConsumer, BiConsumer) * @see Collectors */ - <R> R collect(Collector<? super T, R> collector); + <R, A> R collect(Collector<? super T, A, ? extends R> collector); /** * Returns the minimum element of this stream according to the provided
--- a/src/share/classes/java/util/stream/package-info.java Tue Aug 06 16:01:39 2013 -0700 +++ b/src/share/classes/java/util/stream/package-info.java Fri Jun 28 16:26:54 2013 -0400 @@ -547,7 +547,7 @@ * List<String> l = new ArrayList(Arrays.asList("one", "two")); * Stream<String> sl = l.stream(); * l.add("three"); - * String s = sl.collect(toStringJoiner(" ")).toString(); + * String s = sl.collect(joining(" ")); * }</pre> * First a list is created consisting of two strings: "one"; and "two". Then a stream is created from that list. * Next the list is modified by adding a third string: "three". Finally the elements of the stream are collected @@ -557,7 +557,7 @@ * <pre>{@code * List<String> l = new ArrayList(Arrays.asList("one", "two")); * Stream<String> sl = l.stream(); - * String s = sl.peek(s -> l.add("BAD LAMBDA")).collect(toStringJoiner(" ")).toString(); + * String s = sl.peek(s -> l.add("BAD LAMBDA")).collect(joining(" ")); * }</pre> * then a {@code ConcurrentModificationException} will be thrown since the {@code peek} operation will attempt * to add the string "BAD LAMBDA" to the list after the terminal operation has commenced.
--- a/test/java/util/stream/test/org/openjdk/tests/java/util/FillableStringTest.java Tue Aug 06 16:01:39 2013 -0700 +++ b/test/java/util/stream/test/org/openjdk/tests/java/util/FillableStringTest.java Fri Jun 28 16:26:54 2013 -0400 @@ -40,17 +40,17 @@ } public void testStringBuilder() { - String s = generate().collect(Collectors.toStringBuilder()).toString(); + String s = generate().collect(Collectors.joining()); assertEquals(s, "THREEFOURFIVE"); } public void testStringBuffer() { - String s = generate().collect(Collectors.toStringBuilder()).toString(); + String s = generate().collect(Collectors.joining()); assertEquals(s, "THREEFOURFIVE"); } public void testStringJoiner() { - String s = generate().collect(Collectors.toStringJoiner("-")).toString(); + String s = generate().collect(Collectors.joining("-")); assertEquals(s, "THREE-FOUR-FIVE"); } }
--- a/test/java/util/stream/test/org/openjdk/tests/java/util/stream/GroupByOpTest.java Tue Aug 06 16:01:39 2013 -0700 +++ b/test/java/util/stream/test/org/openjdk/tests/java/util/stream/GroupByOpTest.java Fri Jun 28 16:26:54 2013 -0400 @@ -36,7 +36,6 @@ import java.util.stream.LambdaTestHelpers; import java.util.stream.OpTestCase; import java.util.stream.Stream; -import java.util.stream.StreamOpFlagTestHelper; import java.util.stream.StreamTestDataProvider; import java.util.stream.TestData; @@ -59,13 +58,14 @@ public class GroupByOpTest extends OpTestCase { public void testBypassCollect() { - Collector<Integer, Map<Boolean, List<Integer>>> collector - = Collectors.groupingBy(LambdaTestHelpers.forPredicate(pEven, true, false)); + @SuppressWarnings("unchecked") + Collector<Integer, Map<Boolean, List<Integer>>, Map<Boolean, List<Integer>>> collector + = (Collector<Integer, Map<Boolean, List<Integer>>, Map<Boolean, List<Integer>>>) Collectors.groupingBy(LambdaTestHelpers.forPredicate(pEven, true, false)); - Map<Boolean, List<Integer>> m = collector.resultSupplier().get(); + Map<Boolean, List<Integer>> m = collector.supplier().get(); int[] ints = countTo(10).stream().mapToInt(e -> (int) e).toArray(); for (int i : ints) - m = collector.accumulator().apply(m, i); + collector.accumulator().accept(m, i); assertEquals(2, m.keySet().size()); for(Collection<Integer> group : m.values()) { @@ -130,7 +130,7 @@ // - Total number of values equals size of data for (MapperData<Integer, ?> md : getMapperData(data)) { - Collector<Integer, Map<Object, List<Integer>>> tab = Collectors.groupingBy(md.m); + Collector<Integer, ?, Map<Object, List<Integer>>> tab = Collectors.groupingBy(md.m); Map<Object, List<Integer>> result = withData(data) .terminal(s -> s, s -> s.collect(tab))
--- a/test/java/util/stream/test/org/openjdk/tests/java/util/stream/SummaryStatisticsTest.java Tue Aug 06 16:01:39 2013 -0700 +++ b/test/java/util/stream/test/org/openjdk/tests/java/util/stream/SummaryStatisticsTest.java Fri Jun 28 16:26:54 2013 -0400 @@ -43,9 +43,9 @@ public class SummaryStatisticsTest extends OpTestCase { public void testIntStatistics() { List<IntSummaryStatistics> instances = new ArrayList<>(); - instances.add(countTo(1000).stream().collect(Collectors.toIntSummaryStatistics(i -> i))); + instances.add(countTo(1000).stream().collect(Collectors.summarizingInt(i -> i))); instances.add(countTo(1000).stream().mapToInt(i -> i).summaryStatistics()); - instances.add(countTo(1000).parallelStream().collect(Collectors.toIntSummaryStatistics(i -> i))); + instances.add(countTo(1000).parallelStream().collect(Collectors.summarizingInt(i -> i))); instances.add(countTo(1000).parallelStream().mapToInt(i -> i).summaryStatistics()); for (IntSummaryStatistics stats : instances) { @@ -58,9 +58,9 @@ public void testLongStatistics() { List<LongSummaryStatistics> instances = new ArrayList<>(); - instances.add(countTo(1000).stream().collect(Collectors.toLongSummaryStatistics(i -> i))); + instances.add(countTo(1000).stream().collect(Collectors.summarizingLong(i -> i))); instances.add(countTo(1000).stream().mapToLong(i -> i).summaryStatistics()); - instances.add(countTo(1000).parallelStream().collect(Collectors.toLongSummaryStatistics(i -> i))); + instances.add(countTo(1000).parallelStream().collect(Collectors.summarizingLong(i -> i))); instances.add(countTo(1000).parallelStream().mapToLong(i -> i).summaryStatistics()); for (LongSummaryStatistics stats : instances) { @@ -73,9 +73,9 @@ public void testDoubleStatistics() { List<DoubleSummaryStatistics> instances = new ArrayList<>(); - instances.add(countTo(1000).stream().collect(Collectors.toDoubleSummaryStatistics(i -> i))); + instances.add(countTo(1000).stream().collect(Collectors.summarizingDouble(i -> i))); instances.add(countTo(1000).stream().mapToDouble(i -> i).summaryStatistics()); - instances.add(countTo(1000).parallelStream().collect(Collectors.toDoubleSummaryStatistics(i -> i))); + instances.add(countTo(1000).parallelStream().collect(Collectors.summarizingDouble(i -> i))); instances.add(countTo(1000).parallelStream().mapToDouble(i -> i).summaryStatistics()); for (DoubleSummaryStatistics stats : instances) {
--- a/test/java/util/stream/test/org/openjdk/tests/java/util/stream/TabulatorsTest.java Tue Aug 06 16:01:39 2013 -0700 +++ b/test/java/util/stream/test/org/openjdk/tests/java/util/stream/TabulatorsTest.java Fri Jun 28 16:26:54 2013 -0400 @@ -23,13 +23,17 @@ package org.openjdk.tests.java.util.stream; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; +import java.util.StringJoiner; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; @@ -53,7 +57,10 @@ import static java.util.stream.Collectors.partitioningBy; import static java.util.stream.Collectors.reducing; import static java.util.stream.Collectors.toCollection; +import static java.util.stream.Collectors.toConcurrentMap; import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toMap; +import static java.util.stream.Collectors.toSet; import static java.util.stream.LambdaTestHelpers.assertContents; import static java.util.stream.LambdaTestHelpers.assertContentsUnordered; import static java.util.stream.LambdaTestHelpers.mDoubler; @@ -65,16 +72,6 @@ */ @SuppressWarnings({"rawtypes", "unchecked"}) public class TabulatorsTest extends OpTestCase { - // There are 8 versions of groupingBy: - // groupingBy: { map supplier, not } x { downstream collector, not } x { concurrent, not } - // There are 2 versions of partition: { map supplier, not } - // There are 4 versions of toMap - // mappedTo(function, mapSupplier?, mergeFunction?) - // Each variety needs at least one test - // Plus a variety of multi-level tests (groupBy(..., partition), partition(..., groupBy)) - // Plus negative tests for mapping to null - // Each test should be matched by a nest of asserters (see TabulationAssertion...) - private static abstract class TabulationAssertion<T, U> { abstract void assertValue(U value, @@ -101,7 +98,7 @@ boolean ordered) throws ReflectiveOperationException { if (!clazz.isAssignableFrom(map.getClass())) fail(String.format("Class mismatch in GroupedMapAssertion: %s, %s", clazz, map.getClass())); - assertContentsUnordered(map.keySet(), source.get().map(classifier).collect(Collectors.toSet())); + assertContentsUnordered(map.keySet(), source.get().map(classifier).collect(toSet())); for (Map.Entry<K, ? extends V> entry : map.entrySet()) { K key = entry.getKey(); downstream.assertValue(entry.getValue(), @@ -111,6 +108,39 @@ } } + static class ToMapAssertion<T, K, V, M extends Map<K,V>> extends TabulationAssertion<T, M> { + private final Class<? extends Map> clazz; + private final Function<T, K> keyFn; + private final Function<T, V> valueFn; + private final BinaryOperator<V> mergeFn; + + ToMapAssertion(Function<T, K> keyFn, + Function<T, V> valueFn, + BinaryOperator<V> mergeFn, + Class<? extends Map> clazz) { + this.clazz = clazz; + this.keyFn = keyFn; + this.valueFn = valueFn; + this.mergeFn = mergeFn; + } + + @Override + void assertValue(M map, Supplier<Stream<T>> source, boolean ordered) throws ReflectiveOperationException { + Set<K> uniqueKeys = source.get().map(keyFn).collect(toSet()); + assertTrue(clazz.isAssignableFrom(map.getClass())); + assertEquals(uniqueKeys, map.keySet()); + source.get().forEach(t -> { + K key = keyFn.apply(t); + V v = source.get() + .filter(e -> key.equals(keyFn.apply(e))) + .map(valueFn) + .reduce(mergeFn) + .get(); + assertEquals(map.get(key), v); + }); + } + } + static class PartitionAssertion<T, D> extends TabulationAssertion<T, Map<Boolean,D>> { private final Predicate<T> predicate; private final TabulationAssertion<T,D> downstream; @@ -204,7 +234,7 @@ private <T> ResultAsserter<T> mapTabulationAsserter(boolean ordered) { return (act, exp, ord, par) -> { - if (par & (!ordered || !ord)) { + if (par && (!ordered || !ord)) { TabulatorsTest.nestedMapEqualityAssertion(act, exp); } else { @@ -215,7 +245,7 @@ private<T, M extends Map> void exerciseMapTabulation(TestData<T, Stream<T>> data, - Collector<T, ? extends M> collector, + Collector<T, ?, ? extends M> collector, TabulationAssertion<T, M> assertion) throws ReflectiveOperationException { boolean ordered = !collector.characteristics().contains(Collector.Characteristics.UNORDERED); @@ -248,6 +278,172 @@ assertEquals(o1, o2); } + private<T, R> void assertCollect(TestData.OfRef<T> data, + Collector<T, ?, R> collector, + Function<Stream<T>, R> streamReduction) { + R check = streamReduction.apply(data.stream()); + withData(data).terminal(s -> s.collect(collector)).expectedResult(check).exercise(); + } + + @Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) + public void testReduce(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { + assertCollect(data, Collectors.reducing(0, Integer::sum), + s -> s.reduce(0, Integer::sum)); + assertCollect(data, Collectors.reducing(Integer.MAX_VALUE, Integer::min), + s -> s.min(Integer::compare).orElse(Integer.MAX_VALUE)); + assertCollect(data, Collectors.reducing(Integer.MIN_VALUE, Integer::max), + s -> s.max(Integer::compare).orElse(Integer.MIN_VALUE)); + + assertCollect(data, Collectors.reducing(Integer::sum), + s -> s.reduce(Integer::sum)); + assertCollect(data, Collectors.minBy(Comparator.naturalOrder()), + s -> s.min(Integer::compare)); + assertCollect(data, Collectors.maxBy(Comparator.naturalOrder()), + s -> s.max(Integer::compare)); + + assertCollect(data, Collectors.reducing(0, x -> x*2, Integer::sum), + s -> s.map(x -> x*2).reduce(0, Integer::sum)); + + assertCollect(data, Collectors.summingLong(x -> x * 2L), + s -> s.map(x -> x*2L).reduce(0L, Long::sum)); + assertCollect(data, Collectors.summingInt(x -> x * 2), + s -> s.map(x -> x*2).reduce(0, Integer::sum)); + assertCollect(data, Collectors.summingDouble(x -> x * 2.0d), + s -> s.map(x -> x * 2.0d).reduce(0.0d, Double::sum)); + + assertCollect(data, Collectors.averagingInt(x -> x * 2), + s -> s.mapToInt(x -> x * 2).average().orElse(0)); + assertCollect(data, Collectors.averagingLong(x -> x * 2), + s -> s.mapToLong(x -> x * 2).average().orElse(0)); + assertCollect(data, Collectors.averagingDouble(x -> x * 2), + s -> s.mapToDouble(x -> x * 2).average().orElse(0)); + + // Test explicit Collector.of + Collector<Integer, long[], Double> avg2xint = Collector.of(() -> new long[2], + (a, b) -> { + a[0] += b * 2; + a[1]++; + }, + (a, b) -> { + a[0] += b[0]; + a[1] += b[1]; + return a; + }, + a -> a[1] == 0 ? 0.0d : (double) a[0] / a[1]); + assertCollect(data, avg2xint, + s -> s.mapToInt(x -> x * 2).average().orElse(0)); + } + + @Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) + public void testJoin(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { + withData(data) + .terminal(s -> s.map(Object::toString).collect(Collectors.joining())) + .expectedResult(join(data, "")) + .exercise(); + + Collector<String, StringBuilder, String> likeJoining = Collector.of(StringBuilder::new, StringBuilder::append, (sb1, sb2) -> sb1.append(sb2.toString()), StringBuilder::toString); + withData(data) + .terminal(s -> s.map(Object::toString).collect(likeJoining)) + .expectedResult(join(data, "")) + .exercise(); + + withData(data) + .terminal(s -> s.map(Object::toString).collect(Collectors.joining(","))) + .expectedResult(join(data, ",")) + .exercise(); + + withData(data) + .terminal(s -> s.map(Object::toString).collect(Collectors.joining(",", "[", "]"))) + .expectedResult("[" + join(data, ",") + "]") + .exercise(); + + withData(data) + .terminal(s -> s.map(Object::toString) + .collect(StringBuilder::new, StringBuilder::append, StringBuilder::append) + .toString()) + .expectedResult(join(data, "")) + .exercise(); + + withData(data) + .terminal(s -> s.map(Object::toString) + .collect(() -> new StringJoiner(","), + (sj, cs) -> sj.add(cs), + (j1, j2) -> j1.merge(j2)) + .toString()) + .expectedResult(join(data, ",")) + .exercise(); + + withData(data) + .terminal(s -> s.map(Object::toString) + .collect(() -> new StringJoiner(",", "[", "]"), + (sj, cs) -> sj.add(cs), + (j1, j2) -> j1.merge(j2)) + .toString()) + .expectedResult("[" + join(data, ",") + "]") + .exercise(); + } + + private<T> String join(TestData.OfRef<T> data, String delim) { + StringBuilder sb = new StringBuilder(); + boolean first = true; + for (T i : data) { + if (!first) + sb.append(delim); + sb.append(i.toString()); + first = false; + } + return sb.toString(); + } + + @Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) + public void testSimpleToMap(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { + Function<Integer, Integer> keyFn = i -> i * 2; + Function<Integer, Integer> valueFn = i -> i * 4; + + List<Integer> dataAsList = Arrays.asList(data.stream().toArray(Integer[]::new)); + Set<Integer> dataAsSet = new HashSet<>(dataAsList); + + BinaryOperator<Integer> sum = Integer::sum; + for (BinaryOperator<Integer> op : Arrays.asList((u, v) -> u, + (u, v) -> v, + sum)) { + try { + exerciseMapTabulation(data, toMap(keyFn, valueFn), + new ToMapAssertion<>(keyFn, valueFn, op, HashMap.class)); + if (dataAsList.size() != dataAsSet.size()) + fail("Expected ISE on input with duplicates"); + } + catch (IllegalStateException e) { + if (dataAsList.size() == dataAsSet.size()) + fail("Expected no ISE on input without duplicates"); + } + + exerciseMapTabulation(data, toMap(keyFn, valueFn, op), + new ToMapAssertion<>(keyFn, valueFn, op, HashMap.class)); + + exerciseMapTabulation(data, toMap(keyFn, valueFn, op, TreeMap::new), + new ToMapAssertion<>(keyFn, valueFn, op, TreeMap.class)); + } + + // For concurrent maps, only use commutative merge functions + try { + exerciseMapTabulation(data, toConcurrentMap(keyFn, valueFn), + new ToMapAssertion<>(keyFn, valueFn, sum, ConcurrentHashMap.class)); + if (dataAsList.size() != dataAsSet.size()) + fail("Expected ISE on input with duplicates"); + } + catch (IllegalStateException e) { + if (dataAsList.size() == dataAsSet.size()) + fail("Expected no ISE on input without duplicates"); + } + + exerciseMapTabulation(data, toConcurrentMap(keyFn, valueFn, sum), + new ToMapAssertion<>(keyFn, valueFn, sum, ConcurrentHashMap.class)); + + exerciseMapTabulation(data, toConcurrentMap(keyFn, valueFn, sum, ConcurrentSkipListMap::new), + new ToMapAssertion<>(keyFn, valueFn, sum, ConcurrentSkipListMap.class)); + } + @Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) public void testSimpleGroupBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { Function<Integer, Integer> classifier = i -> i % 3;
--- a/test/jdk/lambda/MethodReferenceTestInstanceMethod.java Tue Aug 06 16:01:39 2013 -0700 +++ b/test/jdk/lambda/MethodReferenceTestInstanceMethod.java Fri Jun 28 16:26:54 2013 -0400 @@ -47,7 +47,7 @@ } public void testStringBuffer() { - String s = generate().collect(Collectors.toStringBuilder()).toString(); + String s = generate().collect(Collectors.joining()); assertEquals(s, "THREEFOURFIVE"); }
--- a/test/jdk/lambda/separate/TestHarness.java Tue Aug 06 16:01:39 2013 -0700 +++ b/test/jdk/lambda/separate/TestHarness.java Fri Jun 28 16:26:54 2013 -0400 @@ -119,7 +119,7 @@ Class stub = new Class(specimen.getName(), cm); String params = - Arrays.asList(args).stream().collect(Collectors.toStringJoiner(", ")).toString(); + Arrays.asList(args).stream().collect(Collectors.joining(", ")).toString(); ConcreteMethod sm = new ConcreteMethod( method.getReturnType(), method.getName(), @@ -150,7 +150,7 @@ null, Arrays.asList((Method)method)); Class cstub = new Class(specimen.getName()); - String params = Arrays.asList(args).stream().collect(Collectors.toStringJoiner(", ")).toString(); + String params = Arrays.asList(args).stream().collect(Collectors.joining(", ")).toString(); ConcreteMethod sm = new ConcreteMethod( "int", SourceModel.stdMethodName,