The code I'm working with
package com.skimmer;
import java.util.ArrayList;
import java.util.List;
import java.util.Spliterator;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.LongStream;
import java.util.stream.Stream;
public class App {
public static void main(String[] args) throws InterruptedException, ExecutionException {
// Simply creating some 'test' data
Stream<String> test = LongStream.range(0, 10000000L).mapToObj(i -> i + "-test");
Spliterator<String> spliterator = test.parallel().spliterator();
List<Callable<Long>> callableList = new ArrayList<Callable<Long>>();
// Creating a future for each split to process concurrently
int totalSplits = 0;
while ((spliterator = spliterator.trySplit()) != null) {
totalSplits++;
callableList.add(new Worker(spliterator, "future-" + totalSplits));
}
ExecutorService executor = Executors.newFixedThreadPool(totalSplits);
List<Future<Long>> futures = executor.invokeAll(callableList);
AtomicLong counter = new AtomicLong(0);
for (Future<Long> future : futures)
counter.getAndAdd(future.get());
System.out.println("Total processed " + counter.get());
System.out.println("Total splits " + totalSplits);
executor.shutdown();
}
public static class Worker implements Callable<Long> {
private Spliterator<String> spliterator;
private String name;
public Worker(Spliterator<String> spliterator, String name) {
this.spliterator = spliterator;
this.name = name;
}
@Override
public Long call() {
AtomicLong counter = new AtomicLong(0);
spliterator.forEachRemaining(s -> {
// We'll assume busy processing code here
counter.getAndIncrement();
});
System.out.println(name + " Total processed : " + counter.get());
return counter.get();
}
}
}
The output
furture-11 Total processed : 244
furture-10 Total processed : 488
furture-9 Total processed : 977
furture-12 Total processed : 122
furture-7 Total processed : 3906
furture-13 Total processed : 61
furture-8 Total processed : 1953
furture-6 Total processed : 7813
furture-14 Total processed : 31
furture-5 Total processed : 15625
furture-15 Total processed : 15
furture-4 Total processed : 31250
furture-17 Total processed : 4
furture-18 Total processed : 2
furture-19 Total processed : 1
furture-16 Total processed : 8
furture-3 Total processed : 62500
furture-2 Total processed : 125000
furture-1 Total processed : 250000
future-0 Total processed : 500000
Total processed 1000000
Total splits 20
My problem/Question : The first trySplit (and future task 'future-0') gets exactly n/2 total elements to begin processing. The first couple splits take a long time to complete - this gets worse as n grows. Is there any other way to process a stream where each future/callable gets an equal distribution of elements to process such as (N/splits) ie. 1000000/20 = 50000
Desired results
furture-11 Total processed : 50000
furture-10 Total processed : 50000
furture-9 Total processed : 50000
furture-12 Total processed : 50000
furture-7 Total processed : 50000
furture-13 Total processed : 50000
furture-8 Total processed : 50000
furture-6 Total processed : 50000
furture-14 Total processed : 50000
furture-5 Total processed : 50000
furture-15 Total processed : 50000
furture-4 Total processed : 50000
furture-17 Total processed : 50000
furture-18 Total processed : 50000
furture-19 Total processed : 50000
furture-16 Total processed : 50000
furture-3 Total processed : 50000
furture-2 Total processed : 50000
furture-1 Total processed : 50000
future-0 Total processed : 50000
Total processed 1000000
Total splits 20
Follow up question : If Spliterator is unable to do this what other approach/solution would be best used to process large streams concurrently.
Practical case scenario : Processing a large (6GB) CSV file that is too large to hold in memory