1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
|
/*
* Copyright 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.morling.onebrc;
import static java.util.stream.Collectors.*;
import java.io.File;
import java.io.RandomAccessFile;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class CalculateAverage_phd3 {
private static final int NUM_THREADS = Runtime.getRuntime().availableProcessors() * 2;
private static final String FILE = "./measurements.txt";
private static final long FILE_SIZE = new File(FILE).length();
// A chunk is a unit for processing, the file will be divided in chunks of the following size
private static final int CHUNK_SIZE = 65536 * 1024;
// Read a little more data into the buffer to finish processing current line
private static final int PADDING = 512;
// Minor : Precompute powers to avoid recalculating while parsing doubles (temperatures)
private static final double[] POWERS_OF_10 = IntStream.range(0, 6).mapToDouble(x -> Math.pow(10.0, x)).toArray();
/**
* A Utility to print aggregated information in the desired format
*/
private record ResultRow(double min, double mean, double max) {
public String toString() {
return round(min) + "/" + round(mean) + "/" + round(max);
}
private double round(double value) {
return Math.round(value * 10.0) / 10.0;
}
};
public static ResultRow resultRow(AggregationInfo aggregationInfo) {
return new ResultRow(aggregationInfo.min, (Math.round(aggregationInfo.sum * 10.0) / 10.0) / (aggregationInfo.count), aggregationInfo.max);
}
public static void main(String[] args) throws Exception {
long fileLength = new File(FILE).length();
int numChunks = (int) Math.ceil(fileLength * 1.0 / CHUNK_SIZE);
ExecutorService executorService = Executors.newFixedThreadPool(NUM_THREADS);
BufferDataProvider provider = new RandomAccessBasedProvider(FILE, FILE_SIZE);
List<Future<LinearProbingHashMap>> futures = new ArrayList<>();
// Process chunks in parallel
for (int chunkIndex = 0; chunkIndex < numChunks; chunkIndex++) {
futures.add(executorService.submit(new Aggregator(chunkIndex, provider)));
}
executorService.shutdown();
executorService.awaitTermination(10, TimeUnit.MINUTES);
Map<String, AggregationInfo> info = futures.stream().map(f -> {
try {
return f.get();
}
catch (ExecutionException | InterruptedException e) {
throw new RuntimeException(e);
}
})
.map(LinearProbingHashMap::toMap)
.flatMap(map -> map.entrySet().stream())
.sequential()
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, AggregationInfo::update));
Map<String, ResultRow> measurements = new TreeMap<>(info.entrySet().stream()
.collect(toMap(Map.Entry::getKey, e -> resultRow(e.getValue()))));
System.out.println(measurements);
}
/**
* Stores required running aggregation information to be able to compute min/max/average at the end
*/
private static class AggregationInfo {
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
double sum;
long count;
public AggregationInfo update(AggregationInfo update) {
this.count += update.count;
this.sum += update.sum;
if (this.max < update.max) {
this.max = update.max;
}
if (this.min > update.min) {
this.min = update.min;
}
return this;
}
public AggregationInfo update(double value) {
this.count++;
this.sum += value;
if (this.max < value) {
this.max = value;
}
if (this.min > value) {
this.min = value;
}
return this;
}
}
private interface BufferDataProvider {
int read(byte[] buffer, long offset) throws Exception;
}
/**
* uses RandomAccessFile seek and read APIs to load data into a buffer.
*/
private static class RandomAccessBasedProvider implements BufferDataProvider {
private final String filePath;
RandomAccessBasedProvider(String filePath, long fileSize) {
this.filePath = filePath;
}
@Override
public int read(byte[] buffer, long offset) throws Exception {
RandomAccessFile file = null;
try {
file = new RandomAccessFile(filePath, "r");
file.seek(offset);
return file.read(buffer);
}
finally {
if (file != null) {
file.close();
}
}
}
}
/**
* Task to processes a chunk of file and return a custom linear probing hashmap for performance
*/
private static class Aggregator implements Callable<LinearProbingHashMap> {
private final long startByte;
private final BufferDataProvider dataProvider;
public Aggregator(long chunkIndex, BufferDataProvider dataProvider) {
this.startByte = chunkIndex * CHUNK_SIZE;
this.dataProvider = dataProvider;
}
@Override
public LinearProbingHashMap call() {
try {
// offset for the last byte to be processed (excluded)
long endByte = Math.min(startByte + CHUNK_SIZE, FILE_SIZE);
// read a little more than needed to cover next entry if needed
long bufferSize = endByte - startByte + ((endByte == FILE_SIZE) ? 0 : PADDING);
byte[] buffer = new byte[(int) bufferSize];
int bytes = dataProvider.read(buffer, startByte);
// Partial aggregation in a hashmap
return processBuffer(buffer, startByte == 0, endByte - startByte);
}
catch (Throwable e) {
throw new RuntimeException(e);
}
}
private static LinearProbingHashMap processBuffer(byte[] buffer, boolean isFileStart, long nextChunkStart) {
int start = 0;
// Move to the next entry after '\n'. Don't do this if we're at the start of
// the file to avoid missing first entry.
if (!isFileStart) {
while (buffer[start] != '\n') {
start++;
}
start += 1;
}
LinearProbingHashMap chunkLocalMap = new LinearProbingHashMap();
while (true) {
LineInfo lineInfo = getNextLine(buffer, start);
byte[] keyBytes = new byte[lineInfo.semicolonIndex - start];
System.arraycopy(buffer, start, keyBytes, 0, keyBytes.length);
double value = parseDouble(buffer, lineInfo.semicolonIndex + 1, lineInfo.nextStart - 1);
// Update aggregated value for the given key with the new line
AggregationInfo info = chunkLocalMap.get(keyBytes, lineInfo.keyHash);
info.update(value);
if ((lineInfo.nextStart > nextChunkStart) || (lineInfo.nextStart >= buffer.length)) {
// we are already at a point where the next line will be processed in the next chunk,
// so the job is done here
break;
}
start = lineInfo.nextStart();
}
return chunkLocalMap;
}
/**
* Converts bytes to double value without intermediate string conversion, faster than Double.parseDouble.
*/
private static double parseDouble(byte[] bytes, int offset, int end) {
boolean negative = (bytes[offset] == '-');
int current = negative ? offset + 1 : offset;
int preFloat = 0;
while (current < end && bytes[current] != '.') {
preFloat = (preFloat * 10) + (bytes[current++] - '0');
}
current++;
int postFloatStart = current;
int postFloat = 0;
while (current < end) {
postFloat = (postFloat * 10) + (bytes[current++] - '0');
}
return (preFloat + ((postFloat) / POWERS_OF_10[end - postFloatStart])) * (negative ? -1 : 1);
}
/**
* Identifies indexes of the next ';' and '\n', which will be used to get entry key and value from line. Also
* computes the hash value for the key while iterating.
*/
private static LineInfo getNextLine(byte[] buffer, int start) {
// caller guarantees that the access is in bounds, so no index check
int hash = 0;
while (buffer[start] != ';') {
start++;
hash = hash * 31 + buffer[start];
}
// The following is just to further reduce the probability of collisions
hash = hash ^ (hash << 16);
int semicolonIndex = start;
// caller guarantees that the access is in bounds, so no index check
while (buffer[start] != '\n') {
start++;
}
return new LineInfo(semicolonIndex, start + 1, hash);
}
}
private record LineInfo(int semicolonIndex, int nextStart, int keyHash) {
}
/**
* A simple map with pre-configured fixed bucket count. With 2^13 buckets and current hash function, seeing 4
* collisions which is not too bad. Every bucket is implemented with a linked list. The map is NOT thread safe.
*/
private static class LinearProbingHashMap {
private final static int BUCKET_COUNT = 8191;
private final Node[] buckets;
LinearProbingHashMap() {
this.buckets = new Node[BUCKET_COUNT];
}
/**
* Given a key, returns the current value of AggregationInfo. If not present, creates a new empty node at the
* front of the bucket
*/
public AggregationInfo get(byte[] key, int keyHash) {
// find bucket index through bitwise AND, works for bucketCount = (2^p - 1)
int bucketIndex = BUCKET_COUNT & keyHash;
Node current = buckets[bucketIndex];
while (current != null) {
if (Arrays.equals(current.entry.key(), key)) {
return current.entry.aggregationInfo();
}
current = current.next;
}
// Entry does not exist, so add a new node in the linked list
AggregationInfo newInfo = new AggregationInfo();
KeyValuePair pair = new KeyValuePair(key, keyHash, newInfo);
Node newNode = new Node(pair, buckets[bucketIndex]);
buckets[bucketIndex] = newNode;
return newNode.entry.aggregationInfo();
}
/**
* A helper to convert to Java's hash map to build the final aggregation after partial aggregations
*/
private Map<String, AggregationInfo> toMap() {
Map<String, AggregationInfo> map = new HashMap<>();
for (Node bucket : buckets) {
while (bucket != null) {
map.put(new String(bucket.entry.key, StandardCharsets.UTF_8), bucket.entry.aggregationInfo());
bucket = bucket.next;
}
}
return map;
}
}
/**
* Linked List node to implement a bucket of custom hash map
*/
private static class Node {
KeyValuePair entry;
Node next;
public Node(KeyValuePair entry, Node next) {
this.entry = entry;
this.next = next;
}
}
/**
* a wrapper class to store information needed for storing a measurement information in the hashmap
*/
private record KeyValuePair(byte[] key, int keyHash, AggregationInfo aggregationInfo) {
}
}
|