Skip to content

Commit 6ae098c

Browse files
committed
refactor v2 code
1 parent a8250a8 commit 6ae098c

File tree

2 files changed

+10
-6
lines changed

2 files changed

+10
-6
lines changed

src/main/java/org/hdf5javalib/examples/hdf5examples/HDF5Debug.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
import java.nio.file.Path;
1717
import java.nio.file.Paths;
1818
import java.nio.file.StandardOpenOption;
19+
import java.time.Duration;
20+
import java.time.Instant;
1921
import java.util.Arrays;
2022
import java.util.OptionalDouble;
2123

@@ -48,10 +50,10 @@ private void run() {
4850
// ATL03_20250302235544_11742607_006_01
4951
// Path dirPath = Paths.get(Objects.requireNonNull(HDF5Debug.class.getClassLoader().getResource("HDF5Examples/h5ex_g_compact2.h5")).toURI());
5052
// Path dirPath = Paths.get("c:/users/karln/Downloads/ATL03_20250302235544_11742607_007_01.h5");
51-
// Path dirPath = Paths.get("c:/users/karln/Downloads/ATL03_20250302235544_11742607_006_01.h5");
53+
Path dirPath = Paths.get("c:/users/karnicho/Downloads/ATL03_20250302235544_11742607_006_01.h5");
5254
// Path dirPath = Paths.get("c:/users/karln/Downloads/SMAP_L1B_TB_57204_D_20251016T224815_R19240_001.h5");
5355
// Path dirPath = Paths.get("c:/users/karln/Downloads/ATL08_20250610011615_13002704_007_01.h5");
54-
Path dirPath = Paths.get("c:/users/karln/Downloads/SMAP_L2_SM_P_55348_A_20250612T001323_R19240_001.h5");
56+
// Path dirPath = Paths.get("c:/users/karln/Downloads/SMAP_L2_SM_P_55348_A_20250612T001323_R19240_001.h5");
5557

5658

5759
processFile(dirPath);
@@ -62,6 +64,7 @@ private void run() {
6264
// Generalized method to process the file and apply a custom action per dataset
6365
private static void processFile(Path filePath) {
6466
try (SeekableByteChannel channel = Files.newByteChannel(filePath, StandardOpenOption.READ)) {
67+
Instant start = Instant.now();
6568
HdfFileReader reader = new HdfFileReader(channel).readFile();
6669

6770
for (HdfDataset dataSet : reader.getDatasets()) {
@@ -78,6 +81,7 @@ private static void processFile(Path filePath) {
7881
// HdfDisplayUtils.displayData(channel, dataSet, reader, HdfDisplayUtils.DisplayMode.SUMMARY_STATS);
7982
//// displayScalarData(channel, dataSet, HdfFloatPoint.class, reader);
8083

84+
System.out.println("Total duration: " + Duration.between(start, Instant.now()));
8185
} catch (Exception e) {
8286
log.error("Exception in processFile: {}", filePath, e);
8387
}

src/main/java/org/hdf5javalib/utils/HdfDisplayUtils.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -224,20 +224,20 @@ private static <T extends Comparable<T>> void displayAggregationForDataset(Seeka
224224
switch (ds.getDimensionality()) {
225225
case 0:
226226
streamType = "streamScalar";
227-
aggregationResult = aggregateStream(dataSource.streamScalar(), mode);
227+
aggregationResult = aggregateStream(dataSource.parallelStreamScalar(), mode);
228228
break;
229229
case 1:
230230
streamType = "streamVector";
231-
aggregationResult = aggregateStream(dataSource.streamVector(), mode);
231+
aggregationResult = aggregateStream(dataSource.parallelStreamVector(), mode);
232232
break;
233233
case 2:
234234
streamType = "streamMatrix";
235-
Stream<T> matrixStream = dataSource.streamMatrix().flatMap(Arrays::stream);
235+
Stream<T> matrixStream = dataSource.parallelStreamMatrix().flatMap(Arrays::stream);
236236
aggregationResult = aggregateStream(matrixStream, mode);
237237
break;
238238
default:
239239
streamType = "streamFlattened";
240-
aggregationResult = aggregateStream(dataSource.streamFlattened(), mode);
240+
aggregationResult = aggregateStream(dataSource.parallelStreamFlattened(), mode);
241241
break;
242242
}
243243

0 commit comments

Comments
 (0)