Skip to content

Commit 5dbf758

Browse files
committed
SonarQube cleanup
1 parent 3469ecf commit 5dbf758

File tree

2 files changed

+15
-15
lines changed

2 files changed

+15
-15
lines changed

src/main/java/org/hdf5javalib/examples/hdf5examples/HDF5Debug.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,9 @@ private void run() {
4747
// List all .h5 files in HDF5Examples resources directory
4848
// ATL03_20250302235544_11742607_006_01
4949
// Path dirPath = Paths.get(Objects.requireNonNull(HDF5Debug.class.getClassLoader().getResource("HDF5Examples/h5ex_g_compact2.h5")).toURI());
50-
// Path dirPath = Paths.get("c:/users/karnicho/Downloads/ATL03_20250302235544_11742607_007_01.h5");
51-
// Path dirPath = Paths.get("c:/users/karnicho/Downloads/ATL03_20250302235544_11742607_006_01.h5");
52-
Path dirPath = Paths.get("c:/users/karnicho/Downloads/SMAP_L1B_TB_57204_D_20251016T224815_R19240_001.h5");
50+
// Path dirPath = Paths.get("c:/users/karln/Downloads/ATL03_20250302235544_11742607_007_01.h5");
51+
// Path dirPath = Paths.get("c:/users/karln/Downloads/ATL03_20250302235544_11742607_006_01.h5");
52+
Path dirPath = Paths.get("c:/users/karln/Downloads/SMAP_L1B_TB_57204_D_20251016T224815_R19240_001.h5");
5353
processFile(dirPath);
5454
} catch (Exception e) {
5555
throw new IllegalStateException(e);

src/main/java/org/hdf5javalib/hdffile/infrastructure/fractalheap/FractalHeap.java

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -313,23 +313,17 @@ private static Block readDirectBlock(SeekableByteChannel channel, FractalHeapHea
313313
if (dataSize < 0) {
314314
throw new IOException("Invalid data size in direct block");
315315
}
316-
// Determine the maximum number of bytes that can be read
317-
long fSize = channel.size();
318-
long fPosition = channel.position();
319-
long bytesRemainingInFile = fSize - fPosition;
320-
321-
// The actual size to read is the smaller of the two values
322-
long actualReadSize = Math.min(dataSize, bytesRemainingInFile);
323-
headerBuffer = ByteBuffer.allocate((int) actualReadSize).order(ByteOrder.LITTLE_ENDIAN);
316+
blockSize = dataSize - headerSize;
317+
headerBuffer = ByteBuffer.allocate((int) blockSize).order(ByteOrder.LITTLE_ENDIAN);
324318
bytesRead = channel.read(headerBuffer);
325-
if ( bytesRead != actualReadSize)
319+
if ( bytesRead != blockSize)
326320
throw new IllegalStateException();
327321
headerBuffer.flip();
328322

329323
byte[] data = headerBuffer.array();
330324
DirectBlock db = new DirectBlock();
331325
db.blockOffset = blockOffset.getInstance(Long.class);
332-
db.blockSize = actualReadSize;
326+
db.blockSize = dataSize;
333327
db.data = data;
334328
db.filterMask = filterMask;
335329
db.checksum = checksum;
@@ -399,7 +393,12 @@ private static List<ChildInfo> parseChildInfos(ByteBuffer blockBuffer, FractalHe
399393
long startingBlockSize = header.startingBlockSize.getInstance(Long.class);
400394

401395
for (short r = 0; r < nrows; r++) {
402-
long rowBlockSize = startingBlockSize * (1L << r);
396+
// --- Corrected Logic ---
397+
// Calculate the exponent: 0 for rows 0 and 1, then 1, 2, 3...
398+
long exponent = Math.max(0L, r - 1);
399+
long rowBlockSize = startingBlockSize * (1L << exponent);
400+
// --- End Corrected Logic ---
401+
403402
for (int c = 0; c < header.tableWidth; c++) {
404403
HdfFixedPoint childAddress = HdfReadUtils.readHdfFixedPointFromBuffer(sizeOfOffset, blockBuffer);
405404
HdfFixedPoint childFilteredSize = sizeOfOffset.undefined();
@@ -482,7 +481,8 @@ private static long getBlockSize(FractalHeapHeader header, long blockOffset) thr
482481
}
483482
double arg = ((double) blockOffset / (header.tableWidth * header.startingBlockSize.getInstance(Long.class))) + 1;
484483
int row = (int) Math.floor(Math.log(arg) / Math.log(2));
485-
return startingBlockSize * (1L << row);
484+
long exponent = Math.max(0L, row - 1);
485+
return startingBlockSize * (1L << exponent);
486486
}
487487

488488
public byte[] getObject(ParsedHeapId heapId) {

0 commit comments

Comments
 (0)