-
Notifications
You must be signed in to change notification settings - Fork 271
FIX: Accept any valid delimiters/EOF markers in TCK files #720
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 12 commits
a841055
cd2a2b9
62c0cda
8ed4aca
74c3410
10ae9bc
0a75431
a848bb4
676df4d
4e1cab2
196f13a
b98ba4f
bac1988
486bbb2
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -390,18 +390,14 @@ def _read(cls, fileobj, header, buffer_size=4): | |||||
| buffer_size = int(buffer_size * MEGABYTE) | ||||||
| buffer_size += coordinate_size - (buffer_size % coordinate_size) | ||||||
|
|
||||||
| # Markers for streamline end and file end | ||||||
| fiber_marker = cls.FIBER_DELIMITER.astype(dtype).tostring() | ||||||
| eof_marker = cls.EOF_DELIMITER.astype(dtype).tostring() | ||||||
|
|
||||||
| with Opener(fileobj) as f: | ||||||
| start_position = f.tell() | ||||||
|
|
||||||
| # Set the file position at the beginning of the data. | ||||||
| f.seek(header["_offset_data"], os.SEEK_SET) | ||||||
|
|
||||||
| eof = False | ||||||
| buffs = [] | ||||||
| leftover = np.empty((0, 3), dtype='<f4') | ||||||
| n_streams = 0 | ||||||
|
|
||||||
| while not eof: | ||||||
|
|
@@ -411,37 +407,36 @@ def _read(cls, fileobj, header, buffer_size=4): | |||||
| if eof: | ||||||
| buff = buff[:n_read] | ||||||
|
|
||||||
| buffs.append(buff) | ||||||
|
|
||||||
| # Make sure we've read enough to find a streamline delimiter. | ||||||
| if fiber_marker not in buff: | ||||||
| # If we've read the whole file, then fail. | ||||||
| if eof: | ||||||
| # Could have minimal buffering, and have read only the | ||||||
| # EOF delimiter | ||||||
| buffs = [bytearray().join(buffs)] | ||||||
| if not buffs[0] == eof_marker: | ||||||
| raise DataError( | ||||||
| "Cannot find a streamline delimiter. This file" | ||||||
| " might be corrupted.") | ||||||
| else: | ||||||
| # Otherwise read a bit more. | ||||||
| continue | ||||||
|
|
||||||
| all_parts = bytearray().join(buffs).split(fiber_marker) | ||||||
| point_parts, buffs = all_parts[:-1], all_parts[-1:] | ||||||
| point_parts = [p for p in point_parts if p != b''] | ||||||
|
|
||||||
| for point_part in point_parts: | ||||||
| # Read floats. | ||||||
| pts = np.frombuffer(point_part, dtype=dtype) | ||||||
| # Convert data to little-endian if needed. | ||||||
| yield pts.astype('<f4', copy=False).reshape([-1, 3]) | ||||||
|
|
||||||
| n_streams += len(point_parts) | ||||||
|
|
||||||
| if not buffs[-1] == eof_marker: | ||||||
| raise DataError("Expecting end-of-file marker 'inf inf inf'") | ||||||
| raw_values = np.frombuffer(buff, dtype=dtype) | ||||||
|
|
||||||
| # Convert raw_values into a list of little-endian triples (for x,y,z coord) | ||||||
| coords = raw_values.astype('<f4', copy=False).reshape((-1, 3)) | ||||||
|
|
||||||
| # Find stream delimiter locations (all NaNs) | ||||||
| delims = np.where(np.isnan(coords).all(axis=1))[0] | ||||||
|
|
||||||
| # Recover leftovers, which can't have delimiters in them | ||||||
| if leftover.size: | ||||||
| delims += leftover.shape[0] | ||||||
| coords = np.vstack((leftover, coords)) | ||||||
|
|
||||||
| begin = 0 | ||||||
| for delim in delims: | ||||||
| pts = coords[begin:delim] | ||||||
| if pts.size: | ||||||
| yield coords[begin:delim] | ||||||
| n_streams += 1 | ||||||
| begin = delim + 1 | ||||||
|
|
||||||
| # The rest gets appended to the leftover | ||||||
|
||||||
| # The rest gets appended to the leftover | |
| # The rest becomes the new leftover. |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Could you apply this suggestion, as well?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done!
Uh oh!
There was an error while loading. Please reload this page.