@@ -413,7 +413,8 @@ def save(self, fileobj):
413
413
pointing to TRK file (and ready to write from the beginning
414
414
of the TRK header data).
415
415
"""
416
- header = create_empty_header ()
416
+ # Enforce little-endian byte order for header
417
+ header = create_empty_header ().newbyteorder ('<' )
417
418
418
419
# Override hdr's fields by those contained in `header`.
419
420
for k , v in self .header .items ():
@@ -499,20 +500,20 @@ def save(self, fileobj):
499
500
for d in t .data_for_points .values ())):
500
501
raise DataError ("Missing scalars for some points!" )
501
502
502
- points = np .asarray (t .streamline , dtype = f4_dtype )
503
- scalars = [np .asarray (t .data_for_points [k ], dtype = f4_dtype )
503
+ points = np .asarray (t .streamline )
504
+ scalars = [np .asarray (t .data_for_points [k ])
504
505
for k in data_for_points_keys ]
505
- scalars = np .concatenate ([np .ndarray ((len (points ), 0 ),
506
- dtype = f4_dtype )
506
+ scalars = np .concatenate ([np .ndarray ((len (points ), 0 ),)
507
507
] + scalars , axis = 1 )
508
- properties = [np .asarray (t .data_for_streamline [k ],
509
- dtype = f4_dtype )
508
+ properties = [np .asarray (t .data_for_streamline [k ])
510
509
for k in data_for_streamline_keys ]
511
- properties = np .concatenate ([ np . array ([], dtype = f4_dtype )
512
- ] + properties )
510
+ properties = np .concatenate (
511
+ [ np . array ([])] + properties ). astype ( f4_dtype )
513
512
514
513
data = struct .pack (i4_dtype .str [:- 1 ], len (points ))
515
- data += np .concatenate ([points , scalars ], axis = 1 ).tostring ()
514
+ pts_scalars = np .concatenate (
515
+ [points , scalars ], axis = 1 ).astype (f4_dtype )
516
+ data += pts_scalars .tostring ()
516
517
data += properties .tostring ()
517
518
f .write (data )
518
519
0 commit comments