From d428ef58fdcb236d81cd2e6beb687c811be1d331 Mon Sep 17 00:00:00 2001 From: wtclarke Date: Tue, 10 Oct 2023 20:13:59 +0100 Subject: [PATCH 1/2] Ensure int64 being used for memory read. --- mapvbvd/twix_map_obj.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/mapvbvd/twix_map_obj.py b/mapvbvd/twix_map_obj.py index b646978..17e2dc1 100644 --- a/mapvbvd/twix_map_obj.py +++ b/mapvbvd/twix_map_obj.py @@ -682,16 +682,16 @@ def _fileopen(self): def readData(self, mem, cIxToTarg=None, cIxToRaw=None, selRange=None, selRangeSz=None, outSize=None): - mem = mem.astype(int) + mem = mem.astype(np.int64) if outSize is None: if selRange is None: - selRange = [np.arange(0, self.dataSize[0]).astype(int), - np.arange(0, self.dataSize[1]).astype(int)] + selRange = [np.arange(0, self.dataSize[0]).astype(np.int64), + np.arange(0, self.dataSize[1]).astype(np.int64)] else: - selRange[0] = np.arange(0, self.dataSize[0]).astype(int) - selRange[1] = np.arange(0, self.dataSize[0]).astype(int) + selRange[0] = np.arange(0, self.dataSize[0]).astype(np.int64) + selRange[1] = np.arange(0, self.dataSize[0]).astype(np.int64) - outSize = np.concatenate((self.dataSize[0:2], mem.shape)).astype(int) + outSize = np.concatenate((self.dataSize[0:2], mem.shape)).astype(np.int64) selRangeSz = outSize cIxToTarg = np.arange(0, selRangeSz[2]) cIxToRaw = cIxToTarg @@ -708,15 +708,17 @@ def readData(self, mem, cIxToTarg=None, cIxToRaw=None, selRange=None, selRangeSz # These parameters were copied for speed in matlab, but just duplicate to keep code similar in python szScanHeader = self.freadInfo.szScanHeader - readSize = self.freadInfo.sz.astype(int) - readShape = self.freadInfo.shape.astype(int) - readCut = self.freadInfo.cut.astype(int) - keepOS = np.concatenate([list(range(int(self.NCol / 4))), list(range(int(self.NCol * 3 / 4), int(self.NCol)))]) + readSize = self.freadInfo.sz.astype(np.int64) + readShape = self.freadInfo.shape.astype(np.int64) + readCut = self.freadInfo.cut.astype(np.int64) + keepOS = np.concatenate([ + list(range(np.int64(self.NCol / 4))), + list(range(np.int64(self.NCol * 3 / 4), np.int64(self.NCol)))]) bIsReflected = self.IsReflected[cIxToRaw] bRegrid = self.regrid and self.rstrj.size > 1 # slicedata = self.slicePos[cIxToRaw, :] - ro_shift = self.ROoffcenter[cIxToRaw] * int(not self.ignoreROoffcenter) + ro_shift = self.ROoffcenter[cIxToRaw] * np.int64(not self.ignoreROoffcenter) # %SRY store information about raw data correction # bDoRawDataCorrect = this.arg.doRawDataCorrect; # bIsRawDataCorrect = this.IsRawDataCorrect( cIxToRaw ); @@ -747,7 +749,7 @@ def readData(self, mem, cIxToTarg=None, cIxToRaw=None, selRange=None, selRangeSz if bRegrid: v1 = np.array(range(1, selRangeSz[1] * blockSz + 1)) rsTrj = [self.rstrj, v1] - trgTrj = np.linspace(np.min(self.rstrj), np.max(self.rstrj), int(self.NCol)) + trgTrj = np.linspace(np.min(self.rstrj), np.max(self.rstrj), np.int64(self.NCol)) trgTrj = [trgTrj, v1] # counter for proper scaling of averages/segments @@ -797,7 +799,7 @@ def readData(self, mem, cIxToTarg=None, cIxToRaw=None, selRange=None, selRangeSz # remove MDH data from block: block = block[readCut, :, :] - ix = np.arange(1 + k - blockCtr, k + 1, dtype=int) # +1 so that it goes to k + ix = np.arange(1 + k - blockCtr, k + 1, dtype=np.int64) # +1 so that it goes to k if blockCtr != blockSz: block = block[:, :, 0:blockCtr] @@ -906,7 +908,7 @@ def readData(self, mem, cIxToTarg=None, cIxToRaw=None, selRange=None, selRangeSz blockInit = np.concatenate((blockInit, blockInit), axis=2) else: # regression; reset size and lock it - blockSz = np.maximum(blockSz / 2, 1).astype(int) + blockSz = np.maximum(blockSz / 2, 1).astype(np.int64) blockInit = blockInit[:, :, :blockSz] doLockblockSz = True From 5a04539fedfe7e85cdc35925898e10f67bd208bf Mon Sep 17 00:00:00 2001 From: wtclarke Date: Wed, 11 Oct 2023 11:14:16 +0100 Subject: [PATCH 2/2] Fix subtle bug with file sizes. --- CHANGELOG.md | 5 +++++ mapvbvd/mapVBVD.py | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ce3e70..ce75a8a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ This document contains the pymapvbvd release history in reverse chronological order. +0.5.6 (Wednesday 11th October 2023) +----------------------------------- +- Fixed issue with large files on Windows. Thanks to FrankZijlstra for reporting. +- Fixed subtle bug with precisely sized files interacting with memory chunking size. Thanks to FrankZijlstra for reporting. + 0.5.5 (Tuesday 10th October 2023) --------------------------------- - Suppress warning `RuntimeWarning: invalid value encountered in cast`. diff --git a/mapvbvd/mapVBVD.py b/mapvbvd/mapVBVD.py index 185f03e..a82dd3b 100644 --- a/mapvbvd/mapVBVD.py +++ b/mapvbvd/mapVBVD.py @@ -140,7 +140,7 @@ def loop_mdh_read(fid, version, Nscans, scan, measOffset, measLength, print_prog n_acq = n_acq + 1 # grow arrays in batches - if n_acq > szBlob: + if n_acq >= szBlob: grownArray = np.zeros((mdh_blob.shape[0], allocSize), dtype=np.uint8) # pylint: disable=E1136 # pylint/issues/3139 mdh_blob = np.concatenate((mdh_blob, grownArray), axis=1) @@ -160,7 +160,7 @@ def loop_mdh_read(fid, version, Nscans, scan, measOffset, measLength, print_prog cPos = cPos + int(ulDMALength) - if isEOF or n_acq == len(filePos): + if isEOF: n_acq = n_acq - 1 # ignore the last attempt # import pdb; pdb.set_trace() filePos[n_acq] = cPos