Skip to content

Commit 531ae7e

Browse files
committed
add permissive to all mrcfile.open
1 parent a6cce4c commit 531ae7e

File tree

12 files changed

+25
-25
lines changed

12 files changed

+25
-25
lines changed

bin/isonet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -445,7 +445,7 @@ def resize(self, star_file:str, apix: float=15, out_folder="tomograms_resized"):
445445
tomo_name = item.rlnMicrographName
446446
zoom_factor = float(ori_apix)/apix
447447
new_tomo_name = "{}/{}".format(out_folder,os.path.basename(tomo_name))
448-
with mrcfile.open(tomo_name) as mrc:
448+
with mrcfile.open(tomo_name, permissive=True) as mrc:
449449
data = mrc.data
450450
print("scaling: {}".format(tomo_name))
451451
new_data = zoom(data, zoom_factor,order=3, prefilter=False)

models/unet/data_sequence.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ def __getitem__(self, i):
2222
idx = slice(i*self.batch_size,(i+1)*self.batch_size)
2323
idx = self.perm[idx]
2424
# print('*******',self.x[-1],mrcfile.open(self.x[0]).data[:,:,:,np.newaxis].shape)
25-
rx = np.array([mrcfile.open(self.x[j]).data[:,:,:,np.newaxis] for j in idx])
26-
ry = np.array([mrcfile.open(self.y[j]).data[:,:,:,np.newaxis] for j in idx])
25+
rx = np.array([mrcfile.open(self.x[j], permissive=True).data[:,:,:,np.newaxis] for j in idx])
26+
ry = np.array([mrcfile.open(self.y[j], permissive=True).data[:,:,:,np.newaxis] for j in idx])
2727
# for j in idx:
2828
# print(mrcfile.open(self.x[j]).data.shape,mrcfile.open(self.y[j]).data.shape)
2929
return rx,ry
@@ -52,8 +52,8 @@ def gen():
5252
for i in range(len(x_set)//batch_size):
5353
idx = slice(i * batch_size,(i+1) * batch_size)
5454
idx = all_idx[idx]
55-
rx = np.array([mrcfile.open(x_set[j]).data[:,:,:,np.newaxis] for j in idx])
56-
ry = np.array([mrcfile.open(y_set[j]).data[:,:,:,np.newaxis] for j in idx])
55+
rx = np.array([mrcfile.open(x_set[j], permissive=True).data[:,:,:,np.newaxis] for j in idx])
56+
ry = np.array([mrcfile.open(y_set[j], permissive=True).data[:,:,:,np.newaxis] for j in idx])
5757

5858
yield rx,ry
5959
return gen
@@ -67,6 +67,6 @@ def gen():
6767
for i in range(len(x_set)//batch_size):
6868
idx = slice(i * batch_size,(i+1) * batch_size)
6969
idx = all_idx[idx]
70-
rx = np.array([mrcfile.open(x_set[j]).data[:,:,:,np.newaxis] for j in idx])
70+
rx = np.array([mrcfile.open(x_set[j], permissive=True).data[:,:,:,np.newaxis] for j in idx])
7171
yield rx
7272
return gen

models/unet/losses/wedge_power.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def power(xz):
4747
mw = np.repeat(TwoDPsf(sidelen,sidelen).getMW()[:,np.newaxis,:],sidelen,axis=1)
4848
circle = np.repeat(TwoDPsf(sidelen,sidelen).circleMask()[:,np.newaxis,:],sidelen,axis=1)
4949
gain = (circle -mw)/2
50-
with mrcfile.open('/storage/heng/mwrtest3D/t371_2/cuberesults/pp676-bin4-wbp_000073_iter15.mrc') as op:
50+
with mrcfile.open('/storage/heng/mwrtest3D/t371_2/cuberesults/pp676-bin4-wbp_000073_iter15.mrc', permissive=True) as op:
5151
data = op.data
5252
a = tf.convert_to_tensor(mw[np.newaxis,:,:,:,np.newaxis])
5353
b = tf.convert_to_tensor(data[np.newaxis,:,:,:,np.newaxis])

models/unet/predict.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def predict(settings):
2828
data = []
2929
for i,mrc in enumerate(list(settings.mrc_list) + list(settings.mrc_list[:append_number])):
3030
root_name = mrc.split('/')[-1].split('.')[0]
31-
with mrcfile.open(mrc) as mrcData:
31+
with mrcfile.open(mrc, permissive=True) as mrcData:
3232
real_data = mrcData.data.astype(np.float32)*-1
3333
real_data=normalize(real_data, percentile = settings.normalize_percentile)
3434

preprocessing/cubes.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ def cubesX(self):
156156
path_noise = sorted([self.noise_folder+'/'+f for f in os.listdir(self.noise_folder)])
157157
path_index = np.random.permutation(len(path_noise))[0:self.__cubesX.shape[0]]
158158
def read_vol(f):
159-
with mrcfile.open(f) as mf:
159+
with mrcfile.open(f, permissive=True) as mf:
160160
res = mf.data
161161
return res
162162
noise_volume = np.array([read_vol(path_noise[j]) for j in path_index])

preprocessing/prepare.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def extract_subtomos(settings):
3535
pixel_size = it.rlnPixelSize
3636
if settings.use_deconv_tomo and "rlnDeconvTomoName" in md.getLabels() and os.path.isfile(it.rlnDeconvTomoName):
3737
logging.info("Extract from deconvolved tomogram {}".format(it.rlnDeconvTomoName))
38-
with mrcfile.open(it.rlnDeconvTomoName) as mrcData:
38+
with mrcfile.open(it.rlnDeconvTomoName, permissive=True) as mrcData:
3939
orig_data = mrcData.data.astype(np.float32)
4040
else:
4141
print("Extract from origional tomogram {}".format(it.rlnMicrographName))
@@ -44,7 +44,7 @@ def extract_subtomos(settings):
4444

4545

4646
if "rlnMaskName" in md.getLabels() and it.rlnMaskName not in [None, "None"]:
47-
with mrcfile.open(it.rlnMaskName) as m:
47+
with mrcfile.open(it.rlnMaskName, permissive=True) as m:
4848
mask_data = m.data
4949
else:
5050
mask_data = None
@@ -89,7 +89,7 @@ def get_cubes_one(data_X, data_Y, settings, start = 0, mask = None, add_noise =
8989
path_noise = sorted([settings.noise_dir+'/'+f for f in os.listdir(settings.noise_dir)])
9090
path_index = np.random.randint(len(path_noise))
9191
def read_vol(f):
92-
with mrcfile.open(f) as mf:
92+
with mrcfile.open(f, permissive=True) as mf:
9393
res = mf.data
9494
return res
9595
noise_volume = read_vol(path_noise[path_index])
@@ -119,11 +119,11 @@ def get_cubes(inp,settings):
119119
mrc, start = inp
120120
root_name = mrc.split('/')[-1].split('.')[0]
121121
current_mrc = '{}/{}_iter{:0>2d}.mrc'.format(settings.result_dir,root_name,settings.iter_count-1)
122-
with mrcfile.open(mrc) as mrcData:
122+
with mrcfile.open(mrc, permissive=True) as mrcData:
123123
iw_data = mrcData.data.astype(np.float32)*-1
124124
iw_data = normalize(iw_data, percentile = settings.normalize_percentile)
125125

126-
with mrcfile.open(current_mrc) as mrcData:
126+
with mrcfile.open(current_mrc, permissive=True) as mrcData:
127127
ow_data = mrcData.data.astype(np.float32)*-1
128128
ow_data = normalize(ow_data, percentile = settings.normalize_percentile)
129129

@@ -212,7 +212,7 @@ def generate_first_iter_mrc(mrc,settings):
212212
'''
213213
root_name = mrc.split('/')[-1].split('.')[0]
214214
extension = mrc.split('/')[-1].split('.')[1]
215-
with mrcfile.open(mrc) as mrcData:
215+
with mrcfile.open(mrc, permissive=True) as mrcData:
216216
orig_data = normalize(mrcData.data.astype(np.float32)*-1, percentile = settings.normalize_percentile)
217217
orig_data = apply_wedge(orig_data, ld1=1, ld2=0)
218218
orig_data = normalize(orig_data, percentile = settings.normalize_percentile)

preprocessing/simulate.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def apply_wedge_dcube(ori_data, mw2d, mw3d=None):
5151

5252
else:
5353
import mrcfile
54-
with mrcfile.open(mw3d, 'r') as mrc:
54+
with mrcfile.open(mw3d, permissive=True) as mrc:
5555
mw = mrc.data
5656
mwshift = np.fft.fftshift(mw)
5757
data = np.zeros_like(ori_data)
@@ -105,7 +105,7 @@ def apply_wedge1(ori_data, ld1 = 1, ld2 =0, mw3d = None):
105105
return outData
106106
else:
107107
import mrcfile
108-
with mrcfile.open(mw3d, 'r') as mrc:
108+
with mrcfile.open(mw3d, permissive=True) as mrc:
109109
mw = mrc.data
110110
mw = np.fft.fftshift(mw)
111111
mw = mw * ld1 + (1-mw) * ld2

util/deconv_gpu.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ def restore(self,new_list):
177177
num_cpu = int(args[6])
178178
deconvstrength = 1
179179

180-
with mrcfile.open(mrcFile) as mrc:
180+
with mrcfile.open(mrcFile, permissive=True) as mrc:
181181
vol = mrc.data
182182
c = Chunks(num=(1,4,4),overlap=0.25)
183183
chunks_list = c.get_chunks(vol)

util/deconvolution.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def wiener1d(angpix, voltage, cs, defocus, snrfalloff, deconvstrength, highpassn
4545
return ctf, wiener
4646

4747
def tom_deconv_tomo(vol_file, out_file,angpix, voltage, cs, defocus, snrfalloff, deconvstrength, highpassnyquist, phaseflipped, phaseshift, ncpu=8):
48-
with mrcfile.open(vol_file) as f:
48+
with mrcfile.open(vol_file, permissive=True) as f:
4949
header_in = f.header
5050
vol = f.data
5151
voxelsize = f.voxel_size
@@ -135,7 +135,7 @@ def __init__(self,chunk_size=200,overlap=0.25):
135135
def get_chunks(self,tomo_name):
136136
#side*(1-overlap)*(num-1)+side = sp + side*overlap -> side *(1-overlap) * num = side
137137
root_name = os.path.splitext(os.path.basename(tomo_name))[0]
138-
with mrcfile.open(tomo_name) as f:
138+
with mrcfile.open(tomo_name, permissive=True) as f:
139139
vol = f.data#.astype(np.float32)
140140
cropsize = int(self.chunk_size*(1+self.overlap))
141141
cubesize = self.chunk_size
@@ -169,7 +169,7 @@ def restore(self,new_file_list):
169169
for j in range(self._N[1]):
170170
for k in range(self._N[2]):
171171
one_chunk_file = new_file_list[i*self._N[1]*self._N[2]+j*self._N[2]+k]
172-
with mrcfile.open(one_chunk_file) as f:
172+
with mrcfile.open(one_chunk_file, permissive=True) as f:
173173
one_chunk_data = f.data
174174
new[i*cubesize:(i+1)*cubesize,j*cubesize:(j+1)*cubesize,k*cubesize:(k+1)*cubesize] \
175175
= one_chunk_data[start:end,start:end,start:end]
@@ -205,7 +205,7 @@ def deconv_one(tomo, out_tomo, voltage=300.0, cs=2.7, defocus=1.0, pixel_size=1.
205205
chunks_deconv_list = list(p.map(partial_func,chunks_list))
206206
vol_restored = c.restore(chunks_deconv_list)
207207

208-
with mrcfile.open(tomo) as n:
208+
with mrcfile.open(tomo, permissive=True) as n:
209209
header_input = n.header
210210
pixel_size = n.voxel_size
211211

util/filter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def get_polygon(points):
111111
import sys
112112
import mrcfile
113113
args = sys.argv
114-
with mrcfile.open(args[1]) as n:
114+
with mrcfile.open(args[1], permissive=True) as n:
115115
tomo = n.data
116116
mask = stdmask_mpi(tomo,cubelen=20,cubesize=80,ncpu=20,if_rescale=True)
117117
with mrcfile.new(args[2],overwrite=True) as n:

0 commit comments

Comments
 (0)