Skip to content

Commit

Permalink
updates prepping for 3.1.12
Browse files Browse the repository at this point in the history
  • Loading branch information
BM32ESRF committed Nov 24, 2022
1 parent 7d0e991 commit 9110051
Show file tree
Hide file tree
Showing 3 changed files with 209 additions and 80 deletions.
54 changes: 54 additions & 0 deletions lauetoolsnn/end_to_end_scripts/LaueNN_pyScript_1_or_2phase.py
Original file line number Diff line number Diff line change
Expand Up @@ -889,6 +889,60 @@
# results = new_MP_function(valu12[0])
# best = results[-2][0][0][0]

def chunker_list(seq, size):
return (seq[i::size] for i in range(size))
# # =============================================================================
# # Below code when RAM is of concern
# # =============================================================================
# from multiprocessing import Process, Queue
# from lauetoolsnn.utils_lauenn import new_MP_function_v1
# _inputs_queue = Queue()
# _outputs_queue = Queue()
# _worker_processes = {}
# for i in range(ncpu):
# _worker_processes[i]= Process(target=new_MP_function_v1, args=(_inputs_queue, _outputs_queue, i+1))
# for i in range(ncpu):
# _worker_processes[i].start()
# chunks = chunker_list(valu12, ncpu)
# chunks_mp = list(chunks)
# meta = {'t1':time.time()}
# for ijk in range(int(ncpu)):
# _inputs_queue.put((chunks_mp[ijk], ncpu, meta))
# ### Update data from multiprocessing
# pbar = tqdm(total=count_global)
# unique_count = []
# while True:
# if len(np.unique(unique_count)) == count_global:
# print("All files have been treated")
# break
# if not _outputs_queue.empty():
# n_range = _outputs_queue.qsize()
# for _ in range(n_range):
# r_message_mpdata = _outputs_queue.get()
# strain_matrix_mpdata, strain_matrixs_mpdata, rotation_matrix_mpdata, col_mpdata,\
# colx_mpdata, coly_mpdata, match_rate_mpdata, mat_global_mpdata,\
# cnt_mpdata, meta_mpdata, files_treated_mpdata, spots_len_mpdata, \
# iR_pixel_mpdata, fR_pixel_mpdata, best_match_mpdata, check_mpdata = r_message_mpdata
# for i_mpdata in files_treated_mpdata:
# files_treated.append(i_mpdata)
# for intmat_mpdata in range(int(ubmat)):
# check[cnt_mpdata,intmat_mpdata] = check_mpdata[cnt_mpdata,intmat_mpdata]
# mat_global[intmat_mpdata][0][cnt_mpdata] = mat_global_mpdata[intmat_mpdata][0][cnt_mpdata]
# strain_matrix[intmat_mpdata][0][cnt_mpdata,:,:] = strain_matrix_mpdata[intmat_mpdata][0][cnt_mpdata,:,:]
# strain_matrixs[intmat_mpdata][0][cnt_mpdata,:,:] = strain_matrixs_mpdata[intmat_mpdata][0][cnt_mpdata,:,:]
# rotation_matrix[intmat_mpdata][0][cnt_mpdata,:,:] = rotation_matrix_mpdata[intmat_mpdata][0][cnt_mpdata,:,:]
# col[intmat_mpdata][0][cnt_mpdata,:] = col_mpdata[intmat_mpdata][0][cnt_mpdata,:]
# colx[intmat_mpdata][0][cnt_mpdata,:] = colx_mpdata[intmat_mpdata][0][cnt_mpdata,:]
# coly[intmat_mpdata][0][cnt_mpdata,:] = coly_mpdata[intmat_mpdata][0][cnt_mpdata,:]
# match_rate[intmat_mpdata][0][cnt_mpdata] = match_rate_mpdata[intmat_mpdata][0][cnt_mpdata]
# spots_len[intmat_mpdata][0][cnt_mpdata] = spots_len_mpdata[intmat_mpdata][0][cnt_mpdata]
# iR_pix[intmat_mpdata][0][cnt_mpdata] = iR_pixel_mpdata[intmat_mpdata][0][cnt_mpdata]
# fR_pix[intmat_mpdata][0][cnt_mpdata] = fR_pixel_mpdata[intmat_mpdata][0][cnt_mpdata]
# best_match[intmat_mpdata][0][cnt_mpdata] = best_match_mpdata[intmat_mpdata][0][cnt_mpdata]
# if cnt_mpdata not in unique_count:
# pbar.update(1)
# unique_count.append(cnt_mpdata)

args = zip(valu12)
with multiprocessing.Pool(ncpu) as pool:
results = pool.starmap(new_MP_function, tqdm(args, total=len(valu12)), chunksize=1)
Expand Down
57 changes: 37 additions & 20 deletions lauetoolsnn/lauetoolsneuralnetwork.py
Original file line number Diff line number Diff line change
Expand Up @@ -6293,26 +6293,43 @@ def predict_preprocess(self,cnt,rotation_matrix,strain_matrix,strain_matrixs,
print("Error writing the cor file", path)

elif files.split(".")[-1] == "cor":
seednumber = "Experimental COR file"
allres = IOLT.readfile_cor(files, True)
data_theta, data_chi, peakx, peaky, intensity = allres[1:6]
CCDcalib = allres[-1]
detectorparameters = allres[-2]
# print('detectorparameters from file are: '+ str(detectorparameters))
pixelsize = CCDcalib['pixelsize']
CCDLabel = CCDcalib['CCDLabel']
framedim = dictLT.dict_CCD[CCDLabel][0]
dict_dp={}
dict_dp['kf_direction']=default_detector_geom
dict_dp['detectorparameters']=detectorparameters
dict_dp['detectordistance']=detectorparameters[0]
dict_dp['detectordiameter']=pixelsize*framedim[0]#TODO*2
dict_dp['pixelsize']=pixelsize
dict_dp['dim']=framedim
dict_dp['peakX']=peakx
dict_dp['peakY']=peaky
dict_dp['intensity']=intensity

try:
seednumber = "Experimental COR file"
allres = IOLT.readfile_cor(files, True)
data_theta, data_chi, peakx, peaky, intensity = allres[1:6]
CCDcalib = allres[-1]
detectorparameters = allres[-2]
# print('detectorparameters from file are: '+ str(detectorparameters))
pixelsize = CCDcalib['pixelsize']
CCDLabel = CCDcalib['CCDLabel']
framedim = dictLT.dict_CCD[CCDLabel][0]
dict_dp={}
dict_dp['kf_direction']=default_detector_geom
dict_dp['detectorparameters']=detectorparameters
dict_dp['detectordistance']=detectorparameters[0]
dict_dp['detectordiameter']=pixelsize*framedim[0]#TODO*2
dict_dp['pixelsize']=pixelsize
dict_dp['dim']=framedim
dict_dp['peakX']=peakx
dict_dp['peakY']=peaky
dict_dp['intensity']=intensity
except:
print("No cor file found for "+ files)
for intmat in range(matricies):
rotation_matrix[intmat][0][self.cnt,:,:] = np.zeros((3,3))
strain_matrix[intmat][0][self.cnt,:,:] = np.zeros((3,3))
strain_matrixs[intmat][0][self.cnt,:,:] = np.zeros((3,3))
col[intmat][0][self.cnt,:] = 0,0,0
colx[intmat][0][self.cnt,:] = 0,0,0
coly[intmat][0][self.cnt,:] = 0,0,0
match_rate[intmat][0][self.cnt] = 0
mat_global[intmat][0][self.cnt] = 0

cnt += 1
self.cnt += 1
peak_detection_error = True
continue

if peak_detection_error:
continue

Expand Down
178 changes: 118 additions & 60 deletions lauetoolsnn/utils_lauenn.py
Original file line number Diff line number Diff line change
Expand Up @@ -2234,24 +2234,43 @@ def predict_preprocessMP(files, cnt,
print("Error writing the cor file", path)

elif files.split(".")[-1] == "cor":
seednumber = "Experimental COR file"
allres = IOLT.readfile_cor(files, True)
data_theta, data_chi, peakx, peaky, intensity = allres[1:6]
CCDcalib = allres[-1]
detectorparameters = allres[-2]
pixelsize = CCDcalib['pixelsize']
CCDLabel = CCDcalib['CCDLabel']
framedim = dictLT.dict_CCD[CCDLabel][0]
dict_dp={}
dict_dp['kf_direction']=default_detector_geom
dict_dp['detectorparameters']=detectorparameters
dict_dp['detectordistance']=detectorparameters[0]
dict_dp['detectordiameter']=pixelsize*framedim[0]#TODO*2
dict_dp['pixelsize']=pixelsize
dict_dp['dim']=framedim
dict_dp['peakX']=peakx
dict_dp['peakY']=peaky
dict_dp['intensity']=intensity
try:
seednumber = "Experimental COR file"
allres = IOLT.readfile_cor(files, True)
data_theta, data_chi, peakx, peaky, intensity = allres[1:6]
CCDcalib = allres[-1]
detectorparameters = allres[-2]
pixelsize = CCDcalib['pixelsize']
CCDLabel = CCDcalib['CCDLabel']
framedim = dictLT.dict_CCD[CCDLabel][0]
dict_dp={}
dict_dp['kf_direction']=default_detector_geom
dict_dp['detectorparameters']=detectorparameters
dict_dp['detectordistance']=detectorparameters[0]
dict_dp['detectordiameter']=pixelsize*framedim[0]#TODO*2
dict_dp['pixelsize']=pixelsize
dict_dp['dim']=framedim
dict_dp['peakX']=peakx
dict_dp['peakY']=peaky
dict_dp['intensity']=intensity
except:
print("Error in Cor file reading for "+ files)
for intmat in range(matricies):
rotation_matrix[intmat][0][cnt,:,:] = np.zeros((3,3))
strain_matrix[intmat][0][cnt,:,:] = np.zeros((3,3))
strain_matrixs[intmat][0][cnt,:,:] = np.zeros((3,3))
col[intmat][0][cnt,:] = 0,0,0
colx[intmat][0][cnt,:] = 0,0,0
coly[intmat][0][cnt,:] = 0,0,0
match_rate[intmat][0][cnt] = 0
mat_global[intmat][0][cnt] = 0
spots_len[intmat][0][cnt] = 0
iR_pix[intmat][0][cnt] = 0
fR_pix[intmat][0][cnt] = 0
check[cnt,intmat] = 0
files_treated.append(files)
return strain_matrix, strain_matrixs, rotation_matrix, col, colx, coly, \
match_rate, mat_global, cnt, files_treated,spots_len,iR_pix,fR_pix, check, best_match, None

sorted_data = np.transpose(np.array([data_theta, data_chi]))
tabledistancerandom = np.transpose(GT.calculdist_from_thetachi(sorted_data, sorted_data))
Expand Down Expand Up @@ -8378,27 +8397,47 @@ def predict_preprocessMultiProcess(files, cnt,
print("Error writing the cor file", path)

elif files.split(".")[-1] == "cor":
# print("Entering Cor file read section")
seednumber = "Experimental COR file"
allres = IOLT.readfile_cor(files, True)
data_theta, data_chi, peakx, peaky, intensity = allres[1:6]
CCDcalib = allres[-1]
detectorparameters = allres[-2]
# print('detectorparameters from file are: '+ str(detectorparameters))
pixelsize = CCDcalib['pixelsize']
CCDLabel = CCDcalib['CCDLabel']
framedim = dictLT.dict_CCD[CCDLabel][0]
dict_dp={}
dict_dp['kf_direction']=default_detector_geom
dict_dp['detectorparameters']=detectorparameters
dict_dp['detectordistance']=detectorparameters[0]
dict_dp['detectordiameter']=pixelsize*framedim[0]
dict_dp['pixelsize']=pixelsize
dict_dp['dim']=framedim
dict_dp['peakX']=peakx
dict_dp['peakY']=peaky
dict_dp['intensity']=intensity

try:
# print("Entering Cor file read section")
seednumber = "Experimental COR file"
allres = IOLT.readfile_cor(files, True)
data_theta, data_chi, peakx, peaky, intensity = allres[1:6]
CCDcalib = allres[-1]
detectorparameters = allres[-2]
# print('detectorparameters from file are: '+ str(detectorparameters))
pixelsize = CCDcalib['pixelsize']
CCDLabel = CCDcalib['CCDLabel']
framedim = dictLT.dict_CCD[CCDLabel][0]
dict_dp={}
dict_dp['kf_direction']=default_detector_geom
dict_dp['detectorparameters']=detectorparameters
dict_dp['detectordistance']=detectorparameters[0]
dict_dp['detectordiameter']=pixelsize*framedim[0]
dict_dp['pixelsize']=pixelsize
dict_dp['dim']=framedim
dict_dp['peakX']=peakx
dict_dp['peakY']=peaky
dict_dp['intensity']=intensity
except:
print("Error in Cor file reading for "+ files)
for intmat in range(matricies):
rotation_matrix[intmat][0][cnt,:,:] = np.zeros((3,3))
strain_matrix[intmat][0][cnt,:,:] = np.zeros((3,3))
strain_matrixs[intmat][0][cnt,:,:] = np.zeros((3,3))
col[intmat][0][cnt,:] = 0,0,0
colx[intmat][0][cnt,:] = 0,0,0
coly[intmat][0][cnt,:] = 0,0,0
match_rate[intmat][0][cnt] = 0
mat_global[intmat][0][cnt] = 0
spots_len[intmat][0][cnt] = 0
iR_pix[intmat][0][cnt] = 0
fR_pix[intmat][0][cnt] = 0
check[cnt,intmat] = 0
# files_treated.append(files)
return strain_matrix, strain_matrixs, rotation_matrix, col, colx, coly, \
match_rate, mat_global, cnt, files_treated,spots_len,iR_pix,fR_pix, check, best_match


sorted_data = np.transpose(np.array([data_theta, data_chi]))
tabledistancerandom = np.transpose(GT.calculdist_from_thetachi(sorted_data, sorted_data))

Expand Down Expand Up @@ -9686,27 +9725,46 @@ def predict_preprocessMultiMatProcess(files, cnt,
print("Error writing the cor file", path)

elif files.split(".")[-1] == "cor":
# print("Entering Cor file read section")
seednumber = "Experimental COR file"
allres = IOLT.readfile_cor(files, True)
data_theta, data_chi, peakx, peaky, intensity = allres[1:6]
CCDcalib = allres[-1]
detectorparameters = allres[-2]
# print('detectorparameters from file are: '+ str(detectorparameters))
pixelsize = CCDcalib['pixelsize']
CCDLabel = CCDcalib['CCDLabel']
framedim = dictLT.dict_CCD[CCDLabel][0]
dict_dp={}
dict_dp['kf_direction']=default_detector_geom
dict_dp['detectorparameters']=detectorparameters
dict_dp['detectordistance']=detectorparameters[0]
dict_dp['detectordiameter']=pixelsize*framedim[0]
dict_dp['pixelsize']=pixelsize
dict_dp['dim']=framedim
dict_dp['peakX']=peakx
dict_dp['peakY']=peaky
dict_dp['intensity']=intensity

try:
# print("Entering Cor file read section")
seednumber = "Experimental COR file"
allres = IOLT.readfile_cor(files, True)
data_theta, data_chi, peakx, peaky, intensity = allres[1:6]
CCDcalib = allres[-1]
detectorparameters = allres[-2]
# print('detectorparameters from file are: '+ str(detectorparameters))
pixelsize = CCDcalib['pixelsize']
CCDLabel = CCDcalib['CCDLabel']
framedim = dictLT.dict_CCD[CCDLabel][0]
dict_dp={}
dict_dp['kf_direction']=default_detector_geom
dict_dp['detectorparameters']=detectorparameters
dict_dp['detectordistance']=detectorparameters[0]
dict_dp['detectordiameter']=pixelsize*framedim[0]
dict_dp['pixelsize']=pixelsize
dict_dp['dim']=framedim
dict_dp['peakX']=peakx
dict_dp['peakY']=peaky
dict_dp['intensity']=intensity
except:
print("Error in Cor file reading for "+ files)
for intmat in range(matricies):
rotation_matrix[intmat][0][cnt,:,:] = np.zeros((3,3))
strain_matrix[intmat][0][cnt,:,:] = np.zeros((3,3))
strain_matrixs[intmat][0][cnt,:,:] = np.zeros((3,3))
col[intmat][0][cnt,:] = 0,0,0
colx[intmat][0][cnt,:] = 0,0,0
coly[intmat][0][cnt,:] = 0,0,0
match_rate[intmat][0][cnt] = 0
mat_global[intmat][0][cnt] = 0
spots_len[intmat][0][cnt] = 0
iR_pix[intmat][0][cnt] = 0
fR_pix[intmat][0][cnt] = 0
check[cnt,intmat] = 0
# files_treated.append(files)
return strain_matrix, strain_matrixs, rotation_matrix, col, colx, coly, \
match_rate, mat_global, cnt, files_treated,spots_len,iR_pix,fR_pix, check, best_match, None

sorted_data = np.transpose(np.array([data_theta, data_chi]))
tabledistancerandom = np.transpose(GT.calculdist_from_thetachi(sorted_data, sorted_data))

Expand Down

0 comments on commit 9110051

Please sign in to comment.