Open main menu

Changes

Dx map

3,270 bytes added, 09:00, 10 May 2014
no edit summary
from tempfile import mkdtemp
from math import sqrt
import sys
# relax module imports.
# Analysis variables.
##################################################################################
# The dispersion model to test.
if not hasattr(ds, 'data'):
# Add more spins here.
spins = [
['Ala', 1, 'N', {'r2': {r20_key_1: 25.0, r20_key_2: 24.0}, 'r2a': {r20_key_1: 25.0, r20_key_2: 24.0}, 'r2b': {r20_key_1: 25.0, r20_key_2: 24.0}, 'kex': 2200.0, 'pA': 0.993, 'dw': 13.50} ]
#['Ala', 2, 'N', {'r2': {r20_key_1: 15.0, r20_key_2: 14.0}, 'r2a': {r20_key_1: 15.0, r20_key_2: 14.0}, 'r2b': {r20_key_1: 15.0, r20_key_2: 14.0}, 'kex': 2200.0, 'pA': 0.993, 'dw': 1.5} ]
]
# Collect the eksperiment to simulate.
ds.data = [model_create, model_analyse, spins, exps]
# Setup variables to be used
##################################################################################
# The tmp directory.
if not hasattr(ds, 'tmpdir'):
ds.resdir = None
# Do set_grid_r20_from_min_r2eff ?.
if not hasattr(ds, 'set_grid_r20_from_min_r2eff'):
ds.set_grid_r20_from_min_r2eff = True
# The plot_curves.
if not hasattr(ds, 'plot_curves'):
ds.plot_curves = TrueFalse
# The conversion for ShereKhan at http://sherekhan.bionmr.org/.
if not hasattr(ds, 'sherekhan_input'):
ds.sherekhan_input = False
 
# The set r2eff err, for defining the error on the graphs and in the fitting weight.
# We set the error to be the same for all ncyc eksperiments.
if not hasattr(ds, 'r2eff_err'):
ds.r2eff_err = 0.1
 
# The print result info.
if not hasattr(ds, 'print_res'):
ds.print_res = True
# Make a dx map to be opened om OpenDX.
# To map the hypersurface of chi2, when altering kex, dw and pA.
if not hasattr(ds, 'opendx'):
ds.opendx = Trueif not hasattr(ds, 'dx_inc'):False #ds.dx_inc opendx = 20True # Which parameters to map on chi2 surface.
if not hasattr(ds, 'dx_params'):
ds.dx_params = ['dw', 'pA', 'kex']
# The How many increements to map for each of the parameters.# Above 20 is good. 70 - 100 is a quite good resolution.# 4 is for speed tests.if not hasattr(ds, 'dx_inc'): #ds.dx_inc = 70 ds.dx_inc = 4 # If bounds set to None, uses the normal Grid search bounds.if not hasattr(ds, 'dx_lower_bounds'): ds.dx_lower_bounds = None #ds.dx_lower_bounds = [0.0, 0.5, 1.0 ] # If bounds set r2eff errto None, uses the normal Grid search bounds.if not hasattr(ds, 'dx_upper_bounds'): #ds.dx_upper_bounds = None ds.dx_upper_bounds = [10.0, 1.0, 3000.0] # If dx_chi_surface is None, it will find Innermost, Inner, Middle and Outer Isosurface# at 10, 20, 50 and 90 percentile of all chi2 values.if not hasattr(ds, 'r2eff_errdx_chi_surface'): ds.r2eff_err dx_chi_surface = None #ds.dx_chi_surface = [1000.0, 20000.0, 30000., 30000.00] ################################################################################## # Define repeating functions.################################################################################### Define function to store grid results.def save_res(res_spins): res_list = [] for res_name, res_num, spin_name, params in res_spins: cur_spin_id = ":%i@%s"%(res_num, spin_name) cur_spin = return_spin(cur_spin_id)  par_dic = {} # Now read the parameters. for mo_param in cur_spin.params: par_dic.update({mo_param : getattr(cur_spin, mo_param) })  # Append result. res_list.append([res_name, res_num, spin_name, par_dic])  return res_list # Define function print relative changedef print_res(cur_spins=None, grid_params=None, min_params=None, clust_params=None, dx_params=[]): # Define list to hold data. dx_set_val = list(range(len(dx_params))) dx_clust_val = list(range(len(dx_params)))  # Compare results. if ds.print_res: print("\n########################") print("Generated data with MODEL:%s"%(model_create)) print("Analysing with MODEL:%s."%(model_analyse)) print("########################\n")  for i in range(len(cur_spins)): res_name, res_num, spin_name, params = cur_spins[i] cur_spin_id = ":%i@%s"%(res_num, spin_name) cur_spin = return_spin(cur_spin_id)  # Now read the parameters. if ds.print_res: print("For spin: '%s'"%cur_spin_id) for mo_param in cur_spin.params: # The R2 is a dictionary, depending on spectrometer frequency. if isinstance(getattr(cur_spin, mo_param), dict): grid_r2 = grid_params[mo_param] min_r2 = min_params[mo_param] clust_r2 = clust_params[mo_param] set_r2 = params[mo_param] for key, val in getattr(cur_spin, mo_param).items(): grid_r2_frq = grid_r2[key] min_r2_frq = min_r2[key] clust_r2_frq = min_r2[key] set_r2_frq = set_r2[key] frq = float(key.split(EXP_TYPE_CPMG_SQ+' - ')[-1].split('MHz')[0]) rel_change = sqrt( (clust_r2_frq - set_r2_frq)**2/(clust_r2_frq)**2 ) if ds.print_res: print("%s %s %s %s %.1f GRID=%.3f MIN=%.3f CLUST=%.3f SET=%.3f RELC=%.3f"%(cur_spin.model, res_name, cur_spin_id, mo_param, frq, grid_r2_frq, min_r2_frq, clust_r2_frq, set_r2_frq, rel_change) ) if rel_change > ds.rel_change: if ds.print_res: print("###################################") print("WARNING: %s Have relative change above %.2f, and is %.4f."%(key, ds.rel_change, rel_change)) print("###################################\n") else: grid_val = grid_params[mo_param] min_val = min_params[mo_param] clust_val = clust_params[mo_param] set_val = params[mo_param] rel_change = sqrt( (clust_val - set_val)**2/(clust_val)**2 ) if ds.print_res: print("%s %s %s %s GRID=%.3f MIN=%.3f CLUST=%.3f SET=%.3f RELC=%.3f"%(cur_spin.model, res_name, cur_spin_id, mo_param, grid_val, min_val, clust_val, set_val, rel_change) ) if rel_change > ds.rel_change: if ds.print_res: print("###################################") print("WARNING: %s Have relative change above %.2f, and is %.4f."%(mo_param, ds.rel_change, rel_change)) print("###################################\n")
# The number of Monte Carlo simulations Store to be used for the error analysesdx map. if not hasattrmo_param in dx_params: dx_set_val[ds.dx_params.index(ds, 'MC_NUM'mo_param):] = set_val dx_clust_val[ds.MC_NUM dx_params.index(mo_param)] = 3clust_val
# The print result info.if not hasattr(ds return dx_set_val, 'print_res'): ds.print_res = Truedx_clust_val
# Now starting
##################################################################################
# Set up the data pipe.
##################################################################################
# Extract the models
exps = ds.data[3]
# Now loop over the experiments, to set the variables in relax.
exp_ids = []
for exp in exps:
pipe.switch(pipe_name=ds.pipe_name_r2eff)
# Then select the modelto create data.
relax_disp.select_model(model=model_create)
if isinstance(getattr(cur_spin, mo_param), dict):
set_r2 = params[mo_param]
  for key, val in getattr(cur_spin, mo_param)set_r2.items():
# Update value to float
set_r2.update({ key : float(val) })
## Now doing the back calculation of R2eff values.
 
# First loop over the frequencies.
i = 0
for exp_type, frq, ei, mi in loop_exp_frq(return_indices=True):
exp_id = exp_ids[mi]
file.close()
# Read in the R2eff file to create the structure. # This is a trick, or else relax complains.
relax_disp.r2eff_read_spin(id=exp_id, spin_id=cur_spin_id, file=file_name, dir=ds.tmpdir, disp_point_col=1, data_col=2, error_col=3)
###Now back calculatevalues from parameters, and stuff R2eff it back.
print("Generating data with MODEL:%s, for spin id:%s"%(model_create, cur_spin_id))
r2effs = optimisation.back_calc_r2eff(spin=cur_spin, spin_id=cur_spin_id)
# Read in the R2eff file to put into spin structure.
relax_disp.r2eff_read_spin(id=exp_id, spin_id=cur_spin_id, file=file_name, dir=ds.resdir, disp_point_col=1, data_col=2, error_col=3)
 
# Add to counter.
i += 1
 
print("Did following number of iterations: %i"%i)
### Now do fitting.
#grid_search(lower=None, upper=None, inc=10, constraints=True, verbosity=ds.verbosity)
 # Define function to store grid results.def save_res(res_spins): res_list = [] for res_name, res_num, spin_name, params in res_spins: cur_spin_id = ":%i@%s"%(res_num, spin_name) cur_spin = return_spin(cur_spin_id)  par_dic = {} # Now read the parameters. for mo_param in cur_spin.params: par_dic.update({mo_param : getattr(cur_spin, mo_param) })  # Append Save result. res_list.append([res_name, res_num, spin_name, par_dic])  return res_list 
ds.grid_results = save_res(cur_spins)
## Now do minimisation.
 
minimise(min_algor='simplex', func_tol=ds.set_func_tol, max_iter=ds.set_max_iter, constraints=True, scaling=True, verbosity=ds.verbosity)
print(cdp.clustering)
relax_disp.sherekhan_input(force=True, spin_id=None, dir=ds.resdir)
 
# Compare results.
if ds.print_res:
print("\n########################")
print("Generated data with MODEL:%s"%(model_create))
print("Analysing with MODEL:%s."%(model_analyse))
print("########################\n")
# Looping over data, to collect the results.
# Define which dx_params to collect for.
ds.dx_set_val = list(range(len(ds.dx_params))), ds.dx_clust_val = list(range(len(ds.dx_params)))for i in range(lenprint_res(cur_spins)): res_name, res_num, spin_name, params = cur_spins[i] cur_spin_id = ":%i@%s"%(res_num, spin_name) cur_spin = return_spin(cur_spin_id)  # Fetch data. grid_params = ds.grid_results[i][3] , min_params = ds.min_results[i][3] , clust_params = ds.clust_results[i][3]  # Now read the parameters. if ds.print_res: print("For spin: '%s'"%cur_spin_id) for mo_param in cur_spin.params: # The R2 is a dictionary, depending on spectrometer frequency. if isinstance(getattr(cur_spin, mo_param), dict): grid_r2 = grid_params[mo_param] min_r2 = min_params[mo_param] clust_r2 = clust_params[mo_param] set_r2 = params[mo_param] for key, val in getattr(cur_spin, mo_param).items(): grid_r2_frq = grid_r2[key] min_r2_frq = min_r2[key] clust_r2_frq = min_r2[key] set_r2_frq = set_r2[key] frq = float(key.split(EXP_TYPE_CPMG_SQ+' - ')[-1].split('MHz')[0]) rel_change = sqrt( (clust_r2_frq - set_r2_frq)**2/(clust_r2_frq)**2 ) if ds.print_res: print("%s %s %s %s %.1f GRID=%.3f MIN=%.3f CLUST=%.3f SET=%.3f RELC=%.3f"%(cur_spin.model, res_name, cur_spin_id, mo_param, frq, grid_r2_frq, min_r2_frq, clust_r2_frq, set_r2_frq, rel_change) ) if rel_change > ds.rel_change: if ds.print_res: print("###################################") print("WARNING: %s Have relative change above %.2f, and is %.4f."%(key, ds.rel_change, rel_change)) print("###################################\n") else: grid_val = grid_params[mo_param] min_val = min_params[mo_param] clust_val = clust_params[mo_param] set_val = params[mo_param] rel_change = sqrt( (clust_val - set_val)**2/(clust_val)**2 ) if ds.print_res: print("%s %s %s %s GRID=%.3f MIN=%.3f CLUST=%.3f SET=%.3f RELC=%.3f"%(cur_spin.model, res_name, cur_spin_id, mo_param, grid_val, min_val, clust_val, set_val, rel_change) ) if rel_change > ds.rel_change: if ds.print_res: print("###################################") print("WARNING: %s Have relative change above %.2f, and is %.4f."%(mo_param, ds.rel_change, rel_change)) print("###################################\n")  # Store to dx map. if mo_param in ds.dx_params: ds.dx_set_val[ds.dx_params.index(mo_param)] = set_val ds.dx_clust_val[ds.dx_params.index(mo_param)] = clust_val
## Do a dx map.
relax_disp.select_model(model=model_analyse)
# First loop over the defined spins and set the model parameterswhich is not in the dx.dx_params.
for i in range(len(cur_spins)):
res_name, res_num, spin_name, params = cur_spins[i]
cur_spin_id = ":%i@%s"%(res_num, spin_name)
cur_spin = return_spin(cur_spin_id)
 
for mo_param in cur_spin.params:
# The R2 is a dictionary, depending on spectrometer frequency.
if isinstance(getattr(cur_spin, mo_param), dict):
set_r2 = params[mo_param]
if mo_param not in ds.dx_params:
for key, val in set_r2.items():
# Update value to float
set_r2.update({ key : float(val) })
print("Setting param:%s to :%f"%(key, float(val)))
# Set it back
setattr(cur_spin, mo_param, set_r2)
 
# For non dict values.
else:
if mo_param not in ds.dx_params:
before = getattr(cur_spin, mo_param)
setattr(cur_spin, mo_param, float(params[mo_param]))
after = getattr(cur_spin, mo_param)
print("Setting param:%s to :%f"%(mo_param, after))
 
cur_model = model_analyse.replace(' ', '_')
file_name_map = "%s_map%s" % (cur_model, cur_spin_id.replace('#', '_').replace(':', '_').replace('@', '_'))
file_name_point = "%s_point%s" % (cur_model, cur_spin_id .replace('#', '_').replace(':', '_').replace('@', '_'))
dx.map(params=ds.dx_params, map_type='Iso3D', spin_id=cur_spin_id, inc=ds.dx_inc, lower=ds.dx_lower_bounds, upper=ds.dx_upper_bounds, axis_incs=10, file_prefix=file_name_map, dir=ds.resdir, point=[ds.dx_set_val, ds.dx_clust_val], point_file=file_name_point, chi_surface=ds.dx_chi_surface)
# vp_exec: A flag specifying whether to execute the visual program automatically at start-up.
#dx.execute(file_prefix=file_name, dir=ds.resdir, dx_exe='dx', vp_exec=True)
if ds.print_res:
print("For spin: '%s'"%cur_spin_id)
print("Params for dx map is")
print(ds.dx_params)
print("Point param for creating dx map is")
print(ds.dx_set_val)
cur_model = model_analyse.replace print(' ', '_') file_name_map = "%s_map%sMinimises point in dx map is" % (cur_model, cur_spin_id.replace('#', '_').replace print(':', '_')ds.replace('@', '_')dx_clust_val) file_name_point = "%s_point%s" % (cur_model, cur_spin_id # Print result again after dx map.replace('#', '_')if ds.replace('opendx:', '_').replace('@', '_')) dx.map print_res(params=ds.dx_params, map_typecur_spins='Iso3D'cur_spins, spin_id=cur_spin_id, incgrid_params=ds.dx_inc, lower=None, upper=None, axis_incs=10, file_prefix=file_name_mapgrid_results[i][3], dirmin_params=ds.resdir, point=min_results[i][ds.dx_set_val, ds.dx_clust_val3], point_file=file_name_point, remap=None) #vp_exec: A flag specifying whether to execute the visual program automatically at start-up. #dx.execute(file_prefix=file_name, dirclust_params=ds.resdir, dx_exe='dx', vp_exec=Trueclust_results[i][3])
</source>