Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
#Scipy Rbf method
self._interpolator = list()
for n in range(self._num_bins):
self._interpolator.append(_interpolate_wrapper(interpolate.Rbf,args=(tuple(used_parameters.T) + (flattened_feature_set[:,n],)),kwargs=kwargs))
else:
#Compute pairwise square distance between points
distances = ((used_parameters[None] - used_parameters[:,None])**2).sum(-1)
epsilon = distances[np.triu_indices(len(distances),k=1)].mean()
kernel = method(distances,epsilon)
weights = np.linalg.solve(kernel,self.feature_set)
#Wrap interpolator
self._interpolator = _function_wrapper(_interpolate_fast,args=[],kwargs={"parameter_grid":used_parameters,"method":method,"weights":weights,"epsilon":epsilon})
>>> from lenstools.statistics import default_callback_loader
>>> map_list = ["conv1.fit","conv2.fit","conv3.fit"]
>>> l_edges = np.arange(200.0,50000.0,200.0)
>>> conv_ensemble = Ensemble.compute(map_list,callback_loader=default_callback_loader,pool=pool,l_edges=l_edges)
"""
#Safety checks
assert callback_loader is not None, "You must specify a callback loader function that returns a numpy array!"
if index is not None:
assert len(index)==len(file_list),"The number of elements in the index hould be the same as the number of files!"
#Build a function wrapper of the callback loader, so it becomes pickleable
_callback_wrapper = _function_wrapper(callback_loader,args=tuple(),kwargs=kwargs)
#Execute the callback on each file in the list (spread calculations with MPI pool if it is not none)
if pool is not None:
M = pool.map
else:
M = map
full_data = assemble([r for r in M(_callback_wrapper,file_list) if r is not None])
#Check if user provided column labels
if "columns" in kwargs.keys():
columns = kwargs["columns"]
else:
columns = None
#Return the created ensemble from the full_data array
parameter_chunks = [ parameters[n*chunk_length:(n+1)*chunk_length] for n in range(split_chunks) ]
else:
raise ValueError("split_chunks must be >0!!")
#Compute the inverse of the covariance matrix once and for all
covinv = np.linalg.inv(features_covariance)
if correct is not None:
covinv *= precision_bias_correction(correct,len(covinv))
#Build the keyword argument dictionary to be passed to the chi2 calculator
kwargs = {"interpolator":self._interpolator,"inverse_covariance":covinv,"observed_feature":observed_feature}
#Hack to make the chi2 pickleable (from emcee)
chi2_wrapper = _function_wrapper(chi2,tuple(),kwargs)
#Finally map chi2 calculator on the list of chunks
if pool is not None:
M = pool.map
else:
M = map
chi2_list = list(M(chi2_wrapper,parameter_chunks))
return np.array(chi2_list).reshape(num_points)
assert num_points%split_chunks == 0,"split_chunks must divide exactly the number of points!!"
chunk_length = num_points//split_chunks
parameter_chunks = [ parameters[n*chunk_length:(n+1)*chunk_length] for n in range(split_chunks) ]
else:
raise ValueError("split_chunks must be >0!!")
#Compute the inverse of the covariance matrix once and for all
covinv = inv(features_covariance)
#Build the keyword argument dictionary to be passed to the chi2 calculator
kwargs = {"num_bins":self._num_bins,"interpolator":self._interpolator,"inverse_covariance":covinv,"observed_feature":observed_feature}
#Hack to make the chi2 pickleable (from emcee)
chi2_wrapper = _function_wrapper(chi2,tuple(),kwargs)
#Finally map chi2 calculator on the list of chunks
if pool is not None:
M = pool.map
else:
M = map
chi2_list = M(chi2_wrapper,parameter_chunks)
return np.array(chi2_list).reshape(num_points)
:type kwargs: dict.
:returns: the bootstraped statistic
:rtype: assemble return type
"""
#Safety check
assert bootstrap_size<=self.nobs,"The size of the resampling cannot exceed the original number of realizations"
#Set the random seed
if seed is not None:
np.random.seed(seed)
#Build a function wrapper of the callback loader, so it becomes pickleable
_callback_wrapper = _function_wrapper(callback,args=tuple(),kwargs=kwargs)
#MPI Pool
if pool is None:
M = map
else:
M = pool.map
#Construct the randomization matrix
randomizer = np.random.randint(self.nobs,size=(resample,bootstrap_size))
#Compute the statistic with the callback
statistic = assemble(M(_callback_wrapper,[ self.reindex(r) for r in randomizer ]))
#Return the bootstraped statistic
return statistic
>>> map_list = ["conv1.fit","conv2.fit","conv3.fit"]
>>> l_edges = np.arange(200.0,50000.0,200.0)
>>> conv_ensemble = Ensemble.fromfilelist(map_list)
>>> conv_ensemble.load(callback_loader=default_callback_loader,pool=pool,l_edges=l_edges)
"""
if callback_loader is None:
callback_loader = lambda f: np.load(f)
self.pool = pool
#Build a function wrapper of the callback loader, so it becomes pickleable
_callback_wrapper = _function_wrapper(callback_loader,args=tuple(),kwargs=kwargs)
#Execute the callback on each file in the list (spread calculations with MPI pool if it is not none)
if pool is not None:
M = pool.map
else:
M = map
full_data = np.array(M(_callback_wrapper,self.file_list))
assert type(full_data) == np.ndarray
assert full_data.shape[0] == self.num_realizations
if from_old:
full_data = full_data[0]
self.num_realizations = full_data.shape[0]