本文整理汇总了Python中scipy.zeros_like函数的典型用法代码示例。如果您正苦于以下问题:Python zeros_like函数的具体用法?Python zeros_like怎么用?Python zeros_like使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了zeros_like函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: fitKronApprox
def fitKronApprox(a):
Sbg = SP.zeros_like(S[0])
Kbg = SP.zeros_like(K[0])
for i in range(len(S)): Sbg+= a[i]*S[i]
for i in range(len(K)): Kbg+= a[i+len(S)]*K[i]
Gamma1 = SP.kron(Sbg,Kbg)
return ((Gamma-Gamma1)**2).sum()
开发者ID:MMesbahU,项目名称:limix,代码行数:7,代码来源:FastVDMM.py
示例2: estCumPos
def estCumPos(position,offset=0,chrom_len=None):
'''
compute the cumulative position of each variant given the position and the chromosome
Also return the starting cumulativeposition of each chromosome
Args:
position: pandas DataFrame of basepair positions (key='pos') and chromosome values (key='chrom')
The DataFrame will be updated with field 'pos_cum'
chrom_len: vector with predefined chromosome length
offset: offset between chromosomes for cumulative position (default 0 bp)
Returns:
chrom_pos,position:
chrom_pos: numpy.array of starting cumulative positions for each chromosome
position: augmented position object where cumulative positions are defined
'''
RV = position.copy()
chromvals = sp.unique(position['chrom'])# sp.unique is always sorted
chrom_pos_cum= sp.zeros_like(chromvals)#get the starting position of each Chrom
pos_cum= sp.zeros_like(position.shape[0])
if not 'pos_cum' in position:
RV["pos_cum"]= sp.zeros_like(position['pos'])#get the cum_pos of each variant.
pos_cum=RV['pos_cum'].values
maxpos_cum=0
for i,mychrom in enumerate(chromvals):
chrom_pos_cum[i] = maxpos_cum
i_chr=position['chrom']==mychrom
if chrom_len is None:
maxpos = position['pos'][i_chr].max()+offset
else:
maxpos = chrom_len[i]+offset
pos_cum[i_chr.values]=maxpos_cum+position.loc[i_chr,'pos']
maxpos_cum+=maxpos
return RV,chrom_pos_cum
开发者ID:PMBio,项目名称:limix,代码行数:35,代码来源:data_util.py
示例3: makeinputh5
def makeinputh5(Iono,basedir):
basedir = Path(basedir).expanduser()
Param_List = Iono.Param_List
dataloc = Iono.Cart_Coords
times = Iono.Time_Vector
velocity = Iono.Velocity
zlist,idx = sp.unique(dataloc[:,2],return_inverse=True)
siz = list(Param_List.shape[1:])
vsiz = list(velocity.shape[1:])
datalocsave = sp.column_stack((sp.zeros_like(zlist),sp.zeros_like(zlist),zlist))
outdata = sp.zeros([len(zlist)]+siz)
outvel = sp.zeros([len(zlist)]+vsiz)
for izn,iz in enumerate(zlist):
arr = sp.argwhere(idx==izn)
outdata[izn]=sp.mean(Param_List[arr],axis=0)
outvel[izn]=sp.mean(velocity[arr],axis=0)
Ionoout = IonoContainer(datalocsave,outdata,times,Iono.Sensor_loc,ver=0,
paramnames=Iono.Param_Names, species=Iono.Species,velocity=outvel)
ofn = basedir/'startdata.h5'
print('writing {}'.format(ofn))
Ionoout.saveh5(str(ofn))
开发者ID:jswoboda,项目名称:RadarDataSim,代码行数:27,代码来源:barkertest.py
示例4: estCumPos
def estCumPos(pos,chrom,offset = 20000000):
'''
compute the cumulative position of each variant given the position and the chromosome
Also return the starting cumulativeposition of each chromosome
Args:
pos: scipy.array of basepair positions (on the chromosome)
chrom: scipy.array of chromosomes
offset: offset between chromosomes for cumulative position (default 20000000 bp)
Returns:
cum_pos: scipy.array of cumulative positions
chrom_pos: scipy.array of starting cumulative positions for each chromosme
'''
chromvals = SP.unique(chrom)#SP.unique is always sorted
chrom_pos=SP.zeros_like(chromvals)#get the starting position of each Chrom
cum_pos = SP.zeros_like(pos)#get the cum_pos of each variant.
maxpos_cum=0
for i,mychrom in enumerate(chromvals):
chrom_pos[i] = maxpos_cum
i_chr=chrom==mychrom
maxpos = pos[i_chr].max()+offset
maxpos_cum+=maxpos
cum_pos[i_chr]=chrom_pos[i]+pos[i_chr]
return cum_pos,chrom_pos
开发者ID:PMBio,项目名称:limix,代码行数:25,代码来源:data_deprecated.py
示例5: makeinputh5
def makeinputh5(Iono,basedir):
"""This will make a h5 file for the IonoContainer that can be used as starting
points for the fitter. The ionocontainer taken will be average over the x and y dimensions
of space to make an average value of the parameters for each altitude.
Inputs
Iono - An instance of the Ionocontainer class that will be averaged over so it can
be used for fitter starting points.
basdir - A string that holds the directory that the file will be saved to.
"""
# Get the parameters from the original data
Param_List = Iono.Param_List
dataloc = Iono.Cart_Coords
times = Iono.Time_Vector
velocity = Iono.Velocity
zlist,idx = sp.unique(dataloc[:,2],return_inverse=True)
siz = list(Param_List.shape[1:])
vsiz = list(velocity.shape[1:])
datalocsave = sp.column_stack((sp.zeros_like(zlist),sp.zeros_like(zlist),zlist))
outdata = sp.zeros([len(zlist)]+siz)
outvel = sp.zeros([len(zlist)]+vsiz)
# Do the averaging across space
for izn,iz in enumerate(zlist):
arr = sp.argwhere(idx==izn)
outdata[izn] = sp.mean(Param_List[arr],axis=0)
outvel[izn] = sp.mean(velocity[arr],axis=0)
Ionoout = IonoContainer(datalocsave,outdata,times,Iono.Sensor_loc,ver=0,
paramnames=Iono.Param_Names, species=Iono.Species,velocity=outvel)
Ionoout.saveh5(basedir/'startdata.h5')
开发者ID:jswoboda,项目名称:RadarDataSim,代码行数:30,代码来源:testdishmode.py
示例6: _get_indices
def _get_indices(self,element,labels,return_indices,mode):
r'''
This is the actual method for getting indices, but should not be called
directly.
'''
if mode == 'union':
union = sp.zeros_like(self._get_info(element=element,label='all'),dtype=bool)
for item in labels: #iterate over labels list and collect all indices
union = union + self._get_info(element=element,label=item)
ind = union
elif mode == 'intersection':
intersect = sp.ones_like(self._get_info(element=element,label='all'),dtype=bool)
for item in labels: #iterate over labels list and collect all indices
intersect = intersect*self._get_info(element=element,label=item)
ind = intersect
elif mode == 'not_intersection':
not_intersect = sp.zeros_like(self._get_info(element=element,label='all'),dtype=int)
for item in labels: #iterate over labels list and collect all indices
info = self._get_info(element=element,label=item)
not_intersect = not_intersect + sp.int8(info)
ind = (not_intersect == 1)
elif mode == 'none':
none = sp.zeros_like(self._get_info(element=element,label='all'),dtype=int)
for item in labels: #iterate over labels list and collect all indices
info = self._get_info(element=element,label=item)
none = none - sp.int8(info)
ind = (none == 0)
if return_indices: ind = sp.where(ind==True)[0]
return ind
开发者ID:AgustinPerez,项目名称:OpenPNM,代码行数:29,代码来源:__Tools__.py
示例7: rankStandardizeNormal
def rankStandardizeNormal(X):
"""
Gaussianize X: [samples x phenotypes]
- each phentoype is converted to ranks and transformed back to normal using the inverse CDF
"""
Is = X.argsort(axis=0)
RV = SP.zeros_like(X)
rank = SP.zeros_like(X)
for i in xrange(X.shape[1]):
x = X[:,i]
i_nan = SP.isnan(x)
if 0:
Is = x.argsort()
rank = SP.zeros_like(x)
rank[Is] = SP.arange(X.shape[0])
#add one to ensure nothing = 0
rank +=1
else:
rank = st.rankdata(x[~i_nan])
#devide by (N+1) which yields uniform [0,1]
rank /= ((~i_nan).sum()+1)
#apply inverse gaussian cdf
RV[~i_nan,i] = SP.sqrt(2) * special.erfinv(2*rank-1)
RV[i_nan,i] = x[i_nan]
return RV
开发者ID:MMesbahU,项目名称:limix,代码行数:25,代码来源:preprocess.py
示例8: plot_drainage_curve
def plot_drainage_curve(self,
pore_volume='volume',
throat_volume='volume',pore_label='all',throat_label='all'):
r"""
Plot drainage capillary pressure curve
"""
try:
PcPoints = sp.unique(self['pore.inv_Pc'])
except:
raise Exception('Cannot print drainage curve: ordinary percolation simulation has not been run')
pores=self._net.pores(labels=pore_label)
throats = self._net.throats(labels=throat_label)
Snwp_t = sp.zeros_like(PcPoints)
Snwp_p = sp.zeros_like(PcPoints)
Pvol = self._net['pore.'+pore_volume]
Tvol = self._net['throat.'+throat_volume]
Pvol_tot = sum(Pvol)
Tvol_tot = sum(Tvol)
for i in range(0,sp.size(PcPoints)):
Pc = PcPoints[i]
Snwp_p[i] = sum(Pvol[self._p_inv[pores]<=Pc])/Pvol_tot
Snwp_t[i] = sum(Tvol[self._t_inv[throats]<=Pc])/Tvol_tot
if sp.mean(self._phase_inv["pore.contact_angle"]) < 90:
Snwp_p = 1 - Snwp_p
Snwp_t = 1 - Snwp_t
PcPoints *= -1
plt.plot(PcPoints,Snwp_p,'r.-')
plt.plot(PcPoints,Snwp_t,'b.-')
r'''
TODO: Add legend to distinguish the pore and throat curves
'''
#plt.xlim(xmin=0)
plt.show()
开发者ID:Maggie1988,项目名称:OpenPNM,代码行数:33,代码来源:__OrdinaryPercolation__.py
示例9: porosity_profile
def porosity_profile(network,
fig=None, axis=2):
r'''
Compute and plot the porosity profile in all three dimensions
Parameters
----------
network : OpenPNM Network object
axis : integer type 0 for x-axis, 1 for y-axis, 2 for z-axis
Notes
-----
the area of the porous medium at any position is calculated from the
maximum pore coordinates in each direction
'''
if fig is None:
fig = _plt.figure()
L_x = _sp.amax(network['pore.coords'][:,0]) + _sp.mean(((21/88.0)*network['pore.volume'])**(1/3.0))
L_y = _sp.amax(network['pore.coords'][:,1]) + _sp.mean(((21/88.0)*network['pore.volume'])**(1/3.0))
L_z = _sp.amax(network['pore.coords'][:,2]) + _sp.mean(((21/88.0)*network['pore.volume'])**(1/3.0))
if axis is 0:
xlab = 'x-direction'
area = L_y*L_z
elif axis is 1:
xlab = 'y-direction'
area = L_x*L_z
else:
axis = 2
xlab = 'z-direction'
area = L_x*L_y
n_max = _sp.amax(network['pore.coords'][:,axis]) + _sp.mean(((21/88.0)*network['pore.volume'])**(1/3.0))
steps = _sp.linspace(0,n_max,100,endpoint=True)
vals = _sp.zeros_like(steps)
p_area = _sp.zeros_like(steps)
t_area = _sp.zeros_like(steps)
rp = ((21/88.0)*network['pore.volume'])**(1/3.0)
p_upper = network['pore.coords'][:,axis] + rp
p_lower = network['pore.coords'][:,axis] - rp
TC1 = network['throat.conns'][:,0]
TC2 = network['throat.conns'][:,1]
t_upper = network['pore.coords'][:,axis][TC1]
t_lower = network['pore.coords'][:,axis][TC2]
for i in range(0,len(steps)):
p_temp = (p_upper > steps[i])*(p_lower < steps[i])
t_temp = (t_upper > steps[i])*(t_lower < steps[i])
p_area[i] = sum((22/7.0)*(rp[p_temp]**2 - (network['pore.coords'][:,axis][p_temp]-steps[i])**2))
t_area[i] = sum(network['throat.area'][t_temp])
vals[i] = (p_area[i]+t_area[i])/area
yaxis = vals
xaxis = steps/n_max
_plt.plot(xaxis,yaxis,'bo-')
_plt.xlabel(xlab)
_plt.ylabel('Porosity')
fig.show()
开发者ID:Maggie1988,项目名称:OpenPNM,代码行数:58,代码来源:Plots.py
示例10: cut_to_stump
def cut_to_stump(self):
self.max_depth = 0
self.node_ind = 0
self.nodes[self.node_ind] = 0
self.start_index[self.node_ind] = 0
self.end_index[self.node_ind] = self.subsample.size
self.num_nodes = 1
self.num_leafs = 0
self.left_child = SP.zeros_like(self.left_child)
self.right_child = SP.zeros_like(self.right_child)
开发者ID:PMBio,项目名称:limix,代码行数:10,代码来源:lmm_forest.py
示例11: predict
def predict(data, coeffs):
"""
Calculate the an autoregressive linear prediction given the signal
and the prediction coefficients.
Parameters
----------
data : numpy array
The signal.
coeffs : numpy array
The prediction coefficients.
Returns
-------
data : numpy array
The predicted signal
Notes
-----
* The first coefficient, 1, is assumed to be left out.
Prediction works as follows:
P = a1+ a2+ a3+ a4
# _ _ _ _
# # _ _ _
# # # _ _
# = # + # + # + _
_ # # # #
_ _ # # #
_ _ _ # #
_ _ _ _ #
Where # is a number and _ is a "dont care"
This means
1. Create empty pred vector, padded by the number of coefficients
at the end
2. Pad original values by number of coefficients at both ends
3. Crop data in each step accordingly
4. Crop prediction
"""
coeffs *= -1
pred = scipy.zeros_like(data)
tmp = numpy.hstack((scipy.zeros_like(coeffs), data))
for j in range(0, coeffs.size):
offset = coeffs.size - j - 1
pred = pred + coeffs[j] * tmp[offset:offset + len(pred)]
return pred[:len(data)]
开发者ID:antiface,项目名称:dspy,代码行数:55,代码来源:Predictor.py
示例12: par_fixed_effect
def par_fixed_effect(tc, X, oob, depth):
import scipy as SP
dview = tc[:]
dview.block = True
results = dview.apply(fixed_effect, *[X, oob, depth])
fixed_sum = SP.zeros_like(results[0][0])
count = SP.zeros_like(results[0][1])
for res in results:
fixed_sum += res[0]
count += res[1]
return fixed_sum, count
开发者ID:PMBio,项目名称:limix,代码行数:11,代码来源:parMixedForest.py
示例13: par_get_variable_scores
def par_get_variable_scores(tc):
import scipy as SP
dview = tc[:]
dview.block = True
results = dview.apply(get_variable_scores)
var_used = SP.zeros_like((results[0])[0])
log_importance = SP.zeros_like(var_used)
for result in results:
var_used += result[0]
log_importance += result[1]
return var_used, log_importance
开发者ID:PMBio,项目名称:limix,代码行数:11,代码来源:parMixedForest.py
示例14: ranktrafo
def ranktrafo(data):
X = data.values[:, None]
Is = X.argsort(axis=0)
RV = sp.zeros_like(X)
rank = sp.zeros_like(X)
for i in xrange(X.shape[1]):
x = X[:,i]
rank = sp.stats.rankdata(x)
rank /= (X.shape[0]+1)
RV[:,i] = sp.sqrt(2) * sp.special.erfinv(2*rank-1)
return RV.flatten()
开发者ID:bmcorser,项目名称:Azimuth,代码行数:12,代码来源:util.py
示例15: exp_gauss_warp
def exp_gauss_warp(X, n, l0, *msb):
"""Length scale function which is an exponential of a sum of Gaussians.
The centers and widths of the Gaussians are free parameters.
The length scale function is given by
.. math::
l = l_0 \exp\left ( \sum_{i=1}^{N}\beta_i\exp\left ( -\frac{(x-\mu_i)^2}{2\sigma_i^2} \right ) \right )
The number of parameters is equal to the three times the number of Gaussians
plus 1 (for :math:`l_0`). This function is inspired by what Gibbs used in
his PhD thesis.
Parameters
----------
X : 1d or 2d array of float
The points to evaluate the function at. If 2d, it should only have
one column (but this is not checked to save time).
n : int
The derivative order to compute. Used for all `X`.
l0 : float
The covariance length scale at the edges of the domain.
*msb : floats
Means, standard deviations and weights for each Gaussian, in that order.
"""
X = scipy.asarray(X, dtype=float)
msb = scipy.asarray(msb, dtype=float)
mm = msb[:len(msb) / 3]
ss = msb[len(msb) / 3:2 * len(msb) / 3]
bb = msb[2 * len(msb) / 3:]
# This is done with for-loops, because trying to get fancy with
# broadcasting was being too memory-intensive for some reason.
if n == 0:
l = scipy.zeros_like(X)
for m, s, b in zip(mm, ss, bb):
l += b * scipy.exp(-(X - m)**2.0 / (2.0 * s**2.0))
l = l0 * scipy.exp(l)
return l
elif n == 1:
l1 = scipy.zeros_like(X)
l2 = scipy.zeros_like(X)
for m, s, b in zip(mm, ss, bb):
term = b * scipy.exp(-(X - m)**2.0 / (2.0 * s**2.0))
l1 += term
l2 += term * (X - m) / s**2.0
l = -l0 * scipy.exp(l1) * l2
return l
else:
raise NotImplementedError("Only n <= 1 is supported!")
开发者ID:markchil,项目名称:gptools,代码行数:52,代码来源:gibbs.py
示例16: bptt
def bptt(self, x, t):
"""Back propagation throuth time of a sample.
Reference: [1] Deep Learning, Ian Goodfellow, Yoshua Bengio and Aaron Courville, P385.
"""
dU = sp.zeros_like(self.U)
dW = sp.zeros_like(self.W)
db = sp.zeros_like(self.b)
dV = sp.zeros_like(self.V)
dc = sp.zeros_like(self.c)
tau = len(x)
cells = self.forward_propagation(x)
dh = sp.zeros(self.n_hiddens)
for i in range(tau - 1, -1, -1):
# FIXME:
# 1. Should not use cell[i] since there maybe multiple hidden layers.
# 2. Using exponential family as output should not be specified.
time_input = x[i]
one_hot_t = sp.zeros(self.n_features)
one_hot_t[t[i]] = 1
# Cell of time i
cell = cells[i]
# Hidden layer of current cell
hidden = cell[0]
# Output layer of current cell
output = cell[1]
# Hidden layer of time i + 1
prev_hidden = cells[i - 1][0] if i - 1 >= 0 else None
# Hidden layer of time i - 1
next_hidden = cells[i + 1][0] if i + 1 < tau else None
# Error of current time i
da = hidden.backward()
next_da = next_hidden.backward() if next_hidden is not None else sp.zeros(self.n_hiddens)
prev_h = prev_hidden.h if prev_hidden is not None else sp.zeros(self.n_hiddens)
# FIXME: The error function should not be specified here
# do = sp.dot(output.backward().T, -one_hot_t / output.y)
do = output.y - one_hot_t
dh = sp.dot(sp.dot(self.W.T, sp.diag(next_da)), dh) + sp.dot(self.V.T, do)
# Gradient back propagation through time
dc += do
db += da * dh
dV += sp.outer(do, hidden.h)
dW += sp.outer(da * dh, prev_h)
dU[:, time_input] += da * dh
return (dU, dW, db, dV, dc)
开发者ID:Yevgnen,项目名称:RNN,代码行数:52,代码来源:rnn.py
示例17: ray_update_worker
def ray_update_worker(args):
angle, p, reco, chunk, calc_wij_sum = args
upd = sp.zeros_like(reco)
wij_sum = sp.zeros_like(reco)
for j in chunk:
ray = sp.zeros_like(reco)
ray[:,j]=1
wij = spn.rotate(ray, angle, reshape=False)
upd += ((p[j]-sp.sum(wij*reco))/sp.sum(wij**2.0))*wij
if calc_wij_sum:
wij_sum+=wij
if calc_wij_sum:
return upd, wij_sum
else:
return upd, None
开发者ID:djvine,项目名称:pySART,代码行数:15,代码来源:pySART.py
示例18: __call__
def __call__(self, Xi, Xj, ni, nj, hyper_deriv=None, symmetric=False):
if (ni > 1).any() or (nj > 1).any():
raise ValueError("Derivative orders greater than one are not supported!")
wXi = scipy.zeros_like(Xi)
wXj = scipy.zeros_like(Xj)
for d in range(0, self.num_dim):
wXi[:, d] = self.w(Xi[:, d], d, 0)
wXj[:, d] = self.w(Xj[:, d], d, 0)
out = self.k(wXi, wXj, ni, nj, hyper_deriv=hyper_deriv, symmetric=symmetric)
for d in range(0, self.num_dim):
first_deriv_mask_i = ni[:, d] == 1
first_deriv_mask_j = nj[:, d] == 1
out[first_deriv_mask_i] *= self.w(Xi[first_deriv_mask_i, d], d, 1)
out[first_deriv_mask_j] *= self.w(Xj[first_deriv_mask_j, d], d, 1)
return out
开发者ID:markchil,项目名称:gptools,代码行数:15,代码来源:warping.py
示例19: check_consistency
def check_consistency(self):
r'''
Checks to see if the current geometry conflicts with any other geometry
'''
temp = sp.zeros_like(self._net.get_pore_info(label=self.name),dtype=int)
for item in self._net._geometry:
temp = temp + sp.array(self._net.get_pore_info(label=item.name),dtype=int)
print('Geometry labels overlap in', sp.sum(temp>1),'pores')
print('Geometry not yet applied to',sp.sum(temp==0),'pores')
temp = sp.zeros_like(self._net.get_throat_info(label=self.name),dtype=int)
for item in self._net._geometry:
temp = temp + sp.array(self._net.get_throat_info(label=item.name),dtype=int)
print('Geometry labels overlap in', sp.sum(temp>1),'throats')
print('Geometry not yet applied to',sp.sum(temp==0),'throats')
开发者ID:AgustinPerez,项目名称:OpenPNM,代码行数:15,代码来源:__GenericGeometry__.py
示例20: _read_iop_from_file
def _read_iop_from_file(self, file_name):
"""
Generic IOP reader that interpolates the iop to the common wavelengths defined in the constructor
returns: interpolated iop
"""
lg.info('Reading :: ' + file_name + ' :: and interpolating to ' + str(self.wavelengths))
if os.path.isfile(file_name):
iop_reader = csv.reader(open(file_name), delimiter=',', quotechar='"')
wave = scipy.float32(iop_reader.next())
iop = scipy.zeros_like(wave)
for row in iop_reader:
iop = scipy.vstack((iop, row))
iop = scipy.float32(iop[1:, :]) # drop the first row of zeros
else:
lg.exception('Problem reading file :: ' + file_name)
raise IOError
try:
int_iop = scipy.zeros((iop.shape[0], self.wavelengths.shape[1]))
for i_iter in range(0, iop.shape[0]):
# r = scipy.interp(self.wavelengths[0, :], wave, iop[i_iter, :])
int_iop[i_iter, :] = scipy.interp(self.wavelengths, wave, iop[i_iter, :])
return int_iop
except IOError:
lg.exception('Error interpolating IOP to common wavelength')
return -1
开发者ID:marrabld,项目名称:bootstrappy,代码行数:29,代码来源:deconv.py
注:本文中的scipy.zeros_like函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论