本文整理汇总了Python中scipy.savetxt函数的典型用法代码示例。如果您正苦于以下问题:Python savetxt函数的具体用法?Python savetxt怎么用?Python savetxt使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了savetxt函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: Corr
def Corr(GDP,I,C):
m = sp.shape(GDP)[1]
GDPIcorr = []
GDPCcorr = []
for i in range(0, m):
gdp = GDP[:,i]
inv = I[:,i]
con = C[:,i]
#Correlation between output and investment for each series
gdpi = sp.corrcoef(gdp,inv)
GDPIcorr.append(gdpi[0,1])
#Correlation between output and consumption for each series
gdpc = sp.corrcoef(gdp,con)
GDPCcorr.append(gdpc[0,1])
#Mean and standard deviation of correlation between GDP and
#Investment and Consumption over total number of simulations
GDPICORR = sp.array(GDPIcorr)
gdpimean = sp.mean(GDPICORR)
gdpistdev = sp.std(GDPICORR)
GDPCCORR = sp.array(GDPCcorr)
gdpcmean = sp.mean(GDPCCORR)
gdpcstdev = sp.std(GDPCCORR)
sp.savetxt('GDPICORR.csv',GDPICORR)
sp.savetxt('GDPCCORR.csv',GDPCCORR)
print "The mean and standard deviation between GDP and"
print "Investment and GDP and Consumption followed by"
print "The lists of each correlation coefficient for"
print "each series are saved in csv files"
return gdpimean, gdpistdev, gdpcmean, gdpcstdev
开发者ID:snowdj,项目名称:byu_macro_boot_camp,代码行数:29,代码来源:DSGE_simulation-Sara_Final.py
示例2: gen_IC
def gen_IC(sigma,rn,outfile="workfile",icdir="ICs", M=5, N=50, lapfile="Laplacian.txt", tries=10, iclist=[]):
lap = loadtxt(lapfile)
spa = sparse.csr_matrix(lap)
success=0
attempts=0
while success==0 and attempts<tries:
try:
tag='s%.2fr%.3d'%(sigma,rn)
tag=tag.replace(".", "")
parameters = [35.0, 16.0, 9.0, 0.4, 0.12, sigma]
x0=10*(random.random(2*N)-0.5)
tic=time.time()
trajectory = integrate.odeint(mimura, x0, range(0,1000), args=(parameters,spa))
print "integration took", time.time()-tic, "seconds"
x1=trajectory[-1]
sol=fsolve(mimura3, x1, args=(parameters,spa),full_output=True)
x2=sol[0]
if x2 not in iclist:
savetxt(icdir+'/init_cond_'+tag+'.txt',x2)
write_mimu(lap,par=parameters,ic=x2,outfile=outfile)
iclist.append(x2)
success=1
tries+=1
except: pass
return iclist
开发者ID:sideshownick,项目名称:Sources,代码行数:29,代码来源:make_function_runfile.py
示例3: processdataset
def processdataset(globpath='iimage-*', outputpath='./', margin=17):
global sp
sp = margin
files = glob.glob(globpath)
files.sort()
datasets = []
imagedir = outputpath+'images/'
if not os.path.exists(imagedir):
os.makedirs(imagedir)
datadir = outputpath+'datasets/'
if not os.path.exists(datadir):
os.makedirs(datadir)
for filename in files:
print filename
datasets = processimage(filename, imagedir, datasets)
for i,dataset in enumerate(datasets):
scipy.savetxt(datadir+'results-'+str(dataset[2])+'.dat',dataset[-1])
开发者ID:dherde,项目名称:interface-tracking,代码行数:25,代码来源:evaluation.py
示例4: apm_generate_aperture_map
def apm_generate_aperture_map():
r"""
Driver function to generate an aperture map from a TIF image.
"""
# parsing commandline args
namespace = parser.parse_args()
if namespace.verbose:
set_main_logger_level('debug')
# checking path to prevent accidental overwritting
if not namespace.aperture_map_name:
map_name = os.path.basename(namespace.image_file)
map_name = map_name.replace( os.path.splitext(map_name)[1], '-aperture-map.txt')
namespace.aperture_map_name = map_name
#
map_path = os.path.join(namespace.output_dir, namespace.aperture_map_name)
if os.path.exists(map_path) and not namespace.force:
msg = '{} already exists, use "-f" option to overwrite'
raise FileExistsError(msg.format(map_path))
# loading image data
data_array = load_image_data(namespace.image_file, namespace.invert)
data_array = data_array.astype(sp.int8)
# summing data array down into a 2-D map
logger.info('creating 2-D aperture map...')
aperture_map = sp.sum(data_array, axis=1, dtype=int)
# saving map
logger.info('saving aperture map as {}'.format(map_path))
sp.savetxt(map_path, aperture_map.T, fmt='%d', delimiter='\t')
开发者ID:stadelmanma,项目名称:netl-AP_MAP_FLOW,代码行数:32,代码来源:apm_generate_aperture_map.py
示例5: create_histogram
def create_histogram(parameter_name, nbins=100, writeFile=True, skipfirst=0, truncate=False, smooth=False):
"""
Returns a histogram and some statistics about this parameter.
@param writeFile: if true, write the histogram to paramname.histogram
"""
f = "%s-chain-0.prob.dump" % parameter_name
values = numpy.recfromtxt(f)[skipfirst::nevery]
statistics = {
'min': float(values.min()),
'max': float(values.max()),
'stdev': float(values.std()),
'mean': float(values.mean()),
'median':float(numpy.median(values)),
'q1': float(scipy.stats.scoreatpercentile(values, 25)),
'q3': float(scipy.stats.scoreatpercentile(values, 75)),
'p5': float(scipy.stats.scoreatpercentile(values, 5)),
'p95': float(scipy.stats.scoreatpercentile(values, 95)),
}
hist = scipy.histogram(values, bins = nbins if not smooth else nbins*10, normed=True)
histwithborders = numpy.dstack([hist[1][0:nbins], hist[1][1:nbins+1], hist[0]])
if writeFile:
scipy.savetxt('%s.histogram' % parameter_name, histwithborders[0], delimiter="\t")
return histwithborders[0], statistics
开发者ID:bsipocz,项目名称:PyMultiNest,代码行数:26,代码来源:analyse.py
示例6: main
def main():
# Do not modify
start = time.time()
parser = argparse.ArgumentParser(description='Build context vectors.')
parser.add_argument('textfile', type=str, help='name of text file')
parser.add_argument('window', type=int, help='context window')
parser.add_argument('threshold', type=int, help='vocabulary minimum frequency')
parser.add_argument('--ndims', type=int, default=100, help='number of SVD dimensions')
parser.add_argument('--debug', type=bool, default=False)
args = parser.parse_args()
vocab, points, vocabFreq = count_context_vectors(args.textfile+'.txt', args.window, args.threshold)
print 'Estimated count context vectors'
if not args.debug: # compute PPMI and SVD before writing. if debug is True, just write the count vectors
points = ppmi(points)
print 'Converted to positive pointwise mutual information'
points = dimensionality_reduce(points, args.ndims)
print 'Reduced dimensionality'
outfile = args.textfile+'.window'+str(args.window)+'.thresh'+str(args.threshold)
else:
outfile = args.textfile+'.window'+str(args.window)+'.thresh'+str(args.threshold)+'.todebug'
with open(outfile+'.labels', 'w') as o:
o.write('\n'.join(vocab)+'\n')
scipy.savetxt(outfile+'.vecs', points, fmt='%.4e')
with open(outfile+'.csv', 'w') as do:
do.write('\n'.join(vocabFreq)+'\n')
print 'Saved to file'
print time.time()-start, 'seconds'
开发者ID:wellesleynlp,项目名称:sravanti-finalproject,代码行数:31,代码来源:contextvectors.py
示例7: run
def run(self):
# Parameters passed are current data array, along with time step
# between current data points
self.times = sp.arange(0,self.Tfinal,self.dt)
self.sim = odeint(self.eqns,self.init,self.times,(self.inj,self.injdt))
sp.savetxt('simulation.txt',sp.column_stack((self.times,self.sim)))
开发者ID:Daedalos,项目名称:NaKLCa,代码行数:7,代码来源:NaKLCa.py
示例8: main
def main():
# Do not modify
start = time.time()
parser = argparse.ArgumentParser(description='Build document-term vectors.')
parser.add_argument('textfile', type=str, help='name of text file with documents on each line')
parser.add_argument('threshold', type=int, help='term minimum frequency')
parser.add_argument('--ndims', type=int, default=100, help='number of SVD dimensions')
parser.add_argument('--debug', type=bool, default=False, help='debug mode?')
args = parser.parse_args()
terms, points = tfidf_docterm(args.textfile+'.txt', args.threshold)
print 'Estimated document-term TF-IDF vectors'
if not args.debug: # compute PPMI and SVD before writing. if debug is True, just write the count vectors
points = dimensionality_reduce(points, args.ndims)
print 'Reduced dimensionality'
outfile = args.textfile+'.tfidf'+'.thresh'+str(args.threshold)
else:
outfile = args.textfile+'.tfidf'+'.thresh'+str(args.threshold)+'.todebug'
with open(outfile+'.dims', 'w') as o:
o.write('\n'.join(terms)+'\n')
scipy.savetxt(outfile+'.vecs', points, fmt='%.4e')
print 'Saved to file'
print time.time()-start, 'seconds'
开发者ID:wellesleynlp,项目名称:sravanti-finalproject,代码行数:26,代码来源:docterm.py
示例9: make_S
def make_S(A0, Ak, G0, Gk, phi):
R = P = scipy.matrix(scipy.zeros((6,6)))
for i in range(0, 3, 2):
for j in range(3):
R[i, j] = Ak[i, j]
R[1, 1] = R[3, 3] = R[4, 4] = 1.0;
R[5, 5] = Ak[5, 5]
P[0, 0] = (A0[0, 0] * cos(phi)**2.0 + A0[1, 0] * sin(phi)**2.0)
P[0, 1] = (A0[0, 1] * cos(phi)**2.0 + A0[1, 1] * sin(phi)**2.0)
P[0, 2] = (A0[0, 2] * cos(phi)**2.0 + A0[1, 2] * sin(phi)**2.0)
P[0, 3] = (A0[3, 3] * sin(2.0 * phi))
P[1, 0] = sin(phi)**2.0
P[1, 1] = cos(phi)**2.0
P[1, 3] = -sin(2.0*phi)
P[2, 0] = A0[2, 0]
P[2, 1] = A0[2, 1]
P[2, 2] = A0[2, 2]
P[3, 0] = -0.5*sin(2.0*phi)
P[3, 1] = 0.5*sin(2.0*phi)
P[3, 3] = cos(2.0*phi)
P[4, 4] = cos(phi)
P[4, 5] = -sin(phi)
P[5, 4] = A0[4, 4] * sin(phi)
P[5, 5] = A0[5, 5] * cos(phi)
scipy.savetxt("R", R)
scipy.savetxt("P", P)
return scipy.matrix(R.I) * scipy.matrix(P)
开发者ID:evil-is-good,项目名称:primat-projects,代码行数:31,代码来源:true_jank_elast.py
示例10: test_networkx_matrix
def test_networkx_matrix(self):
print('\n---------- Matrix Test Start -----------\n')
g = nx.barabasi_albert_graph(30, 2)
nodes = g.nodes()
edges = g.edges()
print(edges)
mx1 = nx.adjacency_matrix(g)
fp = tempfile.NamedTemporaryFile()
file_name = fp.name
sp.savetxt(file_name, mx1.toarray(), fmt='%d')
# Load it back to matrix
mx2 = sp.loadtxt(file_name)
fp.close()
g2 = nx.from_numpy_matrix(mx2)
cyjs_g = util.from_networkx(g2)
#print(json.dumps(cyjs_g, indent=4))
self.assertIsNotNone(cyjs_g)
self.assertIsNotNone(cyjs_g['data'])
self.assertEqual(len(nodes), len(cyjs_g['elements']['nodes']))
self.assertEqual(len(edges), len(cyjs_g['elements']['edges']))
# Make sure all edges are reproduced
print(set(edges))
diff = compare_edge_sets(set(edges), cyjs_g['elements']['edges'])
self.assertEqual(0, len(diff))
开发者ID:denfromufa,项目名称:py2cytoscape,代码行数:31,代码来源:test_util.py
示例11: TSP
def TSP(stops, Alg, steps, param, seed = None,
coordfile = 'xycoords.txt'):
'''A wrapper function that attempts to optimize the traveling
salesperson problem using a specified algorithm. If coordfile
exists, a preexisting set of coordinates will be used. Otherwise,
a new set of "stops" coordinates will be generated for the person to
traverse, and will be written to the specified file.'''
## Create the distance matrix, which will be used to calculate
## the fitness of a given path
if os.path.isfile(coordfile):
coords = scipy.genfromtxt(coordfile)
distMat = DistanceMatrix(coords)
else:
distMat = GenerateMap(stops, fname = coordfile, seed = seed)
if Alg == 'HC':
## param is the number of solutions to try per step
bestSol, fitHistory = HillClimber(steps, param, distMat, seed)
elif Alg == 'SA':
## param is a placeholder
bestSol, fitHistory = SimulatedAnnealing(steps, param, distMat, seed)
elif Alg == 'MC3':
## param is the number of chains
bestSol, fitHistory = MCMCMC(steps, param, distMat, seed)
elif Alg == 'GA':
## param is the population size
bestSol, fitHistory = GeneticAlgorithm(steps, param, distMat, seed)
else:
raise ValueError('Algorithm must be "HC", "SA", "MC3", or "GA".')
outfname = coordfile + '-' + Alg + '-' + str(steps) + '-' + str(param) + '.txt'
scipy.savetxt(outfname, scipy.array(bestSol), fmt = '%i')
return bestSol, fitHistory
开发者ID:esander91,项目名称:RecurseCenter,代码行数:34,代码来源:TravelingSalesperson.py
示例12: make_B
def make_B(A0, Ak, G0, Gk, phi):
x = 0
y = 1
z = 2
B = scipy.zeros((6,6))
B[0, 0] = cos(phi)**2.0*1
B[0, 1] = sin(phi)**2.0
B[0, 3] = sin(2.0 * phi)*1
B[1, 0] = (A0[0, 0] * sin(phi)**2.0 + (A0[1, 0] - Ak[1, 0]) * cos(phi)**2.0) / Ak[1, 1]
print B[1,0], A0[0,0]/Ak[1,1]
B[1, 1] = (A0[1, 1] * cos(phi)**2.0 + (A0[1, 0] - Ak[1, 0]) * sin(phi)**2.0) / Ak[1, 1]
B[1, 2] = (A0[0, 2] * sin(phi)**2.0 + A0[1, 2] * cos(phi)**2.0 - Ak[1, 2]) / Ak[1, 1]*1
B[1, 3] = -sin(2.0 * phi) * (2.0 * G0[0, 1] + Ak[1, 0]) / Ak[1, 1]*1
B[2, 2] = 1.0*1
B[3, 0] = sin(2.0 * phi) * (A0[1, 0] - A0[0, 0]) / (4.0 * Gk[x, y])*1
B[3, 1] = sin(2.0 * phi) * (A0[1, 1] - A0[0, 1]) / (4.0 * Gk[x, y])*1
B[3, 2] = sin(2.0 * phi) * (A0[1, 2] - A0[0, 2]) / (4.0 * Gk[x, y])*1
B[3, 3] = cos(2.0 * phi) * G0[0, 1] / Gk[x, y]*1
B[4, 4] = cos(phi) * G0[1, 2] / Gk[y, z]*1
B[4, 5] = -sin(phi) * G0[0, 2] / Gk[y, z]*1
B[5, 4] = sin(phi)*1
B[5, 5] = cos(phi)*1
scipy.savetxt("B1", B)
return scipy.matrix(B)
开发者ID:evil-is-good,项目名称:primat-projects,代码行数:26,代码来源:true_jank_elast.py
示例13: request_agreement
def request_agreement(self):
"""7. Alice starts key agreement
generate fingerprint and doing fuzzy commitment
send hash and delta to Bob
"""
log.info('7. Alice starts key agreement')
#===============================================================================
# Fingerprinting and Fuzzy Cryptography
#===============================================================================
# generate fingerprint
self.fingerprint = fingerprint_energy_diff.get_fingerprint(self.recording_data, self.recording_samplerate)
# save fingerprint for debugging
scipy.savetxt("client_fingerprint.txt", self.fingerprint)
log.debug('Alice fingerprint:\n'+str(self.fingerprint))
# doing commit, rs codes can correct up to (n-m)/2 errors
self.hash, self.delta, self.private_key = crypto_fuzzy_jw.JW_commit(self.fingerprint, m=self.rs_code_m, n=self.rs_code_n, symsize=self.rs_code_symsize)
log.debug('Alice Blob:\nHash:\n'+str(self.hash)+'\nDelta:\n'+str(self.delta))
# save delta for debugging
scipy.savetxt("client_delta.txt", self.delta)
# remote call for key agreement
# using debug means sending also the fingerprint in clear text!!!
# meaning no security!
if self.debug:
accept_agreement = self.pairing_server.callRemote("agreement_debug", self.fingerprint.tolist(), self.hash, self.delta.tolist())
accept_agreement.addCallbacks(self.answer_agreement)
else:
accept_agreement = self.pairing_server.callRemote("agreement", self.hash, self.delta.tolist())
accept_agreement.addCallbacks(self.answer_agreement)
开发者ID:dschuermann,项目名称:fuzzy-pairing,代码行数:35,代码来源:implementation_client.py
示例14: CalculateProjectRg
def CalculateProjectRg(ProjectInfo,Output,returnRgs = False):
"""
Calculate Radius of gyration for the Project ie. all the Trajectories.
ProjectInfo: ProjectInfo.h5 file.
Output: output file (XXX.dat).
The Output default will be set in the scripts and it is './Rgs.dat'.
"""
Output = checkoutput(Output)
if not isinstance(ProjectInfo,str):
print "Please input the Path to ProjectInfo.h5"
raise IOError
print 'Calculating the Rg for each trajectory......'
ProjectInfoPath = '/'.join(os.path.realpath(ProjectInfo).split('/')[:-1])
os.chdir(ProjectInfoPath)
Trajfiles = []
ProjectInfo = Serializer.LoadFromHDF(ProjectInfo)
for i in range(ProjectInfo['NumTrajs']):
Trajfiles.append(ProjectInfo['TrajFilePath']+ProjectInfo['TrajFileBaseName']+'%d'%i+ProjectInfo['TrajFileType'])
Rgs = computeRg(Trajfiles)
print "Save data to %s"%Output
savetxt(Output,Rgs)
print "Done."
if returnRgs:
return Rgs
开发者ID:asgharrazavi,项目名称:scripts,代码行数:26,代码来源:CalculateRg.py
示例15: simPheno
def simPheno(options):
print 'importing covariance matrix'
if options.cfile is None: options.cfile=options.bfile
XX = readCovarianceMatrixFile(options.cfile,readEig=False)['K']
print 'simulating phenotypes'
SP.random.seed(options.seed)
simulator = sim.CSimulator(bfile=options.bfile,XX=XX,P=options.nTraits)
Xr,region = simulator.getRegion(chrom_i=options.chrom,size=options.windowSize,min_nSNPs=options.nCausalR,pos_min=options.pos_min,pos_max=options.pos_max)
Y,info = genPhenoCube(simulator,Xr,vTotR=options.vTotR,nCausalR=options.nCausalR,pCommonR=options.pCommonR,vTotBg=options.vTotBg,pHidd=options.pHidden,pCommon=options.pCommon)
print 'exporting pheno file'
if options.pfile is not None:
outdir = os.path.split(options.pfile)[0]
if not os.path.exists(outdir):
os.makedirs(outdir)
else:
identifier = '_seed%d_nTraits%d_wndSize%d_vTotR%.2f_nCausalR%d_pCommonR%.2f_vTotBg%.2f_pHidden%.2f_pCommon%.2f'%(options.seed,options.nTraits,options.windowSize,options.vTotR,options.nCausalR,options.pCommonR,options.vTotBg,options.pHidden,options.pCommon)
options.pfile = os.path.split(options.bfile)[-1] + '%s'%identifier
pfile = options.pfile + '.phe'
rfile = options.pfile + '.phe.region'
SP.savetxt(pfile,Y)
SP.savetxt(rfile,region)
开发者ID:jeffhsu3,项目名称:limix,代码行数:27,代码来源:simPhenoCore.py
示例16: toASCII
def toASCII(self, ASCIIfile, ncol=1, hdr="", onError='w',
writechannel=False):
if isinstance(ASCIIfile, str): ASCIIfile = open(ASCIIfile, 'w')
ASCIIfile.write(hdr)
if ncol > 1:
nrow = self.data.size // ncol
rm = self.data.size % ncol
if rm > 0:
nrow += 1
if onError == 'w':
print>> sys.stderr, 'Warning: padded with %i zeros' % (
ncol - rm)
elif onError == 'n':
pass
else:
raise ValueError(
'Data size does not fit into %i columns' % ncol)
dat = self.data.copy()
dat.resize(nrow, ncol)
else:
dat = self.data
if writechannel:
if ncol == 1:
channels = arange(1, dat.size + 1)
channels.resize(dat.size, 1)
dat.resize(dat.size, 1)
print>> ASCIIfile, "Channel Counts"
savetxt(ASCIIfile, concatenate((channels, dat), axis=1),
fmt='%i')
else:
raise NonImplementedError(
"channel numbers not yet supported for multicolumns")
else:
savetxt(ASCIIfile, dat, fmt='%9i')
ASCIIfile.close()
开发者ID:cpascual,项目名称:PAScual,代码行数:35,代码来源:CHNfiles.py
示例17: RecordFidelity
def RecordFidelity(fidelity, outdir, binary):
"""
Output fidelity to data file.
"""
path = os.path.dirname(os.path.realpath(__file__)) + "/" + outdir + "/fidelity.dat"
if binary:
sp.save(path, fidelity)
else:
sp.savetxt(path, fidelity)
开发者ID:Roger-luo,项目名称:AdiaQC,代码行数:9,代码来源:output.py
示例18: RecordProbs
def RecordProbs(bitstring, density, fname, rpath, outinfo):
"""
Record the final-state probabilities.
"""
path = rpath + outinfo["outdir"] + "/" + fname
if outinfo["binary"]:
sp.save(path, density)
else:
sp.savetxt(path, density)
开发者ID:Roger-luo,项目名称:AdiaQC,代码行数:9,代码来源:output.py
示例19: save_MBAR
def save_MBAR(self):
"""save results (BICePs score and population) from MBAR analysis"""
print 'Writing %s...'%self.BSdir
savetxt(self.BSdir, self.f_df)
print '...Done.'
print 'Writing %s...'%self.popdir
savetxt(self.popdir, self.P_dP)
print '...Done.'
开发者ID:vvoelz,项目名称:nmr-biceps,代码行数:9,代码来源:Analysis.py
示例20: Save
def Save(self):
'''Збереження активного масиву до текстового файлу'''
Dict = {'cSave' : 0, 'sSave' : 1, 'rSave' : 2}
senderName = self.sender().objectName()
active = Dict[senderName]
data = self.getData(active)
filename = QtGui.QFileDialog.getSaveFileName(self,'Save File', self.Root)
if filename:
sp.savetxt(str(filename), data)
开发者ID:xkronosua,项目名称:QTR,代码行数:9,代码来源:QTR.py
注:本文中的scipy.savetxt函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论