本文整理汇总了Python中scipy.io.readsav函数的典型用法代码示例。如果您正苦于以下问题:Python readsav函数的具体用法?Python readsav怎么用?Python readsav使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了readsav函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: main
def main():
postx=readsav('samples_x1.sav', python_dict=True)
postt=readsav('samples_t2.sav', python_dict=True)
#alpha is the intercept and beta is the slope
alpha=postx['postx1']['alpha']
beta=postx['postx1']['beta']
开发者ID:sdhawan21,项目名称:gp_lc_icproj,代码行数:7,代码来源:opensamples.py
示例2: find_photo_z
def find_photo_z(type, file=None, file_z=None, z=None, mu=None,
sigma=None):
"""Type: file or gauss
Note: file_z implementation NOT tested.
"""
if type == 'file':
try:
pz_file = io.readsav(file)
pz = pz_file['p_z']
except:
pz_file = np.genfromtxt(file, dtype=None, names=True)
pz = pz_file['pdf']
if file_z is not None:
try:
my_z = io.readsav(file_z)
my_z = my_z['z']
except:
my_z = np.genfromtxt(file, dtype=None, names=True)
my_z = my_z['z']
else:
my_z = np.arange(0, 5, .01)
if np.shape(pz) != np.shape(my_z):
raise ValueError("pz array and z array are different sizes!")
func_my_photo_z = interpolate.interp1d(my_z, pz)
my_photo_z = func_my_photo_z(z)
my_photo_z = np.asarray(my_photo_z/my_photo_z.max())
elif type == 'gauss':
my_photo_z = stats.norm.pdf(z, mu, sigma)
my_photo_z = np.asarray(my_photo_z/my_photo_z.max())
return my_photo_z
开发者ID:scplbl,项目名称:singlEpoClass,代码行数:32,代码来源:photoz.py
示例3: get_dict_from_file
def get_dict_from_file(date, prefix="eis3"):
"""
Reads an IDL .sav file containing EIS housekeeping data and returns its
contents as a python dictionary. For speed, if the file has already been
read, it may return the contents from a hidden memo. If the file is not
found in the location specified it will attempt to download it once and
save the file in the location originally specified.
Parameters
----------
date: date or datetime object
Date of the observation required. If the file is present in the sunpy
data directory, it will be read from there, or downloaded to that
location if it isn't.
prefix: str
file prefix (eis3 for thermal correction, fpp1 for doppler shift)
"""
key = '{0}_{1:%Y%m}.sav'.format(prefix, date)
if key in __housekeeping_memo__:
file_dict = __housekeeping_memo__[key]
else:
download_dir = os.path.join(sunpy.util.config._get_home(),
'EISpy', 'eispy', 'data', key)
try:
file_dict = readsav(download_dir, python_dict=True)
except IOError:
url = "http://sdc.uio.no/eis_wave_corr_hk_data/" + key
urllib.urlretrieve(url, filename=download_dir)
file_dict = readsav(download_dir, python_dict=True)
warnings.warn("File was not found, so it was downloaded and " +
"placed at the given location", UserWarning)
__housekeeping_memo__.update({key: file_dict})
return file_dict
开发者ID:Cadair,项目名称:EISpy,代码行数:33,代码来源:eis_utils.py
示例4: run_master
def run_master(file_int, file_vv, db_images, db_vel, zero):
print(" * MASTER : reading {0}...".format(file_int))
im = io.readsav('/net/duna/scratch1/aasensio/deepLearning/opticalFlow/database/{0}.save'.format(file_int))['int']
print(" * MASTER : reading {0}...".format(file_vv))
vel = io.readsav('/net/duna/scratch1/aasensio/deepLearning/opticalFlow/database/{0}.save'.format(file_vv))
n_timesteps, nx_orig, ny_orig = im.shape
tasks = [i for i in range(n_timesteps)]
task_index = 0
num_workers = size - 1
closed_workers = 0
print("*** Master starting with {0} workers".format(num_workers))
while closed_workers < num_workers:
dataReceived = comm.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status)
source = status.Get_source()
tag = status.Get_tag()
if tag == tags.READY:
# Worker is ready, so send it a task
if task_index < len(tasks):
dataToSend = {'index': task_index+zero, 'image': im[task_index,0:1008,0:1008], 'vx1': vel['vx1'][task_index,0:1008,0:1008], 'vz1': vel['vz1'][task_index,0:1008,0:1008],
'vx01': vel['vx01'][task_index,0:1008,0:1008], 'vz01': vel['vz01'][task_index,0:1008,0:1008], 'vx001': vel['vx001'][task_index,0:1008,0:1008], 'vz001': vel['vz001'][task_index,0:1008,0:1008]}
comm.send(dataToSend, dest=source, tag=tags.START)
print(" * MASTER : sending task {0} to worker {1}".format(task_index, source), flush=True)
task_index += 1
else:
print("Sending termination")
comm.send(None, dest=source, tag=tags.EXIT)
elif tag == tags.DONE:
index = dataReceived['index']
im_r = dataReceived['image']
vx1_r = dataReceived['vx1']
vz1_r = dataReceived['vz1']
vx01_r = dataReceived['vx01']
vz01_r = dataReceived['vz01']
vx001_r = dataReceived['vx001']
vz001_r = dataReceived['vz001']
db_images[index,:,:] = im_r
db_vel[0,index,:,:] = vx1_r
db_vel[1,index,:,:] = vz1_r
db_vel[2,index,:,:] = vx01_r
db_vel[3,index,:,:] = vz01_r
db_vel[4,index,:,:] = vx001_r
db_vel[5,index,:,:] = vz001_r
print(" * MASTER : got block {0} from worker {1}".format(index, source), flush=True)
elif tag == tags.EXIT:
print(" * MASTER : worker {0} exited.".format(source))
closed_workers += 1
print("Master block finished")
return len(tasks)
开发者ID:aasensio,项目名称:DNHazel,代码行数:56,代码来源:imax.py
示例5: loadfhdsav
def loadfhdsav(savfile):
# get the other poles
pol = savfile.split("_")[-1][:2]
if pol == "xx":
uvfile_altpol = savfile.replace("xx", "yy")
pols = [0, 1]
elif pol == "yy":
uvfile_altpol = savfile.replace("yy", "xx")
pols = [1, 0]
else:
print "polarization not found in filename. skipping"
raise (StandardError)
if not os.path.exists(uvfile_altpol):
print "pol file", uvfile_altpol, "not found. please find"
raise (StandardError)
# paramfile = savfile.split('_')[0]+'_params.sav'
paramfile = savfile.replace("vis_%s" % (["xx", "yy"][pols[0]]), "params")
if not os.path.exists(paramfile):
print "error: paramfile=", paramfile, "not found. please find"
raise (StandardError)
# weightfile = savfile.split('_')[0]+'_flags.sav'
weightfile = savfile.replace("vis_%s" % (["xx", "yy"][pols[0]]), "flags")
if not os.path.exists(weightfile):
print "error: weightfile", weightfile, "not found, please find"
raise (StandardError)
print "loading:", savfile
uvfile = readsav(savfile)
ant1 = uvfile["obs"]["baseline_info"][0]["tile_a"][0] - 1
print "min(ant1)=", ant1.min(), "should be 0"
ant2 = uvfile["obs"]["baseline_info"][0]["tile_b"][0] - 1
print "max(ant2)=", ant2.max(), "should be 127"
data = uvfile["vis_ptr"]
# times = uvfile['obs']['baseline_info'][0]['jdate'][0]
baselines = (ant2) * 256 + ant1
freqs = uvfile["obs"]["baseline_info"][0]["freq"][0]
print "loading alternate polarization", uvfile_altpol
uv_altpol = readsav(uvfile_altpol)
data_altpol = uv_altpol["vis_ptr"]
print "loading baselines from params file:", paramfile
params = readsav(paramfile)
U = params["params"]["uu"][0] * 1e9
V = params["params"]["vv"][0] * 1e9
W = params["params"]["ww"][0] * 1e9
uvw = n.array(zip(U, V, W))
times = params["params"]["time"][0]
print "loading weights from :", weightfile
flags = readsav(weightfile)
mask = n.dstack([flags["flag_arr"][0], flags["flag_arr"][1]]) == 0 # the zeros are the flags
# create the new fits file
outdata = n.zeros((data.shape[0], data.shape[1], 2)).astype(n.complex64)
outdata[:, :, pols[0]] = data
outdata[:, :, pols[1]] = data_altpol
ant1, ant2 = i2a(ant1), i2a(ant2)
return uvw, ant1, ant2, baselines, times, freqs, outdata, mask
开发者ID:mkolopanis,项目名称:capo,代码行数:56,代码来源:wedge.py
示例6: __init__
def __init__(self, root):
self.root = root
self.noise = noise
self.batch_size = 256
self.dataFile = "/net/duna/scratch1/aasensio/deepLearning/milne/database/database_6301_hinode_1component.h5"
f = h5py.File(self.dataFile, 'r')
self.pars = f.get("parameters")
self.lower = np.min(self.pars, axis=0)
self.upper = np.max(self.pars, axis=0)
f.close()
self.root_hinode = "/net/nas4/fis/aasensio/scratch/HINODE/SUNSPOT/"
self.label_files = ["sunspot_stokesI_512x512.sav", "sunspot_stokesQ_512x512.sav", "sunspot_stokesU_512x512.sav", "sunspot_stokesV_512x512.sav"]
self.std_values = np.load('{0}_normalization.npy'.format(self.root))
labels_data = ['data_ii', 'data_qq', 'data_uu', 'data_vv']
self.stokes = np.zeros((512,512,50,4))
for i in range(4):
print("Reading file {0}".format(self.label_files[i]))
stokes = io.readsav("/net/nas4/fis/aasensio/scratch/HINODE/SUNSPOT/{0}".format(self.label_files[i]))[labels_data[i]]
if (i == 0):
mean_stokesi = np.mean(stokes[400:500,0:100,0])
stokes = stokes[:,:,0:50] / mean_stokesi
self.stokes[:,:,:,i] = stokes / self.std_values[None,None,:,i]
self.stokes = self.stokes.reshape((512*512,50,4))
开发者ID:aasensio,项目名称:DNHazel,代码行数:35,代码来源:plotMap_6301_hinode_1component.py
示例7: z_from_photo_z
def z_from_photo_z(photo_z_file, n, my_z_array=None):
my_p_z = io.readsav(photo_z_file)
pz = my_p_z['p_z']
if my_z_array is None:
z = np.arange(0, 5, .01)
else:
z = my_z_array
if np.shape(pz) != np.shape(z):
raise ValueError("p_z array and z array are different sizes")
dz = z[1] - z[0]
pz /= (dz * pz).sum()
ecdf = np.cumsum(pz * dz)
cdf = interpolate.interp1d(z, ecdf)
def func(x, *args):
my_cdf = args[0]
cdf = args[1]
return abs(my_cdf - cdf(x))
out_z = []
for i in range(n):
my_cdf = np.random.uniform(0, 1)
my_z = optimize.fmin(func, (1.5), args=(my_cdf, cdf), disp=0)
out_z.append(my_z[0])
out_z = np.asarray(out_z)
return out_z
开发者ID:scplbl,项目名称:singlEpoClass,代码行数:25,代码来源:makeMc.py
示例8: makeWeightsArray
def makeWeightsArray(X):
savfile = readsav(X)
wuv = savfile['weights_uv']
wxy = n.fft.fft2(wuv)
wuv = n.abs(wuv)
wxy = n.abs(wxy)
return wuv,wxy
开发者ID:jpober,项目名称:brownscripts,代码行数:7,代码来源:plotWeights.py
示例9: read_padsav
def read_padsav(file_name, disable_UserWarnings=True):
"""
Reads data from an idl_dict object into XPadDataItem objects
Parameters
----------
file_name : str
Path to XPad*.padsav file
Returns
-------
items : list
A list of XPadDataItems
"""
warning_action = "default"
if (disable_UserWarnings):
warning_action = "ignore"
with catch_warnings():
simplefilter(warning_action, UserWarning)
idl_dict = readsav(file_name)
return parse_padsav(idl_dict)
开发者ID:ZedThree,项目名称:pyxpad,代码行数:25,代码来源:user_functions.py
示例10: __init__
def __init__(self,infile):
mypath=os.getcwd()
if mypath.find('/Users/rfinn') > -1:
print "Running on Rose's mac pro or laptop"
homedir='/Users/rfinn/'
elif mypath.find('Users/kellywhalen') > -1:
print "Running on Kelly's Laptop"
homedir='/Users/kellywhalen/Github/Virgo/'
cefile=readsav(infile)
self.nulnu_iras25=cefile['nulnu_iras25']
self.nulnu_iras100=cefile['nulnu_iras100']
self.nulnu_iras12=cefile['nulnu_iras12']
self.nulnu_iras60=cefile['nulnu_iras60']
self.nulnuinlsun=cefile['nulnuinlsun']
self.lir_sanders=cefile['lir_sanders']
self.lir=cefile['lir']
self.nulnu_lw3=cefile['nulnu_lw3']
self.nulnu_lw2=cefile['nulnu_lw2']
self.lamb=cefile['lambda']
#
# convert all to double-precision arrays
#
self.lamb=array(self.lamb,'d')
self.nulnu_iras25=array(self.nulnu_iras25,'d')
self.nulnu_iras100=array(self.nulnu_iras100,'d')
self.nulnu_iras12=array(self.nulnu_iras12,'d')
self.nulnu_iras60=array(self.nulnu_iras60,'d')
self.nulnuinlsun=array(self.nulnuinlsun,'d')
self.lir_sanders=array(self.lir_sanders,'d')
self.lir=array(self.lir,'d')
self.nulnu_lw3=array(self.nulnu_lw3,'d')
self.nulnu_lw2=array(self.nulnu_lw3,'d')
开发者ID:rfinn,项目名称:Virgo,代码行数:33,代码来源:ReadCharyElbazTemplates.py
示例11: import_cosmology
def import_cosmology(filename, structure_name="fid"):
r""" Loads an icosmo cosmology from a fiducial structure stored in an
idl save file into a cosmicpy cosmology.
Parameters
----------
filename : str
Name of the idl save from which to load the cosmology.
structure_name : str, optional
Name of the icosmo fiducial structure stored in the save file.
Returns
-------
cosmo : cosmology
cosmicpy cosmology corresponding to the icosmo input.
"""
icosmo_file = readsav(filename)
icosmo = icosmo_file.get(structure_name)
h = icosmo['cosmo'][0]['h'][0]
Omega_m = icosmo['cosmo'][0]['omega_m'][0]
Omega_de = icosmo['cosmo'][0]['omega_l'][0]
Omega_b = icosmo['cosmo'][0]['omega_b'][0]
w0 = icosmo['cosmo'][0]['w0'][0]
wa = icosmo['cosmo'][0]['wa'][0]
tau = icosmo['cosmo'][0]['tau'][0]
n = icosmo['cosmo'][0]['n'][0]
sigma8 = icosmo['cosmo'][0]['sigma8'][0]
cosmo = cosmicpy.cosmology(h=h, Omega_m=Omega_m, Omega_de=Omega_de,
Omega_b=Omega_b, w0=w0, wa=wa, tau=tau,
n=n, sigma8=sigma8)
return cosmo
开发者ID:passaglia,项目名称:cosmicpy,代码行数:33,代码来源:icosmo.py
示例12: calc_specifics
def calc_specifics(self, Temp):
"""A separate method to calculate the specific line list properties based on an input T."""
if self.specs_calced == 0:
#make sure we don't inadvertently try and do this twice
if self.ll_name == 'HITRAN04':
self.Temp = Temp
self.specs_calced = 1
#lets make sure the relevant temperature is now carried around with the linelist.
props = HT04_globals(self.spec, self.iso)
if Temp == 296.0 and self.ll_name == 'HITRAN04':
Q=props.Q296
else:
Q=getQ(self.spec, self.iso, self.ll_name, Temp)
E_temp = -1.0 * self.epp * c2 / Temp
#print E_temp
w_temp = -1.0 * self.wave * c2 / Temp
#print w_temp
self.strength = self.strength * (props.abund/ Q) * (np.exp(E_temp) * (1.0-np.exp(w_temp))) * apc.c.cgs.value
#I have no idea why Jan multiplies by C here, but he does, so lets copy it.
strengths_jan = readsav('/home/dstock/sf/idl/code/ff.xdr')
开发者ID:dstock,项目名称:PySynSpec,代码行数:25,代码来源:sfClasses.py
示例13: grabdate
def grabdate(d):
global rundir, srcdir, outdir
# Limit output to one line per date.
status(d)
# Loop over both probes.
for p in ('a', 'b'):
status(p)
# Nuke the run directory. Leave stdout and stderr.
[ os.remove(x) for x in os.listdir(rundir) if x not in ('stdoe.txt',) ]
# Create and execute an IDL script to grab position, electric field, and
# magnetic field data for the day and and dump it into a sav file.
out, err = spedas( idlcode(probe=p, date=d) )
# Make sure there's somewhere to store the pickles.
pkldir = outdir + d.replace('-', '') + '/' + p + '/'
if not os.path.exists(pkldir):
os.makedirs(pkldir)
# Read in the IDL output.
if not os.path.exists('temp.sav'):
status('X')
continue
else:
temp = io.readsav('temp.sav')
# Rewrite the data as pickles. (Pickles are Python data files. They are
# reasonably efficient in terms of both storage size and load time.)
for key, arr in temp.items():
with open(pkldir + key + '.pkl', 'wb') as handle:
pickle.dump(arr, handle, protocol=-1)
# Acknowledge successful date access.
status('OK')
# Move to the next line.
return status()
开发者ID:UMN-Space-Physics,项目名称:rbsp-ulf,代码行数:31,代码来源:grabber.py
示例14: _get_dusty_array
def _get_dusty_array(y_window, x_window):
"""
Returns the sliced array of dusty pixels
"""
url = darts + 'data/cal/dp/dusty_pixels.sav'
http_down = urllib.urlretrieve(url)
dusties = readsav(http_down[0]).dp_data
return dusties[y_window[0]:y_window[1], x_window[0]: x_window[1]]
开发者ID:Cadair,项目名称:EISpy,代码行数:8,代码来源:pixel_calibration.py
示例15: __init__
def __init__(self, root, output, name_of_variable):
# Only allocate needed memory
config = tf.ConfigProto()
config.gpu_options.allow_growth=True
session = tf.Session(config=config)
ktf.set_session(session)
self.root = root
self.nx = 576
self.ny = 576
self.n_times = 2
self.n_filters = 64
self.batch_size = 1
self.n_conv_layers = 20
self.stride = 1
self.skip_frequency = 2
self.n_frames = 1
self.output = output
self.name_of_variable = name_of_variable
telescope_radius = 0.5 * 0.965 * u.meter
pixel_size = 0.02759 * u.arcsec / u.pixel
fov = 1152 * u.pixel
lambda0 = 500 * u.nm
imax = imax_degradation(telescope_radius, pixel_size, fov)
imax.compute_psf(lambda0)
res = io.readsav('/net/viga/scratch1/deepLearning/opticalFlow/mancha/c3d_1152_cont4_4bin_012000_continuum.sav')['continuum']
self.images = np.zeros((2,576,576), dtype='float32')
# 576 pixels are obtained by resampling 1152 pixels of 0.02759 "/px to 0.0545 "/px for IMaX
self.images[0,:,:] = congrid.resample(imax.apply_psf(res[0,:,:]), (576, 576))
self.images[1,:,:] = congrid.resample(imax.apply_psf(res[1,:,:]), (576, 576))
res = io.readsav('/net/viga/scratch1/deepLearning/opticalFlow/mancha/c3d_1152_cont4_4bin_012000.isotau.sav')
self.vx = np.zeros((3,576,576), dtype='float32')
self.vy = np.zeros((3,576,576), dtype='float32')
for i in range(3):
self.vx[i,:,:] = congrid.resample(imax.apply_psf(res['vx'][i,:,:]), (576, 576))
self.vy[i,:,:] = congrid.resample(imax.apply_psf(res['vy'][i,:,:]), (576, 576))
开发者ID:aasensio,项目名称:DNHazel,代码行数:45,代码来源:testResnet.py
示例16: find_sol_mask
def find_sol_mask(shotnr, frame_info=None, rz_array=None,
datadir='/Users/ralph/source/blob_tracking/test_data'):
"""
Returns a mask for the pixels in between the separatrix and the LCFS.
"""
s = readsav('%s/separatrix.sav' % (datadir), verbose=False)
return ((s['rmid'].reshape(64, 64) > s['rmid_sepx']) &
(s['rmid'].reshape(64, 64) < s['rmid_lim']))
开发者ID:rkube,项目名称:ralphs-code-stash,代码行数:9,代码来源:helper_functions.py
示例17: get_kordopatis_comparisons
def get_kordopatis_comparisons():
data = readsav(os.path.join(DATA_PATH, "RAVE_DR5_calibration_data.save"))
return Table(data={
"TEFF": data["calibration_data"]["TEFF"][0],
"LOGG": data["calibration_data"]["LOGG"][0],
"FEH": data["calibration_data"]["FEH"][0],
"REF": data["calibration_data"]["REF"][0],
"Name": [each.strip() for each in data["calibration_data"]["DR5_OBS_ID"][0]]
})
开发者ID:AnnieJumpCannon,项目名称:RAVE,代码行数:10,代码来源:rave_io.py
示例18: wotta16
def wotta16():
""" Generate sys files from IDL save files
Returns
-------
"""
from scipy.io import readsav
# Non-excluded
all = readsav(pyigm_path+'/data/LLS/Literature/wotta16_final.save')
# Build Lehner+13
assert False # Need RA/DEC
开发者ID:pyigm,项目名称:pyigm,代码行数:11,代码来源:lls_literature.py
示例19: fetch_meta
def fetch_meta(fhd_run, obsids=None):
'''
Return meta data needed for the FHD deconvolved source components.
Parameters
----------
fhd_run: string
The name identifier of the FHD run, e.g. \'pac_decon_eor1_June2016\'.
obsids: list-like, optional
Obsids (as strings) to fetch data from. Defaults to all deconvolved.
'''
decon_dir='%sfhd_%s/deconvolution/'%(fhd_base(),fhd_run)
meta_dir='%sfhd_%s/metadata/'%(fhd_base(),fhd_run)
if obsids is None: obsids = fp.get_obslist(decon_dir)
meta = {'clustered':False}
for o in obsids:
params = readsav(decon_dir+o+'_fhd_params.sav')['fhd_params']
metaobs = readsav('%s%s_obs.sav'%(meta_dir,o))['obs']
meta[o] = {'n_iter':params.n_iter[0],'det_thresh':params.detection_threshold[0],'beam_thresh':params.beam_threshold[0],'max_bl':metaobs.max_baseline[0],'freq':metaobs.freq_center[0],'degpix':metaobs.degpix[0]}
meta[o]['beam_width'] = meta[o]['max_bl']**-1 * 180./np.pi
return meta
开发者ID:EoRImaging,项目名称:katalogss,代码行数:21,代码来源:fhd_pype.py
示例20: makeVisArray
def makeVisArray(X):
#savfile = []
#vis_array = []
savfile = readsav(X)
for m in X.split('_'):
if m == 'model':
vis_array = savfile['vis_model_ptr']
print 'Model found!'
break
if m != 'model':
vis_array = savfile['vis_ptr']
try:
flag = readsav(X.split('_')[0]+'_flags.sav')
X.split('_')[0]+'_flags.sav'
except IOError:
flag = readsav(('_').join(X.split('_')[0:3])+'_flags.sav')
('_').join(X.split('_')[0:3])+'_flags.sav'
print flag.keys()
flag = flag['flag_arr'][0]
#vis_array[flag==1] = 0
#vis_array[flag==1] = 0
print n.max(vis_array)
#vis_array[flag==-1]=0
obs = savfile['obs']
times = obs['baseline_info'][0]['JDATE'][0]
#ants1 = obs['baseline_info'][0]['TILE_A'][0]
#ants2 = obs['baseline_info'][0]['TILE_B'][0]
#print n.where(vis_array[5000,:]==0)[0].shape
ntimes = len(times)
nbls = obs['NBASELINES'][0]
time_order = n.argsort(times)
ant1,ant2 = options.baseline.split('_')
ind1 = (obs[0]['baseline_info']['tile_a'][0]==int(ant1)).astype(int)
ind2 = (obs[0]['baseline_info']['tile_b'][0]==int(ant2)).astype(int)
intersect = ((ind1+ind2)/2).astype(bool)
print intersect.max()
bsl_array = vis_array[intersect]
print bsl_array.shape
bsl_array = bsl_array[time_order]
return bsl_array
开发者ID:jpober,项目名称:brownscripts,代码行数:40,代码来源:PlotSavUV.py
注:本文中的scipy.io.readsav函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论