本文整理汇总了Python中scipy.io.netcdf.netcdf_file函数的典型用法代码示例。如果您正苦于以下问题:Python netcdf_file函数的具体用法?Python netcdf_file怎么用?Python netcdf_file使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了netcdf_file函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_read_write_sio
def test_read_write_sio():
eg_sio1 = BytesIO()
f1 = make_simple(eg_sio1, 'w')
str_val = eg_sio1.getvalue()
f1.close()
eg_sio2 = BytesIO(str_val)
f2 = netcdf_file(eg_sio2)
for testargs in gen_for_simple(f2):
yield testargs
f2.close()
# Test that error is raised if attempting mmap for sio
eg_sio3 = BytesIO(str_val)
yield assert_raises, ValueError, netcdf_file, eg_sio3, 'r', True
# Test 64-bit offset write / read
eg_sio_64 = BytesIO()
f_64 = make_simple(eg_sio_64, 'w', version=2)
str_val = eg_sio_64.getvalue()
f_64.close()
eg_sio_64 = BytesIO(str_val)
f_64 = netcdf_file(eg_sio_64)
for testargs in gen_for_simple(f_64):
yield testargs
yield assert_equal, f_64.version_byte, 2
# also when version 2 explicitly specified
eg_sio_64 = BytesIO(str_val)
f_64 = netcdf_file(eg_sio_64, version=2)
for testargs in gen_for_simple(f_64):
yield testargs
yield assert_equal, f_64.version_byte, 2
开发者ID:ArmstrongJ,项目名称:scipy,代码行数:29,代码来源:test_netcdf.py
示例2: write_netcdf
def write_netcdf(path, fields, attrs={}, append=False, format='NETCDF3_64BIT'):
"""
Write the data and grid information for *fields* to *path* as NetCDF.
If the *append* keyword argument in True, append the data to an existing
file, if it exists. Otherwise, clobber an existing files.
"""
assert(format in _VALID_NETCDF_FORMATS)
if os.path.isfile(path) and append:
mode = 'a'
else:
mode = 'w'
if format == 'NETCDF3_CLASSIC':
root = nc.netcdf_file(path, mode, version=1)
elif format == 'NETCDF3_64BIT':
root = nc.netcdf_file(path, mode, version=2)
else:
root = nc4.Dataset(path, mode, format=format)
_set_netcdf_attributes(root, attrs)
_set_netcdf_structured_dimensions(root, fields.shape)
_set_netcdf_variables(root, fields)
root.close()
开发者ID:JianweiHan,项目名称:landlab,代码行数:25,代码来源:write.py
示例3: load_steady_state_experiments_data
def load_steady_state_experiments_data():
samplesToSkip = 3
print('loading data...')
rootName = 'lineAngleSensor2'
f = netcdf.netcdf_file(rootName+'Data.nc', 'r')
memberName = 'azimuth'
azimuth = f.variables[rootName+'.data.'+memberName].data[samplesToSkip:]
memberName = 'elevation'
elevation = f.variables[rootName+'.data.'+memberName].data[samplesToSkip:]
ts_trigger_las = f.variables[rootName+'.data.ts_trigger'].data[samplesToSkip:]*1.0e-9
rootName = 'siemensSensors'
f = netcdf.netcdf_file(rootName+'Data.nc', 'r')
setpoint = f.variables[rootName+'.data.'+'carouselSpeedSetpoint'].data[samplesToSkip:]
speed = f.variables[rootName+'.data.'+'carouselSpeedSmoothed'].data[samplesToSkip:]
ts_trigger_siemens = f.variables[rootName+'.data.ts_trigger'].data[samplesToSkip:]*1.0e-9
# Chose intersection of the two time ranges
startTime = max(ts_trigger_las[0],ts_trigger_siemens[0])
endTime = min(ts_trigger_las[-1],ts_trigger_siemens[-1])
ts_trigger_las -= startTime
ts_trigger_siemens -= startTime
startTime_new = 0
endTime_new = endTime-startTime
times = len(ts_trigger_las)+len(ts_trigger_siemens)
# Since we didn't have the resampler turned on...
t = numpy.linspace(startTime_new, endTime_new, times)
elevation_resampled = numpy.interp(t, ts_trigger_las, elevation)
speed_resampled = numpy.interp(t, ts_trigger_siemens, speed)
return speed_resampled, elevation_resampled
开发者ID:drewm1980,项目名称:highwind_experiments,代码行数:34,代码来源:analyze_steady_states.py
示例4: grab_data
def grab_data(rule):
files = glob.glob(rule)
filebases = []
for i in range(0, len(files)):
#Grab the part after the / of any path, and before the _k*.nc
filebase = re.split('_',
re.split('/', files[i])[-1]
)[0]
#If we haven't seen this base before, keep it around.
if not(filebases.count(filebase)):
filebases.append(filebase)
numbases = len(filebases)
data = numpy.ones(numbases, dtype = [('omega1', float), ('B', float),
('kmin', float), ('kmax', float),
('kpeak', float),
('peakgr', float)])
path = ""
for dir in (re.split('/', files[0])[0:-1]):
path = path + dir + '/'
for i in range(0,numbases):
data[i]['omega1'] = float(re.split('B',
re.split('w', filebases[i])[1])[0])
data[i]['B'] = float(re.split('B', filebases[i])[1])
kminname = path + filebases[i] + "_kmin.nc"
kmaxname = path + filebases[i] + "_kmax.nc"
kpeakname = path + filebases[i] + "_kpeak.nc"
if(os.path.exists(kminname)):
ncfile = netcdf.netcdf_file(kminname, 'r')
data[i]['kmin'] = ncfile.kz
ncfile.close()
else:
data[i]['kmin'] = numpy.nan
if(os.path.exists(kmaxname)):
ncfile = netcdf.netcdf_file(kmaxname, 'r')
data[i]['kmax'] = ncfile.kz
ncfile.close()
else:
data[i]['kmax'] = numpy.nan
if(os.path.exists(kpeakname)):
ncfile = netcdf.netcdf_file(kpeakname, 'r')
data[i]['kpeak'] = ncfile.kz
data[i]['peakgr'] = ncfile.variables['lambda'][0,0]
ncfile.close()
else:
data[i]['kpeak'] = numpy.nan
data[i]['peakgr'] = numpy.nan
#Sort the array so things are nicer
data = numpy.sort(data, order=['omega1', 'B'])
return data
开发者ID:ahroach,项目名称:globalcode,代码行数:60,代码来源:kcritscantools_no_omega2.py
示例5: plot_arm_speed
def plot_arm_speed(axis, startTime=-1):
rootName = 'siemensSensors'
f = netcdf.netcdf_file(rootName+'Data.nc', 'r')
data1 = f.variables[rootName+'.data.'+'carouselSpeedSetpoint'].data[startSample:]
data2 = f.variables[rootName+'.data.'+'carouselSpeedSmoothed'].data[startSample:]
ts_trigger = f.variables[rootName+'.data.ts_trigger'].data[startSample:]*1.0e-9
# Load the actual arm speed from the arm gyro
rootName = 'armboneLisaSensors'
fiile = netcdf.netcdf_file(rootName+'Data.nc', 'r')
rawdata4 = fiile.variables['armboneLisaSensors.GyroState.gr'].data[startSample:]
ts_trigger4 = fiile.variables['armboneLisaSensors.GyroState.ts_trigger'].data[startSample:]*1.0e-9
#fullscale = 2000 # deg/sec
#data4 = -1.0 * rawdata4 / (2**15) * fullscale * pi/180 - 0.0202 # Rad/s
data4 = rawdata4
if startTime == -1:
startTime = ts_trigger[0]
times = ts_trigger-startTime
times4 = ts_trigger4-startTime
pylab.hold(True)
plot(times, data2, '.-', label='On Motor Side of Belt')
plot(times4, data4,'.-', label='From Gyro on Arm')
plot(times, data1, '.-', label='Setpoint (Echoed)')
ylabel('Arm rotation speed [Rad/s]')
xlabel('Time [s]')
#legend(['Setpoint (Echoed)', 'Setpoint (Sent)', 'On Motor Side of Belt', 'From Gyro on Arm'])
title('Plot of Signals Related to Arm Speed')
return startTime
开发者ID:drewm1980,项目名称:highwind_experiments,代码行数:33,代码来源:plot_arm_speed.py
示例6: test_read_write_sio
def test_read_write_sio():
eg_sio1 = BytesIO()
with make_simple(eg_sio1, 'w') as f1:
str_val = eg_sio1.getvalue()
eg_sio2 = BytesIO(str_val)
with netcdf_file(eg_sio2) as f2:
check_simple(f2)
# Test that error is raised if attempting mmap for sio
eg_sio3 = BytesIO(str_val)
assert_raises(ValueError, netcdf_file, eg_sio3, 'r', True)
# Test 64-bit offset write / read
eg_sio_64 = BytesIO()
with make_simple(eg_sio_64, 'w', version=2) as f_64:
str_val = eg_sio_64.getvalue()
eg_sio_64 = BytesIO(str_val)
with netcdf_file(eg_sio_64) as f_64:
check_simple(f_64)
assert_equal(f_64.version_byte, 2)
# also when version 2 explicitly specified
eg_sio_64 = BytesIO(str_val)
with netcdf_file(eg_sio_64, version=2) as f_64:
check_simple(f_64)
assert_equal(f_64.version_byte, 2)
开发者ID:ElDeveloper,项目名称:scipy,代码行数:26,代码来源:test_netcdf.py
示例7: test_read_example_data
def test_read_example_data():
# read any example data files
for fname in glob(pjoin(TEST_DATA_PATH, '*.nc')):
f = netcdf_file(fname, 'r')
f.close()
f = netcdf_file(fname, 'r', mmap=False)
f.close()
开发者ID:123jefferson,项目名称:MiniBloq-Sparki,代码行数:7,代码来源:test_netcdf.py
示例8: get_data
def get_data(dir, run_id = 'def', plot = False):
#if 'inputs' not in locals():
inputs = netcdf.netcdf_file(dir + '{}_inputs.cdf'.format(run_id),'r',mmap = False)
spectra = netcdf.netcdf_file(dir + '{}_spectra.cdf'.format(run_id),'r',mmap = False)
#neutrals = netcdf.netcdf_file(dir + '{}_neutrals.cdf'.format(run_id),'r',mmap = False)
#weights = netcdf.netcdf_file(dir + '{}_fida_weights.cdf'.format(run_id),'r',mmap = False)
neutrals=weights=None
fida = +spectra.variables['fida'].data
wave = +spectra.variables['lambda'].data
halo = +spectra.variables['halo'].data
BE = [+spectra.variables['full'].data, +spectra.variables['half'].data, +spectra.variables['third'].data]
if plot:
fig, ax = pt.subplots(nrows = len(BE)+1)
for i in range(fida.shape[0]):
for ax_tmp, dat_tmp in zip(ax,BE):
ax_tmp.plot(wave,dat_tmp[i,:])
ax[len(BE)].plot(wave,halo[i,:])
fig.canvas.draw();fig.show()
# ax[0].plot(fida[i,:])
i = np.argmin(np.abs([np.mean(inputs.variables['z_grid'].data[i,:,:]) for i in range(inputs.variables['z_grid'].data.shape[0])]))
z = inputs.variables['z_grid'].data[i,:,:]
x_grid = inputs.variables['x_grid'].data[i,:,:]
y_grid = inputs.variables['y_grid'].data[i,:,:]
if plot:
dat_grid = neutrals.variables['halodens'].data[0,i,:,:]
n_halos = neutrals.variables['halodens'].shape[0]
fig, ax = pt.subplots(nrows = n_halos)
for j in range(n_halos):
im = ax[j].pcolormesh(x_grid, y_grid, neutrals.variables['halodens'].data[j,i,:,:])
pt.colorbar(im,ax=ax[j])
fig.canvas.draw();fig.show
return inputs, neutrals, spectra, weights
开发者ID:shaunhaskey,项目名称:CER,代码行数:32,代码来源:read_fida_out.py
示例9: load_timeseries
def load_timeseries(filenames,region,months,bias):
data=np.ma.zeros([len(filenames),len(months)*30])
tmp=np.zeros([len(months)*30])
i=0
[j_s,j_e,i_s,i_e]=region
for f in filenames:
# print i,os.path.basename(f)
try:
f2=f.replace('field16','field16_1')
for j,monthstr in enumerate(months):
f_month=f[:-10]+monthstr+'.nc'
f2_month=f2[:-10]+monthstr+'.nc'
var1=netcdf_file(f_month,'r').variables['field16'][:,0,4:-7,4:-4]
var1=np.ma.masked_values(var1,-1.07374e+09)
var2=netcdf_file(f2_month,'r').variables['field16_1'][:,0,4:-7,4:-4]
var2=np.ma.masked_values(var2,-1.07374e+09)
tmp[j*30:(j+1)*30]=((var1+var2)/2.-bias)[:,j_s:j_e,i_s:i_e].mean(1).mean(1)
if tmp.max()>350.0 or tmp.min()<170 or not np.all(np.isfinite(tmp)):
print 'error: wierd vals',f
continue
else:
# print tmp.min(),tmp.max()
data[i,:]=tmp
i=i+1
except:
print 'Error, cannot load files',f
raise
#continue
return data[:i,:]
开发者ID:pfuhe1,项目名称:cpdn_analysis,代码行数:30,代码来源:return_time_2015_temp.py
示例10: test_maskandscale
def test_maskandscale():
t = np.linspace(20, 30, 15)
t[3] = 100
tm = np.ma.masked_greater(t, 99)
fname = pjoin(TEST_DATA_PATH, 'example_2.nc')
with netcdf_file(fname, maskandscale=True) as f:
Temp = f.variables['Temperature']
assert_equal(Temp.missing_value, 9999)
assert_equal(Temp.add_offset, 20)
assert_equal(Temp.scale_factor, np.float32(0.01))
found = Temp[:].compressed()
del Temp # Remove ref to mmap, so file can be closed.
expected = np.round(tm.compressed(), 2)
assert_allclose(found, expected)
with in_tempdir():
newfname = 'ms.nc'
f = netcdf_file(newfname, 'w', maskandscale=True)
f.createDimension('Temperature', len(tm))
temp = f.createVariable('Temperature', 'i', ('Temperature',))
temp.missing_value = 9999
temp.scale_factor = 0.01
temp.add_offset = 20
temp[:] = tm
f.close()
with netcdf_file(newfname, maskandscale=True) as f:
Temp = f.variables['Temperature']
assert_equal(Temp.missing_value, 9999)
assert_equal(Temp.add_offset, 20)
assert_equal(Temp.scale_factor, np.float32(0.01))
expected = np.round(tm.compressed(), 2)
found = Temp[:].compressed()
del Temp
assert_allclose(found, expected)
开发者ID:ElDeveloper,项目名称:scipy,代码行数:35,代码来源:test_netcdf.py
示例11: test_read_example_data
def test_read_example_data():
# read any example data files
for fname in glob(pjoin(TEST_DATA_PATH, '*.nc')):
with netcdf_file(fname, 'r') as f:
pass
with netcdf_file(fname, 'r', mmap=False) as f:
pass
开发者ID:ElDeveloper,项目名称:scipy,代码行数:7,代码来源:test_netcdf.py
示例12: _load_scipy
def _load_scipy(self, scipy_nc, *args, **kwdargs):
"""
Interprets a netcdf file-like object using scipy.io.netcdf.
The contents of the netcdf object are loaded into memory.
"""
try:
nc = netcdf.netcdf_file(scipy_nc, mode='r', *args, **kwdargs)
except:
scipy_nc = StringIO(scipy_nc)
scipy_nc.seek(0)
nc = netcdf.netcdf_file(scipy_nc, mode='r', *args, **kwdargs)
def from_scipy_variable(sci_var):
return Variable(dims = sci_var.dimensions,
data = sci_var.data,
attributes = sci_var._attributes)
object.__setattr__(self, 'attributes', AttributesDict())
self.attributes.update(nc._attributes)
object.__setattr__(self, 'dimensions', OrderedDict())
dimensions = OrderedDict((k, len(d))
for k, d in nc.dimensions.iteritems())
self.dimensions.update(dimensions)
object.__setattr__(self, 'variables', OrderedDict())
OrderedDict = OrderedDict((vn, from_scipy_variable(v))
for vn, v in nc.variables.iteritems())
self.variables.update()
开发者ID:ebrevdo,项目名称:scidata,代码行数:29,代码来源:data.py
示例13: get_fidasim_output
def get_fidasim_output(self,):
'''Open the relevant netcdf files
SRH: 23June2015
'''
self.inputs = netcdf.netcdf_file(self.directory + '{}_inputs.cdf'.format(self.run_id),'r',mmap = False)
self.spectra = netcdf.netcdf_file(self.directory + '{}_spectra.cdf'.format(self.run_id),'r',mmap = False)
self.neutrals = netcdf.netcdf_file(self.directory + '{}_neutrals.cdf'.format(self.run_id),'r',mmap = False)
开发者ID:shaunhaskey,项目名称:CER,代码行数:8,代码来源:fida_funcs.py
示例14: write_2d_file
def write_2d_file(M2d, varname, outfile, mask, fillValue=1.0e20):
"""
Dumps a 2D array in a NetCDF file.
Arguments:
* M2d * the 2D array to dump
* varname * the variable name on NetCDF file
* outfile * file that will be created. If it is an existing file,
it will be opened in 'append' mode.
* mask * a mask object consistent with M2d array
* fillvalue * (optional) value to set missing_value attribute.
When the file is opened in 'append' mode this method tries to adapt to
existing dimension names (for example it works both with 'lon' or 'longitude')
Does not return anything."""
if os.path.exists(outfile):
ncOUT = NC.netcdf_file(outfile, "a")
print "appending ", varname, " in ", outfile
else:
ncOUT = NC.netcdf_file(outfile, "w")
jpk, jpj, jpi = mask.shape
ncOUT.createDimension("longitude", jpi)
ncOUT.createDimension("latitude", jpj)
ncOUT.createDimension("depth", jpk)
ncvar = ncOUT.createVariable("longitude", "f", ("longitude",))
setattr(ncvar, "units", "degrees_east")
setattr(ncvar, "long_name", "longitude")
setattr(ncvar, "standard_name", "longitude")
setattr(ncvar, "axis", "X")
setattr(ncvar, "valid_min", -5.5625)
setattr(ncvar, "valid_max", 36.25)
setattr(ncvar, "_CoordinateAxisType", "Lon")
ncvar[:] = mask.xlevels[0, :]
ncvar = ncOUT.createVariable("latitude", "f", ("latitude",))
setattr(ncvar, "units", "degrees_north")
setattr(ncvar, "long_name", "latitude")
setattr(ncvar, "standard_name", "latitude")
setattr(ncvar, "axis", "Y")
setattr(ncvar, "valid_min", 30.1875)
setattr(ncvar, "valid_max", 45.9375)
setattr(ncvar, "_CoordinateAxisType", "Lat")
ncvar[:] = mask.ylevels[:, 0]
ncvar = ncOUT.createVariable(varname, "f", (lat_dimension_name(ncOUT), lon_dimension_name(ncOUT)))
setattr(ncvar, "fillValue", fillValue)
setattr(ncvar, "missing_value", fillValue)
setattr(ncvar, "coordinates", "latitude longitude")
ncvar[:] = M2d
setattr(ncOUT, "latitude_min", 30.0)
setattr(ncOUT, "latitude_max", 46.0)
setattr(ncOUT, "longitude_min", -6.0)
setattr(ncOUT, "longitude_max", 37.0)
ncOUT.close()
开发者ID:inogs,项目名称:bit.sea,代码行数:58,代码来源:netcdf3.py
示例15: WriteTMPave
def WriteTMPave(biofile,physfile, outfile):
nc=NC.netcdf_file(biofile,"r");
DIMS=nc.dimensions;
jpk = DIMS['depth']
jpj = DIMS['lat' ]
jpi = DIMS['lon' ]
ncOUT=NC.netcdf_file(outfile,"w")
setattr(ncOUT,"Convenctions","COARDS")
setattr(ncOUT,"DateStart",nc.DateStart)
setattr(ncOUT,"Date__End",nc.Date__End)
ncOUT.createDimension('time', 1)
ncOUT.createDimension('lon' ,jpi)
ncOUT.createDimension('lat' ,jpj)
ncOUT.createDimension('depth',jpk)
for var in ['lon','lat','depth']:
ncvar=ncOUT.createVariable(var,'f',(var,))
ncvar[:]=nc.variables[var].data
nc.close()
setattr(ncOUT.variables['lon'],"long_name","Longitude")
setattr(ncOUT.variables['lat'],"long_name","Latitude")
for var in ['N1p','N3n','O2o'] :
ncIN = NC.netcdf_file(biofile,"r")
ncvar=ncOUT.createVariable(var,'f',('time','depth','lat','lon'))
ncvar[:]=ncIN.variables[var].data.copy()
setattr(ncvar,"long_name",var)
setattr(ncvar,"missing_value",1.e+20)
ncIN.close()
for var in ['votemper','vosaline'] :
ncIN = NC.netcdf_file(physfile,"r")
ncvar=ncOUT.createVariable(var,'f',('time','depth','lat','lon'))
ncvar[:]=ncIN.variables[var].data.copy()
setattr(ncvar,"long_name",var)
setattr(ncvar,"missing_value",1.e+20)
ncIN.close()
AGGREGATE_DICT={'P_l':['P1l','P2l','P3l','P4l']}
for var in AGGREGATE_DICT.keys():
ncvar=ncOUT.createVariable(var,'f',('time','depth','lat','lon'))
junk = np.zeros((1,jpk,jpj,jpi),np.float32)
for lvar in AGGREGATE_DICT[var]:
ncIN = NC.netcdf_file(biofile,"r")
junk +=ncIN.variables[lvar].data.copy()
ncIN.close()
tmask= junk > 1.e+19
junk[tmask] = 1.e+20
ncvar[:]=junk
setattr(ncvar,"long_name",var)
setattr(ncvar,"missing_value",1.e+20)
ncOUT.close()
开发者ID:inogs,项目名称:bit.sea,代码行数:58,代码来源:var_aggregator.py
示例16: test_timestep_diff
def test_timestep_diff(data, dt, eps=2.e-4):
"""
checking if the results are close to the referential ones
(stored in refdata folder)
"""
filename = "timesteptest_dt=" + str(dt) + ".nc"
f_test = netcdf.netcdf_file(filename, "r")
f_ref = netcdf.netcdf_file(os.path.join("long_test/refdata", filename), "r")
for var in ["t", "z", "th_d", "T", "p", "r_v", "rhod"]:
assert np.isclose(f_test.variables[var][:], f_ref.variables[var][:], atol=0, rtol=eps).all(), "differs e.g. " + str(var) + "; max(ref diff) = " + str(np.where(f_ref.variables[var][:] != 0., abs((f_test.variables[var][:]-f_ref.variables[var][:])/f_ref.variables[var][:]), 0.).max())
开发者ID:djarecka,项目名称:parcel,代码行数:11,代码来源:test_timestep.py
示例17: use_netcdf_files
def use_netcdf_files():
nc = netcdf.netcdf_file('/home/nicholas/data/netcdf_files/CFLX_2000_2009.nc', 'r')
all_data = nc.variables['Cflx'][:, :45, :180]
nc.close()
all_data = all_data * 1000 * 24 * 60 * 60
all_data = ma.masked_values(all_data, 1e20)
nc = netcdf.netcdf_file('/home/nicholas/data/netcdf_files/ORCA2.0_grid.nc', 'r')
mask = nc.variables['mask'][0, :45, :180]
nc.close()
mask = ma.masked_values(mask, -1e34)
return all_data, mask
开发者ID:nicholaschris,项目名称:masters_thesis,代码行数:12,代码来源:process_locations.py
示例18: test_byte_gatts
def test_byte_gatts():
# Check that global "string" atts work like they did before py3k
# unicode and general bytes confusion
with in_tempdir():
filename = 'g_byte_atts.nc'
f = netcdf_file(filename, 'w')
f._attributes['holy'] = b'grail'
f._attributes['witch'] = 'floats'
f.close()
f = netcdf_file(filename, 'r')
assert_equal(f._attributes['holy'], b'grail')
assert_equal(f._attributes['witch'], b'floats')
f.close()
开发者ID:ElDeveloper,项目名称:scipy,代码行数:13,代码来源:test_netcdf.py
示例19: get_los_data
def get_los_data(dir='/u/haskeysr/FIDASIM/RESULTS/D3D/155196/00500/MAIN_ION330/', run_id = 'def', plot = False):
#if 'inputs' not in locals():
inputs = netcdf.netcdf_file(dir + '{}_inputs.cdf'.format(run_id),'r',mmap = False)
los_wght = inputs.variables['los_wght'].data
#spectra = netcdf.netcdf_file(dir + '{}_spectra.cdf'.format(run_id),'r',mmap = False)
neutrals = netcdf.netcdf_file(dir + '{}_neutrals.cdf'.format(run_id),'r',mmap = False)
halo_dens = neutrals.variables['halodens'].data
#weights = netcdf.netcdf_file(dir + '{}_fida_weights.cdf'.format(run_id),'r',mmap = False)
print 'hello'
print 'hello2'
print 'hello3'
1/0
return halo_dens, los_wght
开发者ID:shaunhaskey,项目名称:CER,代码行数:13,代码来源:fida_funcs.py
示例20: doHeatContent
def doHeatContent(thetaFile, thetaVar, rhoFile, rhoVar, nodata, thresholdDepth, outFile, outFormat, options):
# open file
fhTheta = netcdf.netcdf_file(thetaFile, 'r')
if fhTheta is None:
exitMessage("Could not open file {0}. Exit 2.".format(thetaFile), 2)
fhRho = netcdf.netcdf_file(rhoFile, 'r')
if fhRho is None:
exitMessage("Could not open file {0}; Exit(2).".format(rhoFile), 2)
thetao = fhTheta.variables[thetaVar][:] # [time, levels, lat, lon]
rho = fhRho.variables[rhoVar][:]
levelsTmp = fhTheta.variables['lev_bnds'][:]
levels = numpy.ravel(0.5*(levelsTmp[:,0] + levelsTmp[:,1] ))
# mapHeat : time, lat, lon
mapHeat = numpy.zeros( (thetao.shape[0], thetao.shape[2], thetao.shape[3]) ) - 1
# Compressing loops...
timelatlon=[]
for itime in range(thetao.shape[0]):
for ilat in range(thetao.shape[2]):
for ilon in range(thetao.shape[3]):
if thetao[itime, 0, ilat, ilon] < nodata:
timelatlon.append((itime, ilat, ilon))
# loop over time, lat and lon
counter=0
for ill in timelatlon:
profileTheta = thetao[ ill[0], : ,ill[1], ill[2] ].ravel()
profileRho = rho[ ill[0], : ,ill[1], ill[2] ].ravel()
heat = 0
heat = computeHeatPotential( profileTheta, profileRho, levels, thresholdDepth, nodata )
mapHeat[ ill[0], ill[1], ill[2] ] = heat
gdal.TermProgress_nocb( counter/float(len(timelatlon)) )
counter = counter+1
gdal.TermProgress_nocb(1)
# save result
outDrv = gdal.GetDriverByName(outformat)
outDS = outDrv.Create(outFile, mapHeat.shape[2], mapHeat.shape[1], mapHeat.shape[0], GDT_Float32, options)
outDS.SetProjection(latlon())
for itime in range(thetao.shape[0]):
data = numpy.ravel(mapHeat[itime, :, :])
outDS.GetRasterBand(itime+1).WriteArray( numpy.flipud( data.reshape((mapHeat.shape[1], mapHeat.shape[2])) ) )
gdal.TermProgress_nocb( itime/float(thetao.shape[0]) )
gdal.TermProgress_nocb(1)
outDS = None
开发者ID:BrunoCombal,项目名称:geowow_work,代码行数:50,代码来源:heatContent.py
注:本文中的scipy.io.netcdf.netcdf_file函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论