本文整理汇总了Python中xarray.open_dataset函数的典型用法代码示例。如果您正苦于以下问题:Python open_dataset函数的具体用法?Python open_dataset怎么用?Python open_dataset使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了open_dataset函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_distribute_climate_cru
def test_distribute_climate_cru(self):
hef_file = get_demo_file('Hintereisferner.shp')
entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0]
gdirs = []
gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
gis.define_glacier_region(gdir, entity=entity)
gdirs.append(gdir)
gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir_cru)
gis.define_glacier_region(gdir, entity=entity)
gdirs.append(gdir)
climate.distribute_climate_data([gdirs[0]])
cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc')
cru_dir = os.path.dirname(cru_dir)
cfg.PATHS['climate_file'] = '~'
cfg.PATHS['cru_dir'] = cru_dir
climate.distribute_climate_data([gdirs[1]])
cfg.PATHS['cru_dir'] = '~'
cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
gdh = gdirs[0]
gdc = gdirs[1]
with xr.open_dataset(os.path.join(gdh.dir, 'climate_monthly.nc')) as nc_h:
with xr.open_dataset(os.path.join(gdc.dir, 'climate_monthly.nc')) as nc_c:
# put on the same altitude
# (using default gradient because better)
temp_cor = nc_c.temp -0.0065 * (nc_h.ref_hgt - nc_c.ref_hgt)
totest = temp_cor - nc_h.temp
self.assertTrue(totest.mean() < 0.5)
# precip
totest = nc_c.prcp - nc_h.prcp
self.assertTrue(totest.mean() < 100)
开发者ID:alexjarosch,项目名称:oggm,代码行数:35,代码来源:test_prepro.py
示例2: __init__
def __init__(self,config_name='config.ini',init=False,*args,**kwargs):
#Shared parameters as attributes
super(FAPAR,self).__init__(config_name,init,*args,**kwargs)
#FAPAR parameters,
super(FAPAR,self).import_config(config_name,'FAPAR')
#Load observation file and rename/reorder dimensions
self.obs=xr.open_dataset(self.fnobs,decode_times=False).FAPAR
self.obs=self.obs.rename({'TAX':'TIME'})
self.obs=self.obs.transpose('TIME','LATITUDE','LONGITUDE')
#Overwrite timeaxis with datetime object
self.timeaxis=pd.date_range('1997-09-01',freq='m',periods=106)
self.obs['TIME']=self.timeaxis
self.obs_mean=self.obs.mean(dim='TIME')
#Monthly mean
self.obs_monthly=self.obs.groupby('TIME.month').mean(dim='TIME')
#Load par from control run
self.par=xr.open_dataset(self.fnctrl_m).par.isel(grid_only=0).drop('grid_only')
self.tconstraint=(self.par.TIME>1997.65) * (self.par.TIME<2006.5)
self.par=self.par[self.tconstraint]
self.par['TIME']=self.timeaxis
self.name='fapar'
#Load area from control run, stack it for seasonal mean
self.area=xr.open_dataset(self.fnctrl).area
self.area_monthly=xr.concat([self.area]*12,dim='month')
self.area_monthly.month.values=range(1,13)
开发者ID:mystastian,项目名称:lhstools,代码行数:25,代码来源:fapar.py
示例3: xrload
def xrload(file_name, engine="h5netcdf", load_to_mem=True, create_new=False):
"""
Loads a xarray dataset.
Parameters
----------
file_name: name of file
engine: engine used to load file
load_to_mem: once opened, load from disk to memory
create_new: if no file exists make a blank one
Returns
-------
ds: loaded Dataset
"""
file_name = auto_add_extension(file_name, engine)
try:
try:
ds = xr.open_dataset(file_name, engine=engine)
except AttributeError as e1:
if "object has no attribute" in str(e1):
ds = xr.open_dataset(file_name, engine="netcdf4")
else:
raise e1
if load_to_mem:
ds.load()
ds.close()
except (RuntimeError, OSError) as e2:
if "o such" in str(e2) and create_new:
ds = xr.Dataset()
else:
raise e2
return ds
开发者ID:cotsog,项目名称:xyzpy,代码行数:33,代码来源:manage.py
示例4: from_netcdf
def from_netcdf(netcdf_path):
"""Load serialized TimeSeries from netCDF file."""
with netCDF4.Dataset(netcdf_path) as ds:
channels = list(ds.groups)
# First channel group stores time series metadata
with xr.open_dataset(netcdf_path, group=channels[0]) as ds:
t = [ds.time.values]
m = [ds.measurement.values]
e = [ds.error.values]
target = ds.attrs.get('target')
meta_features = ds.meta_features.to_series()
name = ds.attrs.get('name')
path = ds.attrs.get('path')
for channel in channels[1:]:
with xr.open_dataset(netcdf_path, group=channel) as ds:
m.append(ds.measurement.values)
if 'time' in ds:
t.append(ds.time.values)
if 'error' in ds:
e.append(ds.error.values)
return TimeSeries(_make_array(t), _make_array(m), _make_array(e), target,
meta_features, name, path)
开发者ID:BenJamesbabala,项目名称:cesium,代码行数:25,代码来源:time_series.py
示例5: test_calculate_external_single_deployment
def test_calculate_external_single_deployment(self):
ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)
ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
'pressure_temp', 'conductivity', 'ext_volt0']]
nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
'frame_type', 'nutnr_dark_value_used_for_fit']]
ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
ctd_stream_dataset.events = self.ctd_events
ctd_stream_dataset._insert_dataset(ctd_ds)
ctd_stream_dataset.calculate_all()
nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk], 'UNIT')
nut_stream_dataset.events = self.nut_events
nut_stream_dataset._insert_dataset(nut_ds)
nut_stream_dataset.calculate_all()
nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
nut_stream_dataset.calculate_all()
expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
'ctdpf_sbe43_sample-practical_salinity',
'salinity_corrected_nitrate']
self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
开发者ID:petercable,项目名称:stream_engine,代码行数:26,代码来源:test_stream_dataset.py
示例6: test_calculate_external_multiple_deployments
def test_calculate_external_multiple_deployments(self):
ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)
ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
'pressure_temp', 'conductivity', 'ext_volt0']]
nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
'frame_type', 'nutnr_dark_value_used_for_fit']]
# remap times to make this two separate deployments
dep1_start = self.ctd_events.deps[1].ntp_start
dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
ctd_ds.time.values = np.linspace(dep1_start + 1, dep2_stop - 1, num=ctd_ds.time.shape[0])
nut_ds.time.values = np.linspace(dep1_start + 1, dep2_stop - 1, num=nut_ds.time.shape[0])
ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
ctd_stream_dataset.events = self.ctd_events
ctd_stream_dataset._insert_dataset(ctd_ds)
ctd_stream_dataset.calculate_all()
nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk], 'UNIT')
nut_stream_dataset.events = self.nut_events
nut_stream_dataset._insert_dataset(nut_ds)
nut_stream_dataset.calculate_all()
nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
nut_stream_dataset.calculate_all()
expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
'ctdpf_sbe43_sample-practical_salinity',
'salinity_corrected_nitrate']
self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
开发者ID:petercable,项目名称:stream_engine,代码行数:32,代码来源:test_stream_dataset.py
示例7: test_calculate_external_single_deployment
def test_calculate_external_single_deployment(self):
tr = TimeRange(3.65342400e+09, 3.65351040e+09)
coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': v, 'deployment': 1}]
for k, v in self.ctd_nutnr_cals.iteritems()}
coefficients = CalibrationCoefficientStore(coefficients, 'UNIT')
ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)
ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
'pressure_temp', 'conductivity', 'ext_volt0']]
nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
'frame_type', 'nutnr_dark_value_used_for_fit']]
ctd_stream_dataset = StreamDataset(self.ctdpf_sk, coefficients, {}, [], 'UNIT')
ctd_stream_dataset._insert_dataset(ctd_ds)
ctd_stream_dataset.calculate_internal()
nut_stream_dataset = StreamDataset(self.nutnr_sk, coefficients, {}, [self.ctdpf_sk], 'UNIT')
nut_stream_dataset._insert_dataset(nut_ds)
nut_stream_dataset.calculate_internal()
nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
nut_stream_dataset.calculate_external()
expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
'ctdpf_sbe43_sample-practical_salinity',
'temp_sal_corrected_nitrate']
self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
开发者ID:JeffRoy,项目名称:stream_engine,代码行数:29,代码来源:test_stream_dataset.py
示例8: test_coordinates_encoding
def test_coordinates_encoding(self):
def equals_latlon(obj):
return obj == 'lat lon' or obj == 'lon lat'
original = Dataset({'temp': ('x', [0, 1]), 'precip': ('x', [0, -1])},
{'lat': ('x', [2, 3]), 'lon': ('x', [4, 5])})
with self.roundtrip(original) as actual:
self.assertDatasetIdentical(actual, original)
with create_tmp_file() as tmp_file:
original.to_netcdf(tmp_file)
with open_dataset(tmp_file, decode_coords=False) as ds:
self.assertTrue(equals_latlon(ds['temp'].attrs['coordinates']))
self.assertTrue(equals_latlon(ds['precip'].attrs['coordinates']))
self.assertNotIn('coordinates', ds.attrs)
self.assertNotIn('coordinates', ds['lat'].attrs)
self.assertNotIn('coordinates', ds['lon'].attrs)
modified = original.drop(['temp', 'precip'])
with self.roundtrip(modified) as actual:
self.assertDatasetIdentical(actual, modified)
with create_tmp_file() as tmp_file:
modified.to_netcdf(tmp_file)
with open_dataset(tmp_file, decode_coords=False) as ds:
self.assertTrue(equals_latlon(ds.attrs['coordinates']))
self.assertNotIn('coordinates', ds['lat'].attrs)
self.assertNotIn('coordinates', ds['lon'].attrs)
开发者ID:ashang,项目名称:xarray,代码行数:26,代码来源:test_backends.py
示例9: woa_get_ts
def woa_get_ts(llon, llat, plot=0):
import xarray as xr
tempfile = '/Users/gunnar/Data/world_ocean_atlas/woa13_decav_t00_04v2.nc'
saltfile = '/Users/gunnar/Data/world_ocean_atlas/woa13_decav_s00_04v2.nc'
dt = xr.open_dataset(tempfile, decode_times=False)
a = dt.isel(time=0)
a.reset_coords(drop=True)
t = a['t_mn']
T = t.sel(lon=llon, lat=llat, method='nearest').values
ds = xr.open_dataset(saltfile, decode_times=False)
a = ds.isel(time=0)
a.reset_coords(drop=True)
s = a['s_mn']
S = s.sel(lon=llon, lat=llat, method='nearest').values
depth = s['depth'].data
if plot:
# import gvfigure as gvf
import matplotlib.pyplot as plt
# fig,ax = gvf.newfig(3,5)
# plt.plot(T,depth)
# ax.invert_yaxis()
f, (ax1, ax2) = plt.subplots(1, 2, sharey=True)
ax1.plot(T, depth, 'k')
ax1.set_xlabel('Temperature')
ax1.set_ylabel('Depth [m]')
ax2.plot(S, depth, 'k')
ax2.set_xlabel('Salinity')
ax1.invert_yaxis()
f.set_figwidth(5)
f.set_figheight(5)
return T, S, depth
开发者ID:gunnarjakob,项目名称:pythonlib,代码行数:35,代码来源:ocean.py
示例10: test_qc
def test_qc(self):
nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample')
ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample')
nutnr_fn = 'nutnr_a_sample.nc'
ctdpf_fn = 'ctdpf_sbe43_sample.nc'
cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))
qc = json.load(open(os.path.join(DATA_DIR, 'qc.json')))
tr = TimeRange(3.65342400e+09, 3.65351040e+09)
coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals}
sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, qc_parameters=qc, request_id='UNIT')
nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False)
ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False)
ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]]
nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]]
sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id)
sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id)
sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)
sr.calculate_derived_products()
expected_parameters = ['temp_sal_corrected_nitrate_qc_executed',
'temp_sal_corrected_nitrate_qc_results']
self.assert_parameters_in_datasets(sr.datasets[nutnr_sk].datasets, expected_parameters)
开发者ID:JeffRoy,项目名称:stream_engine,代码行数:28,代码来源:test_stream_request.py
示例11: test_add_externals
def test_add_externals(self):
nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample')
ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample')
nutnr_fn = 'nutnr_a_sample.nc'
ctdpf_fn = 'ctdpf_sbe43_sample.nc'
cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))
tr = TimeRange(3.65342400e+09, 3.65351040e+09)
coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals}
sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT')
nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False)
ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False)
ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]]
nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]]
sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id)
sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id)
sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)
sr.calculate_derived_products()
sr.import_extra_externals()
self.assertIn('ctdpf_sbe43_sample-seawater_pressure', sr.datasets[nutnr_sk].datasets[0])
self.assertNotIn('ctdpf_sbe43_sample-seawater_pressure', sr.datasets[ctdpf_sk].datasets[0])
data = json.loads(JsonResponse(sr).json())
for each in data:
self.assertIn('int_ctd_pressure', each)
开发者ID:JeffRoy,项目名称:stream_engine,代码行数:31,代码来源:test_stream_request.py
示例12: test_csv
def test_csv(self):
nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample')
ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample')
nutnr_fn = 'nutnr_a_sample.nc'
ctdpf_fn = 'ctdpf_sbe43_sample.nc'
cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))
tr = TimeRange(3.65342400e+09, 3.65351040e+09)
coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals}
sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT')
nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False)
ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False)
nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]]
ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]]
sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id)
sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id)
sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)
sr.calculate_derived_products()
csv = CsvGenerator(sr, ',').to_csv()
self.assertTrue(csv)
开发者ID:JeffRoy,项目名称:stream_engine,代码行数:25,代码来源:test_stream_request.py
示例13: create_metbk_hourly_sr
def create_metbk_hourly_sr(self):
metbk_fn = 'metbk_a_dcl_instrument_recovered.nc'
metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, metbk_fn), decode_times=False)
vel_fn = 'velpt_ab_dcl_instrument_recovered.nc'
vel_ds = xr.open_dataset(os.path.join(DATA_DIR, vel_fn), decode_times=False)
# both of these datasets are labeled deployment 3 but the times are squarely in deployment 1. Fix.
metbk_ds.deployment.values[:] = 1
vel_ds.deployment.values[:] = 1
tr = TimeRange(metbk_ds.time.values[0], metbk_ds.time.values[-1])
sr = StreamRequest(self.hourly_sk, [], tr, {}, request_id='UNIT')
metbk_ds = metbk_ds[self.base_params + [p.name for p in sr.stream_parameters[self.met_sk]]]
vel_ds = vel_ds[self.base_params + [p.name for p in sr.stream_parameters[self.vel_sk]]]
sr.datasets[self.met_sk] = StreamDataset(self.met_sk, sr.uflags, [self.hourly_sk, self.vel_sk], sr.request_id)
sr.datasets[self.hourly_sk] = StreamDataset(self.hourly_sk, sr.uflags, [self.met_sk, self.vel_sk], sr.request_id)
sr.datasets[self.vel_sk] = StreamDataset(self.vel_sk, sr.uflags, [self.hourly_sk, self.met_sk], sr.request_id)
sr.datasets[self.hourly_sk].events = self.met_events
sr.datasets[self.met_sk].events = self.met_events
sr.datasets[self.vel_sk].events = self.vel_events
sr.datasets[self.met_sk]._insert_dataset(metbk_ds)
sr.datasets[self.vel_sk]._insert_dataset(vel_ds)
return sr
开发者ID:petercable,项目名称:stream_engine,代码行数:28,代码来源:test_stream_request.py
示例14: test_input_tmax_and_tmin
def test_input_tmax_and_tmin():
p_min = "/snow3/huziy/Daymet_daily/daymet_v3_tmin_1988_na.nc4"
p_max = "/snow3/huziy/Daymet_daily/daymet_v3_tmax_1988_na.nc4"
tmx = xarray.open_dataset(p_max)["tmax"][0, :, :]
tmn = xarray.open_dataset(p_min)["tmin"][0, :, :]
tmn_msk = tmn.to_masked_array()
tmx_msk = tmx.to_masked_array()
diff = tmx_msk - tmn_msk
print(diff.shape)
plt.figure()
suspect = (diff < 0.) & ~np.isnan(diff) & ~diff.mask
if np.any(suspect):
print(diff[suspect], len(suspect), diff[suspect].min(), "...", diff[suspect].max())
# print(tmx_msk[suspect])
# print(tavg_msk[suspect])
# print(tmn_msk[suspect])
diff[suspect] = -100
im = plt.pcolormesh(tmn.x, tmn.y, diff, cmap=cm.get_cmap("coolwarm", 20), vmin=-2, vmax=2)
plt.colorbar(im)
plt.show()
开发者ID:guziy,项目名称:RPN,代码行数:33,代码来源:compare_tavg_and_tmax.py
示例15: main
def main(in_file: Path, target_grid_file: Path, out_dir: Path=None):
if out_dir is not None:
out_dir.mkdir(exist_ok=True)
out_file = out_dir / (in_file.name + "_interpolated")
else:
out_file = in_file.parent / (in_file.name + "_interpolated")
if out_file.exists():
print(f"Skipping {in_file}, output already exists ({out_file})")
return
with xarray.open_dataset(target_grid_file) as ds_grid:
lons, lats = ds_grid["lon"][:].values, ds_grid["lat"][:].values
xt, yt, zt = lat_lon.lon_lat_to_cartesian(lons.flatten(), lats.flatten())
with xarray.open_dataset(in_file) as ds_in:
lons_s, lats_s = ds_in["lon"][:].values, ds_in["lat"][:].values
xs, ys, zs = lat_lon.lon_lat_to_cartesian(lons_s.flatten(), lats_s.flatten())
ktree = KDTree(list(zip(xs, ys, zs)))
dists, inds = ktree.query(list(zip(xt, yt, zt)), k=1)
# resample to daily
ds_in_r = ds_in.resample(t="1D", keep_attrs=True).mean()
ds_out = xarray.Dataset()
for vname, var in ds_grid.variables.items():
ds_out[vname] = var[:]
ds_out["t"] = ds_in_r["t"][:]
for vname, var in ds_in_r.variables.items():
assert isinstance(var, xarray.Variable)
var = var.squeeze()
# only interested in (t, x, y) fields
if var.ndim != 3:
print(f"skipping {vname}")
continue
if vname.lower() not in ["t", "time", "lon", "lat"]:
print(f"Processing {vname}")
var_interpolated = [var[ti].values.flatten()[inds].reshape(lons.shape) for ti in range(var.shape[0])]
ds_out[vname] = xarray.DataArray(
var_interpolated, dims=("t", "x", "y"),
attrs=var.attrs,
)
ds_out.to_netcdf(out_file)
开发者ID:guziy,项目名称:RPN,代码行数:59,代码来源:interpolate_fields_to_the_hles_focus_domain.py
示例16: get_xarray
def get_xarray(self):
try:
self.dataset = xr.open_dataset(self.dataset_path)
except ValueError as ve:
print(datetime_error, ve)
self.dataset = xr.open_dataset(self.dataset_path, decode_times=False)
print(success_msg)
return self.dataset
开发者ID:Reading-eScience-Centre,项目名称:pycovjson,代码行数:8,代码来源:read_netcdf.py
示例17: convert
def convert(self):
import xarray as xr
try:
# try OpenDAP url
return xr.open_dataset(self.url)
except IOError:
# download the file
return xr.open_dataset(self.file)
开发者ID:bird-house,项目名称:birdy,代码行数:8,代码来源:converters.py
示例18: get_xarray_datasets_from_filenames
def get_xarray_datasets_from_filenames(filenames):
if isinstance(filenames, string_types):
datasets = [xarray.open_dataset(filenames)]
else:
datasets = []
for filename in filenames:
datasets.append(xarray.open_dataset(filename))
return datasets
开发者ID:mcgibbon,项目名称:dapper,代码行数:8,代码来源:retrieval.py
示例19: test_image_driver_no_output_file_nans
def test_image_driver_no_output_file_nans(fnames, domain_file):
'''
Test that all VIC image driver output files have the same nan structure as
the domain file
'''
ds_domain = xr.open_dataset(domain_file)
for fname in fnames:
ds_output = xr.open_dataset(fname)
assert_nan_equal(ds_domain, ds_output)
开发者ID:argansos,项目名称:VIC,代码行数:9,代码来源:test_image_driver.py
示例20: read
def read(self, file_path):
self.file_path = file_path
try:
self.dataset = xr.open_dataset(self.file_path)
except ValueError as ve:
print(datetime_error, ve)
self.dataset = xr.open_dataset(self.file_path, decode_times=False)
print(success_msg)
self.var_names = self.get_var_names(self.dataset)
开发者ID:Reading-eScience-Centre,项目名称:pycovjson,代码行数:9,代码来源:read_netcdf.py
注:本文中的xarray.open_dataset函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论