• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python xarray.concat函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中xarray.concat函数的典型用法代码示例。如果您正苦于以下问题:Python concat函数的具体用法?Python concat怎么用?Python concat使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了concat函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: test_concat

    def test_concat(self):
        # TODO: simplify and split this test case

        # drop the third dimension to keep things relatively understandable
        data = create_test_data()
        for k in list(data):
            if 'dim3' in data[k].dims:
                del data[k]

        split_data = [data.isel(dim1=slice(3)),
                      data.isel(dim1=slice(3, None))]
        self.assertDatasetIdentical(data, concat(split_data, 'dim1'))

        def rectify_dim_order(dataset):
            # return a new dataset with all variable dimensions transposed into
            # the order in which they are found in `data`
            return Dataset(dict((k, v.transpose(*data[k].dims))
                                for k, v in iteritems(dataset.data_vars)),
                           dataset.coords, attrs=dataset.attrs)

        for dim in ['dim1', 'dim2']:
            datasets = [g for _, g in data.groupby(dim, squeeze=False)]
            self.assertDatasetIdentical(data, concat(datasets, dim))

        dim = 'dim2'
        self.assertDatasetIdentical(
            data, concat(datasets, data[dim]))
        self.assertDatasetIdentical(
            data, concat(datasets, data[dim], coords='minimal'))

        datasets = [g for _, g in data.groupby(dim, squeeze=True)]
        concat_over = [k for k, v in iteritems(data.coords)
                       if dim in v.dims and k != dim]
        actual = concat(datasets, data[dim], coords=concat_over)
        self.assertDatasetIdentical(data, rectify_dim_order(actual))

        actual = concat(datasets, data[dim], coords='different')
        self.assertDatasetIdentical(data, rectify_dim_order(actual))

        # make sure the coords argument behaves as expected
        data.coords['extra'] = ('dim4', np.arange(3))
        for dim in ['dim1', 'dim2']:
            datasets = [g for _, g in data.groupby(dim, squeeze=True)]
            actual = concat(datasets, data[dim], coords='all')
            expected = np.array([data['extra'].values
                                 for _ in range(data.dims[dim])])
            self.assertArrayEqual(actual['extra'].values, expected)

            actual = concat(datasets, data[dim], coords='different')
            self.assertDataArrayEqual(data['extra'], actual['extra'])
            actual = concat(datasets, data[dim], coords='minimal')
            self.assertDataArrayEqual(data['extra'], actual['extra'])

        # verify that the dim argument takes precedence over
        # concatenating dataset variables of the same name
        dim = (2 * data['dim1']).rename('dim1')
        datasets = [g for _, g in data.groupby('dim1', squeeze=False)]
        expected = data.copy()
        expected['dim1'] = dim
        self.assertDatasetIdentical(expected, concat(datasets, dim))
开发者ID:CCI-Tools,项目名称:xarray,代码行数:60,代码来源:test_combine.py


示例2: test_concat_size0

    def test_concat_size0(self):
        data = create_test_data()
        split_data = [data.isel(dim1=slice(0, 0)), data]
        actual = concat(split_data, 'dim1')
        self.assertDatasetIdentical(data, actual)

        actual = concat(split_data[::-1], 'dim1')
        self.assertDatasetIdentical(data, actual)
开发者ID:CCI-Tools,项目名称:xarray,代码行数:8,代码来源:test_combine.py


示例3: multi_concat

def multi_concat(results, dims):
    """Concatenate a nested list of xarray objects along several dimensions.
    """
    if len(dims) == 1:
        return xr.concat(results, dim=dims[0])
    else:
        return xr.concat([multi_concat(sub_results, dims[1:])
                          for sub_results in results], dim=dims[0])
开发者ID:jcmgray,项目名称:xyzpy,代码行数:8,代码来源:combo_runner.py


示例4: test_concat_coords

 def test_concat_coords(self):
     data = Dataset({"foo": ("x", np.random.randn(10))})
     expected = data.assign_coords(c=("x", [0] * 5 + [1] * 5))
     objs = [data.isel(x=slice(5)).assign_coords(c=0), data.isel(x=slice(5, None)).assign_coords(c=1)]
     for coords in ["different", "all", ["c"]]:
         actual = concat(objs, dim="x", coords=coords)
         self.assertDatasetIdentical(expected, actual)
     for coords in ["minimal", []]:
         with self.assertRaisesRegexp(ValueError, "not equal across"):
             concat(objs, dim="x", coords=coords)
开发者ID:spencerahill,项目名称:xarray,代码行数:10,代码来源:test_combine.py


示例5: test_concat_constant_index

 def test_concat_constant_index(self):
     # GH425
     ds1 = Dataset({"foo": 1.5}, {"y": 1})
     ds2 = Dataset({"foo": 2.5}, {"y": 1})
     expected = Dataset({"foo": ("y", [1.5, 2.5]), "y": [1, 1]})
     for mode in ["different", "all", ["foo"]]:
         actual = concat([ds1, ds2], "y", data_vars=mode)
         self.assertDatasetIdentical(expected, actual)
     with self.assertRaisesRegexp(ValueError, "not equal across datasets"):
         concat([ds1, ds2], "y", data_vars="minimal")
开发者ID:spencerahill,项目名称:xarray,代码行数:10,代码来源:test_combine.py


示例6: test_concat_encoding

 def test_concat_encoding(self):
     # Regression test for GH1297
     ds = Dataset({'foo': (['x', 'y'], np.random.random((2, 3))),
                   'bar': (['x', 'y'], np.random.random((2, 3)))},
                  {'x': [0, 1]})
     foo = ds['foo']
     foo.encoding = {"complevel": 5}
     ds.encoding = {"unlimited_dims": 'x'}
     assert concat([foo, foo], dim="x").encoding == foo.encoding
     assert concat([ds, ds], dim="x").encoding == ds.encoding
开发者ID:benbovy,项目名称:xarray,代码行数:10,代码来源:test_combine.py


示例7: test_concat_do_not_promote

    def test_concat_do_not_promote(self):
        # GH438
        objs = [Dataset({"y": ("t", [1])}, {"x": 1}), Dataset({"y": ("t", [2])}, {"x": 1})]
        expected = Dataset({"y": ("t", [1, 2])}, {"x": 1, "t": [0, 0]})
        actual = concat(objs, "t")
        self.assertDatasetIdentical(expected, actual)

        objs = [Dataset({"y": ("t", [1])}, {"x": 1}), Dataset({"y": ("t", [2])}, {"x": 2})]
        with self.assertRaises(ValueError):
            concat(objs, "t", coords="minimal")
开发者ID:spencerahill,项目名称:xarray,代码行数:10,代码来源:test_combine.py


示例8: test_concat

    def test_concat(self):
        ds = Dataset({"foo": (["x", "y"], np.random.random((10, 20))), "bar": (["x", "y"], np.random.random((10, 20)))})
        foo = ds["foo"]
        bar = ds["bar"]

        # from dataset array:
        expected = DataArray(np.array([foo.values, bar.values]), dims=["w", "x", "y"])
        actual = concat([foo, bar], "w")
        self.assertDataArrayEqual(expected, actual)
        # from iteration:
        grouped = [g for _, g in foo.groupby("x")]
        stacked = concat(grouped, ds["x"])
        self.assertDataArrayIdentical(foo, stacked)
        # with an index as the 'dim' argument
        stacked = concat(grouped, ds.indexes["x"])
        self.assertDataArrayIdentical(foo, stacked)

        actual = concat([foo[0], foo[1]], pd.Index([0, 1])).reset_coords(drop=True)
        expected = foo[:2].rename({"x": "concat_dim"})
        self.assertDataArrayIdentical(expected, actual)

        actual = concat([foo[0], foo[1]], [0, 1]).reset_coords(drop=True)
        expected = foo[:2].rename({"x": "concat_dim"})
        self.assertDataArrayIdentical(expected, actual)

        with self.assertRaisesRegexp(ValueError, "not identical"):
            concat([foo, bar], dim="w", compat="identical")

        with self.assertRaisesRegexp(ValueError, "not a valid argument"):
            concat([foo, bar], dim="w", data_vars="minimal")
开发者ID:spencerahill,项目名称:xarray,代码行数:30,代码来源:test_combine.py


示例9: test_concat_coords

 def test_concat_coords(self):
     data = Dataset({'foo': ('x', np.random.randn(10))})
     expected = data.assign_coords(c=('x', [0] * 5 + [1] * 5))
     objs = [data.isel(x=slice(5)).assign_coords(c=0),
             data.isel(x=slice(5, None)).assign_coords(c=1)]
     for coords in ['different', 'all', ['c']]:
         actual = concat(objs, dim='x', coords=coords)
         self.assertDatasetIdentical(expected, actual)
     for coords in ['minimal', []]:
         with self.assertRaisesRegexp(ValueError, 'not equal across'):
             concat(objs, dim='x', coords=coords)
开发者ID:CCI-Tools,项目名称:xarray,代码行数:11,代码来源:test_combine.py


示例10: test_auto_combine_2d

    def test_auto_combine_2d(self):
        ds = create_test_data

        partway1 = concat([ds(0), ds(3)], dim='dim1')
        partway2 = concat([ds(1), ds(4)], dim='dim1')
        partway3 = concat([ds(2), ds(5)], dim='dim1')
        expected = concat([partway1, partway2, partway3], dim='dim2')

        datasets = [[ds(0), ds(1), ds(2)], [ds(3), ds(4), ds(5)]]
        result = auto_combine(datasets, concat_dim=['dim1', 'dim2'])

        assert_equal(result, expected)
开发者ID:benbovy,项目名称:xarray,代码行数:12,代码来源:test_combine.py


示例11: test_concat_twice

    def test_concat_twice(self, create_combined_ids, concat_dim):
        shape = (2, 3)
        combined_ids = create_combined_ids(shape)
        result = _combine_nd(combined_ids, concat_dims=['dim1', concat_dim])

        ds = create_test_data
        partway1 = concat([ds(0), ds(3)], dim='dim1')
        partway2 = concat([ds(1), ds(4)], dim='dim1')
        partway3 = concat([ds(2), ds(5)], dim='dim1')
        expected = concat([partway1, partway2, partway3], dim=concat_dim)

        assert_equal(result, expected)
开发者ID:benbovy,项目名称:xarray,代码行数:12,代码来源:test_combine.py


示例12: add_cyclic

def add_cyclic(varin,dim='nlon'):
    '''Add a cyclic point to CESM data. Preserve datatype: xarray'''
    dimdict = {}
    dimdict[dim] = 0
    if dim == 'nlon':
        return(xr.concat([varin, varin.isel(nlon=0)], dim='nlon'))
    elif dim == 'nlat':
        return(xr.concat([varin, varin.isel(nlat=0)], dim='nlat'))
    elif dim == 'dim_0':
        return(xr.concat([varin, varin.isel(dim_0=0)], dim='dim_0'))
    elif dim == 'dim_1':
        return(xr.concat([varin, varin.isel(dim_1=0)], dim='dim_1'))
开发者ID:LABclimate,项目名称:MT,代码行数:12,代码来源:CESM_utils_conv.py


示例13: test_concat_do_not_promote

    def test_concat_do_not_promote(self):
        # GH438
        objs = [Dataset({'y': ('t', [1])}, {'x': 1, 't': [0]}),
                Dataset({'y': ('t', [2])}, {'x': 1, 't': [0]})]
        expected = Dataset({'y': ('t', [1, 2])}, {'x': 1, 't': [0, 0]})
        actual = concat(objs, 't')
        self.assertDatasetIdentical(expected, actual)

        objs = [Dataset({'y': ('t', [1])}, {'x': 1, 't': [0]}),
                Dataset({'y': ('t', [2])}, {'x': 2, 't': [0]})]
        with self.assertRaises(ValueError):
            concat(objs, 't', coords='minimal')
开发者ID:CCI-Tools,项目名称:xarray,代码行数:12,代码来源:test_combine.py


示例14: file_loop

def file_loop(passit):

    ds = xr.open_dataset(passit)

    dataset = passit[6:9]

    ds['tir'].values = ds['tir']

    bloblist = []
    tirlist = []
    lat = ds.lat
    lon = ds.lon

    for ids, day in enumerate(ds['tir']):

        print('id', ids)

        date = day.time
        day.values = day / 100
        if np.sum(day.values) == 0:
            continue
        img, nogood, t_thresh_size, t_thresh_cut, pix_nb = powerBlob_utils.filter_img(day.values, 5)

        power = util.waveletT(img, dataset='METEOSAT5K_vera')
        power_out = powerBlob_utils.find_scales_dominant(power, nogood, dataset=dataset)

        if power_out is None:
            continue

        new_savet = (day.values*100).astype(np.int16)
        bloblist.append(xr.DataArray(power_out.astype(np.int16), coords={'time': date, 'lat': lat, 'lon': lon},
                                     dims=['lat', 'lon']))  # [np.newaxis, :])
        tirlist.append(xr.DataArray(new_savet, coords={'time': date, 'lat': lat, 'lon': lon}, dims=['lat', 'lon']))

    ds_mfg = xr.Dataset()
    ds_mfg['blobs'] = xr.concat(bloblist, 'time')
    ds_mfg['tir'] = xr.concat(tirlist, 'time')
    ds_mfg.sel(lat=slice(5, 12), lon=slice(-13, 13))


    savefile = passit.replace('cores_', 'coresPower_')
    try:
        os.remove(savefile)
    except OSError:
        pass

    comp = dict(zlib=True, complevel=5)
    enc = {var: comp for var in ds_mfg.data_vars}

    ds_mfg.to_netcdf(path=savefile, mode='w', encoding=enc, format='NETCDF4')

    print('Saved ' + savefile)
开发者ID:cornkle,项目名称:proj_CEH,代码行数:52,代码来源:saveCore_powerBlobs_fromNetcdf.py


示例15: add_to_slice

def add_to_slice(da, dim, sl, value):
    # split array into before, middle and after (if slice is the
    # beginning or end before or after will be empty)
    before = da[{dim: slice(0, sl)}]
    middle = da[{dim: sl}]
    after = da[{dim: slice(sl+1, None)}]
    if sl < -1:
        raise RuntimeError('slice can not be smaller value than -1')
    elif sl == -1:
        da_new = xr.concat([before, middle+value], dim=dim)
    else:
        da_new = xr.concat([before, middle+value, after], dim=dim)
    # then add 'value' to middle and concatenate again
    return da_new
开发者ID:jamesp,项目名称:xgcm,代码行数:14,代码来源:grid.py


示例16: pp_tile

def pp_tile(config, timestamp, coordinate_templates, drop_list, tile):
    """
    Post-process a rectangular tile of cells.

    **Arguments:**

    * config
        A `~scmtiles.config.SCMTilesConfig` instance describing the run being
        post-processed.

    * timestamp
        A string timestamp used as part of the filename for the cell output
        files.

    * coordiate_templates
        A dictionary mapping coordinate names to xarray coordinate objects, as
        returned from `load_coorindate_templates`. This is used to lookup the
        latitude and longitude of the cells from their indices.

    * tile
        A `~scmtiles.grid_manager.RectangularTile` instance describing the tile
        to process.

    **Returns:**

    * (tile_ds, filepaths)
        An `xarray.Dataset` representing the tile, and a list of paths to the
        files that were loaded to form the tile.

    """
    grid_rows = OrderedDict()
    filepaths = []
    for cell in tile.cells():
        cell_ds, cell_filepath = pp_cell(cell, timestamp, coordinate_templates,
                                         drop_list, config)
        try:
            grid_rows[cell.y_global].append(cell_ds)
        except KeyError:
            grid_rows[cell.y_global] = [cell_ds]
        filepaths.append(cell_filepath)
    for key, row in grid_rows.items():
        grid_rows[key] = xr.concat(row, dim=config.xname)
    if len(grid_rows) > 1:
        tile_ds = xr.concat(grid_rows.values(), dim=config.yname)
    else:
        tile_ds, = grid_rows.values()
    logger = logging.getLogger('PP')
    logger.info('processing of tile #{} completed'.format(tile.id))
    return tile_ds, filepaths
开发者ID:aopp-pred,项目名称:openifs-scmtiles,代码行数:49,代码来源:openifs_pp_main.py


示例17: month_count

def month_count():

    years = list(range(1983, 2018))

    msg_folder = cnst.GRIDSAT
    fname = 'aggs/gridsat_WA_-70_monthly_count.nc' #65_monthly_count_-40base_15-21UTC_1000km2.nc'

    if not os.path.isfile(msg_folder + fname):
        da = None
        for y in years:
            y = str(y)
            da1 = xr.open_dataset(cnst.GRIDSAT + 'gridsat_WA_' + y + '.nc')  # _-40_1000km2_15-21UTC
            print('Doing ' + y)
            da1['tir'].values = da1['tir'].values/100
            da1['tir'] = da1['tir'].where((da1['tir'] <= -70) & (da1['tir'] >= -108)) #-65
            da1['tir'].values[da1['tir'].values < -70] = 1

            da1 = da1.resample(time='m').sum('time')
            try:
                da = xr.concat([da, da1], 'time')
            except TypeError:
                da = da1.copy()

        enc = {'tir': {'complevel': 5, 'zlib': True}}
        da.to_netcdf(msg_folder + fname, encoding=enc)
开发者ID:cornkle,项目名称:proj_CEH,代码行数:25,代码来源:gridsat_postproc.py


示例18: test_update_add_data_to_coords

 def test_update_add_data_to_coords(self):
     test_array = self.array.copy()
     test_array['time'] = [datetime.datetime.utcnow(), ]
     concatenated_array = xr.concat([self.array, test_array], dim='time')
     self.array.pp.grid = self.grid
     updated_array = self.array.pp.update(test_array)
     np.testing.assert_equal(updated_array.values, concatenated_array.values)
开发者ID:maestrotf,项目名称:pymepps,代码行数:7,代码来源:test_spatial.py


示例19: test_concat_multiindex

 def test_concat_multiindex(self):
     x = pd.MultiIndex.from_product([[1, 2, 3], ['a', 'b']])
     expected = Dataset({'x': x})
     actual = concat([expected.isel(x=slice(2)),
                      expected.isel(x=slice(2, None))], 'x')
     assert expected.equals(actual)
     assert isinstance(actual.x.to_index(), pd.MultiIndex)
开发者ID:CCI-Tools,项目名称:xarray,代码行数:7,代码来源:test_combine.py


示例20: open_mfxr

def open_mfxr(files, dim='TIME', transform_func=None):
    """

    Load multiple MAR files into a single xarray object, performing some
    aggregation first to make this computationally feasible.

    E.g. select a single datarray to examine. 

    # you might also use indexing operations like .sel to subset datasets
    comb = read_netcdfs('MAR*.nc', dim='TIME',
                 transform_func=lambda ds: ds.AL)

    Based on http://xray.readthedocs.io/en/v0.7.1/io.html#combining-multiple-files
    See also http://xray.readthedocs.io/en/v0.7.1/dask.html

    """

    def process_one_path(path):        
        ds = open_xr(path,chunks={'TIME':366})
        # transform_func should do some sort of selection or
        # aggregation
        if transform_func is not None:
            ds = transform_func(ds)
        # load all data from the transformed dataset, to ensure we can
        # use it after closing each original file
        return ds

    paths = sorted(glob(files))
    datasets = [process_one_path(p) for p in paths]
    combined = xr.concat(datasets, dim)
    return combined
开发者ID:atedstone,项目名称:mar_raster,代码行数:31,代码来源:mar_raster.py



注:本文中的xarray.concat函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python xarray.merge函数代码示例发布时间:2022-05-26
下一篇:
Python xapian.sortable_unserialise函数代码示例发布时间:2022-05-26
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap