• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python xmlutils.load_hadoop_xml_defaults函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中sahara.utils.xmlutils.load_hadoop_xml_defaults函数的典型用法代码示例。如果您正苦于以下问题:Python load_hadoop_xml_defaults函数的具体用法?Python load_hadoop_xml_defaults怎么用?Python load_hadoop_xml_defaults使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了load_hadoop_xml_defaults函数的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: get_possible_job_config

def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type,
                            edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_PIG):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if not edp.compare_job_type(edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_JAVA):
        config.update({'params': {}})
    return {'job_config': config}
开发者ID:stannie42,项目名称:sahara,代码行数:26,代码来源:workflow_factory.py


示例2: get_possible_job_config

def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *get_possible_job_types()):
        return None

    if edp.compare_job_type(job_type, 'Java'):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, 'MapReduce', 'Pig'):
        #TODO(nmakhotkin) Here we should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, 'MapReduce'):
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif edp.compare_job_type(job_type, 'Hive'):
        #TODO(nmakhotkin) Here we should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if not edp.compare_job_type('MapReduce', 'Java'):
        config.update({'params': {}})
    return {'job_config': config}
开发者ID:qinweiwei,项目名称:sahara,代码行数:25,代码来源:workflow_factory.py


示例3: get_possible_job_config

def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, edp.JOB_TYPE_SHELL):
        return {'job_config': {'configs': [], 'params': {}, 'args': []}}

    if edp.compare_job_type(job_type,
                            edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_PIG):
        cfg = xmlutils.load_hadoop_xml_defaults(
            'service/edp/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += get_possible_mapreduce_configs()
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        cfg = xmlutils.load_hadoop_xml_defaults(
            'service/edp/resources/hive-default.xml')

    config = {'configs': cfg}
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG, edp.JOB_TYPE_HIVE):
        config.update({'params': {}})
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
        config.update({'args': []})
    return {'job_config': config}
开发者ID:openstack,项目名称:sahara,代码行数:26,代码来源:workflow_factory.py


示例4: get_possible_hive_config_from

def get_possible_hive_config_from(file_name):
    '''Return the possible configs, args, params for a Hive job.'''
    config = {
        'configs': xmlutils.load_hadoop_xml_defaults(file_name),
        'params': {}
        }
    return config
开发者ID:AlexanderYAPPO,项目名称:sahara,代码行数:7,代码来源:confighints_helper.py


示例5: test_load_xml_defaults

 def test_load_xml_defaults(self):
     self.assertEqual(
         [{'name': u'name1', 'value': u'value1', 'description': 'descr1'},
          {'name': u'name2', 'value': u'value2', 'description': 'descr2'},
          {'name': u'name3', 'value': '', 'description': 'descr3'},
          {'name': u'name4', 'value': '', 'description': 'descr4'},
          {'name': u'name5', 'value': u'value5', 'description': ''}],
         x.load_hadoop_xml_defaults(
             'tests/unit/resources/test-default.xml'))
开发者ID:B-Rich,项目名称:sahara,代码行数:9,代码来源:test_xml_utils.py


示例6: get_swift_configs

def get_swift_configs():
    configs = x.load_hadoop_xml_defaults("swift/resources/conf-template.xml")
    for conf in configs:
        if conf["name"] == HADOOP_SWIFT_AUTH_URL:
            conf["value"] = su.retrieve_auth_url() + "tokens/"
        if conf["name"] == HADOOP_SWIFT_TENANT:
            conf["value"] = retrieve_tenant()
        if CONF.os_region_name and conf["name"] == HADOOP_SWIFT_REGION:
            conf["value"] = CONF.os_region_name

    result = [cfg for cfg in configs if cfg["value"]]
    LOG.info(_LI("Swift would be integrated with the following " "params: {result}").format(result=result))
    return result
开发者ID:snowind,项目名称:sahara,代码行数:13,代码来源:swift_helper.py


示例7: vm_awareness_core_config

def vm_awareness_core_config():
    c = x.load_hadoop_xml_defaults('topology/resources/core-template.xml')
    result = [cfg for cfg in c if cfg['value']]

    if not CONF.enable_hypervisor_awareness:
        # not leveraging 4-layer approach so override template value
        param = next((prop for prop in result
                      if prop['name'] == 'net.topology.impl'), None)
        if param:
            param['value'] = 'org.apache.hadoop.net.NetworkTopology'

    LOG.info("Vm awareness will add following configs in core-site "
             "params: %s", result)
    return result
开发者ID:B-Rich,项目名称:sahara,代码行数:14,代码来源:topology_helper.py


示例8: test_create_hadoop_xml

    def test_create_hadoop_xml(self):
        conf = x.load_hadoop_xml_defaults(
            'tests/unit/resources/test-default.xml')
        self.assertEqual(x.create_hadoop_xml({'name1': 'some_val1',
                                              'name2': 2}, conf),
                         """<?xml version="1.0" ?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
  <property>
    <name>name2</name>
    <value>2</value>
  </property>
  <property>
    <name>name1</name>
    <value>some_val1</value>
  </property>
</configuration>
""")
开发者ID:B-Rich,项目名称:sahara,代码行数:18,代码来源:test_xml_utils.py


示例9: get_swift_configs

def get_swift_configs():
    configs = x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
    for conf in configs:
        if conf['name'] == HADOOP_SWIFT_AUTH_URL:
            conf['value'] = su.retrieve_auth_url() + "auth/tokens/"
        if conf['name'] == HADOOP_SWIFT_TENANT:
            conf['value'] = retrieve_tenant()
        if CONF.os_region_name and conf['name'] == HADOOP_SWIFT_REGION:
            conf['value'] = CONF.os_region_name
        if conf['name'] == HADOOP_SWIFT_DOMAIN_NAME:
            # NOTE(jfreud): Don't be deceived here... Even though there is an
            # attribute provided by context called domain_name, it is used for
            # domain scope, and hadoop-swiftfs always authenticates using
            # project scope. The purpose of the setting below is to override
            # the default value for project domain and user domain, domain id
            # as 'default', which may not always be correct.
            # TODO(jfreud): When hadoop-swiftfs allows it, stop hoping that
            # project_domain_name is always equal to user_domain_name.
            conf['value'] = context.current().project_domain_name

    result = [cfg for cfg in configs if cfg['value']]
    LOG.info("Swift would be integrated with the following "
             "params: {result}".format(result=result))
    return result
开发者ID:openstack,项目名称:sahara,代码行数:24,代码来源:swift_helper.py


示例10:

from sahara.plugins import utils
from sahara.plugins.vanilla import utils as vu
from sahara.plugins.vanilla.v1_2_1 import mysql_helper as m_h
from sahara.plugins.vanilla.v1_2_1 import oozie_helper as o_h
from sahara.swift import swift_helper as swift
from sahara.topology import topology_helper as topology
from sahara.utils import crypto
from sahara.utils import types as types
from sahara.utils import xmlutils as x


conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/mapred-default.xml')

HIVE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/hive-default.xml')

# Append Oozie configs fore core-site.xml
CORE_DEFAULT += o_h.OOZIE_CORE_DEFAULT

XML_CONFS = {
    "HDFS": [CORE_DEFAULT, HDFS_DEFAULT],
开发者ID:a9261,项目名称:sahara,代码行数:32,代码来源:config_helper.py


示例11:

# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from sahara.plugins import provisioning as p
from sahara.utils import xmlutils as x


CORE_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/hadoop-default.xml")

HDFS_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/hdfs-default.xml")

YARN_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/yarn-default.xml")

OOZIE_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/oozie-default.xml")


XML_CONFS = {"Hadoop": [CORE_DEFAULT], "HDFS": [HDFS_DEFAULT], "YARN": [YARN_DEFAULT], "JobFlow": [OOZIE_DEFAULT]}

IDH_TARBALL_URL = p.Config(
    "IDH tarball URL",
    "general",
    "cluster",
    priority=1,
开发者ID:qinweiwei,项目名称:sahara,代码行数:31,代码来源:config_helper.py


示例12: vm_awareness_mapred_config

def vm_awareness_mapred_config():
    c = x.load_hadoop_xml_defaults('topology/resources/mapred-template.xml')
    result = [cfg for cfg in c if cfg['value']]
    LOG.info("Vm awareness will add following configs in map-red "
             "params: %s", result)
    return result
开发者ID:B-Rich,项目名称:sahara,代码行数:6,代码来源:topology_helper.py


示例13:

# limitations under the License.

from oslo.config import cfg

from sahara import exceptions as ex
from sahara.openstack.common import log as logging
from sahara.plugins import provisioning as p
from sahara.utils import types as types
from sahara.utils import xmlutils as x

CONF = cfg.CONF
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")

LOG = logging.getLogger(__name__)

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/mapred-default.xml')

YARN_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/yarn-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/oozie-default.xml')

XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
开发者ID:qinweiwei,项目名称:sahara,代码行数:32,代码来源:config_helper.py


示例14: read_default_swift_configs

def read_default_swift_configs():
    return x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
开发者ID:Imperat,项目名称:sahara,代码行数:2,代码来源:swift_helper.py


示例15:

from oslo.config import cfg

from sahara import conductor as c
from sahara.openstack.common import log as logging
from sahara.plugins.general import utils
from sahara.plugins import provisioning as p
from sahara.topology import topology_helper as topology
from sahara.utils import types as types
from sahara.utils import xmlutils as x


conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults("plugins/spark/resources/core-default.xml")

HDFS_DEFAULT = x.load_hadoop_xml_defaults("plugins/spark/resources/hdfs-default.xml")

XML_CONFS = {"HDFS": [CORE_DEFAULT, HDFS_DEFAULT]}

SPARK_CONFS = {
    "Spark": {
        "OPTIONS": [
            {
                "name": "Master port",
                "description": "Start the master on a different port" " (default: 7077)",
                "default": "7077",
                "priority": 2,
            },
            {
开发者ID:JohannaMW,项目名称:sahara,代码行数:31,代码来源:config_helper.py


示例16: get_possible_mapreduce_configs

def get_possible_mapreduce_configs():
    '''return a list of possible configuration values for map reduce jobs.'''
    cfg = xmlutils.load_hadoop_xml_defaults(
        'service/edp/resources/mapred-job-config.xml')
    return cfg
开发者ID:openstack,项目名称:sahara,代码行数:5,代码来源:workflow_factory.py


示例17:

# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from oslo_config import cfg
from oslo_log import log as logging

from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
from sahara.utils import xmlutils as x

CONF = cfg.CONF
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")

LOG = logging.getLogger(__name__)

CORE_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/core-default.xml")

HDFS_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/hdfs-default.xml")

MAPRED_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/mapred-default.xml")

YARN_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/yarn-default.xml")

OOZIE_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/oozie-default.xml")

HIVE_DEFAULT = x.load_hadoop_xml_defaults("plugins/vanilla/v2_6_0/resources/hive-default.xml")

XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
    "HDFS": [HDFS_DEFAULT],
    "YARN": [YARN_DEFAULT],
开发者ID:egafford,项目名称:sahara,代码行数:31,代码来源:config_helper.py


示例18: get_oozie_required_xml_configs

# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from sahara.utils import xmlutils as x


OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/oozie-default.xml')

OOZIE_CORE_DEFAULT = [
    {
        'name': 'hadoop.proxyuser.hadoop.hosts',
        'value': "localhost"
    },
    {
        'name': 'hadoop.proxyuser.hadoop.groups',
        'value': 'hadoop'
    }]

OOZIE_HEAPSIZE_DEFAULT = "CATALINA_OPTS -Xmx1024m"


def get_oozie_required_xml_configs():
开发者ID:AlexanderYAPPO,项目名称:sahara,代码行数:32,代码来源:oozie_helper.py



注:本文中的sahara.utils.xmlutils.load_hadoop_xml_defaults函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python client.saharaclient函数代码示例发布时间:2022-05-27
下一篇:
Python xmlutils.create_hadoop_xml函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap