本文整理汇总了Python中resource_management.libraries.functions.version.format_hdp_stack_version函数的典型用法代码示例。如果您正苦于以下问题:Python format_hdp_stack_version函数的具体用法?Python format_hdp_stack_version怎么用?Python format_hdp_stack_version使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了format_hdp_stack_version函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: actionexecute
def actionexecute(self, env):
config = Script.get_config()
version = default('/commandParams/version', None)
stack_name = default('/hostLevelParams/stack_name', "")
if not version:
raise Fail("Value is required for '/commandParams/version'")
# other os?
if OSCheck.is_redhat_family():
cmd = ('/usr/bin/yum', 'clean', 'all')
code, out = shell.call(cmd, sudo=True)
min_ver = format_hdp_stack_version("2.2")
real_ver = format_hdp_stack_version(version)
if stack_name == "HDP":
if compare_versions(real_ver, min_ver) >= 0:
cmd = ('hdp-select', 'set', 'all', version)
code, out = shell.call(cmd, sudo=True)
if compare_versions(real_ver, format_hdp_stack_version("2.3")) >= 0:
# backup the old and symlink /etc/[component]/conf to /usr/hdp/current/[component]
for k, v in conf_select.PACKAGE_DIRS.iteritems():
for dir_def in v:
link_config(dir_def['conf_dir'], dir_def['current_dir'])
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:26,代码来源:ru_set_all.py
示例2: set_version
def set_version(self):
from resource_management.libraries.functions.default import default
stack_name = default("/hostLevelParams/stack_name", None)
version = default("/commandParams/version", None)
stack_version_unformatted = str(default("/hostLevelParams/stack_version", ""))
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
stack_to_component = self.get_stack_to_component()
if stack_to_component:
component_name = stack_to_component[stack_name] if stack_name in stack_to_component else None
if component_name and stack_name and version and \
compare_versions(format_hdp_stack_version(hdp_stack_version), '2.2.0.0') >= 0:
Execute(('/usr/bin/hdp-select', 'set', component_name, version),
sudo = True)
开发者ID:fanzhidongyzby,项目名称:ambari,代码行数:13,代码来源:script.py
示例3: _create_config_links_if_necessary
def _create_config_links_if_necessary(self, stack_id, stack_version):
"""
Sets up the required structure for /etc/<component>/conf symlinks and /usr/hdp/current
configuration symlinks IFF the current stack is < HDP 2.3+ and the new stack is >= HDP 2.3
stack_id: stack id, ie HDP-2.3
stack_version: version to set, ie 2.3.0.0-1234
"""
if stack_id is None:
Logger.info("Cannot create config links when stack_id is not defined")
return
args = stack_id.upper().split('-')
if len(args) != 2:
Logger.info("Unrecognized stack id {0}, cannot create config links".format(stack_id))
return
if args[0] != "HDP":
Logger.info("Unrecognized stack name {0}, cannot create config links".format(args[0]))
if compare_versions(format_hdp_stack_version(args[1]), "2.3.0.0") < 0:
Logger.info("Configuration symlinks are not needed for {0}, only HDP-2.3+".format(stack_version))
return
for package_name, directories in conf_select.PACKAGE_DIRS.iteritems():
# if already on HDP 2.3, then we should skip making conf.backup folders
if self.current_hdp_stack_version and compare_versions(self.current_hdp_stack_version, '2.3') >= 0:
Logger.info("The current cluster stack of {0} does not require backing up configurations; "
"only conf-select versioned config directories will be created.".format(stack_version))
# only link configs for all known packages
conf_select.link_component_conf_to_versioned_config(package_name, stack_version)
else:
# link configs and create conf.backup folders for all known packages
conf_select.convert_conf_directories_to_symlinks(package_name, stack_version, directories,
skip_existing_links = False, link_to = "backup")
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:35,代码来源:install_packages.py
示例4: pre_rolling_restart
def pre_rolling_restart(self, env):
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.2.0') >= 0:
conf_select.select(params.stack_name, "spark", params.version)
hdp_select.select("spark-thriftserver", params.version)
开发者ID:zouzhberk,项目名称:ambaridemo,代码行数:7,代码来源:spark_thrift_server.py
示例5: pre_rolling_restart
def pre_rolling_restart(self, env):
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
Execute(format("hdp-select set spark-historyserver {version}"))
copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, params.hdfs_user, params.user_group)
开发者ID:chinpeng,项目名称:ambari,代码行数:7,代码来源:job_history_server.py
示例6: zookeeper_service
def zookeeper_service(action='start', upgrade_type=None):
import params
# This path may be missing after Ambari upgrade. We need to create it.
if upgrade_type is None and not os.path.exists("/usr/hdp/current/zookeeper-server") and params.current_version \
and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
conf_select.select(params.stack_name, "zookeeper", params.current_version)
hdp_select.select("zookeeper-server", params.version)
cmd = format("env ZOOCFGDIR={config_dir} ZOOCFG=zoo.cfg {zk_bin}/zkServer.sh")
if action == 'start':
daemon_cmd = format("source {config_dir}/zookeeper-env.sh ; {cmd} start")
no_op_test = format("ls {zk_pid_file} >/dev/null 2>&1 && ps -p `cat {zk_pid_file}` >/dev/null 2>&1")
Execute(daemon_cmd,
not_if=no_op_test,
user=params.zk_user
)
if params.security_enabled:
kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
Execute(kinit_cmd,
user=params.smokeuser
)
elif action == 'stop':
daemon_cmd = format("source {config_dir}/zookeeper-env.sh ; {cmd} stop")
rm_pid = format("rm -f {zk_pid_file}")
Execute(daemon_cmd,
user=params.zk_user
)
Execute(rm_pid)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:33,代码来源:zookeeper_service.py
示例7: pre_upgrade_restart
def pre_upgrade_restart(self, env, upgrade_type=None):
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
conf_select.select(params.stack_name, "storm", params.version)
hdp_select.select("storm-client", params.version)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:7,代码来源:drpc_server.py
示例8: pre_rolling_restart
def pre_rolling_restart(self, env):
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
absolute_backup_dir = None
if params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:
Logger.info("Backing up directories. Initial conf folder: %s" % os.path.realpath(params.knox_conf_dir))
# This will backup the contents of the conf directory into /tmp/knox-upgrade-backup/knox-conf-backup.tar
absolute_backup_dir = upgrade.backup_data()
# conf-select will change the symlink to the conf folder.
conf_select.select(params.stack_name, "knox", params.version)
hdp_select.select("knox-server", params.version)
# Extract the tar of the old conf folder into the new conf directory
if absolute_backup_dir is not None and params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:
conf_tar_source_path = os.path.join(absolute_backup_dir, upgrade.BACKUP_CONF_ARCHIVE)
if os.path.exists(conf_tar_source_path):
extract_dir = os.path.realpath(params.knox_conf_dir)
conf_tar_dest_path = os.path.join(extract_dir, upgrade.BACKUP_CONF_ARCHIVE)
Logger.info("Copying %s into %s file." % (upgrade.BACKUP_CONF_ARCHIVE, conf_tar_dest_path))
Execute(('cp', conf_tar_source_path, conf_tar_dest_path),
sudo = True,
)
tar_archive.untar_archive(conf_tar_source_path, extract_dir)
File(conf_tar_dest_path,
action = "delete",
)
开发者ID:andreysabitov,项目名称:ambari-mantl,代码行数:32,代码来源:knox_gateway.py
示例9: pre_upgrade_restart
def pre_upgrade_restart(self, env, upgrade_type=None):
Logger.info("Executing DataNode Stack Upgrade pre-restart")
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
conf_select.select(params.stack_name, "hadoop", params.version)
hdp_select.select("hadoop-hdfs-datanode", params.version)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:7,代码来源:datanode.py
示例10: pre_rolling_restart
def pre_rolling_restart(self, env):
Logger.info("Executing DataNode Rolling Upgrade pre-restart")
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
Execute(format("hdp-select set hadoop-hdfs-datanode {version}"))
开发者ID:fanzhidongyzby,项目名称:ambari,代码行数:7,代码来源:datanode.py
示例11: _get_current_hiveserver_version
def _get_current_hiveserver_version():
"""
Runs "hive --version" and parses the result in order
to obtain the current version of hive.
:return: the hiveserver2 version, returned by "hive --version"
"""
import params
try:
# When downgrading the source version should be the version we are downgrading from
if "downgrade" == params.upgrade_direction:
if not params.downgrade_from_version:
raise Fail('The version from which we are downgrading from should be provided in \'downgrade_from_version\'')
source_version = params.downgrade_from_version
else:
source_version = params.current_version
hive_execute_path = _get_hive_execute_path(source_version)
version_hive_bin = params.hive_bin
formatted_source_version = format_hdp_stack_version(source_version)
if formatted_source_version and compare_versions(formatted_source_version, "2.2") >= 0:
version_hive_bin = format('/usr/hdp/{source_version}/hive/bin')
command = format('{version_hive_bin}/hive --version')
return_code, hdp_output = shell.call(command, user=params.hive_user, path=hive_execute_path)
except Exception, e:
Logger.error(str(e))
raise Fail('Unable to execute hive --version command to retrieve the hiveserver2 version.')
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:27,代码来源:hive_server_upgrade.py
示例12: _get_directory_mappings_during_upgrade
def _get_directory_mappings_during_upgrade():
"""
Gets a dictionary of directory to archive name that represents the
directories that need to be backed up and their output tarball archive targets
:return: the dictionary of directory to tarball mappings
"""
import params
# Must be performing an Upgrade
if params.upgrade_direction is None or params.upgrade_direction != Direction.UPGRADE or \
params.upgrade_from_version is None or params.upgrade_from_version == "":
Logger.error("Function _get_directory_mappings_during_upgrade() can only be called during a Stack Upgrade in direction UPGRADE.")
return {}
# By default, use this for all stacks.
knox_data_dir = '/var/lib/knox/data'
if params.stack_name and params.stack_name.upper() == "HDP" and \
compare_versions(format_hdp_stack_version(params.upgrade_from_version), "2.3.0.0") > 0:
# Use the version that is being upgraded from.
knox_data_dir = format('/usr/hdp/{upgrade_from_version}/knox/data')
# the trailing "/" is important here so as to not include the "conf" folder itself
directories = {knox_data_dir: BACKUP_DATA_ARCHIVE, params.knox_conf_dir + "/": BACKUP_CONF_ARCHIVE}
Logger.info(format("Knox directories to backup:\n{directories}"))
return directories
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:27,代码来源:upgrade.py
示例13: pre_upgrade_restart
def pre_upgrade_restart(self, env, upgrade_type=None):
Logger.info("Executing Stack Upgrade pre-restart")
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
conf_select.select(params.stack_name, "zookeeper", params.version)
hdp_select.select("zookeeper-server", params.version)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:8,代码来源:zookeeper_server.py
示例14: pre_rolling_restart
def pre_rolling_restart(self, env):
Logger.info("Executing DataNode Rolling Upgrade pre-restart")
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), "2.2.0.0") >= 0:
conf_select.select(params.stack_name, "hadoop", params.version)
hdp_select.select("hadoop-hdfs-datanode", params.version)
开发者ID:andreysabitov,项目名称:ambari-mantl,代码行数:8,代码来源:datanode.py
示例15: pre_upgrade_restart
def pre_upgrade_restart(self, env, upgrade_type=None):
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
Logger.info("Executing Spark Client Stack Upgrade pre-restart")
conf_select.select(params.stack_name, "spark", params.version)
hdp_select.select("spark-client", params.version)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:8,代码来源:spark_client.py
示例16: pre_rolling_restart
def pre_rolling_restart(self, env):
Logger.info("Executing Rolling Upgrade post-restart")
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
conf_select.select(params.stack_name, "hadoop", params.version)
hdp_select.select("hadoop-yarn-resourcemanager", params.version)
开发者ID:andreysabitov,项目名称:ambari-mantl,代码行数:8,代码来源:resourcemanager.py
示例17: pre_rolling_restart
def pre_rolling_restart(self, env):
Logger.info("Executing Rolling Upgrade pre-restart")
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
开发者ID:fanzhidongyzby,项目名称:ambari,代码行数:8,代码来源:historyserver.py
示例18: pre_rolling_restart
def pre_rolling_restart(self, env):
import params
env.set_params(params)
if params.version and compare_versions(format_hdp_stack_version(params.version), "2.2.0.0") >= 0:
conf_select.select(params.stack_name, "spark", params.version)
hdp_select.select("spark-historyserver", params.version)
# Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
# need to copy the tarball, otherwise, copy it.
if params.version and compare_versions(format_hdp_stack_version(params.version), "2.3.0.0") < 0:
resource_created = copy_to_hdfs(
"tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped
)
if resource_created:
params.HdfsResource(None, action="execute")
开发者ID:zouzhberk,项目名称:ambaridemo,代码行数:17,代码来源:job_history_server.py
示例19: unlink_all_configs
def unlink_all_configs(self, env):
"""
Reverses the work performed in link_config. This should only be used when downgrading from
HDP 2.3 to 2.2 in order to under the symlink work required for 2.3.
"""
stack_name = default('/hostLevelParams/stack_name', "").upper()
downgrade_to_version = default('/commandParams/version', None)
downgrade_from_version = default('/commandParams/downgrade_from_version', None)
upgrade_direction = default("/commandParams/upgrade_direction", Direction.UPGRADE)
# downgrade only
if upgrade_direction != Direction.DOWNGRADE:
Logger.warning("Unlinking configurations should only be performed on a downgrade.")
return
# HDP only
if stack_name != "HDP":
Logger.warning("Unlinking configurations should only be performed on the HDP stack.")
return
if downgrade_to_version is None or downgrade_from_version is None:
Logger.warning("Both 'commandParams/version' and 'commandParams/downgrade_from_version' must be specified to unlink configs on downgrade.")
return
Logger.info("Unlinking all configs when downgrading from HDP 2.3 to 2.2")
# normalize the versions
stack_23 = format_hdp_stack_version("2.3")
downgrade_to_version = format_hdp_stack_version(downgrade_to_version)
downgrade_from_version = format_hdp_stack_version(downgrade_from_version)
# downgrade-to-version must be 2.2 (less than 2.3)
if compare_versions(downgrade_to_version, stack_23) >= 0:
Logger.warning("Unlinking configurations should only be performed when downgrading to HDP 2.2")
return
# downgrade-from-version must be 2.3+
if compare_versions(downgrade_from_version, stack_23) < 0:
Logger.warning("Unlinking configurations should only be performed when downgrading from HDP 2.3 or later")
return
# iterate through all directory conf mappings and undo the symlinks
for key, value in conf_select.PACKAGE_DIRS.iteritems():
for directory_mapping in value:
original_config_directory = directory_mapping['conf_dir']
self._unlink_config(original_config_directory)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:46,代码来源:ru_set_all.py
示例20: upgrade_schema
def upgrade_schema(self, env):
"""
Executes the schema upgrade binary. This is its own function because it could
be called as a standalone task from the upgrade pack, but is safe to run it for each
metastore instance.
The metastore schema upgrade requires a database driver library for most
databases. During an upgrade, it's possible that the library is not present,
so this will also attempt to copy/download the appropriate driver.
"""
Logger.info("Upgrading Hive Metastore")
import params
env.set_params(params)
if params.security_enabled:
kinit_command = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal}; ")
Execute(kinit_command, user=params.smokeuser)
# ensure that the JDBC drive is present for the schema tool; if it's not
# present, then download it first
if params.hive_jdbc_driver in params.hive_jdbc_drivers_list and params.hive_use_existing_db:
target_directory = format("/usr/hdp/{version}/hive/lib")
if not os.path.exists(params.target):
# download it
jdbc_connector()
if params.sqla_db_used:
target_native_libs_directory = format("{target_directory}/native/lib64")
Execute(format("yes | {sudo} cp {jars_in_hive_lib} {target_directory}"))
Directory(target_native_libs_directory, recursive=True)
Execute(format("yes | {sudo} cp {libs_in_hive_lib} {target_native_libs_directory}"))
Execute(format("{sudo} chown -R {hive_user}:{user_group} {hive_lib}/*"))
else:
Execute(("cp", params.target, target_directory), path=["/bin", "/usr/bin/"], sudo=True)
File(os.path.join(target_directory, os.path.basename(params.target)), mode=0644)
# build the schema tool command
binary = format("/usr/hdp/{version}/hive/bin/schematool")
# the conf.server directory changed locations between HDP 2.2 and 2.3
# since the configurations have not been written out yet during an upgrade
# we need to choose the original legacy location
schematool_hive_server_conf_dir = params.hive_server_conf_dir
if params.current_version is not None:
current_version = format_hdp_stack_version(params.current_version)
if compare_versions(current_version, "2.3") < 0:
schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
env_dict = {"HIVE_CONF_DIR": schematool_hive_server_conf_dir}
command = format("{binary} -dbType {hive_metastore_db_type} -upgradeSchema")
Execute(command, user=params.hive_user, tries=1, environment=env_dict, logoutput=True)
开发者ID:zouzhberk,项目名称:ambaridemo,代码行数:58,代码来源:hive_metastore.py
注:本文中的resource_management.libraries.functions.version.format_hdp_stack_version函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论