本文整理汇总了Python中resource_management.libraries.functions.default.default函数的典型用法代码示例。如果您正苦于以下问题:Python default函数的具体用法?Python default怎么用?Python default使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了default函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: get_role_component_current_stack_version
def get_role_component_current_stack_version():
"""
Gets the current HDP version of the component that this role command is for.
:return: the current HDP version of the specified component or None
"""
stack_select_component = None
role = default("/role", "")
role_command = default("/roleCommand", "")
stack_selector_name = stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
if role in SERVER_ROLE_DIRECTORY_MAP:
stack_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
stack_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
if stack_select_component is None:
return None
current_stack_version = get_stack_version(stack_select_component)
if current_stack_version is None:
Logger.warning("Unable to determine {0} version for {1}".format(
stack_selector_name, stack_select_component))
else:
Logger.info("{0} is currently at version {1}".format(
stack_select_component, current_stack_version))
return current_stack_version
开发者ID:maduhu,项目名称:HDP2.5-ambari,代码行数:28,代码来源:stack_select.py
示例2: actionexecute
def actionexecute(self, env):
config = Script.get_config()
version = default('/commandParams/version', None)
stack_name = default('/hostLevelParams/stack_name', "")
if not version:
raise Fail("Value is required for '/commandParams/version'")
# other os?
if OSCheck.is_redhat_family():
cmd = ('/usr/bin/yum', 'clean', 'all')
code, out = shell.call(cmd, sudo=True)
min_ver = format_hdp_stack_version("2.2")
real_ver = format_hdp_stack_version(version)
if stack_name == "HDP":
if compare_versions(real_ver, min_ver) >= 0:
cmd = ('hdp-select', 'set', 'all', version)
code, out = shell.call(cmd, sudo=True)
if compare_versions(real_ver, format_hdp_stack_version("2.3")) >= 0:
# backup the old and symlink /etc/[component]/conf to /usr/hdp/current/[component]
for k, v in conf_select.PACKAGE_DIRS.iteritems():
for dir_def in v:
link_config(dir_def['conf_dir'], dir_def['current_dir'])
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:26,代码来源:ru_set_all.py
示例3: get_current_version
def get_current_version(use_upgrading_version_during_upgrade=True):
"""
Get the effective version to use to copy the tarballs to.
:param use_upgrading_version_during_upgrade: True, except when the RU/EU hasn't started yet.
:return: Version, or False if an error occurred.
"""
upgrade_direction = default("/commandParams/upgrade_direction", None)
is_stack_upgrade = upgrade_direction is not None
current_version = default("/hostLevelParams/current_version", None)
Logger.info("Default version is {0}".format(current_version))
if is_stack_upgrade:
if use_upgrading_version_during_upgrade:
# This is the version going to. In the case of a downgrade, it is the lower version.
current_version = default("/commandParams/version", None)
Logger.info("Because this is a Stack Upgrade, will use version {0}".format(current_version))
else:
Logger.info("This is a Stack Upgrade, but keep the version unchanged.")
else:
if current_version is None:
# During normal operation, the first installation of services won't yet know about the version, so must rely
# on <stack-selector> to get it.
stack_version = _get_single_version_from_stack_select()
if stack_version:
Logger.info("Will use stack version {0}".format(stack_version))
current_version = stack_version
if current_version is None:
message_suffix = "during stack %s" % str(upgrade_direction) if is_stack_upgrade else ""
Logger.warning("Cannot copy tarball because unable to determine current version {0}.".format(message_suffix))
return False
return current_version
开发者ID:maduhu,项目名称:HDP2.5-ambari,代码行数:32,代码来源:copy_tarball.py
示例4: delete_storm_local_data
def delete_storm_local_data(self, env):
"""
Deletes Storm data from local directories. This will create a marker file
with JSON data representing the upgrade stack and request/stage ID. This
will prevent multiple Storm components on the same host from removing
the local directories more than once.
:return:
"""
import params
Logger.info('Clearing Storm data from local directories...')
storm_local_directory = params.local_dir
if storm_local_directory is None:
raise Fail("The storm local directory specified by storm-site/storm.local.dir must be specified")
request_id = default("/requestId", None)
stage_id = default("/stageId", None)
stack_version = params.version
stack_name = params.stack_name
json_map = {}
json_map["requestId"] = request_id
json_map["stageId"] = stage_id
json_map["stackVersion"] = stack_version
json_map["stackName"] = stack_name
temp_directory = params.tmp_dir
upgrade_file = os.path.join(temp_directory, "storm-upgrade-{0}.json".format(stack_version))
if os.path.exists(upgrade_file):
try:
with open(upgrade_file) as file_pointer:
existing_json_map = json.load(file_pointer)
if cmp(json_map, existing_json_map) == 0:
Logger.info("The storm upgrade has already removed the local directories for {0}-{1} for request {2} and stage {3}".format(
stack_name, stack_version, request_id, stage_id))
# nothing else to do here for this as it appears to have already been
# removed by another component being upgraded
return
except:
Logger.error("The upgrade file {0} appears to be corrupt; removing...".format(upgrade_file))
File(upgrade_file, action="delete")
else:
# delete the upgrade file since it does not match
File(upgrade_file, action="delete")
# delete from local directory
Directory(storm_local_directory, action="delete", recursive=True)
# recreate storm local directory
Directory(storm_local_directory, mode=0755, owner = params.storm_user,
group = params.user_group, recursive = True)
# the file doesn't exist, so create it
with open(upgrade_file, 'w') as file_pointer:
json.dump(json_map, file_pointer, indent=2)
开发者ID:andreysabitov,项目名称:ambari-mantl,代码行数:60,代码来源:storm_upgrade.py
示例5: get_role_component_current_hdp_version
def get_role_component_current_hdp_version():
"""
Gets the current HDP version of the component that this role command is for.
:return: the current HDP version of the specified component or None
"""
hdp_select_component = None
role = default("/role", "")
role_command = default("/roleCommand", "")
if role in SERVER_ROLE_DIRECTORY_MAP:
hdp_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
hdp_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
if hdp_select_component is None:
return None
current_hdp_version = get_hdp_version(hdp_select_component)
if current_hdp_version is None:
Logger.warning("Unable to determine hdp-select version for {0}".format(
hdp_select_component))
else:
Logger.info("{0} is currently at version {1}".format(
hdp_select_component, current_hdp_version))
return current_hdp_version
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:27,代码来源:hdp_select.py
示例6: _get_tar_source_and_dest_folder
def _get_tar_source_and_dest_folder(tarball_prefix):
"""
:param tarball_prefix: Prefix of the tarball must be one of tez, hive, mr, pig
:return: Returns a tuple of (x, y) after verifying the properties
"""
component_tar_source_file = default("/configurations/cluster-env/%s%s" % (tarball_prefix.lower(), TAR_SOURCE_SUFFIX), None)
# E.g., /usr/hdp/current/hadoop-client/tez-{{ hdp_stack_version }}.tar.gz
component_tar_destination_folder = default("/configurations/cluster-env/%s%s" % (tarball_prefix.lower(), TAR_DESTINATION_FOLDER_SUFFIX), None)
# E.g., hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/
if not component_tar_source_file or not component_tar_destination_folder:
Logger.warning("Did not find %s tar source file and destination folder properties in cluster-env.xml" %
tarball_prefix)
return None, None
if component_tar_source_file.find("/") == -1:
Logger.warning("The tar file path %s is not valid" % str(component_tar_source_file))
return None, None
if not component_tar_destination_folder.endswith("/"):
component_tar_destination_folder = component_tar_destination_folder + "/"
if not component_tar_destination_folder.startswith("hdfs://"):
return None, None
return component_tar_source_file, component_tar_destination_folder
开发者ID:duxia,项目名称:ambari,代码行数:27,代码来源:dynamic_variable_interpretation.py
示例7: actionexecute
def actionexecute(self, env):
resolve_ambari_config()
# Parse parameters from command json file.
config = Script.get_config()
host_name = socket.gethostname()
version = default('/roleParams/version', None)
# These 2 variables are optional
service_package_folder = default('/roleParams/service_package_folder', None)
hooks_folder = default('/roleParams/hooks_folder', None)
tasks = json.loads(config['roleParams']['tasks'])
if tasks:
for t in tasks:
task = ExecuteTask(t)
Logger.info(str(task))
# If a (script, function) exists, it overwrites the command.
if task.script and task.function and service_package_folder and hooks_folder:
file_cache = FileCache(agent_config)
command_paths = {"commandParams":
{"service_package_folder": service_package_folder,
"hooks_folder": hooks_folder
}
}
server_url_prefix = default('/hostLevelParams/jdk_location', "")
base_dir = file_cache.get_service_base_dir(command_paths, server_url_prefix)
script_path = os.path.join(base_dir, task.script)
if not os.path.exists(script_path):
message = "Script %s does not exist" % str(script_path)
raise Fail(message)
# Notice that the script_path is now the fully qualified path, and the
# same command-#.json file is used.
# Also, the python wrapper is used, since it sets up the correct environment variables
command_params = ["/usr/bin/ambari-python-wrap",
script_path,
task.function,
self.command_data_file,
self.basedir,
self.stroutfile,
self.logging_level,
Script.get_tmp_dir()]
task.command = " ".join(command_params)
# Replace redundant whitespace to make the unit tests easier to validate
task.command = re.sub("\s+", " ", task.command).strip()
if task.command:
task.command = replace_variables(task.command, host_name, version)
code, out = shell.call(task.command)
Logger.info("Command: %s\nCode: %s, Out: %s" % (task.command, str(code), str(out)))
if code != 0:
raise Fail(out)
开发者ID:fanzhidongyzby,项目名称:ambari,代码行数:56,代码来源:ru_execute_tasks.py
示例8: set_version
def set_version(self):
from resource_management.libraries.functions.default import default
stack_name = default("/hostLevelParams/stack_name", None)
version = default("/commandParams/version", None)
stack_version_unformatted = str(default("/hostLevelParams/stack_version", ""))
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
stack_to_component = self.get_stack_to_component()
if stack_to_component:
component_name = stack_to_component[stack_name] if stack_name in stack_to_component else None
if component_name and stack_name and version and \
compare_versions(format_hdp_stack_version(hdp_stack_version), '2.2.0.0') >= 0:
Execute(('/usr/bin/hdp-select', 'set', component_name, version),
sudo = True)
开发者ID:fanzhidongyzby,项目名称:ambari,代码行数:13,代码来源:script.py
示例9: _get_upgrade_stack
def _get_upgrade_stack():
"""
Gets the stack name and stack version if an upgrade is currently in progress.
:return: the stack name and stack version as a tuple, or None if an
upgrade is not in progress.
"""
from resource_management.libraries.functions.default import default
direction = default("/commandParams/upgrade_direction", None)
stack_name = default("/hostLevelParams/stack_name", None)
stack_version = default("/commandParams/version", None)
if direction and stack_name and stack_version:
return (stack_name, stack_version)
return None
开发者ID:maduhu,项目名称:HDP2.5-ambari,代码行数:15,代码来源:stack_select.py
示例10: get_stack_name
def get_stack_name():
"""
Gets the name of the stack from hostLevelParams/stack_name.
:return: a stack name or None
"""
from resource_management.libraries.functions.default import default
return default("/hostLevelParams/stack_name", None)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:7,代码来源:script.py
示例11: setup_atlas_hook
def setup_atlas_hook(service_name, service_props, atlas_hook_filepath, owner, group):
"""
Generate the atlas-application.properties.xml file by merging the service_props with the Atlas application-properties.
:param service_name: Service Name to identify if it is a client-only service, which will generate slightly different configs.
:param service_props: Atlas configs specific to this service that must be merged.
:param atlas_hook_filepath: Config file to write, e.g., /etc/falcon/conf/atlas-application.properties.xml
:param owner: File owner
:param group: File group
"""
import params
atlas_props = default('/configurations/application-properties', {})
if has_atlas_in_cluster():
# Take the subset
merged_props = {}
shared_props = SHARED_ATLAS_HOOK_CONFIGS.copy()
if service_name in NON_CLIENT_SERVICES:
shared_props = shared_props.union(SHARED_ATLAS_HOOK_SECURITY_CONFIGS_FOR_NON_CLIENT_SERVICE)
for prop in shared_props:
if prop in atlas_props:
merged_props[prop] = atlas_props[prop]
merged_props.update(service_props)
Logger.info(format("Generating Atlas Hook config file {atlas_hook_filepath}"))
PropertiesFile(atlas_hook_filepath,
properties = merged_props,
owner = owner,
group = group,
mode = 0644)
开发者ID:maduhu,项目名称:HDP2.5-ambari,代码行数:31,代码来源:setup_atlas_hook.py
示例12: has_atlas_in_cluster
def has_atlas_in_cluster():
"""
Determine if Atlas is installed on the cluster.
:return: True if Atlas is installed, otherwise false.
"""
atlas_hosts = default('/clusterHostInfo/atlas_server_hosts', [])
return len(atlas_hosts) > 0
开发者ID:maduhu,项目名称:HDP2.5-ambari,代码行数:7,代码来源:setup_atlas_hook.py
示例13: check_stack_feature
def check_stack_feature(stack_feature, stack_version):
"""
Given a stack_feature and a specific stack_version, it validates that the feature is supported by the stack_version.
IMPORTANT, notice that the mapping of feature to version comes from cluster-env if it exists there.
:param stack_feature: Feature name to check if it is supported by the stack. For example: "rolling_upgrade"
:param stack_version: Version of the stack
:return: Will return True if successful, otherwise, False.
"""
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.version import compare_versions
stack_features_config = default("/configurations/cluster-env/stack_features", None)
if not stack_version:
Logger.debug("Cannot determine if feature %s is supported since did not provide a stack version." % stack_feature)
return False
if stack_features_config:
data = json.loads(stack_features_config)
for feature in data["stack_features"]:
if feature["name"] == stack_feature:
if "min_version" in feature:
min_version = feature["min_version"]
if compare_versions(stack_version, min_version, format = True) < 0:
return False
if "max_version" in feature:
max_version = feature["max_version"]
if compare_versions(stack_version, max_version, format = True) >= 0:
return False
return True
else:
raise Fail("Stack features not defined by stack")
return False
开发者ID:maduhu,项目名称:HDP2.5-ambari,代码行数:34,代码来源:stack_features.py
示例14: post_upgrade_check
def post_upgrade_check():
"""
Ensure all journal nodes are up and quorum is established
:return:
"""
import params
Logger.info("Ensuring Journalnode quorum is established")
if params.security_enabled:
Execute(params.jn_kinit_cmd, user=params.hdfs_user)
time.sleep(5)
hdfs_roll_edits()
time.sleep(5)
all_journal_node_hosts = default("/clusterHostInfo/journalnode_hosts", [])
if len(all_journal_node_hosts) < 3:
raise Fail("Need at least 3 Journalnodes to maintain a quorum")
try:
namenode_ha = namenode_ha_state.NamenodeHAState()
except ValueError, err:
raise Fail("Could not retrieve Namenode HA addresses. Error: " + str(err))
开发者ID:andreysabitov,项目名称:ambari-mantl,代码行数:25,代码来源:journalnode_upgrade.py
示例15: unlink_all_configs
def unlink_all_configs(self, env):
"""
Reverses the work performed in link_config. This should only be used when downgrading from
HDP 2.3 to 2.2 in order to under the symlink work required for 2.3.
"""
stack_name = default('/hostLevelParams/stack_name', "").upper()
downgrade_to_version = default('/commandParams/version', None)
downgrade_from_version = default('/commandParams/downgrade_from_version', None)
upgrade_direction = default("/commandParams/upgrade_direction", Direction.UPGRADE)
# downgrade only
if upgrade_direction != Direction.DOWNGRADE:
Logger.warning("Unlinking configurations should only be performed on a downgrade.")
return
# HDP only
if stack_name != "HDP":
Logger.warning("Unlinking configurations should only be performed on the HDP stack.")
return
if downgrade_to_version is None or downgrade_from_version is None:
Logger.warning("Both 'commandParams/version' and 'commandParams/downgrade_from_version' must be specified to unlink configs on downgrade.")
return
Logger.info("Unlinking all configs when downgrading from HDP 2.3 to 2.2")
# normalize the versions
stack_23 = format_hdp_stack_version("2.3")
downgrade_to_version = format_hdp_stack_version(downgrade_to_version)
downgrade_from_version = format_hdp_stack_version(downgrade_from_version)
# downgrade-to-version must be 2.2 (less than 2.3)
if compare_versions(downgrade_to_version, stack_23) >= 0:
Logger.warning("Unlinking configurations should only be performed when downgrading to HDP 2.2")
return
# downgrade-from-version must be 2.3+
if compare_versions(downgrade_from_version, stack_23) < 0:
Logger.warning("Unlinking configurations should only be performed when downgrading from HDP 2.3 or later")
return
# iterate through all directory conf mappings and undo the symlinks
for key, value in conf_select.PACKAGE_DIRS.iteritems():
for directory_mapping in value:
original_config_directory = directory_mapping['conf_dir']
self._unlink_config(original_config_directory)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:46,代码来源:ru_set_all.py
示例16: get_stack_root
def get_stack_root():
"""
Get the stack-specific install root directory
:return: stack_root
"""
from resource_management.libraries.functions.default import default
stack_name = Script.get_stack_name()
return default("/configurations/cluster-env/stack_root", "/usr/{0}".format(stack_name.lower()))
开发者ID:maduhu,项目名称:HDP2.5-ambari,代码行数:8,代码来源:script.py
示例17: should_expose_component_version
def should_expose_component_version(self, command_name):
"""
Analyzes config and given command to determine if stack version should be written
to structured out. Currently only HDP stack versions >= 2.2 are supported.
:param command_name: command name
:return: True or False
"""
from resource_management.libraries.functions.default import default
stack_version_unformatted = str(default("/hostLevelParams/stack_version", ""))
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
if command_name.lower() == "status":
request_version = default("/commandParams/request_version", None)
if request_version is not None:
return True
else:
# Populate version only on base commands
return command_name.lower() == "start" or command_name.lower() == "install" or command_name.lower() == "restart"
return False
开发者ID:fanzhidongyzby,项目名称:ambari,代码行数:19,代码来源:script.py
示例18: get_component_from_role
def get_component_from_role(role_directory_map, default_role):
"""
Gets the /usr/hdp/current/<component> component given an Ambari role,
such as DATANODE or HBASE_MASTER.
:return: the component name, such as hbase-master
"""
from resource_management.libraries.functions.default import default
command_role = default("/role", default_role)
if command_role in role_directory_map:
return role_directory_map[command_role]
else:
return role_directory_map[default_role]
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:13,代码来源:script.py
示例19: delete_storm_local_data
def delete_storm_local_data(self, env):
"""
Deletes Storm data from local directories. This will create a marker file
with JSON data representing the upgrade stack and request/stage ID. This
will prevent multiple Storm components on the same host from removing
the local directories more than once.
:return:
"""
import params
Logger.info('Clearing Storm data from local directories...')
storm_local_directory = params.local_dir
if storm_local_directory is None:
raise Fail("The storm local directory specified by storm-site/storm.local.dir must be specified")
request_id = default("/requestId", None)
stack_name = params.stack_name
stack_version = params.version
upgrade_direction = params.upgrade_direction
json_map = {}
json_map["requestId"] = request_id
json_map["stackName"] = stack_name
json_map["stackVersion"] = stack_version
json_map["direction"] = upgrade_direction
temp_directory = params.tmp_dir
marker_file = os.path.join(temp_directory, "storm-upgrade-{0}.json".format(stack_version))
Logger.info("Marker file for upgrade/downgrade of Storm, {0}".format(marker_file))
if os.path.exists(marker_file):
Logger.info("The marker file exists.")
try:
with open(marker_file) as file_pointer:
existing_json_map = json.load(file_pointer)
if cmp(json_map, existing_json_map) == 0:
Logger.info("The storm upgrade has already removed the local directories for {0}-{1} for "
"request {2} and direction {3}. Nothing else to do.".format(stack_name, stack_version, request_id, upgrade_direction))
# Nothing else to do here for this as it appears to have already been
# removed by another component being upgraded
return
else:
Logger.info("The marker file differs from the new value. Will proceed to delete Storm local dir, "
"and generate new file. Current marker file: {0}".format(str(existing_json_map)))
except Exception, e:
Logger.error("The marker file {0} appears to be corrupt; removing it. Error: {1}".format(marker_file, str(e)))
File(marker_file, action="delete")
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:51,代码来源:storm_upgrade.py
示例20: format_package_name
def format_package_name(self, name):
from resource_management.libraries.functions.default import default
"""
This function replaces ${stack_version} placeholder into actual version. If the package
version is passed from the server, use that as an absolute truth.
"""
# two different command types put things in different objects. WHY.
# package_version is the form W_X_Y_Z_nnnn
package_version = default("roleParams/package_version", None)
if not package_version:
package_version = default("hostLevelParams/package_version", None)
package_delimiter = '-' if OSCheck.is_ubuntu_family() else '_'
# The cluster effective version comes down when the version is known after the initial
# install. In that case we should not be guessing which version when invoking INSTALL, but
# use the supplied version to build the package_version
effective_version = default("commandParams/version", None)
role_command = default("roleCommand", None)
if (package_version is None or '*' in package_version) \
and effective_version is not None and 'INSTALL' == role_command:
package_version = effective_version.replace('.', package_delimiter).replace('-', package_delimiter)
Logger.info("Version {0} was provided as effective cluster version. Using package version {1}".format(effective_version, package_version))
if package_version:
stack_version_package_formatted = package_version
if OSCheck.is_ubuntu_family():
stack_version_package_formatted = package_version.replace('_', package_delimiter)
# Wildcards cause a lot of troubles with installing packages, if the version contains wildcards we try to specify it.
if not package_version or '*' in package_version:
stack_version_package_formatted = self.get_stack_version_before_packages_installed().replace('.', package_delimiter).replace('-', package_delimiter) if STACK_VERSION_PLACEHOLDER in name else name
package_name = name.replace(STACK_VERSION_PLACEHOLDER, stack_version_package_formatted)
return package_name
开发者ID:maduhu,项目名称:HDP2.5-ambari,代码行数:38,代码来源:script.py
注:本文中的resource_management.libraries.functions.default.default函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论