本文整理汇总了Python中spinnaker.run.check_run_quick函数的典型用法代码示例。如果您正苦于以下问题:Python check_run_quick函数的具体用法?Python check_run_quick怎么用?Python check_run_quick使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了check_run_quick函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: __record_halyard_nightly_version
def __record_halyard_nightly_version(version_bump, options):
"""Record the version and commit hash at which Halyard was built in a bucket.
Assumes that gsutil is installed on the machine this script is run from.
This function uses `gsutil rsync` to read the GCS file, changes it in-place,
and then uses `gsutil rsync` to write the file again. `rsync` is eventually
consistent, so running this script (or manually manipulating the GCS file)
concurrently could likely result in file corruption. Don't parallelize this.
"""
bucket_uri = options.hal_nightly_bucket_uri
build_number = options.build_number
local_bucket_name = os.path.basename(bucket_uri)
# Copy all the bucket contents to local (-r) and get rid of extra stuff (-d).
if not os.path.exists(local_bucket_name):
os.mkdir(local_bucket_name)
check_run_quick('gsutil rsync -r -d {remote_uri} {local_bucket}'
.format(remote_uri=bucket_uri, local_bucket=local_bucket_name))
hal_version = version_bump.version_str.replace('version-', '')
full_hal_version = '{version}-{build}'.format(version=hal_version, build=build_number)
new_hal_nightly_entry = ('{full_hal_version}: {commit}'
.format(full_hal_version=full_hal_version, commit=version_bump.commit_hash))
nightly_entry_file = '{0}/nightly-version-commits.yml'.format(local_bucket_name)
with open(nightly_entry_file, 'a') as nef:
nef.write('{0}\n'.format(new_hal_nightly_entry))
# Now sync the local dir with the bucket again after the update.
check_run_quick('gsutil rsync -r -d {local_bucket} {remote_uri}'
.format(remote_uri=bucket_uri, local_bucket=local_bucket_name))
# Opening with 'w' stomps the old file.
with open(options.output_built_halyard_version, 'w') as hal_version_file:
hal_version_file.write('{}'.format(full_hal_version))
开发者ID:PioTi,项目名称:spinnaker,代码行数:32,代码来源:build_prevalidation.py
示例2: install_nvm
def install_nvm(options):
print '---------- Installing NVM ---------'
check_run_quick('sudo chmod 775 /usr/local')
check_run_quick('sudo mkdir -m 777 -p /usr/local/node /usr/local/nvm')
result = check_fetch(
'https://raw.githubusercontent.com/creationix/nvm/{nvm_version}/install.sh'
.format(nvm_version=NVM_VERSION))
fd, temp = tempfile.mkstemp()
os.write(fd, result.content)
os.close(fd)
try:
run_and_monitor(
'bash -c "NVM_DIR=/usr/local/nvm source {temp}"'.format(temp=temp))
finally:
os.remove(temp)
# curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.26.0/install.sh | NVM_DIR=/usr/local/nvm bash
check_run_and_monitor('sudo bash -c "cat > /etc/profile.d/nvm.sh"',
input=__NVM_SCRIPT)
print '---------- Installing Node {version} ---------'.format(
version=NODE_VERSION)
run_and_monitor('bash -c "source /etc/profile.d/nvm.sh'
'; nvm install {version}'
'; nvm alias default {version}"'
.format(version=NODE_VERSION))
开发者ID:rubythonode,项目名称:spinnaker,代码行数:32,代码来源:install_development.py
示例3: deploy_instance
def deploy_instance(self):
"""Deploy an instance (from an image) so we can get at its disks.
This isnt necessarily efficient, but is simple since we already have
means to create images.
"""
if self.__instance:
print 'Using existing instance {name}'.format(name=self.__instance)
return
if not self.options.image:
raise ValueError('Neither --instance nor --image was specified.')
instance = 'build-spinnaker-tarball-{unique}'.format(
unique=time.strftime('%Y%m%d%H%M%S'))
print 'Deploying temporary instance {name}'.format(name=instance)
check_run_quick('gcloud compute instances create {name}'
' --zone={zone} --project={project}'
' --image={image} --image-project={image_project}'
' --scopes compute-rw,storage-rw'
.format(name=instance,
zone=self.__zone,
project=self.__project,
image=self.options.image,
image_project=self.options.image_project),
echo=False)
self.__instance = instance
开发者ID:Robin--,项目名称:spinnaker,代码行数:28,代码来源:build_google_tarball.py
示例4: __extract_image_tarball_helper
def __extract_image_tarball_helper(self):
"""Helper function for make_image_tarball that does the work.
Note that the work happens on the instance itself. So this function
builds a remote command that it then executes on the prototype instance.
"""
print 'Creating image tarball.'
set_excludes_bash_command = (
'EXCLUDES=`python -c'
' "import glob; print \',\'.join(glob.glob(\'/home/*\'))"`')
tar_path = self.options.tarball_uri
tar_name = os.path.basename(tar_path)
remote_script = [
'sudo mkdir /mnt/tmp',
'sudo /usr/share/google/safe_format_and_mount -m'
' "mkfs.ext4 -F" /dev/sdb /mnt/tmp',
set_excludes_bash_command,
'sudo gcimagebundle -d /dev/sda -o /mnt/tmp'
' --log_file=/tmp/export.log --output_file_name={tar_name}'
' --excludes=/tmp,\\$EXCLUDES'.format(tar_name=tar_name),
'gsutil -q cp /mnt/tmp/{tar_name} {output_path}'.format(
tar_name=tar_name, output_path=tar_path)]
command = '; '.join(remote_script)
check_run_quick('gcloud compute ssh --command="{command}"'
' --project {project} --zone {zone} {instance}'
.format(command=command.replace('"', r'\"'),
project=self.__project,
zone=self.__zone,
instance=self.__instance))
开发者ID:Robin--,项目名称:spinnaker,代码行数:31,代码来源:build_google_tarball.py
示例5: __checkout_githubio_repo
def __checkout_githubio_repo(self):
"""Clones the spinnaker.github.io git repo.
"""
check_run_quick('git clone {0}'.format(self.__githubio_repo_uri))
self.__repo_name = os.path.basename(self.__githubio_repo_uri)
if self.__repo_name.endswith('.git'):
self.__repo_name = self.__repo_name.replace('.git', '')
开发者ID:PioTi,项目名称:spinnaker,代码行数:7,代码来源:publish_test_results.py
示例6: copy_file
def copy_file(options, source, target):
if os.path.exists(source):
# TODO(ewiseblatt): we can use scp here instead, and pass the
# credentials we want to copy with rather than the additional command
# below. But we need to figure out the IP address to copy to.
# For now, do it the long way.
print 'Copying {source}'.format(source=source)
command = ' '.join([
'gcloud compute copy-files',
'--project', get_project(options),
'--zone', options.zone,
source,
'{instance}:{target}'.format(instance=options.instance,
target=target)])
while True:
result = run_quick(command, echo=False)
if not result.returncode:
break
print 'New instance does not seem ready yet...retry in 5s.'
time.sleep(5)
command = ' '.join([
'gcloud compute ssh',
'--command="chmod 600 /home/{gcp_user}/{target}"'.format(
gcp_user=os.environ['LOGNAME'], target=target),
options.instance,
'--project', get_project(options),
'--zone', options.zone])
check_run_quick(command, echo=False)
开发者ID:hadoop835,项目名称:spinnaker,代码行数:29,代码来源:create_google_dev_vm.py
示例7: __generate_halyard_docs
def __generate_halyard_docs(self):
"""Builds Halyard's CLI, which writes the new documentation locally to halyard/docs/commands.md
"""
check_run_quick('git -C halyard rev-parse HEAD | xargs git -C halyard checkout ;')
cmds = [
'make'
]
run_shell_and_log(cmds, 'halyard-generate-docs.log', cwd='halyard/halyard-cli')
开发者ID:PioTi,项目名称:spinnaker,代码行数:8,代码来源:publish_halyard.py
示例8: do_deploy
def do_deploy(self, script, files_to_upload):
"""Implements the BaseBomValidateDeployer interface."""
options = self.options
ensure_empty_ssh_key(self.__ssh_key_path, self.__hal_user)
script_path = write_script_to_path(script, path=None)
files_to_upload.add(script_path)
if options.jenkins_master_name:
write_data_to_secure_path(
os.environ.get('JENKINS_MASTER_PASSWORD'),
path=os.path.join(os.sep, 'tmp', 'jenkins_{name}_password'
.format(name=options.jenkins_master_name)),
is_script=True)
try:
self.do_create_vm(options)
copy_files = (
'scp'
' -i {ssh_key_path}'
' -o StrictHostKeyChecking=no'
' -o UserKnownHostsFile=/dev/null'
' {files} {ip}:~'
.format(ssh_key_path=self.__ssh_key_path,
files=' '.join(files_to_upload),
ip=self.instance_ip))
logging.info('Copying files %s', copy_files)
# pylint: disable=unused-variable
for retry in range(0, 10):
result = run_quick(copy_files)
if result.returncode == 0:
break
time.sleep(2)
if result.returncode != 0:
check_run_quick(copy_files)
except Exception as ex:
logging.error('Caught %s', ex)
raise
finally:
os.remove(script_path)
logging.info('Running install script')
try:
check_run_and_monitor(
'ssh'
' -i {ssh_key}'
' -o StrictHostKeyChecking=no'
' -o UserKnownHostsFile=/dev/null'
' {ip}'
' "sudo ./{script_name}"'
.format(ip=self.instance_ip,
ssh_key=self.__ssh_key_path,
script_name=os.path.basename(script_path)))
except RuntimeError:
raise RuntimeError('Halyard deployment failed.')
开发者ID:edwinavalos,项目名称:spinnaker,代码行数:57,代码来源:validate_bom__deploy.py
示例9: copy_master_yml
def copy_master_yml(options):
"""Copy the specified master spinnaker-local.yml, and credentials.
This will look for paths to credentials within the spinnaker-local.yml, and
copy those as well. The paths to the credentials (and the reference
in the config file) will be changed to reflect the filesystem on the
new instance, which may be different than on this instance.
Args:
options [Namespace]: The parser namespace options contain information
about the instance we're going to copy to, as well as the source
of the master spinnaker-local.yml file.
"""
print 'Creating .spinnaker directory...'
check_run_quick('gcloud compute ssh --command "mkdir -p .spinnaker"'
' --project={project} --zone={zone} {instance}'
.format(project=get_project(options),
zone=options.zone,
instance=options.instance),
echo=False)
bindings = YamlBindings()
bindings.import_path(options.master_yml)
try:
json_credential_path = bindings.get(
'providers.google.primaryCredentials.jsonPath')
except KeyError:
json_credential_path = None
gcp_home = os.path.join('/home', os.environ['LOGNAME'], '.spinnaker')
# If there are credentials, write them to this path
gcp_credential_path = os.path.join(gcp_home, 'google-credentials.json')
with open(options.master_yml, 'r') as f:
content = f.read()
# Replace all the occurances of the original credentials path with the
# path that we are going to place the file in on the new instance.
if json_credential_path:
content = content.replace(json_credential_path, gcp_credential_path)
fd, temp_path = tempfile.mkstemp()
os.write(fd, content)
os.close(fd)
actual_path = temp_path
# Copy the credentials here. The cfg file will be copied after.
copy_file(options, actual_path, '.spinnaker/spinnaker-local.yml')
if json_credential_path:
copy_file(options, json_credential_path,
'.spinnaker/google-credentials.json')
if temp_path:
os.remove(temp_path)
开发者ID:hadoop835,项目名称:spinnaker,代码行数:57,代码来源:create_google_dev_vm.py
示例10: create_instance
def create_instance(options):
"""Creates new GCE VM instance for development."""
project = get_project(options)
print 'Creating instance {project}/{zone}/{instance}'.format(
project=project, zone=get_zone(options), instance=options.instance)
print (' with --machine_type={type} and --disk_size={disk_size}...'
.format(type=options.machine_type, disk_size=options.disk_size))
google_dev_dir = os.path.join(os.path.dirname(__file__), '../google/dev')
dev_dir = os.path.dirname(__file__)
project_dir = os.path.join(dev_dir, '..')
install_dir = '{dir}/../install'.format(dir=dev_dir)
startup_command = ['/opt/spinnaker/install/install_spinnaker.sh'
' --dependencies_only',
'/opt/spinnaker/install/install_development.sh']
fd, temp_startup = tempfile.mkstemp()
os.write(fd, ';'.join(startup_command))
os.close(fd)
metadata_files = [
'startup-script={google_dev_dir}/google_install_loader.py'
',sh_bootstrap_dev={dev_dir}/bootstrap_dev.sh'
',sh_install_spinnaker={project_dir}/InstallSpinnaker.sh'
',sh_install_development={dev_dir}/install_development.sh'
',startup_command={temp_startup}'
.format(google_dev_dir=google_dev_dir,
dev_dir=dev_dir,
project_dir=project_dir,
temp_startup=temp_startup)]
metadata = ','.join([
'startup_loader_files='
'sh_install_spinnaker'
'+sh_install_development'
'+sh_bootstrap_dev'])
command = ['gcloud', 'compute', 'instances', 'create',
options.instance,
'--project', get_project(options),
'--zone', get_zone(options),
'--machine-type', options.machine_type,
'--image-family', 'ubuntu-1404-lts',
'--image-project', 'ubuntu-os-cloud',
'--scopes', options.scopes,
'--boot-disk-size={size}'.format(size=options.disk_size),
'--boot-disk-type={type}'.format(type=options.disk_type),
'--metadata', metadata,
'--metadata-from-file={files}'.format(
files=','.join(metadata_files))]
if options.address:
command.extend(['--address', options.address])
check_run_quick(' '.join(command), echo=False)
开发者ID:PioTi,项目名称:spinnaker,代码行数:55,代码来源:create_google_dev_vm.py
示例11: create_tarball
def create_tarball(self):
"""Create a tar.gz file from the instance specified by the options.
The file will be written to options.tarball_uri.
It can be later turned into a GCE image by passing it as the --source-uri
to gcloud images create.
"""
project = self.__project
basename = os.path.basename(self.options.tarball_uri).replace('_', '-')
first_dot = basename.find('.')
if first_dot:
basename = basename[0:first_dot]
disk_name = '{name}-export'.format(name=basename)
print 'Attaching external disk "{disk}" to extract image tarball.'.format(
disk=disk_name)
# TODO(ewiseblatt): 20151002
# Add an option to reuse an existing disk to reduce the cycle time.
# Then guard the create/format/destroy around this option.
# Still may want/need to attach/detach it here to reduce race conditions
# on its use since it can only be bound to once instance at a time.
check_run_quick('gcloud compute disks create '
' {disk_name} --project {project} --zone {zone} --size=10'
.format(disk_name=disk_name,
project=self.__project,
zone=self.__zone),
echo=False)
check_run_quick('gcloud compute instances attach-disk {instance}'
' --disk={disk_name} --device-name=export-disk'
' --project={project} --zone={zone}'
.format(instance=self.__instance,
disk_name=disk_name,
project=self.__project,
zone=self.__zone),
echo=False)
try:
self.__extract_image_tarball_helper()
finally:
print 'Detaching and deleting external disk.'
run_quick('gcloud compute instances detach-disk -q {instance}'
' --disk={disk_name} --project={project} --zone={zone}'
.format(instance=self.__instance,
disk_name=disk_name,
project=self.__project,
zone=self.__zone),
echo=False)
run_quick('gcloud compute disks delete -q {disk_name}'
' --project={project} --zone={zone}'
.format(disk_name=disk_name,
project=self.__project,
zone=self.__zone),
echo=False)
开发者ID:Robin--,项目名称:spinnaker,代码行数:53,代码来源:build_google_tarball.py
示例12: __determine_gate_version
def __determine_gate_version(self):
bom_file = 'bom.yml'
check_run_quick('gsutil cat gs://halconfig/bom/{spinnaker_version}.yml > {bom_file}'
.format(spinnaker_version=self.__spinnaker_version, bom_file=bom_file))
with open(bom_file, 'r') as stream:
try:
bom = yaml.load(stream)
return bom['services']['gate']['version']
except yaml.YAMLError as err:
print 'Failed to load Gate version from BOM.'
raise err
开发者ID:PioTi,项目名称:spinnaker,代码行数:12,代码来源:publish_api_docs.py
示例13: git_clone
def git_clone(self, repository, owner=None):
"""Clone the specified repository
Args:
repository [string]: The name of the github repository (without owner).
owner [string]: An explicit repository owner.
If not provided use the configured options.
"""
name = repository.name
repository_dir = get_repository_dir(name)
upstream_user = repository.owner
branch = self.pull_branch or 'master'
origin_url = self.get_github_repository_url(repository, owner=owner)
upstream_url = 'https://github.com/{upstream_user}/{name}.git'.format(
upstream_user=upstream_user, name=name)
# Don't echo because we're going to hide some failure.
print 'Cloning {name} from {origin_url} -b {branch}.'.format(
name=name, origin_url=origin_url, branch=branch)
shell_result = run_and_monitor(
'git clone {url} -b {branch}'.format(url=origin_url, branch=branch),
echo=False)
if not shell_result.returncode:
if shell_result.stdout:
print shell_result.stdout
else:
if repository in self.__extra_repositories:
sys.stderr.write('WARNING: Missing optional repository {name}.\n'
.format(name=name))
sys.stderr.write(' Continue on without it.\n')
return
sys.stderr.write(shell_result.stderr or shell_result.stdout)
sys.stderr.write(
'FATAL: Cannot continue without required repository {name}.\n'
' Consider using github to fork one from {upstream}.\n'.
format(name=name, upstream=upstream_url))
raise SystemExit('Repository {url} not found.'.format(url=origin_url))
if self.__options.add_upstream and origin_url != upstream_url:
print ' Adding upstream repository {upstream}.'.format(
upstream=upstream_url)
check_run_quick('git -C "{dir}" remote add upstream {url}'
.format(dir=repository_dir, url=upstream_url),
echo=False)
if self.__options.disable_upstream_push:
which = 'upstream' if origin_url != upstream_url else 'origin'
print ' Disabling git pushes to {which} {upstream}'.format(
which=which, upstream=upstream_url)
check_run_quick(
'git -C "{dir}" remote set-url --push {which} disabled'
.format(dir=repository_dir, which=which),
echo=False)
开发者ID:edwinavalos,项目名称:spinnaker,代码行数:53,代码来源:refresh_source.py
示例14: __publish_halyard_docs
def __publish_halyard_docs(self):
""" Formats Halyard's documentation, then pushes to Spinnaker's documentation repository.
"""
docs_source = 'halyard/docs/commands.md'
docs_target = '{repo_name}/reference/halyard/commands.md'.format(repo_name=self.__docs_repo_name)
repo_uri = '[email protected]:{repo_owner}/{repo_name}'.format(repo_owner=self.__docs_repo_owner,
repo_name=self.__docs_repo_name)
check_run_quick('git clone {repo_uri}'.format(repo_uri=repo_uri))
with open(docs_source, 'r') as source:
with open(docs_target, 'w') as target:
header = '\n'.join([
'---',
'layout: single',
'title: "Commands"',
'sidebar:',
' nav: reference',
'---',
'',
'Published: {}'.format(datetime
.datetime
.utcnow()
.strftime('%Y-%m-%d %H:%M:%S')),
'',
])
target.write(header + source.read())
commit_message = 'docs(halyard): {version}'.format(version=self.__stable_version)
check_run_quick('git -C {repo_name} add reference/halyard/commands.md'.format(repo_name=self.__docs_repo_name))
check_run_quick('git -C {repo_name} commit -m "{message}"'
.format(repo_name=self.__docs_repo_name, message=commit_message))
check_run_quick('git -C {repo_name} push origin master'.format(repo_name=self.__docs_repo_name))
开发者ID:jtk54,项目名称:spinnaker,代码行数:33,代码来源:publish_halyard.py
示例15: do_fetch_service_log_file
def do_fetch_service_log_file(self, service, log_dir):
"""Implements the BaseBomValidateDeployer interface."""
write_data_to_secure_path('', os.path.join(log_dir, service + '.log'))
check_run_quick(
'scp'
' -i {ssh_key}'
' -o StrictHostKeyChecking=no'
' -o UserKnownHostsFile=/dev/null'
' {ip}:/var/log/spinnaker/{service}/{service}.log'
' {log_dir}'
.format(ip=self.instance_ip,
ssh_key=self.ssh_key_path,
service=service,
log_dir=log_dir))
开发者ID:edwinavalos,项目名称:spinnaker,代码行数:14,代码来源:validate_bom__deploy.py
示例16: ensure_empty_ssh_key
def ensure_empty_ssh_key(path, user):
"""Ensure there is an ssh key at the given path.
It is assumed that this key has no password associated with it so we
can use it for ssh/scp.
"""
if os.path.exists(path):
return
logging.debug('Creating %s SSH key for user "%s"', path, user)
check_run_quick(
'ssh-keygen -N "" -t rsa -f {path} -C {user}'
'; sed "s/^ssh-rsa/{user}:ssh-rsa/" -i {path}'
.format(user=user, path=path))
开发者ID:jtk54,项目名称:spinnaker,代码行数:15,代码来源:validate_bom__deploy.py
示例17: push_branch_and_tags
def push_branch_and_tags(self):
"""Creates a release branch and pushes tags to the microservice repos owned by --github_publisher.
A private key that has access to --github_publisher's github repos needs added
to a running ssh-agent on the machine this script will run on:
> <copy or rsync the key to the vm>
> eval `ssh-agent`
> ssh-add ~/.ssh/<key with access to github repos>
"""
major, minor, _ = self.__release_version.split('.')
# The stable branch will look like <major>.<minor>.X since nebula
# enforces restrictions on what branches it does releases from.
# https://github.com/nebula-plugins/nebula-release-plugin#extension-provided
stable_branch = '.'.join([major, minor, 'X'])
for comp in COMPONENTS:
if self.__patch_release:
check_run_quick('git -C {0} checkout {1}'.format(comp, stable_branch))
else:
# Create new release branch.
check_run_quick('git -C {0} checkout -b {1}'.format(comp, stable_branch))
version_tag_build = ''
if comp == 'spinnaker-monitoring':
version_tag_build = 'version-{0}'.format(self.__bom_dict[SERVICES]['monitoring-daemon'][VERSION])
else:
version_tag_build = 'version-{0}'.format(self.__bom_dict[SERVICES][comp][VERSION])
last_dash = version_tag_build.rindex('-')
version_tag = version_tag_build[:last_dash]
repo_to_push = ('[email protected]:{owner}/{comp}.git'
.format(owner=self.__github_publisher, comp=comp))
check_run_quick('git -C {comp} remote add release {url}'
.format(comp=comp, url=repo_to_push))
check_run_quick('git -C {comp} push release {branch}'
.format(comp=comp, branch=stable_branch))
repo = self.__github.get_repo('{owner}/{comp}'.format(owner=self.__github_publisher, comp=comp))
paginated_tags = repo.get_tags()
tag_names = [tag.name for tag in paginated_tags]
if version_tag not in tag_names:
# The tag doesn't exist and we need to push a tag.
print ('pushing version tag {tag} to {owner}/{comp}'
.format(tag=version_tag, owner=self.__github_publisher, comp=comp))
check_run_quick('git -C {comp} push release {tag}'
.format(comp=comp, tag=version_tag))
开发者ID:danielpeach,项目名称:spinnaker,代码行数:48,代码来源:publish_bom.py
示例18: __get_pod_name
def __get_pod_name(self, k8s_namespace, service):
"""Determine the pod name for the deployed service."""
options = self.options
response = check_run_quick(
'kubectl {context} get pods --namespace {namespace}'
' | gawk -F "[[:space:]]+" "/{service}-v/ {{print \\$1}}" | tail -1'
.format(context=('--context {0}'.format(options.k8s_account_context)
if options.k8s_account_context
else ''),
namespace=k8s_namespace,
service=service))
pod = response.stdout.strip()
if not pod:
message = 'There is no pod for "{service}" in {namespace}'.format(
service=service, namespace=k8s_namespace)
logging.error(message)
raise ValueError(message)
if response.returncode != 0:
message = 'Could not find pod for "{service}".: {error}'.format(
service=service,
error=response.stdout.strip())
logging.error(message)
raise ValueError(message)
else:
print '{0} -> "{1}"'.format(service, response.stdout)
return response.stdout.strip()
开发者ID:jtk54,项目名称:spinnaker,代码行数:28,代码来源:validate_bom__deploy.py
示例19: __load_bom
def __load_bom(self):
"""Load the release candidate BOM into memory.
"""
bom_yaml_string = check_run_quick('hal version bom {0} --color false --quiet'
.format(self.__bom_version), echo=False).stdout.strip()
print 'bom yaml string pulled by hal: \n\n{0}\n\n'.format(bom_yaml_string)
self.__bom_dict = yaml.load(bom_yaml_string)
开发者ID:PioTi,项目名称:spinnaker,代码行数:7,代码来源:reconstruct_source.py
示例20: __open_changelog_pull_request
def __open_changelog_pull_request(self):
"""Opens a pull request from --github_publisher's repo to the upstream 'spinnaker' repo.
Uses 'hub' to open the pull request (https://github.com/github/hub).
This assumes that 'hub' is installed on the machine running this script.
"""
title = 'Changelog for version {0}'.format(self.__version)
branch_head = '{user}:{branch}'.format(user=self.__github_publisher, branch=self.__changelog_branch)
with open('message', 'w') as msg_file:
# TODO(jacobkiefer): Add notification to spinnaker/google-reviewers in body.
message = '{title}'.format(title=title)
msg_file.write(message)
base = 'spinnaker:master'
check_run_quick('hub -C spinnaker.github.io pull-request -b {base} -h {head} -F message'
.format(base=base, head=branch_head, msg=''))
开发者ID:edwinavalos,项目名称:spinnaker,代码行数:16,代码来源:publish_changelog.py
注:本文中的spinnaker.run.check_run_quick函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论