• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python shellutils.run_cmd函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中sparktestsupport.shellutils.run_cmd函数的典型用法代码示例。如果您正苦于以下问题:Python run_cmd函数的具体用法?Python run_cmd怎么用?Python run_cmd使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了run_cmd函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: run_python_tests

def run_python_tests(test_modules):
    set_title_and_block("Running PySpark tests", "BLOCK_PYSPARK_UNIT_TESTS")

    command = [os.path.join(SPARK_HOME, "python", "run-tests")]
    if test_modules != [modules.root]:
        command.append("--modules=%s" % ','.join(m.name for m in test_modules))
    run_cmd(command)
开发者ID:wyg1990,项目名称:spark,代码行数:7,代码来源:run-tests.py


示例2: run_sparkr_tests

def run_sparkr_tests():
    set_title_and_block("Running SparkR tests", "BLOCK_SPARKR_UNIT_TESTS")

    if which("R"):
        run_cmd([os.path.join(SPARK_HOME, "R", "run-tests.sh")])
    else:
        print("Ignoring SparkR tests as R was not found in PATH")
开发者ID:KurtKramer,项目名称:spark,代码行数:7,代码来源:run-tests.py


示例3: identify_changed_files_from_git_commits

def identify_changed_files_from_git_commits(patch_sha, target_branch=None, target_ref=None):
    """
    Given a git commit and target ref, use the set of files changed in the diff in order to
    determine which modules' tests should be run.

    >>> [x.name for x in determine_modules_for_files( \
            identify_changed_files_from_git_commits("fc0a1475ef", target_ref="5da21f07"))]
    ['graphx']
    >>> 'root' in [x.name for x in determine_modules_for_files( \
         identify_changed_files_from_git_commits("50a0496a43", target_ref="6765ef9"))]
    True
    """
    if target_branch is None and target_ref is None:
        raise AttributeError("must specify either target_branch or target_ref")
    elif target_branch is not None and target_ref is not None:
        raise AttributeError("must specify either target_branch or target_ref, not both")
    if target_branch is not None:
        diff_target = target_branch
        run_cmd(['git', 'fetch', 'origin', str(target_branch+':'+target_branch)])
    else:
        diff_target = target_ref
    raw_output = subprocess.check_output(['git', 'diff', '--name-only', patch_sha, diff_target],
                                         universal_newlines=True)
    # Remove any empty strings
    return [f for f in raw_output.split('\n') if f]
开发者ID:KurtKramer,项目名称:spark,代码行数:25,代码来源:run-tests.py


示例4: detect_binary_inop_with_mima

def detect_binary_inop_with_mima(hadoop_version):
    build_profiles = get_hadoop_profiles(hadoop_version) + modules.root.build_profile_flags
    set_title_and_block("Detecting binary incompatibilities with MiMa", "BLOCK_MIMA")
    profiles = " ".join(build_profiles)
    print("[info] Detecting binary incompatibilities with MiMa using SBT with these profiles: ",
          profiles)
    run_cmd([os.path.join(SPARK_HOME, "dev", "mima"), profiles])
开发者ID:heuermh,项目名称:spark,代码行数:7,代码来源:run-tests.py


示例5: run_java_style_checks

def run_java_style_checks(build_profiles):
    set_title_and_block("Running Java style checks", "BLOCK_JAVA_STYLE")
    # The same profiles used for building are used to run Checkstyle by SBT as well because
    # the previous build looks reused for Checkstyle and affecting Checkstyle. See SPARK-27130.
    profiles = " ".join(build_profiles)
    print("[info] Checking Java style using SBT with these profiles: ", profiles)
    run_cmd([os.path.join(SPARK_HOME, "dev", "sbt-checkstyle"), profiles])
开发者ID:heuermh,项目名称:spark,代码行数:7,代码来源:run-tests.py


示例6: exec_maven

def exec_maven(mvn_args=()):
    """Will call Maven in the current directory with the list of mvn_args passed
    in and returns the subprocess for any further processing"""

    zinc_port = get_zinc_port()
    os.environ["ZINC_PORT"] = "%s" % zinc_port
    zinc_flag = "-DzincPort=%s" % zinc_port
    flags = [os.path.join(SPARK_HOME, "build", "mvn"), "--force", zinc_flag]
    run_cmd(flags + mvn_args)
开发者ID:advancedxy,项目名称:spark,代码行数:9,代码来源:run-tests.py


示例7: run_sparkr_style_checks

def run_sparkr_style_checks():
    set_title_and_block("Running R style checks", "BLOCK_R_STYLE")

    if which("R"):
        # R style check should be executed after `install-dev.sh`.
        # Since warnings about `no visible global function definition` appear
        # without the installation. SEE ALSO: SPARK-9121.
        run_cmd([os.path.join(SPARK_HOME, "dev", "lint-r")])
    else:
        print("Ignoring SparkR style check as R was not found in PATH")
开发者ID:KurtKramer,项目名称:spark,代码行数:10,代码来源:run-tests.py


示例8: build_spark_documentation

def build_spark_documentation():
    set_title_and_block("Building Spark Documentation", "BLOCK_DOCUMENTATION")
    os.environ["PRODUCTION"] = "1 jekyll build"

    os.chdir(os.path.join(SPARK_HOME, "docs"))

    jekyll_bin = which("jekyll")

    if not jekyll_bin:
        print("[error] Cannot find a version of `jekyll` on the system; please",
              " install one and retry to build documentation.")
        sys.exit(int(os.environ.get("CURRENT_BLOCK", 255)))
    else:
        run_cmd([jekyll_bin, "build"])

    os.chdir(SPARK_HOME)
开发者ID:KurtKramer,项目名称:spark,代码行数:16,代码来源:run-tests.py


示例9: run_pr_checks

def run_pr_checks(pr_tests, ghprb_actual_commit, sha1):
    """
    Executes a set of pull request checks to ease development and report issues with various
    components such as style, linting, dependencies, compatibilities, etc.
    @return a list of messages to post back to Github
    """
    # Ensure we save off the current HEAD to revert to
    current_pr_head = run_cmd(['git', 'rev-parse', 'HEAD'], return_output=True).strip()
    pr_results = list()

    for pr_test in pr_tests:
        test_name = pr_test + '.sh'
        pr_results.append(run_cmd(['bash', os.path.join(SPARK_HOME, 'dev', 'tests', test_name),
                                   ghprb_actual_commit, sha1],
                                  return_output=True).rstrip())
        # Ensure, after each test, that we're back on the current PR
        run_cmd(['git', 'checkout', '-f', current_pr_head])
    return pr_results
开发者ID:CrazyJacky,项目名称:spark,代码行数:18,代码来源:run-tests-jenkins.py


示例10: run_python_tests

def run_python_tests(test_modules, parallelism, with_coverage=False):
    set_title_and_block("Running PySpark tests", "BLOCK_PYSPARK_UNIT_TESTS")

    if with_coverage:
        # Coverage makes the PySpark tests flaky due to heavy parallelism.
        # When we run PySpark tests with coverage, it uses 4 for now as
        # workaround.
        parallelism = 4
        script = "run-tests-with-coverage"
    else:
        script = "run-tests"
    command = [os.path.join(SPARK_HOME, "python", script)]
    if test_modules != [modules.root]:
        command.append("--modules=%s" % ','.join(m.name for m in test_modules))
    command.append("--parallelism=%i" % parallelism)
    run_cmd(command)

    if with_coverage:
        post_python_tests_results()
开发者ID:heuermh,项目名称:spark,代码行数:19,代码来源:run-tests.py


示例11: run_python_style_checks

def run_python_style_checks():
    set_title_and_block("Running Python style checks", "BLOCK_PYTHON_STYLE")
    run_cmd([os.path.join(SPARK_HOME, "dev", "lint-python")])
开发者ID:KurtKramer,项目名称:spark,代码行数:3,代码来源:run-tests.py


示例12: run_java_style_checks

def run_java_style_checks():
    set_title_and_block("Running Java style checks", "BLOCK_JAVA_STYLE")
    run_cmd([os.path.join(SPARK_HOME, "dev", "lint-java")])
开发者ID:KurtKramer,项目名称:spark,代码行数:3,代码来源:run-tests.py


示例13: run_python_packaging_tests

def run_python_packaging_tests():
    set_title_and_block("Running PySpark packaging tests", "BLOCK_PYSPARK_PIP_TESTS")
    command = [os.path.join(SPARK_HOME, "dev", "run-pip-tests")]
    run_cmd(command)
开发者ID:KurtKramer,项目名称:spark,代码行数:4,代码来源:run-tests.py


示例14: post_python_tests_results

def post_python_tests_results():
    if "SPARK_TEST_KEY" not in os.environ:
        print("[error] 'SPARK_TEST_KEY' environment variable was not set. Unable to post "
              "PySpark coverage results.")
        sys.exit(1)
    spark_test_key = os.environ.get("SPARK_TEST_KEY")
    # The steps below upload HTMLs to 'github.com/spark-test/pyspark-coverage-site'.
    # 1. Clone PySpark coverage site.
    run_cmd([
        "git",
        "clone",
        "https://spark-test:%[email protected]/spark-test/pyspark-coverage-site.git" % spark_test_key])
    # 2. Remove existing HTMLs.
    run_cmd(["rm", "-fr"] + glob.glob("pyspark-coverage-site/*"))
    # 3. Copy generated coverage HTMLs.
    for f in glob.glob("%s/python/test_coverage/htmlcov/*" % SPARK_HOME):
        shutil.copy(f, "pyspark-coverage-site/")
    os.chdir("pyspark-coverage-site")
    try:
        # 4. Check out to a temporary branch.
        run_cmd(["git", "symbolic-ref", "HEAD", "refs/heads/latest_branch"])
        # 5. Add all the files.
        run_cmd(["git", "add", "-A"])
        # 6. Commit current HTMLs.
        run_cmd([
            "git",
            "commit",
            "-am",
            "Coverage report at latest commit in Apache Spark",
            '--author="Apache Spark Test Account <[email protected]>"'])
        # 7. Delete the old branch.
        run_cmd(["git", "branch", "-D", "gh-pages"])
        # 8. Rename the temporary branch to master.
        run_cmd(["git", "branch", "-m", "gh-pages"])
        # 9. Finally, force update to our repository.
        run_cmd(["git", "push", "-f", "origin", "gh-pages"])
    finally:
        os.chdir("..")
开发者ID:heuermh,项目名称:spark,代码行数:38,代码来源:run-tests.py


示例15: run_scala_style_checks

def run_scala_style_checks(build_profiles):
    set_title_and_block("Running Scala style checks", "BLOCK_SCALA_STYLE")
    profiles = " ".join(build_profiles)
    print("[info] Checking Scala style using SBT with these profiles: ", profiles)
    run_cmd([os.path.join(SPARK_HOME, "dev", "lint-scala"), profiles])
开发者ID:heuermh,项目名称:spark,代码行数:5,代码来源:run-tests.py


示例16: detect_binary_inop_with_mima

def detect_binary_inop_with_mima(hadoop_version):
    build_profiles = get_hadoop_profiles(hadoop_version) + modules.root.build_profile_flags
    set_title_and_block("Detecting binary incompatibilities with MiMa", "BLOCK_MIMA")
    run_cmd([os.path.join(SPARK_HOME, "dev", "mima")] + build_profiles)
开发者ID:KurtKramer,项目名称:spark,代码行数:4,代码来源:run-tests.py


示例17: exec_maven

def exec_maven(mvn_args=()):
    """Will call Maven in the current directory with the list of mvn_args passed
    in and returns the subprocess for any further processing"""

    run_cmd([os.path.join(SPARK_HOME, "build", "mvn")] + mvn_args)
开发者ID:nraychaudhuri,项目名称:spark,代码行数:5,代码来源:run-tests.py


示例18: run_build_tests

def run_build_tests():
    set_title_and_block("Running build tests", "BLOCK_BUILD_TESTS")
    run_cmd([os.path.join(SPARK_HOME, "dev", "test-dependencies.sh")])
    pass
开发者ID:KurtKramer,项目名称:spark,代码行数:4,代码来源:run-tests.py


示例19: detect_binary_inop_with_mima

def detect_binary_inop_with_mima():
    set_title_and_block("Detecting binary incompatibilities with MiMa", "BLOCK_MIMA")
    run_cmd([os.path.join(SPARK_HOME, "dev", "mima")])
开发者ID:nraychaudhuri,项目名称:spark,代码行数:3,代码来源:run-tests.py


示例20: main

def main():
    opts = parse_opts()
    # Ensure the user home directory (HOME) is valid and is an absolute directory
    if not USER_HOME or not os.path.isabs(USER_HOME):
        print("[error] Cannot determine your home directory as an absolute path;",
              " ensure the $HOME environment variable is set properly.")
        sys.exit(1)

    os.chdir(SPARK_HOME)

    rm_r(os.path.join(SPARK_HOME, "work"))
    rm_r(os.path.join(USER_HOME, ".ivy2", "local", "org.apache.spark"))
    rm_r(os.path.join(USER_HOME, ".ivy2", "cache", "org.apache.spark"))

    os.environ["CURRENT_BLOCK"] = str(ERROR_CODES["BLOCK_GENERAL"])

    java_exe = determine_java_executable()

    if not java_exe:
        print("[error] Cannot find a version of `java` on the system; please",
              " install one and retry.")
        sys.exit(2)

    java_version = determine_java_version(java_exe)

    if java_version.minor < 8:
        print("[warn] Java 8 tests will not run because JDK version is < 1.8.")

    # install SparkR
    if which("R"):
        run_cmd([os.path.join(SPARK_HOME, "R", "install-dev.sh")])
    else:
        print("Cannot install SparkR as R was not found in PATH")

    if os.environ.get("AMPLAB_JENKINS"):
        # if we're on the Amplab Jenkins build servers setup variables
        # to reflect the environment settings
        build_tool = os.environ.get("AMPLAB_JENKINS_BUILD_TOOL", "sbt")
        hadoop_version = os.environ.get("AMPLAB_JENKINS_BUILD_PROFILE", "hadoop2.6")
        test_env = "amplab_jenkins"
        # add path for Python3 in Jenkins if we're calling from a Jenkins machine
        os.environ["PATH"] = "/home/anaconda/envs/py3k/bin:" + os.environ.get("PATH")
    else:
        # else we're running locally and can use local settings
        build_tool = "sbt"
        hadoop_version = os.environ.get("HADOOP_PROFILE", "hadoop2.6")
        test_env = "local"

    print("[info] Using build tool", build_tool, "with Hadoop profile", hadoop_version,
          "under environment", test_env)

    changed_modules = None
    changed_files = None
    if test_env == "amplab_jenkins" and os.environ.get("AMP_JENKINS_PRB"):
        target_branch = os.environ["ghprbTargetBranch"]
        changed_files = identify_changed_files_from_git_commits("HEAD", target_branch=target_branch)
        changed_modules = determine_modules_for_files(changed_files)
        excluded_tags = determine_tags_to_exclude(changed_modules)
    if not changed_modules:
        changed_modules = [modules.root]
        excluded_tags = []
    print("[info] Found the following changed modules:",
          ", ".join(x.name for x in changed_modules))

    # setup environment variables
    # note - the 'root' module doesn't collect environment variables for all modules. Because the
    # environment variables should not be set if a module is not changed, even if running the 'root'
    # module. So here we should use changed_modules rather than test_modules.
    test_environ = {}
    for m in changed_modules:
        test_environ.update(m.environ)
    setup_test_environ(test_environ)

    test_modules = determine_modules_to_test(changed_modules)

    # license checks
    run_apache_rat_checks()

    # style checks
    if not changed_files or any(f.endswith(".scala")
                                or f.endswith("scalastyle-config.xml")
                                for f in changed_files):
        run_scala_style_checks()
    if not changed_files or any(f.endswith(".java")
                                or f.endswith("checkstyle.xml")
                                or f.endswith("checkstyle-suppressions.xml")
                                for f in changed_files):
        # run_java_style_checks()
        pass
    if not changed_files or any(f.endswith(".py") for f in changed_files):
        run_python_style_checks()
    if not changed_files or any(f.endswith(".R") for f in changed_files):
        run_sparkr_style_checks()

    # determine if docs were changed and if we're inside the amplab environment
    # note - the below commented out until *all* Jenkins workers can get `jekyll` installed
    # if "DOCS" in changed_modules and test_env == "amplab_jenkins":
    #    build_spark_documentation()

    if any(m.should_run_build_tests for m in test_modules):
#.........这里部分代码省略.........
开发者ID:KurtKramer,项目名称:spark,代码行数:101,代码来源:run-tests.py



注:本文中的sparktestsupport.shellutils.run_cmd函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python shellutils.which函数代码示例发布时间:2022-05-27
下一篇:
Python shellutils.rm_r函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap