• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python patch.object函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tests.py2.patch.object函数的典型用法代码示例。如果您正苦于以下问题:Python object函数的具体用法?Python object怎么用?Python object使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了object函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: setUp

    def setUp(self):
        super(StreamingArgsTestCase, self).setUp()
        self.runner = HadoopJobRunner(
            hadoop_bin='hadoop', hadoop_streaming_jar='streaming.jar',
            mr_job_script='my_job.py', stdin=BytesIO())
        self.runner._add_job_files_for_upload()

        self.runner._hadoop_version='0.20.204'
        self.start(patch.object(self.runner, '_upload_args',
                                return_value=['new_upload_args']))
        self.start(patch.object(self.runner, '_pre_0_20_upload_args',
                                return_value=['old_upload_args']))
        self.start(patch.object(self.runner, '_hadoop_args_for_step',
                                return_value=['hadoop_args_for_step']))
        self.start(patch.object(self.runner, '_hdfs_step_input_files',
                                return_value=['hdfs_step_input_files']))
        self.start(patch.object(self.runner, '_hdfs_step_output_dir',
                                return_value='hdfs_step_output_dir'))
        self.runner._script_path = 'my_job.py'

        self._new_basic_args = [
            'hadoop', 'jar', 'streaming.jar',
             'new_upload_args', 'hadoop_args_for_step',
             '-input', 'hdfs_step_input_files',
             '-output', 'hdfs_step_output_dir']

        self._old_basic_args = [
            'hadoop', 'jar', 'streaming.jar',
             'hadoop_args_for_step',
             '-input', 'hdfs_step_input_files',
             '-output', 'hdfs_step_output_dir',
             'old_upload_args']
开发者ID:nilesh-molankar,项目名称:mrjob,代码行数:32,代码来源:test_hadoop.py


示例2: test_find_hadoop_streaming_jar

    def test_find_hadoop_streaming_jar(self):
        # not just any jar will do
        with patch.object(os, 'walk', return_value=[
            ('/some_dir', None, 'mason.jar')]):
            self.assertEqual(find_hadoop_streaming_jar('/some_dir'), None)

        # should match streaming jar
        with patch.object(os, 'walk', return_value=[
            ('/some_dir', None, 'hadoop-0.20.2-streaming.jar')]):
            self.assertEqual(find_hadoop_streaming_jar('/some_dir'), None)

        # shouldn't find anything in an empty dir
        with patch.object(os, 'walk', return_value=[]):
            self.assertEqual(find_hadoop_streaming_jar('/some_dir'), None)
开发者ID:nilesh-molankar,项目名称:mrjob,代码行数:14,代码来源:test_hadoop.py


示例3: test_verbose

 def test_verbose(self):
     with patch.object(sys, 'stderr', StringIO()) as stderr:
         MRJob.set_up_logging(verbose=True)
         log = logging.getLogger('__main__')
         log.info('INFO')
         log.debug('DEBUG')
         self.assertEqual(stderr.getvalue(), 'INFO\nDEBUG\n')
开发者ID:Yelp,项目名称:mrjob,代码行数:7,代码来源:test_launch.py


示例4: mrjob_conf_patcher

def mrjob_conf_patcher(substitute_conf=EMPTY_MRJOB_CONF):
    def mock_load_opts_from_mrjob_confs(runner_alias, conf_paths=None):
        return [(None,
                 substitute_conf.get('runners', {}).get(runner_alias, {}))]

    return patch.object(runner, 'load_opts_from_mrjob_confs',
                        mock_load_opts_from_mrjob_confs)
开发者ID:Yelp,项目名称:mrjob,代码行数:7,代码来源:sandbox.py


示例5: test_kill_persistent_cluster

 def test_kill_persistent_cluster(self):
     with no_handlers_for_logger("mrjob.dataproc"):
         r = self._quick_runner()
         with patch.object(mrjob.dataproc.DataprocJobRunner, "_api_cluster_delete") as m:
             r._opts["cluster_id"] = "j-MOCKCLUSTER0"
             r._cleanup_cluster()
             self.assertTrue(m.called)
开发者ID:davidmarin,项目名称:mrjob,代码行数:7,代码来源:test_dataproc.py


示例6: test_path_join

    def test_path_join(self):
        fs = Filesystem()

        with patch.object(fs, 'join'):
            with no_handlers_for_logger('mrjob.fs.base'):
                fs.path_join('foo', 'bar')

            fs.join.assert_called_once_with('foo', 'bar')
开发者ID:kartheek6,项目名称:mrjob,代码行数:8,代码来源:test_base.py


示例7: test_path_exists

    def test_path_exists(self):
        fs = Filesystem()

        with patch.object(fs, "exists"):
            with no_handlers_for_logger("mrjob.fs.base"):
                fs.path_exists("foo")

            fs.exists.assert_called_once_with("foo")
开发者ID:sebratt,项目名称:mrjob,代码行数:8,代码来源:test_base.py


示例8: test_default_options

 def test_default_options(self):
     with no_handlers_for_logger('__main__'):
         with patch.object(sys, 'stderr', StringIO()) as stderr:
             MRJob.set_up_logging()
             log = logging.getLogger('__main__')
             log.info('INFO')
             log.debug('DEBUG')
             self.assertEqual(stderr.getvalue(), 'INFO\n')
开发者ID:etiennebatise,项目名称:mrjob,代码行数:8,代码来源:test_launch.py


示例9: test_put_part_size_mb

    def test_put_part_size_mb(self):
        local_path = self.makefile('foo', contents=b'bar')
        dest = 'gs://bar-files/foo'
        self.storage_client().bucket('bar-files').create()

        with patch.object(GCSFilesystem, '_blob') as blob_meth:
            self.fs.put(local_path, dest, part_size_mb=99999)
            blob_meth.assert_called_once_with(dest, chunk_size=99999)
开发者ID:Affirm,项目名称:mrjob,代码行数:8,代码来源:test_gcs.py


示例10: test_libjars_attr_relative_path

    def test_libjars_attr_relative_path(self):
        job_dir = os.path.dirname(MRJob.mr_job_script())

        with patch.object(MRJob, "LIBJARS", ["cookie.jar", "/left/dora.jar"]):
            job = MRJob()

            self.assertEqual(
                job.job_runner_kwargs()["libjars"], [os.path.join(job_dir, "cookie.jar"), "/left/dora.jar"]
            )
开发者ID:davidmarin,项目名称:mrjob,代码行数:9,代码来源:test_job.py


示例11: setUp

    def setUp(self):
        super(StreamingArgsTestCase, self).setUp()
        self.runner = HadoopJobRunner(
            hadoop_bin='hadoop', hadoop_streaming_jar='<streaming jar>',
            mr_job_script='my_job.py', stdin=BytesIO())
        self.runner._add_job_files_for_upload()

        self.start(patch.object(self.runner, '_upload_args',
                                return_value=['<upload args>']))
        self.start(patch.object(self.runner, '_hadoop_args_for_step',
                                return_value=['<hadoop args for step>']))
        self.start(patch.object(self.runner, '_hdfs_step_input_files',
                                return_value=['<hdfs step input files>']))
        self.start(patch.object(self.runner, '_hdfs_step_output_dir',
                                return_value='<hdfs step output dir>'))
        self.start(patch.object(HadoopFilesystem, 'get_hadoop_version',
                                return_value='2.7.1'))
        self.runner._script_path = 'my_job.py'
开发者ID:Milkigit,项目名称:mrjob,代码行数:18,代码来源:test_hadoop.py


示例12: test_libjars_attr_relative_path

    def test_libjars_attr_relative_path(self):
        job_dir = os.path.dirname(MRJob.mr_job_script())

        with patch.object(MRJob, 'LIBJARS', ['cookie.jar', '/left/dora.jar']):
            job = MRJob()

            self.assertEqual(
                job._runner_kwargs()['libjars'],
                [os.path.join(job_dir, 'cookie.jar'), '/left/dora.jar'])
开发者ID:okomestudio,项目名称:mrjob,代码行数:9,代码来源:test_job.py


示例13: test_kill_cluster_if_successful

 def test_kill_cluster_if_successful(self):
     # If they are setting up the cleanup to kill the cluster, mrjob should
     # kill the cluster independent of job success.
     with no_handlers_for_logger('mrjob.dataproc'):
         r = self._quick_runner()
         with patch.object(mrjob.dataproc.DataprocJobRunner, '_api_cluster_delete') as m:
             r._ran_job = True
             r._cleanup_cluster()
             self.assertTrue(m.called)
开发者ID:Jeremyfanfan,项目名称:mrjob,代码行数:9,代码来源:test_dataproc.py


示例14: test_configuration_translation

    def test_configuration_translation(self):
        job = MRWordCount(["--jobconf", "mapred.jobtracker.maxtasks.per.job=1"])

        with job.make_runner() as runner:
            with no_handlers_for_logger("mrjob.runner"):
                with patch.object(runner, "get_hadoop_version", return_value="2.7.1"):
                    self.assertEqual(
                        runner._hadoop_args_for_step(0),
                        ["-D", "mapred.jobtracker.maxtasks.per.job=1", "-D", "mapreduce.jobtracker.maxtasks.perjob=1"],
                    )
开发者ID:irskep,项目名称:mrjob,代码行数:10,代码来源:test_runner.py


示例15: test_no_mrjob_confs

    def test_no_mrjob_confs(self):
        with patch.object(conf, 'real_mrjob_conf_path', return_value=None):
            mr_job = MRIncrementerJob(['-r', 'inline', '--times', '2'])
            mr_job.sandbox(stdin=BytesIO(b'0\n1\n2\n'))

            with mr_job.make_runner() as runner:
                runner.run()
                output = sorted(mr_job.parse_output_line(line)[1]
                                for line in runner.stream_output())
                self.assertEqual(output, [2, 3, 4])
开发者ID:ashleymiller,项目名称:mrjob,代码行数:10,代码来源:test_inline.py


示例16: test_too_many_jobs_on_the_dance_floor

    def test_too_many_jobs_on_the_dance_floor(self):

        def fake_popen(*args, **kwargs):
            m = Mock()
            m.communicate.return_value = (b"2 jobs currently running\n", b'')
            return m

        with patch.object(ssh, 'Popen', side_effect=fake_popen):
            self.assertRaises(IOError, ssh.ssh_terminate_single_job,
                              ['ssh_bin'], 'address', 'key.pem')
开发者ID:DanisHack,项目名称:mrjob,代码行数:10,代码来源:test_ssh.py


示例17: test_no_output

 def test_no_output(self):
     launcher = MRJobLauncher(args=['--no-conf', '--no-output', ''])
     launcher.sandbox()
     with patch.object(launcher, 'make_runner') as m_make_runner:
         runner = Mock()
         _mock_context_mgr(m_make_runner, runner)
         runner.stream_output.return_value = ['a line']
         launcher.run_job()
         self.assertEqual(launcher.stdout.getvalue(), b'')
         self.assertEqual(launcher.stderr.getvalue(), b'')
开发者ID:DanisHack,项目名称:mrjob,代码行数:10,代码来源:test_launch.py


示例18: test_junk_list_output

    def test_junk_list_output(self):

        def fake_popen(*args, **kwargs):
            m = Mock()
            m.communicate.return_value = (b"yah output, its gahbage\n", b'')
            return m

        with patch.object(ssh, 'Popen', side_effect=fake_popen):
            self.assertRaises(IOError, ssh.ssh_terminate_single_job,
                              ['ssh_bin'], 'address', 'key.pem')
开发者ID:DanisHack,项目名称:mrjob,代码行数:10,代码来源:test_ssh.py


示例19: setUp

    def setUp(self):
        def error(msg=None):
            if msg:
                raise ValueError(msg)
            else:
                raise ValueError

        p = patch.object(cmd, 'error', side_effect=error)
        p.start()
        self.addCleanup(p.stop)
开发者ID:gitbenedict,项目名称:mrjob,代码行数:10,代码来源:test_cmd.py


示例20: test_dance_floor_is_empty

    def test_dance_floor_is_empty(self):

        def fake_popen(*args, **kwargs):
            m = Mock()
            m.communicate.return_value = (b"0 jobs currently running\n", b'')
            return m

        with patch.object(ssh, 'Popen', side_effect=fake_popen):
            self.assertEqual(
                None, ssh.ssh_terminate_single_job(
                    ['ssh_bin'], 'address', 'key.pem'))
开发者ID:DanisHack,项目名称:mrjob,代码行数:11,代码来源:test_ssh.py



注:本文中的tests.py2.patch.object函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python pyunit_utils.find_grid_runtime函数代码示例发布时间:2022-05-27
下一篇:
Python py2.patch函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap