• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python threadpool.makeRequests函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中threadpool.makeRequests函数的典型用法代码示例。如果您正苦于以下问题:Python makeRequests函数的具体用法?Python makeRequests怎么用?Python makeRequests使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了makeRequests函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: crawl

    def crawl(self, para_file):
        """ using thread pool to speed up crawling.
        """
        if not self.writer and not self.method:
            return
        fpara = open(para_file, 'r')

        pool = threadpool.ThreadPool(self.poolsize)
        parlst = list()
        for line in fpara:
            if self.stopped:
                break # Stop current crawling
            parlst.append(line.strip())
            if len(parlst) > 10:
                requests = threadpool.makeRequests(self.retrieve, parlst)
                map(pool.putRequest, requests)
                pool.wait()
                self.writer.flush()
                del parlst[:]

        #Flush the last part of lines in parlst
        if not self.stopped:
            requests = threadpool.makeRequests(self.retrieve, parlst)
            map(pool.putRequest, requests)
            pool.wait()
            self.writer.flush()

        fpara.close()
        self.writer.close()
        if not self.stopped:
            logging.info('Retrieving finished.')
        else:
            logging.info('Retrieving interrupted.')
        return
开发者ID:spacelis,项目名称:tcrawl,代码行数:34,代码来源:crawler.py


示例2: start

    def start(self, jenkinsHost,
            jenkinsUser="root",
            jenkinsKey=globals.configJenkinsKey):

        self.__jenkinsHost = jenkinsHost
        self.__jenkinsUser = jenkinsUser
        self.__jenkinsKey = jenkinsKey

        # Keep startup times short.  Only connect a few threads to Jenkins upfront.  The rest
        # will be dynamically added as required.  This also enables authentication issues to be
        # resolved by the system admin or developer without taking the tool down

        for i in range(globals.threadPoolSize):
            threadRequests = makeRequests(self.connectInit, [((i,), {})])
            [globals.threadPool.putRequest(req) for req in threadRequests]

        # Do 1 to validate the connection.  This is synchronous
        for i in range(1):
            threadRequests = makeRequests(self.connect, [((i,), {})])
            [globals.threadPool.putRequest(req) for req in threadRequests]
        globals.threadPool.wait()

        # Thread queueing results in new thread connections later, so do them all now
        # asynchronously so that they are ready when needed without having to wait.  This is
        # the connectRetry logic, so all paths are used during startup
        for i in range(globals.threadPoolSize - 1):
            threadRequests = makeRequests(self.connectRetry, [((), {})])
            [globals.threadPool.putRequest(req) for req in threadRequests]
开发者ID:akamat-ibm,项目名称:Autoport,代码行数:28,代码来源:mover.py


示例3: __init__

 def __init__(self):
     self.logger = logger
     cp = ConfigParser.SafeConfigParser()
     cp.read('config\config.ini')
     self.req_que_size = (int)(cp.get('autoLevel', 'req_que_size'))
     self.resp_que_size = (int)(cp.get('autoLevel', 'resp_que_size'))
     self.threadpoolSize = (int)(cp.get('autoLevel', 'threadpoolSize'))
     self.accu_threshold = (int)(cp.get('autoLevel', 'accu_threshold'))
     self.max_alarm_level = (int)(cp.get('autoLevel', 'max_alarm_level'))
     self.threadpo = threadpool.ThreadPool(self.threadpoolSize)
     self.root = Node()
     self.reqQueue = Queue.Queue(maxsize=self.req_que_size)
     self.respQueue = Queue.Queue(maxsize=self.resp_que_size)
     self.alarmDura = {}
     children = {}
     for i in cp.get('autoLevel', 'alarm').split(','):
         key = (int)(i.split('-')[0])
         value = (int)(i.split('-')[1])
         self.alarmDura[key] = value
     for key in cp.get('autoLevel', 'org').split(','):
         children[(int)(key)] = AlarmNode(org=(int)(key), req_que_size=self.req_que_size,
                                          resp_que_size=self.resp_que_size, threadpo=self.threadpo,
                                          alarmDura=self.alarmDura, accu_threshold=self.accu_threshold,
                                          max_alarm_level=self.max_alarm_level, parentRespQueue=self.respQueue)
     self.logger.debug('init: %s, %s' % (self.alarmDura, children))
     self.root.setChildren(children)
     self.alaMsgTask = threadpool.makeRequests(self.alaMsg, [(None, None)])
     self.scanTimeTask = threadpool.makeRequests(self.scanTime, [(None, None)])
     self.threadpo.putRequest(self.alaMsgTask[0])
     self.threadpo.putRequest(self.scanTimeTask[0])
开发者ID:lq08025107,项目名称:pyspider,代码行数:30,代码来源:AutoSco.py


示例4: simulateStep

    def simulateStep(self):
        tc = self.threadCount

        kwargs = [([], {'firstRow': self.size/tc * x, 'lastRow': self.size/tc * x + self.size/tc}) for x in range(tc)]
      
        [self.pool.putRequest(req) for req in threadpool.makeRequests(self.countAllNeighbours, kwargs)]
        self.pool.wait()
      
        [self.pool.putRequest(req) for req in threadpool.makeRequests(self.simulateLifeAndDeath, kwargs)]
        self.pool.wait()
开发者ID:wmax,项目名称:NeighborBasedAndSeedBasedGoL,代码行数:10,代码来源:GoLObjectOriented.py


示例5: thread_use

def thread_use():
    pool = threadpool.ThreadPool(10)
    # single parameter
    option = [1, 2]
    requests = threadpool.makeRequests(thread_sing_parameter, option)
    [pool.putRequest(req) for req in requests]
    pool.wait()

    # multiple parameters
    option = [([1, 2, 3], None), ([4, 5, 6], None)]
    requests = threadpool.makeRequests(thread_multiple_parameter, option)
    [pool.putRequest(req) for req in requests]
    pool.wait()
开发者ID:Warrenlive,项目名称:code-snippet,代码行数:13,代码来源:x_threadpool.py


示例6: check_all_instance

def check_all_instance(instance_ports):
    datas = [(item, {}) for item in instance_ports]
    reqs = threadpool.makeRequests(check_one_instance, datas, run_success, run_fault)
    [pool.putRequest(req) for req in reqs]
    log.debug("pool.wait for check instance")
    pool.wait()
    log.debug("pool.wait end")
开发者ID:caimaoy,项目名称:uhp,代码行数:7,代码来源:monitor.py


示例7: sumbit

 def sumbit(self,record_time):
     log.info( "i did at %s %d" % (util.get_local_time(), record_time)  )
     coll = Collector()
     requests = threadpool.makeRequests(coll.collect, [(record_time)], self.print_result) 
     for req in requests:
         log.info("get request")
         self.pool.putRequest(req)
开发者ID:renyinew,项目名称:uhp,代码行数:7,代码来源:collect.py


示例8: pattern4

def pattern4(fundinfolist4, maxreturn, threadnum):
    #Check fund manager perf
    print 'Fund list 4: 基于pattern3的结果,排序当前基金经理业绩'

    #Get manager perf if buyable
    def checkBuyableAndGetPerf(fund):
        isBuyable(fund)
        if fund.buyable:
            getManagerPerf(fund)
        else:
            fund.managerperf = '-1000' #With this, no need do extra filter on buyable

    #Get fund manager perf
    pool2 = threadpool.ThreadPool(threadnum)
    requests2 = threadpool.makeRequests(checkBuyableAndGetPerf, fundinfolist4)
    [pool2.putRequest(req) for req in requests2]
    pool2.wait()

    fundInfoListOrdered4 = sorted(fundinfolist4, key=lambda fund:string.atof(fund.managerperf),reverse=True)

    meetnum = 0
    fundlinkTemp = 'http://fund.eastmoney.com/%s.html'
    jjjllinkTemp = 'http://fund.eastmoney.com/f10/jjjl_%s.html'

    for i in range(0, min(maxreturn,len(fundInfoListOrdered4))):
        fundinfolist4.append(fundInfoListOrdered4[i])
        fundlink = fundlinkTemp % fundInfoListOrdered4[i].fundcode
        jjjllink = jjjllinkTemp % fundInfoListOrdered4[i].fundcode
        print '   %s %s %s 净值:%s 业绩:%s Duration:%s' % (fundlink, jjjllink, fundInfoListOrdered4[i].name,
                                                       fundInfoListOrdered4[i].latestvalue,
                                                       fundInfoListOrdered4[i].managerperf.encode('utf-8'),
                                                       fundInfoListOrdered4[i].managerduration.encode('utf-8'))
开发者ID:micerin,项目名称:PythonExercise,代码行数:32,代码来源:AllFundList.py


示例9: start

    def start(self, norm_target_func=None, *args, **kwargs):

        def args_generator(poc_name):
            func_args = {
                'poc_name': poc_name,
                'options': self.options,
                'success': None,
                'poc_ret': {},
            }
            for target in self.seed_targets:
                if norm_target_func:
                    func_args['options']['target'] = norm_target_func(target.strip(), *args, **kwargs)
                else:
                    func_args['options']['target'] = target.strip()
                yield deepcopy(func_args)

        for name, func2run in self.funcs2run:
            requests = threadpool.makeRequests(callable_=func2run,
                                               args_list=args_generator(name),
                                               callback=self.handle_result,
                                               exc_callback=self.handle_result)

            [self.tp.putRequest(req) for req in requests]
            self.total_num += requests.__len__()

        self.tp.wait()
        self.tp.dismissWorkers(100, do_join=True)
        return self.total_num, self.success_num
开发者ID:blueroutecn,项目名称:BeeCli,代码行数:28,代码来源:batch.py


示例10: getIDS

def getIDS(names):
       cnt=[]
       poolsize=60
       def f(name):
              for i in [1,2]:
                     try:
                            us_set=['Mozilla','IE','Opera','Chrome','Magic','theSage','Iceweasel','Rockmelt']
                            ua=random.choice(us_set)
                            req=urllib2.Request('http://graph.facebook.com/'+name+'?fields=id,name',headers={'User-Agent':ua})
                            r=urllib2.urlopen(req)
                            r=r.read()
                            r=eval(r)
                            result.append((r['id'],r['name']))
                            cnt.append(True)
                            if len(names)>550:
                                   time.sleep(60)# to prevent blocking of ip address by limiting rate
                     except Exception as e:
                            print e.reason,len(cnt)
                            if 'Nont Found' in e.reason:
                                   if len(names)>550:
                                          time.sleep(60)# to prevent blocking of ip address by limiting rate
                                   break
                            if 'Forbidden' in e.reason:
                                   time.sleep(600)
                            if len(names)>550:
                                   time.sleep(60)# to prevent blocking of ip address by limiting rate
                            pass
       result=[]
       pool=tp.ThreadPool(poolsize)
       requests=tp.makeRequests(f,names)
       r=[pool.putRequest(req) for req in requests]
       pool.wait()
       pool.dismissWorkers(poolsize)
       return result
开发者ID:caitybeth,项目名称:Facebook_Mapper,代码行数:34,代码来源:friender.py


示例11: batchTest

    def batchTest(self, norm_target_func=None, *args, **kwds):
        '''
        the func must be the run() function in a poc class.
        '''
        def argsGenerator():
            func_args = {
                'options': self.options,
                'success': None,
                'poc_ret': {},
            }
            for seed in self.seed_iter:
                if norm_target_func:
                    func_args['options']['target'] = norm_target_func(seed.strip(), *args, **kwds)
                else:
                    func_args['options']['target'] = seed.strip()
                yield deepcopy(func_args)

        requests = threadpool.makeRequests(callable_=self.func2run,
                                           args_list = argsGenerator(),
                                           callback=self.cbSaveResult,
                                           exc_callback=self.cbHandleErr)
        [self.tp.putRequest(req) for req in requests]
        self.tp.wait()
        self.tp.dismissWorkers(100, do_join=True)
        return self.total_num, self.finished_num, self.err_num
开发者ID:0ps,项目名称:pocscan,代码行数:25,代码来源:batchtest.py


示例12: run

 def run(self):
     pool = threadpool.ThreadPool(10)
     reqs = threadpool.makeRequests(self._scan_start, self.dicts)
     [pool.putRequest(req) for req in reqs]
     pool.wait()
     pool.dismissWorkers(20, do_join=True)
     return self.result
开发者ID:AnswerDog,项目名称:test,代码行数:7,代码来源:pma_crack.py


示例13: create_records_cache

    def create_records_cache(self, provider, path):
        """ Creates an array of Records classed (s.a.) after parsing all records under `path'.
        Assumes a valid session """
        records = []

        # If we are in `/' then get all records
        for d in self.provider.metadata(path).lsdirs():
            recpath = d + "/record.json"
            log.debug(recpath)
            records.append(recpath)
        requests = threadpool.makeRequests(self.records_worker, records, self.append_records_cache, self.handle_exception)

        #insert the requests into the threadpool

        # This is ugly but will need serious refactoring for the local provider.
        # basically: if using local storage then just use one thread to avoid choking on the HD.
        # For dropbox and other remote providers use multi-theading
        if ( provider == "local" ):
            pool = threadpool.ThreadPool(1)
        else:
            pool = threadpool.ThreadPool(min(len(requests), default_number))
        for req in requests:
            pool.putRequest(req)
            log.debug("Work request #%s added." % req.requestID)

        #wait for them to finish (or you could go and do something else)
        pool.wait()
        pool.dismissWorkers(min(len(requests), 20), do_join=True)
        log.debug("workers length: %s" % len(pool.workers))
开发者ID:xmichael,项目名称:pcapi,代码行数:29,代码来源:rest.py


示例14: winconf_multi

def winconf_multi(nb_jobs = 4):
    from ranwinconf.list_AD import get_all_servers

    pattern = conf_get_IFP(config, "GENERAL", "PATTERN", "operatingSystem='*Server*'")

    print "Retrieving server list from AD based on pattern: %s" % pattern

    server_list = get_all_servers("objectClass='computer' AND %s" % pattern)

    server_name_list = [ computer.cn for computer in server_list]

    print "%d servers were retrieved" % len(server_name_list)

    import threadpool
    pool = threadpool.ThreadPool(nb_jobs)

    requests = threadpool.makeRequests(thread_work, server_name_list)

    for req in requests:
        pool.putRequest(req)

    pool.wait()

    for server_name in server_name_list:
        manage_vcs_and_notification(server_name, "%s.txt" % server_name)
开发者ID:sebbrochet,项目名称:ranwinconf,代码行数:25,代码来源:ranwinconf.py


示例15: main

def main():
    print "=="
    f=open('delay_dist_fail.txt','r',102400);
    lines = f.readlines()
    length = len(lines)
    total = length
    print length
    logging.info("length:%d",length)
    index = 0
    process_cnt=0;
    pool = threadpool.ThreadPool(100)
    for line in lines :
        index +=1;
        logging.info(line)
        process_cnt +=1 ;
        #print line
        requests = threadpool.makeRequests(appendStatusAndTime,[line], asyn_callback) 
        for req in requests:
            req.total = length;
            req.process = process_cnt
            pool.putRequest(req)
        if index == 200:
            pool.wait() 
            index =0
        #print "====="
    pool.wait()        
开发者ID:liubinwyzbt,项目名称:StudyNote,代码行数:26,代码来源:bus_fail_replay.py


示例16: downjpgmutithread

def downjpgmutithread( filepathlist, dir=""):
    

    print("total downloads: %d"%len(filepathlist)) 

    runlist=[]
    for file in filepathlist:
        runlist.append(([file,dir],None))
    print("start downloads")
    
    pool = threadpool.ThreadPool(5)
    reqs = threadpool.makeRequests(fileDownload, runlist)
    [pool.putRequest(req) for req in reqs]
    pool.wait()
    '''
    task_threads=[] #存储线程
    count=1
    for file in filepathlist:
        name=file[file.rfind('/')+1:]
        if allfiles.count(name)>0:
            print "file aready saved."
            continue
        print "file name :"+file
        t= threading.Thread( target=downjpg,args=(file,) )
        count=count+1
        task_threads.append(t)
    for task in task_threads:
        task.start()
        #time.sleep(5)
    for task in task_threads:
        task.join() #等待所有线程结束
    '''
    print("已经完成所有任务")
开发者ID:wherby,项目名称:Script,代码行数:33,代码来源:Multipledownload.py


示例17: start_thread_pool

    def start_thread_pool(self, thread_pool, app_type):
        """
        开始请求接口
        :param thread_pool: 线程池
        :param app_type: 0 >> A; 1 >> B; 2 >> C; 3 >> D
        :return:
        """
        d1 = datetime.datetime.now()
        print("读取接口数据中...")
        s = sessions.ReadSessions.ReadSessions()
        l = s.get_will_request_sessions()  # 获取将要请求的所有接口数据
        print("接口请求中,请等待...")

        pool = threadpool.ThreadPool(self.thread_count)
        requests1 = threadpool.makeRequests(thread_pool, l)
        [pool.putRequest(req) for req in requests1]
        pool.wait()
        print("接口请求完成!")

        # 重试机制
        retry.Retry.retry11(app_type)

        # 清理数据
        print("正在整理创建的数据...")
        sessions.DelaySessions.clear_up(0)
        print("测试报告准备中...")
        d2 = datetime.datetime.now()
        t = d2 - d1
        print('接口回归测试完成!')
        print("%s %s%s" % ("耗时:", t.seconds, "s"))
开发者ID:darker50,项目名称:ApiTests,代码行数:30,代码来源:Request.py


示例18: pattern6

def pattern6(fundcodelist):
    print 'Check delta for self selected fund, give buy/sell/noaction order'
    print 'Strategy: increased %s%% in passed m days(at most) then sell, or dropped %s%% then buy, otherwise, no action' % (upthreshold, downthreshould)

    fundlist6 = []
    for fundcode in fundcodelist:
        fund = Fund()
        fund.fundcode = fundcode
        fundlist6.append(fund)

    pool0 = threadpool.ThreadPool(len(fundcodelist))
    requests0 = threadpool.makeRequests(getPerfForFund, fundlist6)
    [pool0.putRequest(req) for req in requests0]
    pool0.wait()

    actionsum = 0
    for fund in fundlist6:
        actionsum +=fund.action

    outputinfo = ''
    #Strategy to sell or buy in under such unstable situation
    if actionsum > 2:
        outputinfo =  'ActionSum: %d for %d funds, Time To Sell Out!!!' % (actionsum, len(fundcodelist))
    elif actionsum < -2:
        outputinfo = 'ActionSum: %d for %d funds, Good To Buy In!!!' % (actionsum, len(fundcodelist))
    else:
        outputinfo = 'ActionSum: %d for %d funds, No Valuable Action!!!' % (actionsum, len(fundcodelist))
    print outputinfo
    return outputinfo
开发者ID:micerin,项目名称:PythonExercise,代码行数:29,代码来源:AllFundList_1.py


示例19: start_crawling

    def start_crawling(self, multi_way=_multi_process):
#        self.branch_commit_fp=open(os.path.join(self.saveDir, self.target_repos.repos_name, 'branch_commit.info'), 'w')
        self.parse_branch_name()
        print 'Number of branches:%s' % len(self.branches)


        ######################################################################################
        ##########
        ##########      implementing crawler with the third part package 'threadpool', which is not truly multi-thread
        ##########
        if multi_way==_multi_thread:
            para=[((b, baseURL, os.path.join(self.saveDir, self.target_repos.repos_name),), {}) for b in self.branches[:poolsize]]
            pool=threadpool.ThreadPool(poolsize)
            requests=threadpool.makeRequests(crawling_branch, para)

            for req in requests:
                pool.putRequest(req)
            pool.wait()
        #######################################################################################

        #######################################################################################
        ##########
        ##########      truly multi-process implementation
        else:
            pool=ProcessPool(poolsize)
#            para=[(b, baseURL, os.path.join(self.saveDir, self.target_repos.repos_name),) for b in self.branches[:poolsize]]
            for b in self.branches:
                sys.stderr.write('Branch %s\n' % b.branch_name)
                pool.apply_async(crawling_branch, (b, baseURL, os.path.join(self.saveDir, self.target_repos.repos_name),))
            pool.close()
            pool.join()
            sys.stderr.write('All processes has been terminated\n')
开发者ID:maxwellmao,项目名称:programming_lang,代码行数:32,代码来源:deep_crawling.py


示例20: batch_download_upps_src_data

    def batch_download_upps_src_data(self):
        try:
            cmdstr="rm "+self.upps_src_data_directory+"*"
            print(cmdstr)
            status,output=cmd_execute(cmdstr)
            print status,output
        except Exception as e:
            print(str(e))
        while 1:
            cmdstr="hadoop fs -test -e /app/lbs/lbs-stat/upp/data/mr/up/map/category/"+self.event_day+"/done"
            e=cmd_execute(cmdstr)[0]
            if(e == 0):
                print(u"hadoop文件ready,开始批量下载文件...")
                break
            else:
                print(u"hadoop文件还未ready,60s之后继续监测...")
                time.sleep(60)
        now=datetime.datetime.now()
        print(u"地图类目用户偏好-UPPS源数据下载-开始:"+str(now))
        filepath_list=[]
        for i in xrange(10):
            filepath_list.append("/app/lbs/lbs-stat/upp/data/mr/up/map/category/"+self.event_day+"/data/part-*"+str(i))
        print(filepath_list)

        pool = threadpool.ThreadPool(self.download_srcdata_poolsize)
        reqs = threadpool.makeRequests(self.download_upps_src_data,filepath_list,self.on_download_finish)
        [pool.putRequest(req) for req in reqs]
        pool.wait()

        now=datetime.datetime.now()
        print(u"地图类目用户偏好-UPPS源数据下载-完成:"+str(now))
开发者ID:zeus911,项目名称:threadpool,代码行数:31,代码来源:map_category.py



注:本文中的threadpool.makeRequests函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python threadpool.ThreadPool类代码示例发布时间:2022-05-27
下一篇:
Python local.__init__函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap