• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python deferral.retry函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中util.deferral.retry函数的典型用法代码示例。如果您正苦于以下问题:Python retry函数的具体用法?Python retry怎么用?Python retry使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了retry函数的16个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: set_merged_work

 def set_merged_work(merged_url, merged_userpass):
     merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
     while self.running:
         auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)()
         self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict(
             hash=int(auxblock['hash'], 16),
             target='p2pool' if auxblock['target'] != 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')),
             merged_proxy=merged_proxy,
         )}))
         yield deferral.sleep(1)
开发者ID:cqtenq,项目名称:p2pool-neoscrypt,代码行数:10,代码来源:work.py


示例2: set_merged_work

 def set_merged_work():
     if not args.merged_url:
         return
     merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
     while True:
         auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)()
         pre_merged_work.set(dict(
             hash=int(auxblock['hash'], 16),
             target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')),
             chain_id=auxblock['chainid'],
         ))
         yield deferral.sleep(1)
开发者ID:finway-china,项目名称:p2pool,代码行数:12,代码来源:main.py


示例3: freshen_addresses

 def freshen_addresses(self, c):
     self.cur_address_throttle = time.time()
     if self.cur_address_throttle - self.address_throttle < 30:
         return
     self.address_throttle=time.time()
     print "ATTEMPTING TO FRESHEN ADDRESS."
     self.address = yield deferral.retry('Error getting a dynamic address from dashd:', 5)(lambda: self.dashd.rpc_getnewaddress('p2pool'))()
     new_pubkey = dash_data.address_to_pubkey_hash(self.address, self.net)
     self.pubkeys.popleft()
     self.pubkeys.addkey(new_pubkey)
     print " Updated payout pool:"
     for i in range(len(self.pubkeys.keys)):
         print '    ...payout %d: %s(%f)' % (i, dash_data.pubkey_hash_to_address(self.pubkeys.keys[i], self.net),self.pubkeys.keyweights[i],)
     self.pubkeys.updatestamp(c)
     print " Next address rotation in : %fs" % (time.time()-c+self.args.timeaddresses)
开发者ID:690254282,项目名称:p2pool-dash,代码行数:15,代码来源:work.py


示例4: set_merged_work

 def set_merged_work(merged_url, merged_userpass):
     merged_proxy = jsonrpc.Proxy(merged_url, dict(Authorization="Basic " + base64.b64encode(merged_userpass)))
     while self.running:
         auxblock = yield deferral.retry("Error while calling merged getauxblock:", 30)(
             merged_proxy.rpc_getauxblock
         )()
         self.merged_work.set(
             dict(
                 self.merged_work.value,
                 **{
                     auxblock["chainid"]: dict(
                         hash=int(auxblock["hash"], 16),
                         target="p2pool"
                         if auxblock["target"] == "p2pool"
                         else pack.IntType(256).unpack(auxblock["target"].decode("hex")),
                         merged_proxy=merged_proxy,
                     )
                 }
             )
         )
         yield deferral.sleep(1)
开发者ID:hua002,项目名称:p2pool,代码行数:21,代码来源:work.py


示例5: got_response

        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
            
           
            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')
            
            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']
            
            on_time = self.new_work_event.times == lp_count
            
            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
                            pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
                           # neoscrypt uses little endian only
                           # pack.IntType(256, 'little').pack(aux_work['hash']).encode('hex'),
                            bitcoin_data.aux_pow_type.pack(dict(
                                merkle_tx=dict(
                                    tx=new_gentx,
                                    block_hash=header_hash,
                                    merkle_link=merkle_link,
                                ),
                                merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                parent_block_header=header,
                            )).encode('hex'),
                        )
                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (result,)
                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')
            
            if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)
                
                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)
                
                self.node.tracker.add(share)
                self.node.set_best_share()
                
                try:
                    if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')
                
                self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash)
	    if p2pool.DEBUG:
		print 'Hash:   %X' % (pow_hash)
		print 'Target: %X' % (target)
            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (user,)
            elif header_hash in received_header_hashes:
                print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
            else:
                received_header_hashes.add(header_hash)
                
                self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
                self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target))
                self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash))
            
            return on_time
开发者ID:cqtenq,项目名称:p2pool-neoscrypt,代码行数:97,代码来源:work.py


示例6: poll_warnings

 def poll_warnings():
     bitcoind_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(bitcoind.rpc_getinfo)()))
开发者ID:and-then,项目名称:p2pool-rav,代码行数:2,代码来源:main.py


示例7: main

def main(args, net, datadir_path, merged_urls, worker_endpoint):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__,)
        print
        
        @defer.inlineCallbacks
        def connect_p2p():
            # connect to bitcoind over bitcoin-p2p
            print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
            factory = bitcoin_p2p.ClientFactory(net.PARENT)
            reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
            def long():
                print '''    ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...'''
            long_dc = reactor.callLater(5, long)
            yield factory.getProtocol() # waits until handshake is successful
            if not long_dc.called: long_dc.cancel()
            print '    ...success!'
            print
            defer.returnValue(factory)
        
        if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
            factory = yield connect_p2p()
        
        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
        yield helper.check(bitcoind, net)
        temp_work = yield helper.getwork(bitcoind)
        
        bitcoind_getinfo_var = variable.Variable(None)
        @defer.inlineCallbacks
        def poll_warnings():
            bitcoind_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(bitcoind.rpc_getinfo)()))
        yield poll_warnings()
        deferral.RobustLoopingCall(poll_warnings).start(20*60)
        
        print '    ...success!'
        print '    Current block hash: %x' % (temp_work['previous_block'],)
        print '    Current block height: %i' % (temp_work['height'] - 1,)
        print
        
        if not args.testnet:
            factory = yield connect_p2p()
        
        print 'Determining payout address...'
        if args.pubkey_hash is None:
            address_path = os.path.join(datadir_path, 'cached_payout_address')
            
            if os.path.exists(address_path):
                with open(address_path, 'rb') as f:
                    address = f.read().strip('\r\n')
                print '    Loaded cached address: %s...' % (address,)
            else:
                address = None
            
            if address is not None:
                res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
                if not res['isvalid'] or not res['ismine']:
                    print '    Cached address is either invalid or not controlled by local bitcoind!'
                    address = None
            
            if address is None:
                print '    Getting payout address from bitcoind...'
                address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
            
            with open(address_path, 'wb') as f:
                f.write(address)
            
            my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
        else:
            my_pubkey_hash = args.pubkey_hash
        print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
        print
        
        print "Loading shares..."
        shares = {}
        known_verified = set()
        def share_cb(share):
            share.time_seen = 0 # XXX
            shares[share.hash] = share
            if len(shares) % 1000 == 0 and shares:
                print "    %i" % (len(shares),)
        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add)
        print "    ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified))
        print
        
        
        print 'Initializing work...'
        
        node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net)
        yield node.start()
        
        for share_hash in shares:
            if share_hash not in node.tracker.items:
                ss.forget_share(share_hash)
        for share_hash in known_verified:
            if share_hash not in node.tracker.verified.items:
                ss.forget_verified_share(share_hash)
        node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
#.........这里部分代码省略.........
开发者ID:and-then,项目名称:p2pool-rav,代码行数:101,代码来源:main.py


示例8: poll_warnings

 def poll_warnings():
     errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
     bitcoind_warning_var.set(errors if errors != '' else None)
开发者ID:baloo-kiev,项目名称:p2pool,代码行数:3,代码来源:main.py


示例9: poll_warnings

 def poll_warnings():
     errors = (yield deferral.retry("Error while calling getmininginfo:")(bitcoind.rpc_getmininginfo)())[
         "errors"
     ]
     bitcoind_warning_var.set(errors if errors != "" else None)
开发者ID:nishizhen,项目名称:p2pool,代码行数:5,代码来源:main.py


示例10: poll_warnings

 def poll_warnings():
     daemon_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(daemon.rpc_getinfo)()))
开发者ID:ghostlander,项目名称:p2pool-neoscrypt,代码行数:2,代码来源:main.py


示例11: got_response

        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = (
                packed_gentx[: -self.COINBASE_NONCE_LENGTH - 4] + coinbase_nonce + packed_gentx[-4:]
                if coinbase_nonce != "\0" * self.COINBASE_NONCE_LENGTH
                else packed_gentx
            )
            new_gentx = (
                bitcoin_data.tx_type.unpack(new_packed_gentx)
                if coinbase_nonce != "\0" * self.COINBASE_NONCE_LENGTH
                else gentx
            )

            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header["bits"].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header, txs=[new_gentx] + other_transactions),
                        False,
                        self.node.factory,
                        self.node.bitcoind,
                        self.node.bitcoind_work,
                        self.node.net,
                    )
                    if pow_hash <= header["bits"].target:
                        print
                        print "GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x" % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash,
                        )
                        print
            except:
                log.err(None, "Error while processing potential block:")

            user, _, _, _ = self.get_user_details(user)
            assert header["previous_block"] == ba["previous_block"]
            assert header["merkle_root"] == bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(new_packed_gentx), merkle_link
            )
            assert header["bits"] == ba["bits"]
            # Check momentum using midhash (who knows why it is called that) and birthday values
            midhash = hashlib.sha256(hashlib.sha256(bitcoin_data.block_header_type.pack(header)[:80]).digest()).digest()
            # print 'MIDHASH: {0}'.format(midhash.encode('hex'))
            # print 'A: {0}'.format(header['birthdayA'])
            # print 'B: {0}'.format(header['birthdayB'])
            momentumc = rewardcoin_momentum.checkMomentum(midhash, header["birthdayA"], header["birthdayB"])
            # print momentumc
            if momentumc == False:
                print "Invalid Momentum from Client!"
                return False

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work["target"] or p2pool.DEBUG:
                        df = deferral.retry("Error submitting merged block: (will retry)", 10, 10)(
                            aux_work["merged_proxy"].rpc_getauxblock
                        )(
                            pack.IntType(256, "big").pack(aux_work["hash"]).encode("hex"),
                            bitcoin_data.aux_pow_type.pack(
                                dict(
                                    merkle_tx=dict(tx=new_gentx, block_hash=header_hash, merkle_link=merkle_link),
                                    merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                    parent_block_header=header,
                                )
                            ).encode("hex"),
                        )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work["target"]):
                                print >> sys.stderr, "Merged block submittal result: %s Expected: %s" % (
                                    result,
                                    pow_hash <= aux_work["target"],
                                )
                            else:
                                print "Merged block submittal result: %s" % (result,)

                        @df.addErrback
                        def _(err):
                            log.err(err, "Error submitting merged block:")

                except:
                    log.err(None, "Error while processing merged mining POW:")

            if pow_hash <= share_info["bits"].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(8 * self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)

                print "GOT SHARE! %s %s prev %s age %.2fs%s" % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    " DEAD ON ARRIVAL" if not on_time else "",
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
#.........这里部分代码省略.........
开发者ID:Cloudsy,项目名称:p2pool-Rewardcoin,代码行数:101,代码来源:work.py


示例12: poll_warnings

 def poll_warnings():
     dashd_getnetworkinfo_var.set((yield deferral.retry('Error while calling getnetworkinfo:')(dashd.rpc_getnetworkinfo)()))
开发者ID:thelazier,项目名称:p2pool-dash,代码行数:2,代码来源:main.py


示例13: got_response

        def got_response(header, request):
            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header["bits"].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header, txs=transactions),
                        False,
                        self.node.factory,
                        self.node.bitcoind,
                        self.node.bitcoind_work,
                        self.node.net,
                    )
                    if pow_hash <= header["bits"].target:
                        print
                        print "GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x" % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash,
                        )
                        print
            except:
                log.err(None, "Error while processing potential block:")

            user, _, _, _ = self.get_user_details(request)
            assert header["previous_block"] == ba.previous_block
            assert header["merkle_root"] == ba.merkle_root
            assert header["bits"] == ba.bits

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work["target"] or p2pool.DEBUG:
                        df = deferral.retry("Error submitting merged block: (will retry)", 10, 10)(
                            aux_work["merged_proxy"].rpc_getauxblock
                        )(
                            pack.IntType(256, "big").pack(aux_work["hash"]).encode("hex"),
                            bitcoin_data.aux_pow_type.pack(
                                dict(
                                    merkle_tx=dict(tx=transactions[0], block_hash=header_hash, merkle_link=merkle_link),
                                    merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                    parent_block_header=header,
                                )
                            ).encode("hex"),
                        )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work["target"]):
                                print >> sys.stderr, "Merged block submittal result: %s Expected: %s" % (
                                    result,
                                    pow_hash <= aux_work["target"],
                                )
                            else:
                                print "Merged block submittal result: %s" % (result,)

                        @df.addErrback
                        def _(err):
                            log.err(err, "Error submitting merged block:")

                except:
                    log.err(None, "Error while processing merged mining POW:")

            if pow_hash <= share_info["bits"].target and header_hash not in received_header_hashes:
                share = get_share(header, transactions)

                print "GOT SHARE! %s %s prev %s age %.2fs%s" % (
                    request.getUser(),
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    " DEAD ON ARRIVAL" if not on_time else "",
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                if not p2pool.DEBUG:
                    self.node.tracker.verified.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header["bits"].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, "Error forwarding block solution:")

                self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)

            if pow_hash > target:
                print "Worker %s submitted share with hash > target:" % (request.getUser(),)
                print "    Hash:   %56x" % (pow_hash,)
                print "    Target: %56x" % (target,)
            elif header_hash in received_header_hashes:
                print >> sys.stderr, "Worker %s @ %s submitted share more than once!" % (
                    request.getUser(),
                    request.getClientIP(),
                )
            else:
#.........这里部分代码省略.........
开发者ID:hua002,项目名称:p2pool,代码行数:101,代码来源:work.py


示例14: main

def main(args, net, datadir_path, merged_urls, worker_endpoint):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__,)
        print
        
        traffic_happened = variable.Event()
        
        @defer.inlineCallbacks
        def connect_p2p():
            # connect to bitcoind over bitcoin-p2p
            print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
            factory = bitcoin_p2p.ClientFactory(net.PARENT)
            reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
            yield factory.getProtocol() # waits until handshake is successful
            print '    ...success!'
            print
            defer.returnValue(factory)
        
        if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
            factory = yield connect_p2p()
        
        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
        @deferral.retry('Error while checking Bitcoin connection:', 1)
        @defer.inlineCallbacks
        def check():
            if not (yield net.PARENT.RPC_CHECK(bitcoind)):
                print >>sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
                raise deferral.RetrySilentlyException()
            if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version']):
                print >>sys.stderr, '    Bitcoin version too old! Upgrade to 0.6.4 or newer!'
                raise deferral.RetrySilentlyException()
        yield check()
        temp_work = yield getwork(bitcoind)
        
        if not args.testnet:
            factory = yield connect_p2p()
        
        block_height_var = variable.Variable(None)
        @defer.inlineCallbacks
        def poll_height():
            block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)()))
        yield poll_height()
        task.LoopingCall(poll_height).start(60*60)
        
        bitcoind_warning_var = variable.Variable(None)
        @defer.inlineCallbacks
        def poll_warnings():
            errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors']
            bitcoind_warning_var.set(errors if errors != '' else None)
        yield poll_warnings()
        task.LoopingCall(poll_warnings).start(20*60)
        
        print '    ...success!'
        print '    Current block hash: %x' % (temp_work['previous_block'],)
        print '    Current block height: %i' % (block_height_var.value,)
        print
        
        print 'Determining payout address...'
        if args.pubkey_hash is None:
            address_path = os.path.join(datadir_path, 'cached_payout_address')
            
            if os.path.exists(address_path):
                with open(address_path, 'rb') as f:
                    address = f.read().strip('\r\n')
                print '    Loaded cached address: %s...' % (address,)
            else:
                address = None
            
            if address is not None:
                res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
                if not res['isvalid'] or not res['ismine']:
                    print '    Cached address is either invalid or not controlled by local bitcoind!'
                    address = None
            
            if address is None:
                print '    Getting payout address from bitcoind...'
                address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
            
            with open(address_path, 'wb') as f:
                f.write(address)
            
            my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
        else:
            my_pubkey_hash = args.pubkey_hash
        print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
        print
        
        my_share_hashes = set()
        my_doa_share_hashes = set()
        
        tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes)
        shared_share_hashes = set()
        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
        known_verified = set()
        print "Loading shares..."
        for i, (mode, contents) in enumerate(ss.get_shares()):
            if mode == 'share':
#.........这里部分代码省略.........
开发者ID:alaudidae,项目名称:p2pool,代码行数:101,代码来源:main.py


示例15: poll_height

 def poll_height():
     block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)()))
开发者ID:alaudidae,项目名称:p2pool,代码行数:2,代码来源:main.py


示例16: main

def main(args, net, datadir_path):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__,)
        print
        try:
            from . import draw
        except ImportError:
            draw = None
            print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
            print
        
        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
        good = yield deferral.retry('Error while checking bitcoind identity:', 1)(net.BITCOIN_RPC_CHECK)(bitcoind)
        if not good:
            print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
            return
        temp_work = yield getwork(bitcoind)
        print '    ...success!'
        print '    Current block hash: %x' % (temp_work['previous_block_hash'],)
        print
        
        # connect to bitcoind over bitcoin-p2p
        print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
        factory = bitcoin_p2p.ClientFactory(net)
        reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
        yield factory.getProtocol() # waits until handshake is successful
        print '    ...success!'
        print
        
        if args.pubkey_hash is None:
            print 'Getting payout address from bitcoind...'
            my_script = yield get_payout_script2(bitcoind, net)
        else:
            print 'Computing payout script from provided address....'
            my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
        print '    ...success!'
        print '    Payout script:', bitcoin_data.script2_to_human(my_script, net)
        print
        
        ht = bitcoin_p2p.HeightTracker(bitcoind, factory)
        
        tracker = p2pool_data.OkayTracker(net)
        shared_share_hashes = set()
        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
        known_verified = set()
        print "Loading shares..."
        for i, (mode, contents) in enumerate(ss.get_shares()):
            if mode == 'share':
                if contents.hash in tracker.shares:
                    continue
                shared_share_hashes.add(contents.hash)
                contents.time_seen = 0
                tracker.add(contents)
                if len(tracker.shares) % 1000 == 0 and tracker.shares:
                    print "    %i" % (len(tracker.shares),)
            elif mode == 'verified_hash':
                known_verified.add(contents)
            else:
                raise AssertionError()
        print "    ...inserting %i verified shares..." % (len(known_verified),)
        for h in known_verified:
            if h not in tracker.shares:
                ss.forget_verified_share(h)
                continue
            tracker.verified.add(tracker.shares[h])
        print "    ...done loading %i shares!" % (len(tracker.shares),)
        print
        tracker.removed.watch(lambda share: ss.forget_share(share.hash))
        tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
        tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash))
        
        peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it
        
        pre_current_work = variable.Variable(None)
        pre_current_work2 = variable.Variable(None)
        pre_merged_work = variable.Variable(None)
        # information affecting work that should trigger a long-polling update
        current_work = variable.Variable(None)
        # information affecting work that should not trigger a long-polling update
        current_work2 = variable.Variable(None)
        
        work_updated = variable.Event()
        
        requested = expiring_dict.ExpiringDict(300)
        
        @defer.inlineCallbacks
        def set_real_work1():
            work = yield getwork(bitcoind)
            pre_current_work2.set(dict(
                time=work['time'],
                transactions=work['transactions'],
                subsidy=work['subsidy'],
                clock_offset=time.time() - work['time'],
                last_update=time.time(),
            )) # second set first because everything hooks on the first
            pre_current_work.set(dict(
                version=work['version'],
#.........这里部分代码省略.........
开发者ID:finway-china,项目名称:p2pool,代码行数:101,代码来源:main.py



注:本文中的util.deferral.retry函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python deferral.sleep函数代码示例发布时间:2022-05-26
下一篇:
Python db.outer_atomic函数代码示例发布时间:2022-05-26
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap