本文整理汇总了Python中utils.setup_logging函数的典型用法代码示例。如果您正苦于以下问题:Python setup_logging函数的具体用法?Python setup_logging怎么用?Python setup_logging使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了setup_logging函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: main
def main():
_mkdirs(SRCDIR, INSTALLDIR)
setup_logging()
fetch_and_build()
for db in ('sqlite3', 'mysql'):
shell('rm -rf {}/*'.format(INSTALLDIR))
setup_and_test(db)
开发者ID:EagleSmith,项目名称:seafile,代码行数:7,代码来源:run.py
示例2: __init__
def __init__(self, name):
logger = logging.getLogger('log')
setup_logging()
try:
import smbus
except ImportError:
logger.critical('[Arduino Socket]: SMBUS not configured properly!')
sys.exit(1)
arduino_device = None # Global arduino_device variable
states = None
# Define the socket parameters
HOST = ''
PORT = 7893
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind socket to local host and port
try:
connection.bind((HOST, PORT))
except socket.error, msg:
logger.critical('[Arduino Socket]: Bind failed. Error Code: ' + str(msg[0]) + ' Message ' \
+ msg[1])
sys.exit()
开发者ID:VT-SailBOT,项目名称:sailbot,代码行数:27,代码来源:arduino_read.py
示例3: start_tracker
def start_tracker():
"""Start the Torrent Tracker.
"""
# parse commandline options
parser = OptionParser()
parser.add_option('-p', '--port', help='Tracker Port', default=0)
parser.add_option('-b', '--background', action='store_true', default=False,
help='Start in background')
parser.add_option('-d', '--debug', action='store_true', default=False,
help='Debug mode')
(options, args) = parser.parse_args()
# setup directories
utils.create_pytt_dirs()
# setup logging
utils.setup_logging(options.debug)
try:
# start the torrent tracker
run_app(int(options.port) or utils.get_config().getint('tracker',
'port'))
except KeyboardInterrupt:
logging.info('Tracker Stopped.')
utils.close_db()
sys.exit(0)
except Exception, ex:
logging.fatal('%s' % str(ex))
utils.close_db()
sys.exit(-1)
开发者ID:kholia,项目名称:Pytt,代码行数:29,代码来源:tracker.py
示例4: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--input_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--overwrite', action='store_true')
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
for i in range(len(tickers)):
ticker = tickers[i]
if ticker in SKIPPED_TICKERS:
logging.warning('%d/%d: skipped %s' % (i+1, len(tickers), ticker))
continue
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.input_dir, ticker)
output_path = '%s/%s.csv' % (args.output_dir, ticker)
if not path.isfile(input_path):
logging.warning('Input file does not exist: %s' % input_path)
continue
if path.isfile(output_path) and not args.overwrite:
logging.warning('Output file exists and not overwritable: %s'
% output_path)
continue
parse(input_path, output_path)
开发者ID:galabing,项目名称:petra,代码行数:33,代码来源:parse_income_statements.py
示例5: run
def run():
"""
Main loop. Run this TA for ever
"""
try:
meta_configs, stanza_configs = conf.parse_modinput_configs(
sys.stdin.read())
except Exception as ex:
_LOGGER.error("Failed to setup config for manager TA: %s", ex.message)
_LOGGER.error(traceback.format_exc())
raise
if not stanza_configs:
_LOGGER.info("No config, exiting...")
return 0
if stanza_configs:
loglevel = stanza_configs[0].get("loglevel", "INFO")
_LOGGER.info("Setup logging level=%s", loglevel)
for log_file in all_logs:
utils.setup_logging(log_file, loglevel, True)
ta_manager = tm.TAManager(meta_configs, stanza_configs[0])
_setup_signal_handler(ta_manager)
ta_manager.run()
开发者ID:chenziliang,项目名称:src,代码行数:26,代码来源:ta_central_management.py
示例6: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--input_dir', required=True)
parser.add_argument('--from_ticker', default='')
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
lines = fp.read().splitlines()
tickers = []
for line in lines:
if line >= args.from_ticker:
tickers.append(line)
logging.info('Processing %d tickers' % len(tickers))
for i in range(len(tickers)):
ticker = tickers[i]
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.input_dir, ticker.replace('^', '_'))
if not path.isfile(input_path):
logging.warning('Input file does not exist: %s' % input_path)
continue
validate(input_path)
开发者ID:galabing,项目名称:iceland,代码行数:27,代码来源:validate_sample_data.py
示例7: __init__
def __init__(self, name, port, pin, scale_factor, zero_point):
logger = logging.getLogger('log')
setup_logging(name)
try:
import RPi.GPIO as GPIO
except ImportError:
logger.critical('[Servo Socket]: GPIO not configured properly!')
sys.exit(1)
self.port = port
self.pin = pin
self.scale_factor = scale_factor
self.zero_point = zero_point
# Configure the servo
GPIO.setmode(GPIO.BOARD)
GPIO.setup(self.pin, GPIO.OUT)
# Define the socket parameters
HOST = ''
PORT = self.port
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind socket to local host and port
try:
connection.bind((HOST, PORT))
except socket.error, msg:
logger.critical('[Servo Socket]: Bind failed. Error Code: ' + str(msg[0]) + ' Message ' \
+ msg[1])
sys.exit()
开发者ID:VT-SailBOT,项目名称:sailbot,代码行数:33,代码来源:servo.py
示例8: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--total_assets_path', required=True)
parser.add_argument('--intangible_assets_path', required=True)
parser.add_argument('--total_liabilities_path', required=True)
parser.add_argument('--prices_path', required=True)
parser.add_argument('--outstanding_shares_path', required=True)
parser.add_argument('--output_path', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
ta_map = utils.read_map(args.total_assets_path)
tl_map = utils.read_map(args.total_liabilities_path)
p_map = utils.read_map(args.prices_path)
s_map = utils.read_map(args.outstanding_shares_path)
tickers = ta_map.keys() & tl_map.keys() & p_map.keys() & s_map.keys()
# intangible assets are 0 by default
ia_map = dict()
for t in tickers:
ia_map[t] = 0.0
ia_part = utils.read_map(args.intangible_assets_path)
for k, v in ia_part.items():
ia_map[k] = v
with open(args.output_path, 'w') as fp:
for ticker in sorted(tickers):
output = ((ta_map[ticker] - ia_map[ticker] - tl_map[ticker])
/ s_map[ticker] / p_map[ticker])
print('%s %f' % (ticker, output), file=fp)
开发者ID:galabing,项目名称:petra,代码行数:32,代码来源:haugen_b2p.py
示例9: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--input_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--overwrite', action='store_true')
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
# Sanity check.
assert args.input_dir != args.output_dir
utils.setup_logging(args.verbose)
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
for i in range(len(tickers)):
ticker = tickers[i]
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.input_dir, ticker.replace('^', '_'))
if not path.isfile(input_path):
logging.warning('Input file is missing: %s' % input_path)
continue
output_path = '%s/%s.csv' % (args.output_dir, ticker.replace('^', '_'))
if path.isfile(output_path) and not args.overwrite:
logging.warning('Output file exists and not overwritable: %s'
% output_path)
continue
sample(input_path, output_path)
开发者ID:galabing,项目名称:iceland,代码行数:32,代码来源:sample_data.py
示例10: main
def main():
_mkdirs(SRCDIR, INSTALLDIR)
setup_logging()
fetch_and_build()
for db in ('sqlite3', 'mysql'):
if db == 'mysql':
shell('mysqladmin -u root password %s' % MYSQL_ROOT_PASSWD)
for i in ('prompt', 'auto'):
shell('rm -rf {}/*'.format(INSTALLDIR))
setup_and_test(db, i)
开发者ID:shoeper,项目名称:seafile,代码行数:10,代码来源:run.py
示例11: setup
def setup():
global copied, uploaded, last_scanned, warnings
copied = open_shelf("copied.db")
uploaded = open_shelf("uploaded.db")
last_scanned = []
log_path = os.path.join(PROJECT_PATH, "smugsync.log")
utils.setup_logging(log_path)
warnings = StringIO.StringIO()
handler = logging.StreamHandler(warnings)
handler.setLevel(logging.WARNING)
logging.getLogger("").addHandler(handler)
开发者ID:jongman,项目名称:smugsync,代码行数:12,代码来源:smugsync.py
示例12: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--price_dir', required=True)
parser.add_argument('--yyyy_mm', required=True)
parser.add_argument('--k', default='12')
parser.add_argument('--output_path', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
k = int(args.k)
assert k > 0
volume_map = dict()
for i in range(len(tickers)):
ticker = tickers[i]
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.price_dir, ticker.replace('^', '_'))
if not path.isfile(input_path):
logging.warning('Input file is missing: %s' % input_path)
continue
with open(input_path, 'r') as fp:
lines = fp.read().splitlines()
vmap = dict()
assert len(lines) > 0
for j in range(1, len(lines)):
d, o, h, l, c, v, a = lines[j].split(',')
d = d[:7]
if args.yyyy_mm < d: continue
if distance(args.yyyy_mm, d) >= k: break
v = float(v) * float(a)
if d in vmap: vmap[d] += v
else: vmap[d] = v
assert len(vmap) <= k
if len(vmap) < k: #max(1, k/2):
logging.warning('Could not find enough data for %s' % ticker)
continue
volume_map[ticker] = sum(vmap.values()) / len(vmap)
with open(args.output_path, 'w') as fp:
for ticker in sorted(volume_map.keys()):
print('%s %f' % (ticker, volume_map[ticker]), file=fp)
开发者ID:galabing,项目名称:petra,代码行数:50,代码来源:haugen_trading_volume.py
示例13: start
def start():
""" Запуск планировщика """
setup_logging(logging.DEBUG if settings.DEBUG is True else logging.INFO)
queue = Queue()
# Start scheduler subprocess
Process(target=scheduler_process, args=(queue, os.getpid())).start()
# To support Ctrl+C in debug mode
if not settings.DEBUG:
Thread(target=amqp_thread, args=(queue, )).start()
else:
Process(target=amqp_thread, args=(queue, )).start()
开发者ID:nikolaykhodov,项目名称:liketools,代码行数:14,代码来源:run_scheduler.py
示例14: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--price_sample_dir', required=True)
parser.add_argument('--market_sample_path', required=True)
parser.add_argument('--yyyy_mm', required=True)
parser.add_argument('--k', required=True)
parser.add_argument('--output_path', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
k = int(args.k)
assert k > 0
market_samples = read_samples(args.market_sample_path)
curr_date = args.yyyy_mm
prev_date = compute_date(curr_date, k)
logging.info('current date = %s, previous date = %s' % (curr_date, prev_date))
assert curr_date in market_samples
assert prev_date in market_samples
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
excess_map = dict()
for i in range(len(tickers)):
ticker = tickers[i]
assert ticker.find('^') == -1 # ^GSPC should not be in tickers.
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
stock_sample_path = '%s/%s.csv' % (args.price_sample_dir, ticker)
if not path.isfile(stock_sample_path):
logging.warning('Input file does not exist: %s' % stock_sample_path)
continue
stock_samples = read_samples(stock_sample_path)
if (curr_date not in stock_samples
or prev_date not in stock_samples):
logging.warning('Insufficient data for %s' % ticker)
continue
excess = compute_excess(
stock_samples[prev_date], stock_samples[curr_date],
market_samples[prev_date], market_samples[curr_date])
excess_map[ticker] = excess
with open(args.output_path, 'w') as fp:
for ticker in sorted(excess_map.keys()):
print('%s %f' % (ticker, excess_map[ticker]), file=fp)
开发者ID:galabing,项目名称:petra,代码行数:48,代码来源:haugen_excess_return.py
示例15: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--from_ticker', default='')
parser.add_argument('--report_type', required=True)
parser.add_argument('--input_dir', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
rt = args.report_type
assert rt in TYPE_MAP, (
'report_type must be one of %s' % TYPE_MAP.keys())
(req_map, opt_map, add_map, skip_map) = TYPE_MAP[rt]
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
total, opts, quarterly = 0, 0, 0
common_keys = None
for i in range(len(tickers)):
ticker = tickers[i]
if ticker < args.from_ticker or ticker in SKIPPED_TICKERS:
logging.info('%d/%d: skipped %s' % (i+1, len(tickers), ticker))
continue
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.input_dir, ticker)
if not path.isfile(input_path):
logging.warning('Input file does not exist: %s' % input_path)
continue
keys, has_opt, is_quarterly = validate(
input_path, ticker, req_map, opt_map, add_map, skip_map)
if common_keys is None:
common_keys = keys
else:
common_keys &= keys
if has_opt:
opts += 1
if is_quarterly:
quarterly += 1
total += 1
logging.info('%d out of %d have optional metrics' % (opts, total))
logging.info('%d out of %d are consecutive quarters' % (quarterly, total))
logging.info('Common keys: %s' % common_keys)
开发者ID:galabing,项目名称:petra,代码行数:48,代码来源:validate_financial_data.py
示例16: __init__
def __init__(self,
take_ownership=True, # Tor dies when the Crawler does
torrc_config={"CookieAuth": "1"},
tor_log="/var/log/tor/tor.log",
tor_cell_log="/var/log/tor/tor_cell_seq.log",
control_port=9051,
socks_port=9050,
run_in_xvfb=True,
tbb_path=join("/opt","tbb","tor-browser_en-US"),
tb_log_path=join(_log_dir,"firefox.log"),
tb_tor_cfg=USE_RUNNING_TOR,
page_load_timeout=20,
wait_on_page=5,
wait_after_closing_circuits=0,
restart_on_sketchy_exception=True,
additional_control_fields={},
db_handler=None):
self.logger = setup_logging(_log_dir, "crawler")
self.torrc_config = torrc_config
self.socks_port = find_free_port(socks_port, control_port)
self.torrc_config.update({"SocksPort": str(self.socks_port)})
self.control_port = find_free_port(control_port, self.socks_port)
self.torrc_config.update({"ControlPort": str(self.control_port)})
self.torrc_config.update({"Log": "INFO file {}".format(tor_log)})
self.logger.info("Starting tor process with config "
"{torrc_config}.".format(**locals()))
self.tor_process = launch_tor_with_config(config=self.torrc_config,
take_ownership=take_ownership)
self.authenticate_to_tor_controlport()
self.logger.info("Opening cell log stream...")
self.cell_log = open(tor_cell_log, "rb")
if run_in_xvfb:
self.logger.info("Starting Xvfb...")
self.run_in_xvfb = True
self.virtual_framebuffer = start_xvfb()
self.logger.info("Starting Tor Browser...")
self.tb_driver = TorBrowserDriver(tbb_path=tbb_path,
tor_cfg=tb_tor_cfg,
tbb_logfile_path=tb_log_path,
socks_port=self.socks_port,
control_port=self.control_port)
self.wait_after_closing_circuits = wait_after_closing_circuits
self.page_load_timeout = page_load_timeout
self.tb_driver.set_page_load_timeout(page_load_timeout)
self.wait_on_page = wait_on_page
self.restart_on_sketchy_exception = restart_on_sketchy_exception
self.control_data = self.get_control_data(page_load_timeout,
wait_on_page,
wait_after_closing_circuits,
additional_control_fields)
self.db_handler = db_handler
if db_handler:
self.crawlid = self.db_handler.add_crawl(self.control_data)
开发者ID:redshiftzero,项目名称:FingerprintSecureDrop,代码行数:60,代码来源:crawler.py
示例17: __init__
def __init__(self, meta_configs, stanza_configs):
"""
@meta_configs: a dict like object, implement dict.get/[] like
interfaces to get the value for a key. meta_configs shall at least
contain
{"server_uri": uri, "checkpoint_dir": dir, "session_key": key}
key/value pairs
@stanza_configs: a list like object containing a list of dict
like object. Each element shall implement dict.get/[] like interfaces
to get the value for a key. Each element in the list shall at least
contain
"""
import timer_queue as tq
import ta_configure_manager as conf_mgr
import servers
import ta_conf_client as tcc
self.meta_configs = meta_configs
appname = utils.get_appname_from_path(op.abspath(__file__))
meta_configs["appname"] = appname
self.wakeup_queue = Queue.Queue()
self.conf_manager = conf_mgr.TAConfigureManager(meta_configs)
self.timer_queue = tq.TimerQueue()
self.pub_server = servers.PubServer(stanza_configs)
self.rep_server = servers.RepServer(stanza_configs,
self._handle_request)
self.conf_client = tcc.TAConfClient(stanza_configs["repserver"],
meta_configs["server_uri"],
meta_configs["session_key"])
self._state_logger = utils.setup_logging("ta_state")
self._started = False
开发者ID:chenziliang,项目名称:src,代码行数:32,代码来源:ta_manager.py
示例18: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--report_type', required=True)
parser.add_argument('--period', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--overwrite', action='store_true')
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
rt = args.report_type
assert rt == 'is' or rt == 'bs' or rt == 'cf', (
'report_type must be one of "is", "bs" and "cf"')
p = args.period
assert p == '3' or p == '12', 'period must be "3" or "12"'
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
sl, fl = [], [] # Lists of tickers succeeded/failed to download.
for i in range(len(tickers)):
ticker = tickers[i]
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
output_path = '%s/%s.csv' % (args.output_dir, ticker)
dl = False
if path.isfile(output_path):
action = 'skipping'
if args.overwrite:
remove(output_path)
action = 'overwriting'
dl = True
logging.warning('Output file exists: %s, %s' % (output_path, action))
else: dl = True
if dl:
ok = download(ticker, rt, p, output_path)
if ok: sl.append(ticker)
else: fl.append(ticker)
logging.info('Downloaded %d tickers, failed %d tickers'
% (len(sl), len(fl)))
logging.info('Downloaded tickers: %s' % sl)
logging.info('Failed tickers: %s' % fl)
开发者ID:galabing,项目名称:petra,代码行数:47,代码来源:download_financial_data.py
示例19: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--er1_path', required=True)
parser.add_argument('--er12_path', required=True)
parser.add_argument('--tv2mc_path', required=True)
parser.add_argument('--er2_path', required=True)
parser.add_argument('--e2p_path', required=True)
parser.add_argument('--roe_path', required=True)
parser.add_argument('--b2p_path', required=True)
parser.add_argument('--er6_path', required=True)
parser.add_argument('--cf2p_path', required=True)
parser.add_argument('--output_path', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
er1_map = utils.read_map(args.er1_path)
er12_map = utils.read_map(args.er12_path)
tv2mc_map = utils.read_map(args.tv2mc_path)
er2_map = utils.read_map(args.er2_path)
e2p_map = utils.read_map(args.e2p_path)
roe_map = utils.read_map(args.roe_path)
b2p_map = utils.read_map(args.b2p_path)
er6_map = utils.read_map(args.er6_path)
cf2p_map = utils.read_map(args.cf2p_path)
tickers = (er1_map.keys() & er12_map.keys() & tv2mc_map.keys()
& er2_map.keys() & e2p_map.keys() & roe_map.keys()
& b2p_map.keys() & er6_map.keys() & cf2p_map.keys())
logging.info('%d tickers' % len(tickers))
logging.info('total weight: %f' %
(ER1 + ER12 + TV2MC + ER2 + E2P + ROE + B2P + ER6 + CF2P))
with open(args.output_path, 'w') as fp:
for t in sorted(tickers):
score = (er1_map[t] * ER1
+ er12_map[t] * ER12
+ tv2mc_map[t] * TV2MC
+ er2_map[t] * ER2
+ e2p_map[t] * E2P
+ roe_map[t] * ROE
+ b2p_map[t] * B2P
+ er6_map[t] * ER6
+ cf2p_map[t] * CF2P) / 100 # accounting for %
print('%s %f' % (t, score), file=fp)
开发者ID:galabing,项目名称:petra,代码行数:45,代码来源:haugen_score.py
示例20: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--prices_path', required=True)
parser.add_argument('--outstanding_shares_path', required=True)
parser.add_argument('--output_path', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
p_map = utils.read_map(args.prices_path)
s_map = utils.read_map(args.outstanding_shares_path)
tickers = p_map.keys() & s_map.keys()
with open(args.output_path, 'w') as fp:
for ticker in sorted(tickers):
output = p_map[ticker] * s_map[ticker]
print('%s %f' % (ticker, output), file=fp)
开发者ID:galabing,项目名称:petra,代码行数:18,代码来源:haugen_mc.py
注:本文中的utils.setup_logging函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论