本文整理汇总了Python中pyyaks.logger.info函数的典型用法代码示例。如果您正苦于以下问题:Python info函数的具体用法?Python info怎么用?Python info使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了info函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: main
def main():
filetypes = Ska.Table.read_ascii_table('filetypes.dat')
if len(sys.argv) == 2:
filetypes = filetypes[ filetypes['content'] == sys.argv[1].upper() ]
loglevel = pyyaks.logger.INFO
logger = pyyaks.logger.get_logger(level=loglevel, format="%(message)s")
for filetype in filetypes:
ft.content = filetype.content.lower()
orig_files_glob = os.path.join(orig_arch_files['contentdir'].abs, filetype['fileglob'])
logger.info('orig_files_glob=%s', orig_files_glob)
for f in glob.glob(orig_files_glob):
ft.basename = os.path.basename(f)
tstart = re.search(r'(\d+)', ft.basename).group(1)
datestart = DateTime(tstart).date
ft.year, ft.doy = re.search(r'(\d\d\d\d):(\d\d\d)', datestart).groups()
archdir = arch_files['archdir'].abs
archfile = arch_files['archfile'].abs
if not os.path.exists(archdir):
print 'Making dir', archdir
os.makedirs(archdir)
if not os.path.exists(archfile):
# logger.info('mv %s %s' % (f, archfile))
shutil.move(f, archfile)
开发者ID:gmrehbein,项目名称:eng_archive,代码行数:29,代码来源:move_fits_archive.py
示例2: make_archfiles_db
def make_archfiles_db(filename, content_def):
# Do nothing if it is already there
if os.path.exists(filename):
return
datestart = DateTime(DateTime(opt.start).secs - 60)
tstart = datestart.secs
tstop = tstart
year, doy = datestart.date.split(':')[:2]
times, indexes = derived.times_indexes(tstart, tstop, content_def['time_step'])
logger.info('Creating db {}'.format(filename))
archfiles_def = open('archfiles_def.sql').read()
db = Ska.DBI.DBI(dbi='sqlite', server=filename)
db.execute(archfiles_def)
archfiles_row = dict(filename='{}:0:1'.format(content_def['content']),
filetime=0,
year=year,
doy=doy,
tstart=tstart,
tstop=tstop,
rowstart=0,
rowstop=0,
startmjf=indexes[0], # really index0
stopmjf=indexes[-1], # really index1
date=datestart.date)
db.insert(archfiles_row, 'archfiles')
开发者ID:gmrehbein,项目名称:eng_archive,代码行数:27,代码来源:add_derived.py
示例3: try4times
def try4times(func, *arg, **kwarg):
"""
Work around problems with sqlite3 database getting locked out from writing,
presumably due to read activity. Not completely understood.
This function will try to run func(*arg, **kwarg) a total of 4 times with an
increasing sequence of wait times between tries. It catches only a database
locked error.
"""
from django.db.utils import OperationalError
for delay in 0, 5, 10, 60:
if delay > 0:
time.sleep(delay)
try:
func(*arg, **kwarg)
except OperationalError as err:
if 'database is locked' in str(err):
# Locked DB, issue informational warning
logger.info('Warning: locked database, waiting {} seconds'.format(delay))
else:
# Something else so just re-raise
raise
else:
# Success, jump out of loop
break
else:
# After 4 tries bail out with an exception
raise OperationalError('database is locked')
开发者ID:sot,项目名称:kadi,代码行数:31,代码来源:update_events.py
示例4: main
def main():
global opt
opt = get_opt()
info = {'date': opt.stop,
'start': opt.start,
'stop': opt.stop,
'box_duration_months': opt.box_duration}
asol_aimpoint = get_asol(info)
asol_monthly = AsolBinnedStats(asol_aimpoint, 365.25 / 12)
for det in ('ACIS-S', 'ACIS-I'):
asol_monthly.det = det
det_title = asol_monthly.det_title
info[det_title] = asol_monthly.get_chip_x_y_info()
asol_monthly.plot_chip_x_y(info[det_title])
asol_monthly.plot_intra_obs_dy_dz()
plot_housing_temperature()
info_file = os.path.join(opt.data_root, 'info.json')
with open(info_file, 'w') as fh:
logger.info('Writing info file {}'.format(info_file))
json.dump(make_pure_python(info), fh, indent=4, sort_keys=True)
开发者ID:sot,项目名称:aimpoint_mon,代码行数:25,代码来源:plot_aimpoint.py
示例5: read_derived
def read_derived(i, filename, filetype, row, colnames, archfiles, db):
"""Read derived data using eng_archive and derived computation classes.
``filename`` has format <content>_<index0>_<index1> where <content>
is the content type (e.g. "dp_thermal128"), <index0> is the start index for
the new data and index1 is the end index (using Python slicing convention
index0:index1). Args ``i``, ``filetype``, and ``row`` are as in
read_archive(). ``row`` must equal <index0>. ``colnames`` is the list of
column names for the content type.
"""
# Check if filename is already in archfiles. If so then abort further processing.
if db.fetchall('SELECT filename FROM archfiles WHERE filename=?', (filename,)):
logger.verbose('File %s already in archfiles - skipping' % filename)
return None, None
# f has format <content>_<index0>_<index1>
# <content> has format dp_<content><mnf_step> e.g. dp_thermal128
content, index0, index1 = filename.split(':')
index0 = int(index0)
index1 = int(index1)
mnf_step = int(re.search(r'(\d+)$', content).group(1))
time_step = mnf_step * derived.MNF_TIME
times = time_step * np.arange(index0, index1)
logger.info('Reading (%d / %d) %s' % (i, len(archfiles), filename))
vals = {}
bads = np.zeros((len(times), len(colnames)), dtype=np.bool)
for i, colname in enumerate(colnames):
if colname == 'TIME':
vals[colname] = times
bads[:, i] = False
else:
dp_class = getattr(Ska.engarchive.derived, colname.upper())
dp = dp_class()
dataset = dp.fetch(times[0] - 1000, times[-1] + 1000)
ok = (index0 <= dataset.indexes) & (dataset.indexes < index1)
vals[colname] = dp.calc(dataset)[ok]
bads[:, i] = dataset.bads[ok]
vals['QUALITY'] = bads
dat = Ska.Numpy.structured_array(vals, list(colnames) + ['QUALITY'])
# Accumlate relevant info about archfile that will be ingested into
# MSID h5 files. Commit info before h5 ingest so if there is a failure
# the needed info will be available to do the repair.
date = DateTime(times[0]).date
year, doy = date[0:4], date[5:8]
archfiles_row = dict(filename=filename,
filetime=int(index0 * time_step),
year=year,
doy=doy,
tstart=times[0],
tstop=times[-1],
rowstart=row,
rowstop=row + len(dat),
startmjf=index0,
stopmjf=index1,
date=date)
return dat, archfiles_row
开发者ID:sot,项目名称:eng_archive,代码行数:60,代码来源:update_archive.py
示例6: make_msid_file
def make_msid_file(colname, content, content_def):
ft['content'] = content
ft['msid'] = colname
filename = msid_files['data'].abs
if os.path.exists(filename):
return
logger.info('Making MSID data file %s', filename)
if colname == 'TIME':
dp_vals, indexes = derived.times_indexes(opt.start, opt.stop,
content_def['time_step'])
else:
dp = content_def['classes'][colname]()
dataset = dp.fetch(opt.start, opt.stop)
dp_vals = np.asarray(dp.calc(dataset), dtype=dp.dtype)
# Finally make the actual MSID data file
filters = tables.Filters(complevel=5, complib='zlib')
h5 = tables.openFile(filename, mode='w', filters=filters)
n_rows = int(20 * 3e7 / content_def['time_step'])
h5shape = (0,)
h5type = tables.Atom.from_dtype(dp_vals.dtype)
h5.createEArray(h5.root, 'data', h5type, h5shape, title=colname,
expectedrows=n_rows)
h5.createEArray(h5.root, 'quality', tables.BoolAtom(), (0,), title='Quality',
expectedrows=n_rows)
logger.info('Made {} shape={} with n_rows(1e6)={}'.format(colname, h5shape, n_rows / 1.0e6))
h5.close()
开发者ID:gmrehbein,项目名称:eng_archive,代码行数:31,代码来源:add_derived.py
示例7: move_archive_files
def move_archive_files(filetype, archfiles):
ft['content'] = filetype.content.lower()
stagedir = arch_files['stagedir'].abs
if not os.path.exists(stagedir):
os.makedirs(stagedir)
for f in archfiles:
if not os.path.exists(f):
continue
ft['basename'] = os.path.basename(f)
tstart = re.search(r'(\d+)', str(ft['basename'])).group(1)
datestart = DateTime(tstart).date
ft['year'], ft['doy'] = re.search(r'(\d\d\d\d):(\d\d\d)', datestart).groups()
archdir = arch_files['archdir'].abs
archfile = arch_files['archfile'].abs
if not os.path.exists(archdir):
os.makedirs(archdir)
if not os.path.exists(archfile):
logger.info('mv %s %s' % (os.path.abspath(f), archfile))
if not opt.dry_run:
if not opt.occ:
shutil.copy2(f, stagedir)
shutil.move(f, archfile)
if os.path.exists(f):
logger.verbose('Unlinking %s' % os.path.abspath(f))
os.unlink(f)
开发者ID:gmrehbein,项目名称:eng_archive,代码行数:31,代码来源:update_archive.py
示例8: create_content_dir
def create_content_dir():
"""
Make empty files for colnames.pkl, colnames_all.pkl and archfiles.db3
for the current content type ft['content'].
This only works within the development (git) directory in conjunction
with the --create option.
"""
dirname = msid_files['contentdir'].abs
if not os.path.exists(dirname):
logger.info('Making directory {}'.format(dirname))
os.makedirs(dirname)
empty = set()
if not os.path.exists(msid_files['colnames'].abs):
with open(msid_files['colnames'].abs, 'wb') as f:
pickle.dump(empty, f, protocol=0)
if not os.path.exists(msid_files['colnames_all'].abs):
with open(msid_files['colnames_all'].abs, 'wb') as f:
pickle.dump(empty, f, protocol=0)
if not os.path.exists(msid_files['archfiles'].abs):
archfiles_def = open('archfiles_def.sql').read()
filename = msid_files['archfiles'].abs
logger.info('Creating db {}'.format(filename))
db = Ska.DBI.DBI(dbi='sqlite', server=filename, autocommit=False)
db.execute(archfiles_def)
db.commit()
开发者ID:sot,项目名称:eng_archive,代码行数:28,代码来源:update_archive.py
示例9: make_h5_col_file
def make_h5_col_file(dats, colname):
"""Make a new h5 table to hold column from ``dat``."""
filename = msid_files['msid'].abs
filedir = os.path.dirname(filename)
if not os.path.exists(filedir):
os.makedirs(filedir)
# Estimate the number of rows for 20 years based on available data
times = np.hstack([x['TIME'] for x in dats])
dt = np.median(times[1:] - times[:-1])
n_rows = int(86400 * 365 * 20 / dt)
filters = tables.Filters(complevel=5, complib='zlib')
h5 = tables.openFile(filename, mode='w', filters=filters)
col = dats[0][colname]
h5shape = (0,) + col.shape[1:]
h5type = tables.Atom.from_dtype(col.dtype)
h5.createEArray(h5.root, 'data', h5type, h5shape, title=colname,
expectedrows=n_rows)
h5.createEArray(h5.root, 'quality', tables.BoolAtom(), (0,), title='Quality',
expectedrows=n_rows)
logger.info('Made {} shape={} with n_rows(1e6)={}'
.format(colname, h5shape, n_rows / 1.0e6))
h5.close()
开发者ID:gmrehbein,项目名称:eng_archive,代码行数:25,代码来源:update_archive.py
示例10: get_obsid
def get_obsid(obsid, dt=3.0):
"""
Get an obsid
"""
obsids = events.obsids.filter(obsid__exact=obsid)
if len(obsids) == 0:
raise ValueError('No obsid={} in kadi database'.format(obsid))
dwells = events.dwells.filter(obsids[0].start, obsids[0].stop)
obsid_dwells = [dwell for dwell in dwells if dwell.start > obsids[0].start]
logger.info('Using obsid dwell(s): {}'
.format(','.join(str(dwell) for dwell in obsid_dwells)))
scs107s = events.scs107s.filter(obsid=obsid)
tstart = DateTime(obsid_dwells[0].start).secs
if len(scs107s) > 0:
tstop = scs107s[0].tstop - 200
else:
tstop = DateTime(obsid_dwells[-1].stop).secs
if tstop - tstart < 2000:
raise ValueError('Observation interval too short {}'.format(tstop - tstart))
telems, slots = get_archive_data(tstart, tstop)
out = telems_to_struct(telems, slots)
out['obsid'] = obsid
return out
开发者ID:sot,项目名称:aca_status_flags,代码行数:26,代码来源:update_flags_archive.py
示例11: copy_statfiles_to_test
def copy_statfiles_to_test(stat, dt, tstart, tstop):
ft['interval'] = stat
colnames = pickle.load(open(msid_files['colnames'].abs))
for colname in colnames:
ft['msid'] = colname
if os.path.exists(test_msid_files['stats'].abs):
continue
if os.path.exists(msid_files['stats'].abs):
logger.info('Copying {0} stats for MSID {1}'.format(stat, colname))
statdir = os.path.dirname(test_msid_files['stats.tmp'].abs)
if not os.path.exists(statdir):
os.makedirs(statdir)
shutil.copy(msid_files['stats'].abs, test_msid_files['stats.tmp'].abs)
h5 = tables.openFile(test_msid_files['stats.tmp'].abs, 'a')
times = (h5.root.data.col('index') + 0.5) * dt
row0, row1 = np.searchsorted(times, [tstart, tstop])
#print colname, row0, row1, len(times), DateTime(times[row0]).date, DateTime(times[row1]).date,
# Remove from row1-1 to end. The row1-1 is because it is possible
# to get the daily stat without the rest of the 5min data if
# tstop is past noon of the day. This messes up update_archive.
h5.root.data.removeRows(row1 - 1, h5.root.data.nrows)
h5.root.data.removeRows(0, row0)
h5.copyFile(test_msid_files['stats'].abs, overwrite=True)
newtimes = (h5.root.data.col('index') + 0.5) * dt
#print len(newtimes), DateTime(newtimes[0]).date, DateTime(newtimes[-1]).date
h5.close()
os.unlink(test_msid_files['stats.tmp'].abs)
开发者ID:gmrehbein,项目名称:eng_archive,代码行数:27,代码来源:make_regr_data.py
示例12: del_stats
def del_stats(colname, time0, interval):
"""Delete all rows in ``interval`` stats file for column ``colname`` that
occur after time ``time0`` - ``interval``. This is used to fix problems
that result from a file misorder. Subsequent runs of update_stats will
refresh the values correctly.
"""
dt = {'5min': 328,
'daily': 86400}[interval]
ft['msid'] = colname
ft['interval'] = interval
stats_file = msid_files['stats'].abs
if not os.path.exists(stats_file):
raise IOError('Stats file {} not found'.format(stats_file))
logger.info('Fixing stats file %s after time %s', stats_file, DateTime(time0).date)
stats = tables.open_file(stats_file, mode='a',
filters=tables.Filters(complevel=5, complib='zlib'))
index0 = time0 // dt - 1
indexes = stats.root.data.col('index')[:]
row0 = np.searchsorted(indexes, [index0])[0] - 1
if opt.dry_run:
n_del = len(stats.root.data) - row0
else:
n_del = stats.root.data.remove_rows(row0, len(stats.root.data))
logger.info('Deleted %d rows from row %s (%s) to end', n_del, row0,
DateTime(indexes[row0] * dt).date)
stats.close()
开发者ID:sot,项目名称:eng_archive,代码行数:29,代码来源:update_archive.py
示例13: get_stats_over_time
def get_stats_over_time(start, stop=None, sp=False, dp=None, ir=False, ms=None,
slots='combined', t_samp=1000):
"""
Equivalent to get_stats_per_interval, but concatenate the results for all
obsids within the specified time interval.
"""
# Get obsids in time range and collect all the per-interval statistics
obsids = events.obsids.filter(start, stop, dur__gt=2000)
stats_list = []
for obsid in obsids:
set_FILES_context(obsid.obsid, sp, dp, ir, ms, t_samp, slots)
# First check that there is the raw dat file for this obsid. Nothing
# can be done without this.
dat_file = FILES['dat.pkl'].rel
if not os.path.exists(dat_file):
logger.info('Skipping {}: {} not in archive'.format(obsid, dat_file))
continue
# Now get the stats for this obsid. Hopefully it has already been computed and
# is cached as a file. If not, try to compute the stats (and cache). If that
# fails then press on but touch a file to indicate failure so subsequent attempts
# don't bother.
logger.info('Processing obsid {}'.format(obsid))
try:
stats = get_cached_stats() # depends on the context set previously
except FailedStatsFile:
# Previously failed
logger.info(' Skipping {}: failed statistics'.format(obsid.obsid))
continue
except NoStatsFile:
logger.info(' Reading pickled data file {}'.format(dat_file))
dat = pickle.load(open(dat_file, 'r'))
try:
logger.info(' Computing statistics')
if slots == 'combined':
stats = get_stats_per_interval_combined(dat, sp, dp, ir, ms, t_samp)
else:
stats = get_stats_per_interval_per_slot(dat, sp, dp, ir, ms, slots, t_samp)
except ValueError as err:
open(FILES['stats.ERR'].rel, 'w') # touch file to indicate failure to compute stats
logger.warn(' ERROR: {}'.format(err))
stats['obsid'] = obsid.obsid
stats_list.append(stats)
stats = {}
for case in STAT_CASES:
stats[case] = {}
for stat_type in STAT_TYPES:
stats[case][stat_type] = np.hstack([x[case][stat_type] for x in stats_list])
# Set corresponding array of obsids for back-tracing outliers etc
stats['obsid'] = np.hstack([np.ones(len(x['obc']['std']), dtype=int) * x['obsid']
for x in stats_list])
return stats
开发者ID:sot,项目名称:aca_status_flags,代码行数:57,代码来源:analysis_plots.py
示例14: cut_stars
def cut_stars(ai):
starfiles = glob(os.path.join(ai['outdir'],
"*stars.txt"))
shutil.copy(starfiles[0], starfiles[0] + ".orig")
starlines = open(starfiles[0]).read().split("\n")
for slot in ai['skip_slot']:
starlines = [i for i in starlines
if not re.match("^\s+{}\s+1.*".format(slot), i)]
logger.info('Cutting stars by updating {}'.format(starfiles[0]))
with open(starfiles[0], "w") as newlist:
newlist.write("\n".join(starlines))
开发者ID:sot,项目名称:periscope_tilt,代码行数:11,代码来源:runasp.py
示例15: get_obsid_data
def get_obsid_data(obsid):
filename = os.path.join('data', str(obsid) + '.pkl')
if os.path.exists(filename):
dat = pickle.load(open(filename, 'r'))
else:
import update_flags_archive
dat = update_flags_archive.get_obsid(obsid)
pickle.dump(dat, open(filename, 'w'), protocol=-1)
logger.info('Wrote data for {}'.format(obsid))
return dat
开发者ID:sot,项目名称:aca_status_flags,代码行数:11,代码来源:analysis_plots.py
示例16: add_asol_to_h5
def add_asol_to_h5(filename, asol):
asol = asol.as_array()
h5 = tables.openFile(filename, mode='a',
filters=tables.Filters(complevel=5, complib='zlib'))
try:
logger.info('Appending {} records to {}'.format(len(asol), filename))
h5.root.data.append(asol)
except tables.NoSuchNodeError:
logger.info('Creating {}'.format(filename))
h5.createTable(h5.root, 'data', asol, "Aimpoint drift", expectedrows=1e6)
h5.root.data.flush()
h5.close()
开发者ID:sot,项目名称:aimpoint_mon,代码行数:12,代码来源:update_aimpoint_data.py
示例17: check_filetype
def check_filetype(filetype):
ft['content'] = filetype.content.lower()
if not os.path.exists(msid_files['archfiles'].abs):
logger.info('No archfiles.db3 for %s - skipping' % ft['content'])
return
logger.info('Checking {} content type, archfiles {}'.format(
ft['content'], msid_files['archfiles'].abs))
db = Ska.DBI.DBI(dbi='sqlite', server=msid_files['archfiles'].abs)
archfiles = db.fetchall('select * from archfiles')
db.conn.close()
if opt.check_order:
for archfile0, archfile1 in zip(archfiles[:-1], archfiles[1:]):
exception = (archfile0['startmjf'] == 77826 and
archfile0['year'] == 2004 and archfile0['doy'] == 309)
if archfile1['tstart'] < archfile0['tstart'] and not exception:
logger.info('ERROR: archfile order inconsistency\n {}\n{}'
.format(archfile0, archfile1))
if not opt.check_lengths:
colnames = ['TIME']
else:
colnames = [x for x in pickle.load(open(msid_files['colnames'].abs))
if x not in fetch.IGNORE_COLNAMES]
lengths = set()
for colname in colnames:
ft['msid'] = colname
h5 = tables.openFile(msid_files['msid'].abs, mode='r')
length = len(h5.root.data)
h5.root.data[length - 1]
h5.close()
logger.verbose('MSID {} has length {}'.format(colname, length))
lengths.add(length)
if len(lengths) != 1:
logger.info('ERROR: inconsistent MSID length {} {} {}'.format(
ft['content'], colname, lengths))
return # Other checks don't make sense now
length = lengths.pop()
archfile = archfiles[-1]
if archfile['rowstop'] != length:
logger.info('ERROR: inconsistent archfile {}: '
'last rowstop={} MSID length={}'.format(
ft['content'], archfile['rowstop'], length))
if opt.find_glitch:
find_glitch()
开发者ID:gmrehbein,项目名称:eng_archive,代码行数:53,代码来源:check_integrity.py
示例18: copy_dark_image
def copy_dark_image():
"""
Copy dark cal image from Ska to Mica
"""
outdir = MICA_FILES['dark_cal_dir'].abs
if not os.path.exists(outdir):
logger.info('Making output dark cal directory {}'.format(outdir))
os.makedirs(outdir)
infile = SKA_FILES['dark_image.fits'].abs
outfile = MICA_FILES['dark_image.fits'].abs
logger.info('Copying {} to {}'.format(infile, outfile))
shutil.copy(infile, outfile)
开发者ID:sot,项目名称:mica,代码行数:13,代码来源:update_aca_dark.py
示例19: plot_observed_aimpoints
def plot_observed_aimpoints(obs_aimpoints):
"""
Make png and html (mpld3) plot of data in the ``obs_aimpoints`` table.
"""
plt.close(1)
fig = plt.figure(1, figsize=(8, 4))
dates = DateTime(obs_aimpoints['mean_date'])
years = dates.frac_year
times = dates.secs
ok = years > np.max(years) - float(opt.lookback) / 365.25
obs_aimpoints = obs_aimpoints[ok]
times = times[ok]
lolims = {}
uplims = {}
for axis in ('dx', 'dy'):
lolims[axis] = obs_aimpoints[axis] > 10
uplims[axis] = obs_aimpoints[axis] < -10
obs_aimpoints[axis] = obs_aimpoints[axis].clip(-10, 10)
ok = ((np.abs(obs_aimpoints['target_offset_y']) < 100) &
(np.abs(obs_aimpoints['target_offset_z']) < 100))
plot_cxctime(times[ok], obs_aimpoints['dx'][ok], 'ob', label='CHIPX')
plot_cxctime(times[ok], obs_aimpoints['dy'][ok], 'or', label='CHIPY')
plot_cxctime(times[~ok], obs_aimpoints['dx'][~ok], '*b', label='CHIPX (offset > 100")')
plot_cxctime(times[~ok], obs_aimpoints['dy'][~ok], '*r', label='CHIPY (offset > 100")')
for axis in ('dx', 'dy'):
if np.any(lolims[axis]):
plt.errorbar(DateTime(times[lolims[axis]]).plotdate,
obs_aimpoints[axis][lolims[axis]], marker='.', yerr=1.5, lolims=True)
if np.any(uplims[axis]):
plt.errorbar(DateTime(times[uplims[axis]]).plotdate,
obs_aimpoints[axis][uplims[axis]], marker='.', yerr=1.5, uplims=True)
plt.grid()
ymax = max(12, np.max(np.abs(obs_aimpoints['dx'])), np.max(np.abs(obs_aimpoints['dy'])))
plt.ylim(-ymax, ymax)
plt.ylabel('Offset (arcsec)')
plt.title('Observed aimpoint offsets')
plt.legend(loc='upper left', fontsize='small', title='', framealpha=0.5)
outroot = os.path.join(opt.data_root, 'observed_aimpoints')
logger.info('Writing plot files {}.png,html'.format(outroot))
mpld3.plugins.connect(fig, mpld3.plugins.MousePosition(fmt='.1f'))
mpld3.save_html(fig, outroot + '.html')
fig.patch.set_visible(False)
plt.savefig(outroot + '.png', frameon=False)
开发者ID:sot,项目名称:aimpoint_mon,代码行数:50,代码来源:update_observed_aimpoints.py
示例20: run_ai
def run_ai(ais):
"""
Run aspect pipeline 'flt_run_pipe' over the aspect intervals described
in the list of dictionaries passed as an argument
"""
ascds_env = getenv('source /home/ascds/.ascrc -r release', shell='tcsh')
tcsh_shell("punlearn asp_l1_std", env=ascds_env)
ocat_env = getenv(
'source /proj/sot/ska/data/aspect_authorization/set_ascds_ocat_vars.csh',
shell='tcsh')
for var in ['ASCDS_OCAT_UNAME', 'ASCDS_OCAT_SERVER', 'ASCDS_OCAT_PWORD']:
ascds_env[var] = ocat_env[var]
if opt.param is not None and len(opt.param):
for param in opt.param:
cmd = 'pset asp_l1_std {}'.format(param)
tcsh_shell(cmd,
env=ascds_env)
logger_fh = FilelikeLogger(logger)
for ai in ais:
pipe_cmd = 'flt_run_pipe -r {root} -i {indir} -o {outdir} \
-t {pipe_ped} \
-a "INTERVAL_START"={istart} \
-a "INTERVAL_STOP"={istop} \
-a obiroot={obiroot} \
-a revision=1 '.format(**ai)
if 'pipe_start_at' in ai:
pipe_cmd = pipe_cmd + " -s {}".format(ai['pipe_start_at'])
if 'pipe_stop_before' in ai:
pipe_cmd = pipe_cmd + " -S {}".format(ai['pipe_stop_before'])
if 'skip_slot' in ai:
try:
tcsh_shell(pipe_cmd + " -S check_star_data",
env=ascds_env,
logfile=logger_fh)
except ShellError as sherr:
# if shell error, just check to see if get_star_data completed successfully
loglines = open(logger_fh.filename).read()
if not re.search("get_star_data completed successfully", loglines):
raise ShellError(sherr)
cut_stars(ai)
tcsh_shell(pipe_cmd + " -s check_star_data",
env=ascds_env,
logfile=logger_fh)
else:
logger.info('Running pipe command {}'.format(pipe_cmd))
tcsh_shell(pipe_cmd,
env=ascds_env,
logfile=logger_fh)
开发者ID:sot,项目名称:periscope_tilt,代码行数:49,代码来源:runasp.py
注:本文中的pyyaks.logger.info函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论