本文整理汇总了Python中socorrolib.lib.datetimeutil.utc_now函数的典型用法代码示例。如果您正苦于以下问题:Python utc_now函数的具体用法?Python utc_now怎么用?Python utc_now使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了utc_now函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_no_new_crashes
def test_no_new_crashes(self):
new_crash_source = ESNewCrashSource(self.config)
self.health_check()
generator = new_crash_source.new_crashes(
utc_now() - datetime.timedelta(days=1),
'Firefox',
['43.0.1']
)
eq_(list(generator), [])
self.index_crash(
a_processed_crash,
raw_crash=a_raw_crash,
crash_id=a_processed_crash['uuid']
)
self.refresh_index()
# Same test now that there is a processed crash in there
# but notably under a different name and version.
generator = new_crash_source.new_crashes(
utc_now() - datetime.timedelta(days=1),
'Firefox',
['43.0.1']
)
eq_(list(generator), [])
开发者ID:4thAce,项目名称:socorro,代码行数:26,代码来源:test_new_crash_source.py
示例2: _normal_jobs_iter
def _normal_jobs_iter(self):
"""
Yields a list of job tuples pulled from the 'jobs' table for which the
owner is this process and the started datetime is null. This iterator
is perpetual - it never raises the StopIteration exception
"""
get_normal_job_sql = (
"select"
" j.id,"
" j.uuid,"
" priority "
"from"
" jobs j "
"where"
" j.owner = %d"
" and j.starteddatetime is null "
"order by queueddatetime"
" limit %d" % (self.processor_id,
self.config.batchJobLimit))
normal_jobs_list = []
last_query_timestamp = utc_now()
while True:
polling_threshold = utc_now() - self.config.pollingInterval
if not normal_jobs_list and \
last_query_timestamp < polling_threshold: # get more
normal_jobs_list = self.transaction(
execute_query_fetchall,
get_normal_job_sql
)
last_query_timestamp = utc_now()
if normal_jobs_list:
while normal_jobs_list:
yield normal_jobs_list.pop(0)
else:
yield None
开发者ID:4thAce,项目名称:socorro,代码行数:35,代码来源:legacy_new_crash_source.py
示例3: run
def run(self):
# if this is non-zero, we use it.
if self.config.days_into_past:
last_run = utc_now() - datetime.timedelta(days=self.config.days_into_past)
else:
try:
# KeyError if it's never run successfully
# TypeError if self.job_information is None
last_run = self.job_information["last_success"]
except (KeyError, TypeError):
# basically, the "virgin run" of this job
last_run = utc_now()
# bugzilla runs on PST, so we need to communicate in its time zone
PST = tz.gettz("PST8PDT")
last_run_formatted = last_run.astimezone(PST).strftime("%Y-%m-%d")
query = self.config.query % last_run_formatted
for (bug_id, status, resolution, short_desc, signature_set) in self._iterator(query):
try:
# each run of this loop is a transaction
self.database_transaction_executor(
self.inner_transaction, bug_id, status, resolution, short_desc, signature_set
)
except NothingUsefulHappened:
pass
开发者ID:KaiRo-at,项目名称:socorro,代码行数:25,代码来源:bugzilla.py
示例4: test_delete_old_indices
def test_delete_old_indices(self):
# Create old indices to be deleted.
self.index_client.create('socorro200142', {})
self.indices.append('socorro200142')
self.index_client.create('socorro200000', {})
self.indices.append('socorro200000')
# Create an old aliased index.
self.index_client.create('socorro200201_20030101', {})
self.indices.append('socorro200201_20030101')
self.index_client.put_alias(
index='socorro200201_20030101',
name='socorro200201',
)
# Create a recent aliased index.
last_week_index = self.get_index_for_date(
utc_now() - datetime.timedelta(weeks=1)
)
self.index_client.create('socorro_some_aliased_index', {})
self.indices.append('socorro_some_aliased_index')
self.index_client.put_alias(
index='socorro_some_aliased_index',
name=last_week_index,
)
# Create a recent index that should not be deleted.
now_index = self.get_index_for_date(utc_now())
self.index_client.create(now_index, {})
self.indices.append(now_index)
# These will raise an error if an index was not correctly created.
assert self.index_client.exists('socorro200142')
assert self.index_client.exists('socorro200000')
assert self.index_client.exists('socorro200201')
assert self.index_client.exists(now_index)
assert self.index_client.exists(last_week_index)
api = IndexCleaner(self.config)
api.delete_old_indices()
# Verify the recent index is still there.
ok_(self.index_client.exists(now_index))
ok_(self.index_client.exists(last_week_index))
# Verify the old indices are gone.
ok_(not self.index_client.exists('socorro200142'))
ok_(not self.index_client.exists('socorro200000'))
ok_(not self.index_client.exists('socorro200201'))
开发者ID:4thAce,项目名称:socorro,代码行数:50,代码来源:test_index_cleaner.py
示例5: test_create_release
def test_create_release(self):
self._insert_release_channels()
self._insert_products()
config_manager = self._setup_config_manager()
with config_manager.context() as config:
app = middleware_app.MiddlewareApp(config)
app.main()
server = middleware_app.application
now = datetimeutil.utc_now()
response = self.post(
server,
'/releases/release/',
{
'product': 'Firefox',
'version': '1.0',
'update_channel': 'beta',
'build_id': now.strftime('%Y%m%d%H%M'),
'platform': 'Windows',
'beta_number': '1',
'release_channel': 'Beta',
'throttle': '1'
}
)
eq_(response.data, True)
开发者ID:ahlfors,项目名称:socorro,代码行数:26,代码来源:test_middleware_app.py
示例6: test_basic_run
def test_basic_run(self):
cur = self.conn.cursor()
# Ensure test table is present.
statement = """
INSERT INTO raw_adi_logs
(report_date, product_name, count) VALUES
(%(first)s, 'WinterFox', 11),
(%(second)s, 'WinterFox', 23)
"""
second = utc_now().date()
first = second - datetime.timedelta(days=1)
cur.execute(statement, {'first': first, 'second': second})
self.conn.commit()
# Run the crontabber job to remove the test table.
config_manager = self._setup_config_manager(days_to_keep=1)
with config_manager.context() as config:
tab = CronTabber(config)
tab.run_all()
# Basic assertion test of stored procedure.
information = self._load_structure()
assert information['clean-raw-adi-logs']
assert not information['clean-raw-adi-logs']['last_error']
assert information['clean-raw-adi-logs']['last_success']
# Ensure test row was removed
cur.execute("""
SELECT report_date FROM raw_adi_logs
""")
result, = cur.fetchall()
report_date = result[0]
eq_(report_date, second)
开发者ID:4thAce,项目名称:socorro,代码行数:33,代码来源:test_clean_raw_adi_logs.py
示例7: POST
def POST(self, *args):
raw_crash, dumps = self._get_raw_crash_from_form()
current_timestamp = utc_now()
raw_crash.submitted_timestamp = current_timestamp.isoformat()
# legacy - ought to be removed someday
raw_crash.timestamp = time.time()
if (not self.config.accept_submitted_crash_id
or 'crash_id' not in raw_crash
):
crash_id = createNewOoid(current_timestamp)
raw_crash.crash_id = crash_id
self.logger.info('%s received', crash_id)
else:
crash_id = raw_crash.crash_id
self.logger.info('%s received with existing crash_id:', crash_id)
raw_crash.type_tag = self.type_tag
self.crash_storage.save_raw_crash(
raw_crash,
dumps,
crash_id
)
self.logger.info('%s accepted', crash_id)
return "CrashID=%s%s\n" % (self.type_tag, crash_id)
开发者ID:lonnen,项目名称:socorro-collector,代码行数:27,代码来源:wsgi_generic_collector.py
示例8: test_basic_run
def test_basic_run(self):
cur = self.conn.cursor()
# Ensure test table is present.
statement = """
INSERT INTO missing_symbols
(date_processed, debug_file, debug_id, code_file, code_id)
VALUES
(%(first)s, 'foo.pdb', '0420', 'foo.py', '123'),
(%(second)s, 'bar.pdb', '65EA9', 'bar.py', null)
"""
second = utc_now().date()
first = second - datetime.timedelta(days=1)
cur.execute(statement, {'first': first, 'second': second})
self.conn.commit()
# Run the crontabber job to remove the test table.
config_manager = self._setup_config_manager(days_to_keep=1)
with config_manager.context() as config:
tab = CronTabber(config)
tab.run_all()
# Basic assertion test of stored procedure.
information = self._load_structure()
assert information['clean-missing-symbols']
assert not information['clean-missing-symbols']['last_error']
assert information['clean-missing-symbols']['last_success']
# Ensure expected test row was removed
cur.execute("""
SELECT date_processed FROM missing_symbols
""")
first, = cur.fetchall()
date_processed = first[0]
eq_(date_processed, second)
开发者ID:4thAce,项目名称:socorro,代码行数:34,代码来源:test_clean_missing_symbols.py
示例9: test_mapping
def test_mapping(self, mapping):
"""Verify that a mapping is correct.
This function does so by first creating a new, temporary index in
elasticsearch using the mapping. It then takes some recent crash
reports that are in elasticsearch and tries to insert them in the
temporary index. Any failure in any of those steps will raise an
exception. If any is raised, that means the mapping is incorrect in
some way (either it doesn't validate against elasticsearch's rules,
or is not compatible with the data we currently store).
If no exception is raised, the mapping is likely correct.
This function is to be used in any place that can change the
`storage_mapping` field in any Super Search Field.
Methods `create_field` and `update_field` use it, see above.
"""
temp_index = 'socorro_mapping_test'
es_connection = self.get_connection()
# Import at runtime to avoid dependency circle.
from socorro.external.es.index_creator import IndexCreator
index_creator = IndexCreator(self.config)
try:
index_creator.create_index(
temp_index,
mapping,
)
now = datetimeutil.utc_now()
last_week = now - datetime.timedelta(days=7)
current_indices = self.generate_list_of_indexes(last_week, now)
crashes_sample = es_connection.search(
index=current_indices,
doc_type=self.config.elasticsearch.elasticsearch_doctype,
size=self.config.elasticsearch.mapping_test_crash_number,
)
crashes = [x['_source'] for x in crashes_sample['hits']['hits']]
for crash in crashes:
es_connection.index(
index=temp_index,
doc_type=self.config.elasticsearch.elasticsearch_doctype,
body=crash,
)
except elasticsearch.exceptions.ElasticsearchException as e:
raise BadArgumentError(
'storage_mapping',
msg='Indexing existing data in Elasticsearch failed with the '
'new mapping. Error is: %s' % str(e),
)
finally:
try:
index_creator.get_index_client().delete(temp_index)
except elasticsearch.exceptions.NotFoundError:
# If the index does not exist (if the index creation failed
# for example), we don't need to do anything.
pass
开发者ID:4thAce,项目名称:socorro,代码行数:60,代码来源:super_search_fields.py
示例10: get_signatures
def get_signatures(self, **kwargs):
"""Return top crashers by signatures.
See http://socorro.readthedocs.org/en/latest/middleware.html#tcbs
"""
filters = [
("product", None, "str"),
("version", None, "str"),
("crash_type", "all", "str"),
("to_date", datetimeutil.utc_now(), "datetime"),
("duration", datetime.timedelta(7), "timedelta"),
("os", None, "str"),
("limit", 100, "int"),
("date_range_type", None, "str")
]
params = external_common.parse_arguments(filters, kwargs)
params.logger = logger
# what the twoPeriodTopCrasherComparison() function does is that it
# makes a start date from taking the to_date - duration
if params.duration > datetime.timedelta(30):
raise BadArgumentError('Duration too long. Max 30 days.')
with self.get_connection() as connection:
return tcbs.twoPeriodTopCrasherComparison(connection, params)
开发者ID:ahlfors,项目名称:socorro,代码行数:26,代码来源:crashes.py
示例11: test_create_release_with_beta_number_null
def test_create_release_with_beta_number_null(self):
self._insert_release_channels()
service = Releases(config=self.config)
now = datetimeutil.utc_now()
build_id = now.strftime('%Y%m%d%H%M')
params = dict(
product='Firefox',
version='1.0',
update_channel='beta',
build_id=build_id,
platform='Windows',
beta_number=None,
release_channel='Beta',
throttle=1
)
res = service.create_release(**params)
ok_(res)
# but...
params['beta_number'] = 0
assert_raises(
MissingArgumentError,
service.create_release,
**params
)
开发者ID:ahlfors,项目名称:socorro,代码行数:27,代码来源:test_releases.py
示例12: test_utc_now
def test_utc_now():
"""
Test datetimeutil.utc_now()
"""
res = datetimeutil.utc_now()
eq_(res.strftime('%Z'), 'UTC')
eq_(res.strftime('%z'), '+0000')
ok_(res.tzinfo)
开发者ID:mozilla,项目名称:socorrolib,代码行数:8,代码来源:test_datetimeutil.py
示例13: test_get_parameters_date_defaults
def test_get_parameters_date_defaults(self):
with _get_config_manager().context() as config:
search = SearchBaseWithFields(
config=config,
)
now = datetimeutil.utc_now()
# Test default values when nothing is passed
params = search.get_parameters()
ok_('date' in params)
eq_(len(params['date']), 2)
# Pass only the high value
args = {
'date': '<%s' % datetimeutil.date_to_string(now)
}
params = search.get_parameters(**args)
ok_('date' in params)
eq_(len(params['date']), 2)
eq_(params['date'][0].operator, '<')
eq_(params['date'][1].operator, '>=')
eq_(params['date'][0].value.date(), now.date())
eq_(
params['date'][1].value.date(),
now.date() - datetime.timedelta(days=7)
)
# Pass only the low value
pasttime = now - datetime.timedelta(days=10)
args = {
'date': '>=%s' % datetimeutil.date_to_string(pasttime)
}
params = search.get_parameters(**args)
ok_('date' in params)
eq_(len(params['date']), 2)
eq_(params['date'][0].operator, '<=')
eq_(params['date'][1].operator, '>=')
eq_(params['date'][0].value.date(), now.date())
eq_(params['date'][1].value.date(), pasttime.date())
# Pass the two values
pasttime = now - datetime.timedelta(days=10)
args = {
'date': [
'<%s' % datetimeutil.date_to_string(now),
'>%s' % datetimeutil.date_to_string(pasttime),
]
}
params = search.get_parameters(**args)
ok_('date' in params)
eq_(len(params['date']), 2)
eq_(params['date'][0].operator, '<')
eq_(params['date'][1].operator, '>')
eq_(params['date'][0].value.date(), now.date())
eq_(params['date'][1].value.date(), pasttime.date())
开发者ID:4thAce,项目名称:socorro,代码行数:56,代码来源:test_search_common.py
示例14: test_new_crashes
def test_new_crashes(self):
new_crash_source = ESNewCrashSource(self.config)
self.index_crash(
a_processed_crash,
raw_crash=a_raw_crash,
crash_id=a_processed_crash['uuid']
)
self.index_crash(
a_firefox_processed_crash,
raw_crash=a_raw_crash,
crash_id=a_firefox_processed_crash['uuid']
)
other_firefox_processed_crash = deepcopy(a_firefox_processed_crash)
other_firefox_processed_crash['uuid'] = (
other_firefox_processed_crash['uuid'].replace('a', 'e')
)
other_firefox_processed_crash['date_processed'] = (
utc_now() - datetime.timedelta(days=1)
)
self.index_crash(
other_firefox_processed_crash,
raw_crash=a_raw_crash,
crash_id=other_firefox_processed_crash['uuid']
)
self.refresh_index()
assert self.connection.get(
index=self.config.elasticsearch.elasticsearch_index,
id=a_processed_crash['uuid']
)
assert self.connection.get(
index=self.config.elasticsearch.elasticsearch_index,
id=a_firefox_processed_crash['uuid']
)
# same test now that there is a processed crash in there
generator = new_crash_source.new_crashes(
utc_now() - datetime.timedelta(days=1),
'Firefox',
['43.0.1']
)
eq_(list(generator), [a_firefox_processed_crash['uuid']])
开发者ID:4thAce,项目名称:socorro,代码行数:42,代码来源:test_new_crash_source.py
示例15: _get_base
def _get_base(self, crash_id):
"""this method overrides the base method to define the daily file
system root directory name. While the default class is to use a
YYYYMMDD form, this class substitutes a simple DD form. This is the
mechanism of directory recycling as at the first day of a new month
we return to the same directiory structures that were created on the
first day of the previous month"""
date = dateFromOoid(crash_id)
if not date:
date = utc_now()
date_formatted = "%02d" % (date.day,)
return [self.config.fs_root, date_formatted]
开发者ID:4thAce,项目名称:socorro,代码行数:12,代码来源:crashstorage.py
示例16: get_missing_fields
def get_missing_fields(self):
"""Return a list of all missing fields in our database.
Take the list of all fields that were indexed in the last 3 weeks
and do a diff with the list of known fields.
"""
now = datetimeutil.utc_now()
two_weeks_ago = now - datetime.timedelta(weeks=2)
indices = self.generate_list_of_indexes(two_weeks_ago, now)
es_connection = self.get_connection()
index_client = elasticsearch.client.IndicesClient(es_connection)
doctype = self.config.elasticsearch.elasticsearch_doctype
def parse_mapping(mapping, namespace):
"""Return a set of all fields in a mapping. Parse the mapping
recursively. """
fields = set()
for key in mapping:
field = mapping[key]
if namespace:
field_full_name = '.'.join((namespace, key))
else:
field_full_name = key
if 'properties' in field:
fields.update(
parse_mapping(
field['properties'],
field_full_name
)
)
else:
fields.add(field_full_name)
return fields
all_existing_fields = set()
for index in indices:
try:
mapping = index_client.get_mapping(
index=index,
)
properties = mapping[index]['mappings'][doctype]['properties']
all_existing_fields.update(parse_mapping(properties, None))
except elasticsearch.exceptions.NotFoundError, e:
# If an index does not exist, this should not fail
self.config.logger.warning(
'Missing index in elasticsearch while running '
'SuperSearchFields.get_missing_fields, error is: %s',
str(e)
)
开发者ID:4thAce,项目名称:socorro,代码行数:53,代码来源:super_search_fields.py
示例17: get_adu_by_signature
def get_adu_by_signature(self, **kwargs):
"""Return a list of ADUs and crash counts by signature and ADU date
"""
now = datetimeutil.utc_now().date()
lastweek = now - datetime.timedelta(weeks=1)
filters = [
("start_date", lastweek, "date"),
("end_date", now, "date"),
("signature", None, "str"),
("channel", None, "str"),
("product_name", None, "str"),
]
params = external_common.parse_arguments(filters, kwargs)
for param in ("start_date", "end_date", "signature", "channel"):
if not params[param]:
raise MissingArgumentError(param)
if (params.end_date - params.start_date) > datetime.timedelta(days=365):
raise BadArgumentError('Duration too long. Max 365 days.')
sql_query = """
SELECT
product_name,
signature,
adu_date::TEXT,
build_date::TEXT,
buildid::TEXT,
crash_count,
adu_count,
os_name,
channel
FROM crash_adu_by_build_signature
WHERE adu_date BETWEEN %(start_date)s AND %(end_date)s
AND product_name = %(product_name)s
AND channel = %(channel)s
AND signature = %(signature)s
ORDER BY buildid
"""
error_message = (
"Failed to retrieve crash ADU by build signature from PostgreSQL"
)
results = self.query(sql_query, params, error_message=error_message)
crashes = results.zipped()
return {
"hits": crashes,
"total": len(crashes)
}
开发者ID:ahlfors,项目名称:socorro,代码行数:53,代码来源:crashes.py
示例18: createNewOoid
def createNewOoid(timestamp=None, depth=None):
"""Create a new Ooid for a given time, to be stored at a given depth
timestamp: the year-month-day is encoded in the ooid. If none, use current day
depth: the expected storage depth is encoded in the ooid. If non, use the defaultDepth
returns a new opaque id string holding 24 random hex digits and encoded date and depth info
"""
if not timestamp:
timestamp = utc_now().date()
if not depth:
depth = defaultDepth
assert depth <= 4 and depth >=1
uuid = str(uu.uuid4())
return "%s%d%02d%02d%02d" %(uuid[:-7],depth,timestamp.year%100,timestamp.month,timestamp.day)
开发者ID:mozilla,项目名称:socorrolib,代码行数:13,代码来源:ooid.py
示例19: uuidToOoid
def uuidToOoid(uuid,timestamp=None, depth= None):
""" Create an ooid from a 32-hex-digit string in regular uuid format.
uuid: must be uuid in expected format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxx7777777
timestamp: the year-month-day is encoded in the ooid. If none, use current day
depth: the expected storage depth is encoded in the ooid. If non, use the defaultDepth
returns a new opaque id string holding the first 24 digits of the provided uuid and encoded date and depth info
"""
if not timestamp:
timestamp = utc_now().date()
if not depth:
depth = defaultDepth
assert depth <= 4 and depth >=1
return "%s%d%02d%02d%02d" %(uuid[:-7],depth,timestamp.year%100,timestamp.month,timestamp.day)
开发者ID:mozilla,项目名称:socorrolib,代码行数:13,代码来源:ooid.py
示例20: test_get_with_indices
def test_get_with_indices(self, mocked_es):
mocked_connection = mock.Mock()
mocked_es.Elasticsearch.return_value = mocked_connection
# Test default indices.
self.api.post(
query='{}'
)
mocked_connection.search.assert_called_with(
body={},
index=[self.api.config.elasticsearch.elasticsearch_index],
doc_type=self.api.config.elasticsearch.elasticsearch_doctype
)
# Test all indices.
self.api.post(
query='{}',
indices=['ALL']
)
mocked_connection.search.assert_called_with(
body={}
)
# Test forcing indices.
self.api.post(
query='{}',
indices=['socorro_201801', 'socorro_200047', 'not_an_index']
)
mocked_connection.search.assert_called_with(
body={},
index=['socorro_201801', 'socorro_200047', 'not_an_index'],
doc_type=self.api.config.elasticsearch.elasticsearch_doctype
)
# Test default indices with an index schema based on dates.
index_schema = 'socorro_%Y%W'
config = self.get_base_config(es_index=index_schema)
api = Query(config=config)
now = datetimeutil.utc_now()
last_week = now - datetime.timedelta(days=7)
indices = api.generate_list_of_indexes(last_week, now)
api.post(
query='{}'
)
mocked_connection.search.assert_called_with(
body={},
index=indices,
doc_type=api.config.elasticsearch.elasticsearch_doctype
)
开发者ID:ahlfors,项目名称:socorro,代码行数:51,代码来源:test_query.py
注:本文中的socorrolib.lib.datetimeutil.utc_now函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论