def handle(self, date=None, *args, **options): if date: # ensure that date entered can be parsed entered_date = datetime.datetime.strptime(date, "%Y-%m-%d") else: # set date to yesterday if not passed in yesterday = datetime.datetime.now() - datetime.timedelta(days=1) date = yesterday.strftime("%Y-%m-%d") print "pushing logs for %s" % date # construct database query qs = LogModel.objects.extra(where=["date_trunc('day', {0}) = '{1}'".format(DATE_FIELD, date)]).order_by() results = qs.values(ENDPOINT_FIELD, USER_FIELD).annotate(calls=Count("id")) endpoint = urljoin(settings.LOCKSMITH_HUB_URL, "report_calls/") # report results for item in results: apicall( endpoint, settings.LOCKSMITH_SIGNING_KEY, api=settings.LOCKSMITH_API_NAME, date=date, endpoint=item[ENDPOINT_FIELD], key=item[USER_FIELD], calls=item["calls"], )
def handle(self, date=None, *args, **options): if not date: # set date to yesterday if not passed in yesterday = datetime.datetime.now() - datetime.timedelta(days=1) date = yesterday.strftime('%Y-%m-%d') print 'pushing logs for %s' % date dt_begin = datetime.datetime.strptime(date, '%Y-%m-%d') dt_end = dt_begin + datetime.timedelta(days=1) # construct database query results = db.logs.group(['key', 'method'], {"timestamp": {"$gte": dt_begin, "$lt": dt_end}}, {"count": 0}, "function (obj, prev) {prev.count += 1;}") endpoint = urljoin(settings.LOCKSMITH_HUB_URL, 'report_calls/') # report results for item in results: apicall(endpoint, settings.LOCKSMITH_SIGNING_KEY, api=settings.LOCKSMITH_API_NAME, date=date, endpoint=item['method'], key=item['key'], calls=int(item['count']))
def handle(self, date=None, *args, **options): if not date: # set date to yesterday if not passed in yesterday = datetime.datetime.now() - datetime.timedelta(days=1) date = yesterday.strftime('%Y-%m-%d') print 'pushing logs for %s' % date dt_begin = datetime.datetime.strptime(date, '%Y-%m-%d') dt_end = dt_begin + datetime.timedelta(days=1) # construct database query results = db.logs.group( ['key', 'method'], {"timestamp": { "$gte": dt_begin, "$lt": dt_end }}, {"count": 0}, "function (obj, prev) {prev.count += 1;}") endpoint = urljoin(settings.LOCKSMITH_HUB_URL, 'report_calls/') # report results for item in results: apicall(endpoint, settings.LOCKSMITH_SIGNING_KEY, api=settings.LOCKSMITH_API_NAME, date=date, endpoint=item['method'], key=item['key'], calls=int(item['count']))
def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, key=key ) return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
def push_key(key, replicate_too=True): if replicate_too: for kps in key.pub_statuses.filter(api__push_enabled=True): if kps.api.name in ReplicatedApiNames: replicate_key.delay(key, kps.api) endpoints = {UNPUBLISHED: 'create_key/', NEEDS_UPDATE: 'update_key/'} dirty = key.pub_statuses.exclude(status=PUBLISHED).filter( api__push_enabled=True).select_related() if not dirty: print u"Skipping push_key for {k} because all KeyPublicationStatus objects are PUBLISHED.".format(k=key.key) # Retrying immediately on failure would allow a broken or unresponsive # api to prevent other, properly functioning apis from receiving the key. # Thus we use retry_flag to delay the task retry until after attempting # to push to all apis. retry_flag = False for kps in dirty: if kps.api.name in ReplicatedApiNames: # Skip this API because we've queued a replicate_key task above print u"push_key for {k} ignoring {a} because it uses replicate_key.".format(k=key.key, a=kps.api.name) continue endpoint = urljoin(kps.api.url, endpoints[kps.status]) try: apicall(endpoint, kps.api.signing_key, api=kps.api.name, key=kps.key.key, email=kps.key.email, status=kps.key.status) print 'sent key {k} to {a}'.format(k=key.key, a=kps.api.name) kps.status = PUBLISHED kps.save() except Exception as e: ctx = { 'a': str(kps.api.name), 'k': str(key.key), 'e': str(e.read()) if isinstance(e, urllib2.HTTPError) else str(e) } print 'Caught exception while pushing key {k} to {a}: {e}'.format(**ctx) print 'Will retry' retry_flag = True if retry_flag: push_key.retry()
def replicate_key(key, api): kps = key.pub_statuses.get(api=api) endpoint = urljoin(kps.api.url, 'replicate_key/{k}/'.format(k=key.key)) try: apicall(endpoint, kps.api.signing_key, api=kps.api.name, key=kps.key.key, email=kps.key.email, status=kps.key.status) print 'replicated key {k} to {a} with status {s}'.format(k=key.key, s=key.status, a=kps.api.name) kps.status = PUBLISHED kps.save() except Exception as e: ctx = { 'a': str(kps.api.name), 'k': str(key.key), 'e': str(e.read()) if isinstance(e, urllib2.HTTPError) else str(e) } print 'Caught exception while pushing key {k} to {a}: {e}'.format(**ctx) print 'Will retry' replicate_key.retry()
def handle_noargs(self, **options): verbosity = int(options.get('verbosity', 1)) endpoints = {UNPUBLISHED: 'create_key/', NEEDS_UPDATE: 'update_key/'} actions = {UNPUBLISHED: 0, NEEDS_UPDATE: 0} failures = [] # get all non-published keys belonging to APIs with push_enabled dirty = KeyPublicationStatus.objects.exclude(status=PUBLISHED).filter( api__push_enabled=True).select_related() for kps in dirty: endpoint = urljoin(kps.api.url, endpoints[kps.status]) try: apicall(endpoint, kps.api.signing_key, api=kps.api.name, key=kps.key.key, email=kps.key.email, status=kps.key.status) actions[kps.status] += 1 kps.status = PUBLISHED kps.save() except urllib2.HTTPError, e: msg = 'endpoint=%s, signing_key=%s, api=%s, key=%s, email=%s, status=%s\n error: %s' msg = msg % (endpoint, kps.api.signing_key, kps.api.name, kps.key.key, kps.key.email, kps.key.status, e.read()) failures.append(msg)
def handle_noargs(self, **options): print "resetting all keys for %s" % settings.LOCKSMITH_API_NAME endpoint = urljoin(settings.LOCKSMITH_HUB_URL, "reset_keys/") apicall(endpoint, settings.LOCKSMITH_SIGNING_KEY, api=settings.LOCKSMITH_API_NAME)
def submit_report(log_path, log_regex, log_date_format, log_date, log_custom_transform, locksmith_api_name, locksmith_signing_key, locksmith_endpoint): log_re = re.compile(log_regex) log_directory = os.path.dirname(log_path) log_file_re = re.compile(re.escape(os.path.basename(log_path)).replace(r'\*', '.*')) # only include the ones that match our wildcard pattern unsorted_log_files = [file for file in os.listdir(log_directory) if log_file_re.match(file)] # do some voodoo to make sure they're in the right order, since the numbers may be lexicographically sorted in an odd way number_re = re.compile(r'\d+') log_files = sorted(unsorted_log_files, key=lambda f: int(number_re.findall(f)[0]) if number_re.search(f) else -1) totals = {} # loop over the files last_loop = False for log_file in log_files: if log_file.endswith('.gz'): file = gzip.open(os.path.join(log_directory, log_file), 'rb') else: file = open(os.path.join(log_directory, log_file), 'r') # loop over the rows for row in file: match = log_re.match(row) if match: record = match.groupdict() day = datetime.datetime.strptime(record['date'], log_date_format).date() if day == log_date and record['status'] == '200' and record['apikey'] and record['apikey'] != '-': # normalize the endpoint endpoint = log_custom_transform(record['endpoint']) if log_custom_transform else record['endpoint'] # add it to the tally if record['apikey'] not in totals: totals[record['apikey']] = {} if endpoint not in totals[record['apikey']]: totals[record['apikey']][endpoint] = 1 else: totals[record['apikey']][endpoint] += 1 elif day < log_date: # this is the last log we need to parse last_loop = True if last_loop: break # submit totals to hub submit_date = log_date.strftime('%Y-%m-%d') total_submitted = 0 for api_key in totals: for endpoint in totals[api_key]: apicall( locksmith_endpoint, locksmith_signing_key, api = locksmith_api_name, date = submit_date, endpoint = endpoint, key = api_key, calls = totals[api_key][endpoint] ) total_submitted += totals[api_key][endpoint] return total_submitted