def _get_search_results(config): headers = surl.DEFAULT_HEADERS.copy() headers['Authorization'] = surl.get_authorization_header( config.root, config.discharge) snaps = [] url = ( '{}/api/v1/snaps/search?size=250&scope=wide&arch=wide&' 'confinement=strict,classic,devmode&' 'fields=snap_id,channel,confinement,media,origin,developer_validation,' 'date_published,last_updated,sections'.format( surl.CONSTANTS[config.store_env]['api_base_url'])) while url is not None: r = surl.store_request(config, method='get', url=url, headers=headers) r.raise_for_status() payload = r.json() snaps.extend(payload['_embedded']['clickindex:package']) # XXX store is returning an 'http' (no 's'). _next = payload['_links'].get('next') url = _next['href'] if _next is not None else None return snaps
def get_publisher_metric(snap_id, metric_name, config): headers = surl.DEFAULT_HEADERS.copy() headers['Authorization'] = surl.get_authorization_header( config.root, config.discharge) # account for time spend mining the metrics daily (~4h). yesterday = (datetime.datetime.utcnow() - datetime.timedelta(days=1, hours=4)) start = end = yesterday.date().isoformat() filters = [{ "metric_name": metric_name, "snap_id": snap_id, "start": start, "end": end }] url = '{}/dev/api/snaps/metrics'.format( surl.CONSTANTS[config.store_env]['sca_base_url']) payload = {"filters": filters} r = surl.store_request(config, method='post', url=url, json=payload, headers=headers) r.raise_for_status() return r.json()['metrics']
def get_snap_id(snap_name, config): headers = surl.DEFAULT_HEADERS.copy() headers['Authorization'] = surl.get_authorization_header( config.root, config.discharge) url = '{}/dev/api/snaps/info/{}'.format( surl.CONSTANTS[config.store_env]['sca_base_url'], snap_name) r = surl.store_request(config, method='get', url=url, headers=headers) r.raise_for_status() return r.json()['snap_id']
def get_snap_metrics(filters, config): headers = surl.DEFAULT_HEADERS.copy() headers['Authorization'] = surl.get_authorization_header( config.root, config.discharge) url = '{}/dev/api/snaps/metrics'.format( surl.CONSTANTS[config.store_env]['sca_base_url']) metrics = [] for partition in _make_partition(filters, 400): payload = {"filters": partition} r = requests.post(url=url, json=payload, headers=headers) r.raise_for_status() metrics.extend(r.json()['metrics']) return metrics
def get_snap_info(snap_name, config): headers = surl.DEFAULT_HEADERS.copy() headers["Authorization"] = surl.get_authorization_header( config.root, config.discharge ) headers["Snap-Device-Series"] = "16" url = "{}/v2/snaps/info/{}".format( surl.CONSTANTS[config.store_env]["api_base_url"], snap_name ) r = requests.get(url=url, headers=headers) if r.status_code == 404: raise SnapNotFound() else: r.raise_for_status() return r.json()
def _refresh_discharge(config): headers = surl.DEFAULT_HEADERS.copy() headers["Authorization"] = surl.get_authorization_header( config.root, config.discharge ) url = "{}/dev/api/account".format(surl.CONSTANTS[config.store_env]["sca_base_url"]) r = requests.get(url=url, headers=headers) if r.headers.get("WWW-Authenticate") == ("Macaroon needs_refresh=1"): discharge = surl.get_refreshed_discharge(config.discharge, config.store_env) config = surl.ClientConfig( root=config.root, discharge=discharge, store_env=config.store_env, path=config.path, ) surl.save_config(config) return config
def get_search_results(config): headers = surl.DEFAULT_HEADERS.copy() headers['Authorization'] = surl.get_authorization_header( config.root, config.discharge) snaps = [] url = ('{}/api/v1/snaps/search?size=500&' 'fields=snap_id,media,origin,developer_validation'.format( surl.CONSTANTS[config.store_env]['api_base_url'])) while url is not None: r = requests.get(url=url, headers=headers) r.raise_for_status() payload = r.json() snaps.extend(payload['_embedded']['clickindex:package']) # XXX store is returning an 'http' (no 's'). _next = payload['_links'].get('next') url = _next['href'] if _next is not None else None return snaps
def get_publisher_metric(snap_id, metric_name, config): headers = surl.DEFAULT_HEADERS.copy() headers['Authorization'] = surl.get_authorization_header( config.root, config.discharge) # account for time spend mining the metrics daily (~4h). yesterday = ( datetime.datetime.utcnow() - datetime.timedelta(days=1, hours=4)) start = end = yesterday.date().isoformat() filters = [ {"metric_name": metric_name, "snap_id": snap_id, "start": start, "end": end} ] url = '{}/dev/api/snaps/metrics'.format( surl.CONSTANTS[config.store_env]['sca_base_url']) payload = {"filters": filters} r = surl.store_request(config, method='post', url=url, json=payload, headers=headers) r.raise_for_status() return r.json()['metrics']
def get_publisher_metric(snap_id, metric_name, config): headers = surl.DEFAULT_HEADERS.copy() headers['Authorization'] = surl.get_authorization_header( config.root, config.discharge) yesterday = datetime.datetime.utcnow().date() - datetime.timedelta(1) start = end = yesterday.isoformat() filters = [{ "metric_name": metric_name, "snap_id": snap_id, "start": start, "end": end }] url = '{}/dev/api/snaps/metrics'.format( surl.CONSTANTS[config.store_env]['sca_base_url']) payload = {"filters": filters} r = requests.post(url=url, json=payload, headers=headers) r.raise_for_status() return r.json()['metrics']
def get_channel_metrics(snap_id, config): """ channelMapWithMetrics = { 'channelMap': [ { 'channel': { 'track': 'latest', 'risk': 'edge' }, 'weeklyActive1moDelta': 3, 'weeklyActive': 100 } ] } """ headers = surl.DEFAULT_HEADERS.copy() headers["Authorization"] = surl.get_authorization_header( config.root, config.discharge ) now = datetime.datetime.utcnow() # Account for time spent mining the metrics daily (~4h) yesterday = now - datetime.timedelta(days=1, hours=4) start = end = yesterday.date().isoformat() payload = { "filters": [ { "metric_name": "weekly_installed_base_by_channel", "snap_id": snap_id, "start": start, "end": end, } ] } url = "{}/dev/api/snaps/metrics".format( surl.CONSTANTS[config.store_env]["sca_base_url"] ) current = requests.post(url=url, json=payload, headers=headers) if current.status_code != requests.codes.ok: print(current.headers) print(current.text) current.raise_for_status() current = current.json() # FIXME this should be a month, not 30 days. month_prev = yesterday - datetime.timedelta(days=30) month_prev = month_prev.date().isoformat() payload["filters"][0]["start"] = month_prev payload["filters"][0]["end"] = month_prev old = requests.post(url=url, json=payload, headers=headers) old.raise_for_status() old = old.json() data = [] for series_current in current["metrics"][0]["series"]: name = series_current["name"] weekly_active = series_current["values"][0] # If no data from the previous month, initialise to this month. delta = weekly_active for series_old in old["metrics"][0]["series"]: if series_old["name"] == name: delta = series_current["values"][0] - series_old["values"][0] break track, risk, branch = _get_channel_parts(name) channel = {"track": track, "risk": risk} if branch: channel["branch"] = branch data.append( { "channel": channel, "weeklyActive": weekly_active, "weeklyActive1moDelta": delta, } ) return {"channelMap": data}