def time_range(client, index, path_log): ### Cliente Sv try: s = Search(using=client, index=index) \ .query('range' , **{'Date': {'gte': "now-7d/d"}}) \ .sort({"Date" : {"order" : "desc"}}) \ .query("match_all") for hit in s[0]: print(hit.Date) last_update = hit.Date start = datetime.datetime.strptime(last_update.strip(), "%Y-%m-%dT%H:%M:%S.%fZ") start = start + datetime.timedelta(days=1) end = start + datetime.timedelta(days=1) start = start.strftime("%Y-%m-%d") end = end.strftime("%Y-%m-%d") time_dict = {"start": start, "end": end} except: error = sys.exc_info() simple_log(path_log, index, error[0], error[1]) time_dict = None return time_dict
def pandasnorm(response_data): name = __name__ + '.pandasnorm' list_df = [] fileds = [ 'account_id', 'resource_instance_id', 'resource_group_id', 'month', 'pricing_country', 'billing_country', 'currency_code', 'plan_id', 'resource_id', 'billable', 'pricing_plan_id', 'region', 'usage', 'plan_name', 'resource_name', 'resource_instance_name', 'resource_group_name' ] try: for i in response_data: df = pd.json_normalize(i, 'usage', fileds) for k in ['price', 'break_down', 'discounts', 'usage']: try: df = df.drop(k, axis=1) except: pass list_df.append(df) result = pd.concat(list_df) except: error = sys.exc_info() simple_log(path_log, index, name, error[0], error[1]) result = None return result
def DownloadReport(account, apikey, date, path, batch, index, path_log): name = __name__ + '.DownloadReport' token = get_token(apikey) billMonth = date offset = '' data = concatdata(account, billMonth, token, offset) df = pandasnorm(data) try: # df['BatchID'] = batch dic = df.to_dict(orient='records') now = datetime.datetime.now().strftime('%Yy%mm%dd%Hh%Mm') name_file = path + 'DataIBM_' + now + '.json' jsonfile = open(name_file, 'w') for row in dic: json.dump(row, jsonfile) jsonfile.write('\n') jsonfile.close() except: error = sys.exc_info() simple_log(path_log, index, batch, name, error[0], error[1]) return None
def get_token(apikey): name = __name__ + '.get_token' try: url = "https://iam.cloud.ibm.com/identity/token?grant_type=urn:ibm:params:oauth:grant-type:apikey&apikey=" payload = {} headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json', } response = requests.request("POST", url + apikey, headers=headers, data=payload) data = json.loads(response.text) token = data['access_token'] except: error = sys.exc_info() simple_log(path_log, index, name, error[0], error[1]) token = None return token
def delete_rows(client, index, date, path_log): try: s = Search(using=client, index=index) \ .filter('range' , **{'Date': {'gte': date, "lte": date}}) response = s.delete() except: error = sys.exc_info() simple_log(path_log, index, error[0], error[1]) return None
def get_id_batch(client, index, path_log): name = __name__ + '.get_id_batch' try: s = Search(using=client, index=index) \ .query('range' , **{'Date': {'gte': "now-7d/d"}}) \ .sort({"BatchID" : {"order" : "desc"}}) \ .query("match_all") for hit in s[0]: last_batch = hit.BatchID except: last_batch = None error = sys.exc_info() simple_log(path_log, index, last_batch, name, error[0], error[1]) return last_batch
def processResourceInstanceUsage(account_id, billMonth, iam_token, offset): name = __name__ + '.processResourceInstanceUsage' try: METERING_HOST = "https://billing.cloud.ibm.com" USAGE_URL = "/v4/accounts/" + account_id + "/resource_instances/usage/" + billMonth + "?_limit=200&_names=true&_start=" + offset url = METERING_HOST + USAGE_URL headers = { "Authorization": "{}".format(iam_token), "Accept": "application/json", "Content-Type": "application/json" } response = requests.get(url, headers=headers) response = response.json() except: error = sys.exc_info() simple_log(path_log, index, name, error[0], error[1]) response = None return response
def donwload(client, start, end, folder_download, path_log, index): metric = 'BlendedCost' cost = [{ 'Type': 'DIMENSION', 'Key': 'SERVICE'}] try: response = client.get_cost_and_usage( TimePeriod={ 'Start': start, 'End': end }, Granularity='DAILY', Metrics=['BlendedCost'], GroupBy=cost ) response = response['ResultsByTime'] except: error = sys.exc_info() simple_log(path_log, index, error[0], error[1]) response = None i = 0 list_df = [] while True: try: df = pd.json_normalize(response[i]['Groups']) timestart = response[i]['TimePeriod']['Start'] timeend = response[i]['TimePeriod']['End'] df["Period_start"] = timestart df["Period_end"] = timeend df = df.rename(columns={'Keys': 'Service', 'Metrics.BlendedCost.Amount': 'Amount', 'Metrics.BlendedCost.Unit': 'CurrencyCode',}) df['Amount'] = df['Amount'].astype(float) i = i + 1 list_df.append(df) except IndexError: break try: result = pd.concat(list_df) dic = result.to_dict(orient='records') except: error = sys.exc_info() simple_log(path_log, index, error[0], error[1]) dic = None now = datetime.datetime.now() now = now.strftime('%Ya%mm%dd%Hh%Mm') namefile = start + '_' + now +'.json' jsonfile = open(folder_download + namefile, 'w') for row in dic: json.dump(row, jsonfile) jsonfile.write('\n') jsonfile.close() return None