def installPipelines():
    conn = get_connection()
    client = IngestClient(conn)
    client.put_pipeline(id='ingest_attachment', body={
        'description': "Extract attachment information",
        'processors': [
            {
                "attachment": {
                    "field": "data"
                },
                "remove": {
                    "field": "data"
                }
            }
        ]
    })
    client.put_pipeline(id='add_timestamp', body={
        'description': "Adds an index_date timestamp",
        'processors': [
            {
                "set": {
                    "field": "index_date",
                    "value": "{{_ingest.timestamp}}",
                },
            },
        ]
    })
Exemplo n.º 2
0
def load_ingest_pipeline_settings(es, pipeline_settings_path):
    p = IngestClient(es)
    #content = json.loads(pipeline_settings_path)
    #print(content.read())
    with open(pipeline_settings_path) as json_file:
        content = json.load(json_file)
    p.put_pipeline(id='attachment', body=content)

    return p
def check_pnc_pipeline(esconn, pipeline_name):
    ingest = IngestClient(esconn)
    try:
        pipeline = ingest.get_pipeline(id=pipeline_name)
        print(pipeline)
        return True
    except NotFoundError:
        
        return False
    except Exception as ex:
        raise ES_PIPELINE_ERROR(ex)
Exemplo n.º 4
0
def create_pipeline():
    pipeline_body = {
        "description":
        "Adds timestamp called timestamp to documents",
        "processors": [{
            "set": {
                "field": "_source.timestamp",
                "value": "{{_ingest.timestamp}}"
            }
        }]
    }
    p = IngestClient(es)
    p.put_pipeline(id='timestamp', body=pipeline_body)
def create_ingest_pipeline(esconn, pipeline_name, data_file):
    ingest = IngestClient(esconn)
    try:
        pipeline_json = open(data_file)
        body = pipeline_json.read()
        pipeline = ingest.put_pipeline(
            id = pipeline_name,
            body = json.loads(body)
        )
        if pipeline['acknowledged'] != True:
            raise ES_PIPELINE_ERROR('Failed to create pipeline. Response: ', pipeline)
        print("SUCCESS: Created Pipeline: " + newtech_pipeline)
    except Exception as ex:
        raise ES_PIPELINE_ERROR(ex)
Exemplo n.º 6
0
def create_index(client, *, index_name, **kwargs):
    p = IngestClient(client)
    p.put_pipeline(id='document_attachment',
                   body={
                       'description': "Extract attachment information",
                       'processors': [{
                           "attachment": {
                               "field": "source_file"
                           }
                       }]
                   })

    index = Index(index_name, using=client)
    index.doc_type(Document)
    try:
        index.create()
    except RequestError:
        print(f"Index named '{index_name}' already exists", file=sys.stderr)
        sys.exit(1)
Exemplo n.º 7
0
  def process_item(self, item, _spider):
    index_name = 'yna_news_total_' + datetime.datetime.now().strftime('%Y%m')

    doc = dict(item)

    if not self.es.indices.exists(index=index_name):
      self.es.indices.create(index=index_name)

    client = IngestClient(self.es)
    settings = {
        "description": "Adds a field to a document with the time of ingestion",
        "processors": [
            {
                "set": {
                    "field": "@timestamp",
                    "value": "{{_ingest.timestamp}}"
                }
            }
        ]
    }
    client.put_pipeline(id='timestamp', body=settings)

    okt = Okt()
    words = list()

    nouns = okt.nouns(item['content'])
    words.extend(nouns)
    for noun in nouns:
      if len(noun) == 1:
        words.remove(noun)

      if len(words) == 0:
        words.append("")

    doc['analyzed_words'] = words
    logger.debug("doc:\n%s", doc)

    self.es.index(index=index_name, doc_type='string', body=doc, pipeline="timestamp")
    self.es.indices.refresh(index=index_name)

    return item
def installPipelines():
    conn = get_connection()
    client = IngestClient(conn)
    client.put_pipeline(id='ingest_attachment',
                        body={
                            'description':
                            "Extract attachment information",
                            'processors': [{
                                "attachment": {
                                    "field": "data"
                                },
                                "remove": {
                                    "field": "data"
                                }
                            }]
                        })
    client.put_pipeline(id='add_timestamp',
                        body={
                            'description':
                            "Adds an index_date timestamp",
                            'processors': [
                                {
                                    "set": {
                                        "field": "index_date",
                                        "value": "{{_ingest.timestamp}}",
                                    },
                                },
                            ]
                        })
Exemplo n.º 9
0
from elasticsearch.client.ingest import IngestClient
from elasticsearch import Elasticsearch

es = Elasticsearch([{'host': 'localhost', 'port': 9200}])
pipeline = IngestClient(es)
pipeline.put_pipeline(
    id='time-to-minutes',
    body={
        'description':
        "Extract time for datetime and convert into minutes",
        'processors': [{
            "dissect": {
                "field": "working_hours",
                "pattern": "%{?date} %{tmp_hours}:%{tmp_minutes}:%{?seconds}"
            }
        }, {
            "convert": {
                "field": "tmp_hours",
                "type": "integer"
            }
        }, {
            "convert": {
                "field": "tmp_minutes",
                "type": "integer"
            }
        }, {
            "script": {
                "source":
                "\n ctx.working_minutes = (ctx.tmp_hours * 60) + ctx.tmp_minutes;\n "
            }
        }, {
p = IngestClient(es_client).put_pipeline(
    id='ingest_processor',
    body={
        "description":
        "Extract attachment information",
        "processors": [{
            "attachment": {
                "field": "data",
                "target_field": "attachment",
                "properties": ["content", "content_type"]
            },
            "remove": {
                "field": "data"
            },
            "langdetect": {
                "if": "ctx.attachment.content != null",
                "field": "attachment.content",
                "target_field": "lang"
            },
            "set": {
                "if": "ctx.lang != null && ctx.lang == \"fr\"",
                "field": "content_fr",
                "value": "{{attachment.content}}"
            }
        }, {
            "set": {
                "if": "ctx.lang != null && ctx.lang == \"de\"",
                "field": "content_de",
                "value": "{{attachment.content}}"
            }
        }, {
            "set": {
                "if": "ctx.lang != null && ctx.lang == \"it\"",
                "field": "content_it",
                "value": "{{attachment.content}}"
            }
        }, {
            "set": {
                "if": "ctx.lang != null && ctx.lang == \"en\"",
                "field": "content_en",
                "value": "{{attachment.content}}"
            }
        }, {
            "set": {
                "if":
                "ctx.attachment?.content != null && ctx.content_fr == null && ctx.content_de == null && ctx.content_it == null && ctx.content_en == null",
                "field": "content",
                "value": "{{attachment.content}}"
            }
        }, {
            "set": {
                "if": "ctx.attachment?.content_type != null",
                "field": "type",
                "value": "{{attachment.content_type}}"
            },
            "remove": {
                "field": "attachment"
            }
        }]
    })
Exemplo n.º 11
0
def _put_files_pipeline():
    ingestion_pipeline_client = IngestClient(current_search.client)
    ingestion_pipeline_client.put_pipeline(
        id=current_app.config["ES_FULLTEXT_PIPELINE_NAME"], body=FULLTEXT_PIPELINE_SETUP
    )
Exemplo n.º 12
0
                break
        except OSError as ex:
            time.sleep(0.01)
            if time.perf_counter() - start_time >= timeout:
                raise TimeoutError(
                    'Waited too long for the port {} on host {} to start accepting connections.'
                    .format(port, host)) from ex


o = urlparse(ELASTICSEARCH)
wait_for_port(o.port, o.hostname)

es = Elasticsearch([ELASTICSEARCH])

# add attachment pipeline
p = IngestClient(es)
p.put_pipeline(id='attachment',
               body={
                   'description': "Extract attachment information",
                   'processors': [{
                       "attachment": {
                           "field": "data"
                       }
                   }]
               })

# find all files in all zip files and index them
# TODO: would the foreach processor help here?
for z in glob('%s/*.zip' % (INPUT)):
    zipFilename = os.path.basename(z)
    print(zipFilename)
Exemplo n.º 13
0
def test_cli_removes_files_pipeline(inspire_app, cli):
    cli.invoke(["index", "delete-files-pipeline"])
    ingest_client = IngestClient(current_search.client)
    with pytest.raises(NotFoundError):
        ingest_client.get_pipeline(
            inspire_app.config["ES_FULLTEXT_PIPELINE_NAME"])
Exemplo n.º 14
0
def test_cli_put_files_pipeline(inspire_app, cli):
    cli.invoke(["index", "put_files_pipeline"])
    IngestClient(current_search.client)
    assert (IngestClient(current_search.client).get_pipeline("file_content")
            ["file_content"] == FULLTEXT_PIPELINE_SETUP)
Exemplo n.º 15
0
def delete_files_pipeline(ctx):
    ingestion_pipeline_client = IngestClient(current_search.client)
    ingestion_pipeline_client.delete_pipeline(
        current_app.config["ES_FULLTEXT_PIPELINE_NAME"]
    )
Exemplo n.º 16
0
    def account_bulk_insert_elastic(self, account):

        ElasticConnectionString = os.getenv("ELASTIC_CONNECTIONSTRING")

        targetES = Elasticsearch(ElasticConnectionString)

        p = IngestClient(targetES)

        if not p.get_pipeline(id="account-cost-threshold"):
            p.put_pipeline(
                id='account-cost-threshold',
                body={
                    'description':
                    "add threshold",
                    'processors': [{
                        "set": {
                            "field": "_source.ingest_time",
                            "value": "{{_ingest.timestamp}}"
                        }
                    }, {
                        "script": {
                            "lang":
                            "painless",
                            "source":
                            "\r\n          if (ctx.containsKey(\"department\")) { \r\n            String unit = ctx['department'];\r\n            int value;\r\n            if (unit == \"SAST\") { \r\n              value = 25000; \r\n            } \r\n            else if (unit == \"CxGo\") { \r\n              value = 15000; \r\n            } \r\n            else if (unit == \"AST\") { \r\n              value = 7000; \r\n            } \r\n            else if (unit == \"AST Integration\") { \r\n              value = 1000; \r\n            } \r\n            else if (unit == \"CB\") { \r\n              value = 5000; \r\n            } \r\n            else if (unit == \"SCA\") {\r\n              value = 85000; \r\n            } \r\n            else {\r\n              value = 20000; \r\n            }\r\n            ctx['threshold_value'] = value;\r\n          }\r\n        "
                        }
                    }]
                })

        now = datetime.now()
        target_index_name = "aws-eco-account-cost-" + now.strftime("%m-%Y")
        index_template_name = "aws-eco-account-cost-template"

        #targetES.indices.delete(index=target_index_name, ignore=[400, 404])
        request_body = {
            "index_patterns": ["aws-eco-account-cost-*"],
            "settings": {
                "number_of_shards": 1,
                "number_of_replicas": 1,
                "index": {
                    "codec": "best_compression"
                },
                "default_pipeline": "account-cost-threshold",
            },
            'mappings': {
                'properties': {
                    'department': {
                        'type': 'keyword'
                    },
                    'account_name': {
                        'type': 'keyword'
                    },
                    'account_number': {
                        'type': 'keyword'
                    },
                    'keys': {
                        'type': 'keyword'
                    },
                    'value': {
                        'type': 'float'
                    },
                    'dimension_name': {
                        'type': 'keyword'
                    },
                    'dimension_value': {
                        'type': 'keyword'
                    },
                    'start_time': {
                        'format': 'dateOptionalTime',
                        'type': 'date'
                    },
                    'end_time': {
                        'format': 'dateOptionalTime',
                        'type': 'date'
                    },
                    'metrics': {
                        'type': 'keyword'
                    },
                    'forecast_mean_value': {
                        'type': 'float'
                    },
                    'forecast_prediction_interval_lowerbound': {
                        'type': 'float'
                    },
                    'forecast_prediction_interval_upperbound': {
                        'type': 'float'
                    },
                }
            }
        }

        if not targetES.indices.exists_template(index_template_name):
            targetES.indices.put_template(index_template_name,
                                          request_body,
                                          create=True)

        documents = []

        for service in account.services:
            for metric in service.metrics:
                for datapoint in metric.datapoints:

                    if datapoint != []:

                        new_row = {"_id": (account.account_number + "-" + metric.dimension_name + "-"+metric.dimension_value + "-" + datetime.strptime(datapoint.start, "%Y-%m-%d %H:%M:%S").strftime("%Y%m%d%H%M%S")).replace(" ", ""), \
                            "department": account.department, \
                            "account_name":account.account_name, \
                            "account_number":account.account_number,\
                            "keys":service.name,\
                            "value":datapoint.value, \
                            "dimension_name":metric.dimension_name, \
                            "dimension_value":metric.dimension_value, \
                            "start_time": datetime.strptime(datapoint.start, "%Y-%m-%d %H:%M:%S"), \
                            "end_time": datetime.strptime(datapoint.end, "%Y-%m-%d %H:%M:%S"),\
                            "metrics": metric.name, \
                            "forecast_mean_value": service.forecast.forecast_mean_value, \
                            "forecast_prediction_interval_lowerbound": service.forecast.forecast_prediction_interval_lowerbound, \
                            "forecast_prediction_interval_upperbound": service.forecast.forecast_prediction_interval_upperbound  }

                        documents.append(new_row)

        if documents != []:

            helpers.bulk(targetES,
                         documents,
                         index=target_index_name,
                         doc_type='_doc',
                         raise_on_error=True)
Exemplo n.º 17
0
def main():

    #df = pandas.DataFrame(columns=["_id","department", "account_name"])

    documents = []

    new_row = {"_id": "1" , \
                            "department": "account.department", \
                            "account_name":"account.account_name"
                             }

    #df = df.append(new_row, ignore_index=True)

    documents.append(new_row)

    new_row = {"_id": "2" , \
                            "department": "account.department", \
                            "account_name":"account.account_name"
                             }

    documents.append(new_row)
    #df = df.append(new_row, ignore_index=True)

    #print(df)

    print("***************************************************************")

    #documents = df.to_dict(orient='records')

    print(documents)
    print(type(documents))

    return

    group = {
        'Keys': ['AWS CloudTrail'],
        'Metrics': {
            'AmortizedCost': {
                'Amount': '0.108152',
                'Unit': 'USD'
            },
            'BlendedCost': {
                'Amount': '0.108152',
                'Unit': 'USD'
            }
        }
    }

    print(group['Metrics'])

    for metric in group['Metrics']:
        print(group['Metrics'][metric]['Amount'])

    return

    return

    # Create CloudWatch client
    cloudwatch = boto3.client('cloudwatch')

    # List metrics through the pagination interface
    paginator = cloudwatch.get_paginator('list_metrics')
    for response in paginator.paginate(  #Dimensions=[{'Name': 'LogGroupName'}],
            #MetricName='IncomingLogEvents',
            Namespace='AWS/EC2'):
        for row in response['Metrics']:
            print(row['MetricName'])
            for row2 in row['Dimensions']:
                print(row2['Name'])
                print(row2['Value'])

    return

    cloudwatch = boto3.client('cloudwatch')
    start_time = "2021-04-01"
    end_time = "2021-04-05"
    start_time = start_time + 'T00:00:00Z'
    end_time = end_time + 'T00:00:00Z'

    start_time = datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%SZ')
    end_time = datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%SZ')

    response = cloudwatch.get_metric_statistics(Namespace='AWS/EC2',
                                                Dimensions=[{
                                                    'Name':
                                                    'InstanceId',
                                                    'Value':
                                                    ec2.instance_id
                                                }],
                                                MetricName='CPUUtilization',
                                                StartTime=start_time,
                                                EndTime=end_time,
                                                Period=3600,
                                                Statistics=['Average'])

    datapoints = response["Datapoints"]

    print(datapoints)

    return

    ec2_list = []
    session = boto3.Session()
    ec2 = session.resource('ec2')
    instances = ec2.instances.filter()

    for instance in instances:
        availability_zone = instance.placement["AvailabilityZone"]
        state = instance.state['Name']
        account_number = instance.network_interfaces_attribute[0]['OwnerId']
        account_name = Utility.map_account_name_to_account_number(
            account_number)
        department = Utility.map_department_to_account(account_number)

    return

    print(Utility.get_service_namespace('Amazon API Gateway2'))
    return

    client = boto3.client('sts')
    response = client.get_caller_identity()

    print(response)

    return

    aws_service = AwsService()

    ec2_list = aws_service.get_aws_describe_instances()

    print(ec2_list)

    ec2 = ec2_list[0]

    df_cost = aws_service.get_aws_cost_and_usage_with_resources(
        ec2=ec2,
        start_time='2021-02-14T00:00:00Z',
        end_time='2021-02-15T00:00:00Z',
        granularity="HOURLY",
        metrics="AmortizedCost")

    print(df_cost)

    return

    #get_aws_cost_and_usage_with_resources

    targetES = Elasticsearch(
        "https://*****:*****@c11f5bc9787c4c268d3b960ad866adc2.eu-central-1.aws.cloud.es.io:9243"
    )

    p = IngestClient(targetES)

    if not p.get_pipeline(id="account-cost-threshold"):
        p.put_pipeline(
            id='account-cost-threshold_2',
            body={
                'description':
                "add threshold",
                'processors': [{
                    "set": {
                        "field": "_source.ingest_time",
                        "value": "{{_ingest.timestamp}}"
                    }
                }, {
                    "script": {
                        "lang":
                        "painless",
                        "source":
                        "\r\n          if (ctx.containsKey(\"pu\")) { \r\n            String unit = ctx['pu'];\r\n            int value;\r\n            if (unit == \"SAST\") { \r\n              value = 25000; \r\n            } \r\n            else if (unit == \"CxGo\") { \r\n              value = 15000; \r\n            } \r\n            else if (unit == \"AST\") { \r\n              value = 7000; \r\n            } \r\n            else if (unit == \"AST Integration\") { \r\n              value = 1000; \r\n            } \r\n            else if (unit == \"CB\") { \r\n              value = 5000; \r\n            } \r\n            else if (unit == \"SCA\") {\r\n              value = 85000; \r\n            } \r\n            else {\r\n              value = 20000; \r\n            }\r\n            ctx['threshold_value'] = value;\r\n          }\r\n        "
                    }
                }]
            })

    if not p.get_pipeline(id="ec2-cost-idle-cpu"):
        p.put_pipeline(
            id='ec2-cost-idle-cpu',
            body={
                'description':
                "add idle_text, cpu_percent fields",
                'processors': [{
                    "script": {
                        "lang":
                        "painless",
                        "source":
                        "\r\n          if (ctx.containsKey(\"is_idle\")) { \r\n            String text;\r\n            int idle;\r\n            if (ctx.is_idle instanceof byte ||\r\n                ctx.is_idle instanceof short ||\r\n                ctx.is_idle instanceof int ||\r\n                ctx.is_idle instanceof long ||\r\n                ctx.is_idle instanceof float ||\r\n                ctx.is_idle instanceof double)\r\n            {\r\n                idle = (int)ctx.is_idle;\r\n            } else {  \r\n                idle = Integer.parseInt(ctx['is_idle']);\r\n            }\r\n            if (idle == 0) { \r\n              text = \"In Use\";\r\n            } else if (idle == 1) {\r\n              text = \"Potential Waste\";\r\n            } else {\r\n              text = \"\";\r\n            }\r\n            ctx['idle_text'] = text;\r\n          }\r\n          float cpu;\r\n          if (ctx.containsKey(\"cpu_utilization\")) {\r\n            if (ctx.cpu_utilization instanceof byte ||\r\n                ctx.cpu_utilization instanceof short ||\r\n                ctx.cpu_utilization instanceof int ||\r\n                ctx.cpu_utilization instanceof long ||\r\n                ctx.cpu_utilization instanceof float ||\r\n                ctx.cpu_utilization instanceof double)\r\n            {\r\n                cpu = (float)ctx.cpu_utilization/100;\r\n            } else {   \r\n              cpu = Float.parseFloat(ctx['cpu_utilization'])/100;\r\n            }\r\n            ctx['cpu_percent'] = cpu;\r\n          }\r\n        "
                    }
                }]
            })

    return

    request_body = {
        "index_patterns": ["ec2-cost-*"],
        "settings": {
            "number_of_shards": 1,
            "number_of_replicas": 1
        },
        'mappings': {
            'properties': {
                'start_time': {
                    'format': 'dateOptionalTime',
                    'type': 'date'
                },
                'cpu_utilization': {
                    'type': 'float'
                },
                'network_in': {
                    'type': 'float'
                },
                'network_out': {
                    'type': 'float'
                },
                'disk_write_ops': {
                    'type': 'float'
                },
                'disk_read_ops': {
                    'type': 'float'
                },
                'disk_write_bytes': {
                    'type': 'float'
                },
                'disk_read_bytes': {
                    'type': 'float'
                },
                'ebs_write_bytes': {
                    'type': 'float'
                },
                'ebs_read_bytes': {
                    'type': 'float'
                },
                'is_idle': {
                    'type': 'short'
                },
                'availability_zone': {
                    'type': 'keyword'
                },
                'instance_id': {
                    'type': 'keyword'
                },
                'instance_type': {
                    'type': 'keyword'
                },
                'launch_time': {
                    'format': 'dateOptionalTime',
                    'type': 'date'
                },
                'state': {
                    'type': 'keyword'
                },
                'ebs_optimized': {
                    'type': 'keyword'
                },
                'account_number': {
                    'type': 'keyword'
                },
                'pu': {
                    'type': 'keyword'
                },
                'account_name': {
                    'type': 'keyword'
                },
                'cost': {
                    'type': 'float'
                },
            }
        }
    }

    if not targetES.indices.exists_template("ec2-cost-template"):
        targetES.indices.put_template("ec2-cost-template",
                                      request_body,
                                      create=True)

    return

    return

    ec2_list = []
    session = boto3.Session()
    ec2 = session.resource('ec2')
    instances = ec2.instances.filter()

    for instance in instances:
        availability_zone = instance.placement["AvailabilityZone"]
        state = instance.state['Name']

    return

    client = boto3.client('cloudwatch')

    metric_list = [
        'CPUUtilization', 'NetworkOut', 'NetworkIn', 'EBSWriteBytes',
        'EBSReadBytes', 'DiskReadBytes', 'DiskWriteBytes'
    ]  #'NetworkPacketsOut','NetworkPacketsIn','DiskWriteOps','DiskReadOps']

    aws_service = AwsService()

    start_date = '2021-01-17'
    end_date = '2021-01-18'
    instance_id = 'i-0d4dc0ddfe07c9259'

    ec2_list = aws_service.get_aws_describe_instances()

    ec2 = ec2_list[0]

    response = client.list_metrics(Namespace='AWS/EC2',
                                   Dimensions=[
                                       {
                                           'Name': 'InstanceId',
                                           'Value': ec2.instance_id
                                       },
                                   ])

    for metric in response['Metrics']:
        if metric['MetricName'] in metric_list:
            print(metric['MetricName'])

    #pprint.pprint(response['Metrics'])

    return

    frames = []

    for metric_name in metric_list:
        statistics = 'Average'
        namespace = 'AWS/EC2'
        instance_id = ec2.instance_id
        period = 3600
        start_time = start_date
        end_time = end_date

        df = aws_service.get_aws_metric_statistics(ec2, metric_name, period,
                                                   start_time, end_time,
                                                   namespace, statistics)

        if not df.empty:
            frames.append(df)

    print(frames)

    return

    cloudwatch = boto3.client('cloudwatch')

    response = cloudwatch.get_metric_statistics(Namespace='AWS/EC2',
                                                Dimensions=[{
                                                    'Name':
                                                    'InstanceId',
                                                    'Value':
                                                    'i-0c825168d7ad6093a'
                                                }],
                                                MetricName="CPUUtilization",
                                                StartTime='2021-01-16',
                                                EndTime='2021-01-19',
                                                Period=3600,
                                                Statistics=['Average'])

    pprint.pprint(response)
    return

    dt = '2021-01-01T00:00:00Z'
    dt_time = datetime.datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ')
    dt_time = dt_time.astimezone()
    print(dt_time)
    return

    client = boto3.client('ce')

    response = client.get_cost_and_usage_with_resources(
        TimePeriod={
            'Start': '2021-01-01',
            'End': '2021-01-02'
        },
        Metrics=['AmortizedCost'],
        Granularity='HOURLY',
        Filter={
            "And": [{
                "Dimensions": {
                    "Key": "SERVICE",
                    "MatchOptions": ["EQUALS"],
                    "Values": ["Amazon Elastic Compute Cloud - Compute"]
                }
            }, {
                "Dimensions": {
                    "Key": "RESOURCE_ID",
                    "MatchOptions": ["EQUALS"],
                    "Values": ["i-0d4dc0ddfe07c9259"]
                }
            }]
        })

    datapoints = response["ResultsByTime"]

    for datapoint in datapoints:
        dt = datapoint['TimePeriod']['Start']
        dt_time = datetime.datetime.strptime(dt, '%Y-%m-%dT%H:%M:%S')
        dt_time = dt_time.astimezone()
        print(dt_time)
        #print(datapoint['TimePeriod']['Start'])
        #print(datapoint['Total']['AmortizedCost']['Amount'])

    #pprint.pprint(response)

    return

    session = boto3.Session()
    ec2 = session.resource('ec2')
    instances = ec2.instances.filter()

    client = boto3.client('lambda', region_name='eu-west-1')
    response = client.list_functions()

    pprint.pprint(response)
    return

    id = boto3.client('sts').get_caller_identity().get('Account')

    name = boto3.client('organizations').describe_account(
        AccountId=id).get('Account').get('Name')

    print(name)

    return

    client = boto3.client('ce')
    '''
    response = client.get_cost_and_usage(
        TimePeriod={
            'Start': '2021-01-02',
            'End': '2021-01-02'
        },
        Granularity='DAILY',
        Metrics=['AMORTIZED_COST'],
        GroupBy=[
        {
            'Type': 'DIMENSION',
            'Key': 'SERVICE'
        }]
    )

    pprint.pprint(response)
    return
    '''

    response = client.get_cost_forecast(
        TimePeriod={
            'Start': '2021-01-06',
            'End': '2021-02-01'
        },
        Metric='AMORTIZED_COST',
        Granularity='MONTHLY',
        PredictionIntervalLevel=80,
    )

    print(response['ForecastResultsByTime'][0]['MeanValue'])
    print(response['ForecastResultsByTime'][0]['PredictionIntervalLowerBound'])
    print(response['ForecastResultsByTime'][0]['PredictionIntervalUpperBound'])

    return

    given_date = '2020-05-05'

    date_time_obj = datetime.datetime.strptime(given_date, '%Y-%m-%d')

    first_day_of_month = date_time_obj.replace(day=1)

    next_month = first_day_of_month + relativedelta(months=+1)

    first_day_of_month_str = first_day_of_month.strftime('%Y-%m-%d')

    print(first_day_of_month)
    print(first_day_of_month_str)
    print(next_month)
    return

    client = boto3.client('sts')

    response = client.get_caller_identity()

    print(response['Account'])

    return

    client = boto3.client('lambda')

    response = client.update_function_configuration(
        FunctionName='billingoptimizations-prod-calcBillingOptimizations',
        Environment={'Variables': {
            'TEST': '11111'
        }},
    )

    response = client.get_function_configuration(
        FunctionName='Cx-CircleCi-Pipeliene-Status-Shipper')

    print(response)
    return

    client = boto3.client('ce')
    '''
    response = client.get_cost_and_usage(
        TimePeriod={
            'Start': '2020-09-01',
            'End': '2020-12-01'
        },
        Granularity='MONTHLY',
        Metrics=['AMORTIZED_COST'],
        GroupBy=[
        {
            'Type': 'DIMENSION',
            'Key': 'SERVICE'
        }]
    )

    print(response)
    return
    '''

    response = client.get_cost_forecast(
        TimePeriod={
            'Start': '2021-01-05',
            'End': '2021-01-06'
        },
        Metric='AMORTIZED_COST',
        Granularity='DAILY',
        PredictionIntervalLevel=80,
    )
    '''
        Filter = {      
            "Dimensions": {
            "Key": "SERVICE",
            "Values": ["AWS CloudTrail","EC2 - Other"]
            }
        }
    '''

    pprint.pprint(response)

    return

    targetES = Elasticsearch(
        "https://*****:*****@c11f5bc9787c4c268d3b960ad866adc2.eu-central-1.aws.cloud.es.io:9243"
    )

    now = datetime.datetime.now()
    target_index_name = "account-billing-" + now.strftime("%m-%Y")

    request_body = {
        "settings": {
            "number_of_shards": 5,
            "number_of_replicas": 1
        },
        'mappings': {
            'properties': {
                'account': {
                    'type': 'text'
                },
                'keys': {
                    'type': 'text'
                },
                'amount': {
                    'type': 'float'
                },
                'start_time': {
                    'format': 'dateOptionalTime',
                    'type': 'date'
                },
                'end_time': {
                    'format': 'dateOptionalTime',
                    'type': 'date'
                },
                'metrics': {
                    'type': 'text'
                },
            }
        }
    }

    targetES.indices.create(index=target_index_name, body=request_body)

    return

    response = client.get_cost_and_usage(TimePeriod={
        'Start': '2020-09-01',
        'End': '2020-12-01'
    },
                                         Granularity='MONTHLY',
                                         Metrics=['AMORTIZED_COST'],
                                         GroupBy=[{
                                             'Type': 'DIMENSION',
                                             'Key': 'SERVICE'
                                         }])

    pprint.pprint(response)

    for row in response['ResultsByTime']:
        pprint.pprint(row['TimePeriod']['Start'])
        pprint.pprint(row['TimePeriod']['End'])
        for group in row['Groups']:
            pprint.pprint(group['Keys'][0])
            pprint.pprint(group['Metrics']['AmortizedCost']['Amount'])
            key_list = list(group['Metrics'].keys())
            pprint.pprint(key_list[0])

        print("************************************")

    return

    result = response['ResultsByTime'][0]['Groups']
    pprint.pprint(result)

    return

    for row in result:
        print(row['Keys'][0])
        print(row['Metrics']['AmortizedCost']['Amount'])
        print("********************")
    #print(result)

    #pprint.pprint(result)

    #pprint.pprint(response)

    return

    #print(response['Environment']['Variables'])

    results = response['Environment']['Variables']

    for key in results.keys():
        print(results[key])

    run_parallel()