Пример #1
0
 def load_triumph_manifest():
     try:
         triumph_manifest = json.load_s3(MANIFEST_JSON)
     except:
         data = {"triumphs": {}, "objectives": {}}
         json.dump_s3(MANIFEST_JSON)
         triumph_manifest = json.load_s3(MANIFEST_JSON)
     return triumph_manifest
Пример #2
0
 def load_stored_scores():
     try:
         # with open(json_path, 'r')  as stored_scores:
         # triumph_scores = json.load(stored_scores)
         triumph_scores = json.load_s3(JSONFILE)
     except:
         # with open(json_path, 'w+') as stored_scores:
         #     data = {"discord_users":{}, "triumph_scores":{}}
         #     json.dump(data, stored_scores)
         #     triumph_scores = json.load(stored_scores)
         data = {"discord_users": {}, "triumph_scores": {}}
         json.dump_s3(data, JSONFILE)
         triumph_scores = json.load_s3(JSONFILE)
     return triumph_scores
    def connect_s3(self):
        session = boto3.Session(
            aws_access_key_id=settings.AWS_SERVER_PUBLIC_KEY,
            aws_secret_access_key=settings.AWS_SERVER_SECRET_KEY,
        )
        s3_dont_url = session.resource('s3').Bucket("dont-track-url-bucket")
        json.load_s3 = lambda f: json.load(
            s3_dont_url.Object(key=f).get()["Body"])
        self.dont_track_urls_list = json.load_s3("url")['url']

        s3 = session.resource('s3').Bucket("history-bucket")
        json.load_s3 = lambda f: json.load(s3.Object(key=f).get()["Body"])
        json.dump_s3 = lambda obj, f: s3.Object(key=f).put(Body=json.dumps(obj)
                                                           )

        data = json.load_s3("history1")
        self.df = pd.DataFrame(data)
Пример #4
0
def load_json_s3(s3_file_key):
    """Download and parse object straight from S3"""
    print("parse_aws_json_file() Getting file: " + s3_file_key)
    try:
        return json.load_s3(s3_file_key)
    except Exception as e:
        print("parse_aws_json_file() Exception: " + str(e))
        return None
def save(url):
    track_or_not = False
    session = boto3.Session(
        aws_access_key_id=settings.AWS_SERVER_PUBLIC_KEY,
        aws_secret_access_key=settings.AWS_SERVER_SECRET_KEY,
    )
    s3 = session.resource('s3').Bucket("dont-track-url-bucket")

    json.load_s3 = lambda f: json.load(s3.Object(key=f).get()["Body"])
    json.dump_s3 = lambda obj, f: s3.Object(key=f).put(Body=json.dumps(obj))
    try:
        urls = json.load_s3("url")
        if url not in urls['url']:
            urls['url'].append(url)
        else:
            urls['url'].remove(url)
            track_or_not = True
    except:
        urls = {'url': [url]}
    json.dump_s3(urls, "url")

    make_reco_model.reco_model.update_model()
    return track_or_not
Пример #6
0
def retail_prophet():
    json.load_s3 = lambda f: json.load(s3.Object(key=f).get()["Body"])
    json.dump_s3 = lambda obj, f: s3.Object(key=f).put(Body=json.dumps(obj))
    try:
        lastrows = int(os.getenv('LASTROWS'))
        print(lastrows)
    except:
        lastrows = None
    try:
        canales = os.getenv('CANALES').split(',')
        print(canales)
    except:
        canales = None
    cache = os.getenv('CACHE')
    if cache:
        if cache == 'False':
            cache = False
        elif cache == 'True':
            cache = True
    else:
        cache = False
    print("CACHE IS " + str(cache))
    bucket = os.getenv('BUCKET')
    key = os.getenv('KEY')
    path = '/'.join([bucket, key])
    df = pd_read_csv_s3(path, compression="gzip")
    print(df)
    if df is None:
        print("Can't read file")
        return {}
    if canales is None or canales == 'All':
        canales = [
            "directo", "google", "google seo", "mailing", "newsroom",
            "facebook", "referrers", "paid_social_samsung", "totales"
        ]
    result = {}
    s3 = boto3.resource("s3").Bucket(bucket)
    if cache:
        try:
            print("Entered first try")
            body = json.load_s3("prophet.json")
            response = {"statusCode": 200, "body": json.dumps(body)}
            return response
        except:
            print("Entered except")
            response = {
                "statusCode":
                404,
                "error": ("there is no previous prophet result, please run"
                          "without cache")
            }
            return response
    else:
        result = {}
        print("Entered success:")
        for canal in canales:
            if lastrows:
                if lastrows != 'All':
                    canal_df = df[['fecha', canal]].tail(lastrows)
            canal_df = df[['fecha', canal]]
            result.update({canal: forecast_total(canal_df)})
        response = {"statusCode": 200, "body": json.dumps(result)}
        json.dump_s3(result, "prophet.json")
Пример #7
0
    client = init(args.name)
    client.connect()
    client.subscribe("{}/#".format(args.name), 1, callback)
    print("Thing {} and listening for events...".format(args.name))

    while True:
        try:
            time.sleep(5)
            if __RECORDING__:
                timestamp = datetime.datetime.now().isoformat()
                print("Snap @ {} to {} UUID".format(timestamp, __UUID__))

                try:
                    manifest = json.load_s3(
                        "{}/{}/images/manifest.json".format(
                            __BASE__, __UUID__))
                except ClientError:
                    print("Initializing manifest")
                    manifest = {
                        "uuid": __UUID__,
                        "timestamp": timestamp,
                        "num_stacks": 4,
                        "stack_size": 2,
                        "captures": []
                    }
                    pass

                # Upload a set of images
                for stack in range(manifest["num_stacks"]):
                    for z in range(manifest["stack_size"]):
Пример #8
0
def update_image_index(release_label, apt_repo, common_config, image_name):
    """Updates the index file used to track bare metal images

    Current format:
    {
      "<timestamp>": {
        "raw": {
          "bot": {
            "<distribution>": {
              "file": "<organization>_<flavour>_<distribution>_<release_label>_<date><time>",
              "checksum": <md5sum_of_image>
            }
          }
        }
      },
      ...
    }
    """
    s3 = boto3.client('s3')

    # Helper methods
    json.load_s3 = lambda f: json.load(
        s3.get_object(Bucket=apt_repo, Key=f)['Body'])
    json.dump_s3 = lambda obj, f: s3.put_object(
        Bucket=apt_repo, Key=f, Body=json.dumps(obj, indent=2))

    index_key = release_label + '/images/index'

    _, flavour, distribution, release_label, timestamp = image_name.split('_')

    # Read checksum from generated file
    with open(f'/tmp/{image_name}', 'r') as checksum_file:
        checksum = checksum_file.read().replace('\n', '').split(' ')[0]
    os.remove(f'/tmp/{image_name}')

    image_data = {
        'raw': {
            flavour: {
                distribution: {
                    'file': image_name,
                    'checksum': checksum
                }
            }
        }
    }

    data = {}
    try:
        # Wait for file to be ready to write
        wait_for_index(s3, apt_repo, index_key)
        data = json.load_s3(index_key)
    except botocore.exceptions.ClientError as error:
        # If file doesn't exists, we'll create a new one
        if error.response['Error']['Code'] == 'NoSuchKey':
            click.echo('Index file doesn\'t exist, creating a new one')

    try:
        data[timestamp] = merge_dicts(data[timestamp], image_data)
    except KeyError:
        data[timestamp] = image_data

    # Write data to index file
    json.dump_s3(data, index_key)
    tag_file(s3, apt_repo, index_key, 'Lock', 'False')

    # Invalidate image index cache
    if 'cloudfront_distribution_id' in common_config:
        invalidate_file_cloudfront(common_config['cloudfront_distribution_id'],
                                   index_key)
Пример #9
0
        "https://platform.slack-edge.com/img/default_application_icon.png",
        "ts":
        1532746720.06
    }
    slack = slackweb.Slack(url=slack_webhook['Parameter']['Value'])

# Parameters & functions for state machine s3 bucket
state_machine_s3 = "s3-bucket-name"
s3 = boto3.resource("s3").Bucket(state_machine_s3)
json.load_s3 = lambda filename: json.load(
    s3.Object(key=filename).get()["Body"])
json.dump_s3 = lambda object, filename: s3.Object(key=filename).put(
    Body=json.dumps(object))
yaml.load_s3 = lambda filename: yaml.load(
    s3.Object(key=filename).get()["Body"])
state_machine = json.load_s3('state_machine')


def slack_notification(url, status_code, status):
    """ Send notifications to slack """
    if notification == "slack":
        if status == "StatusOK":
            attachments = []
            slack_payload['color'] = "#36a64f"
            slack_payload[
                'text'] = '%s is back healthy and responding with status code %d' % (
                    url, status_code)
            slack_payload['fallback'] = "Weburl ping status, Normal!!!"
            slack_payload['title_link'] = url
            slack_payload['fields'][0]['value'] = 'Normal'
            slack_payload['ts'] = time.time()
Пример #10
0

# uploading a file directly to S3    https://stackoverflow.com/questions/40336918/how-to-write-a-file-or-data-to-an-s3-object-using-boto3
s3.Bucket('bucketname').upload_file('/local/file/here.txt','folder/sub/path/to/s3key')


# reading the read json from s3

import json, boto3
s3 = boto3.resource("s3").Bucket("bucket")
json.load_s3 = lambda f: json.load(s3.Object(key=f).get()["Body"])
json.dump_s3 = lambda obj, f: s3.Object(key=f).put(Body=json.dumps(obj))
# Now you can use json.load_s3 and json.dump_s3 with the same API as load and dump
data = {"test":0}
json.dump_s3(data, "key") # saves json to s3://bucket/key
data = json.load_s3("key") # read json from s3://bucket/key
#-----------------------------------------------------------------------------------------------------
# storing a list in S3 bucket   https://dzone.com/articles/boto3-amazon-s3-as-python-object-store
import boto3
import pickle
s3 = boto3.client('s3')
myList=[1,2,3,4,5]
#Serialize the object 
serializedListObject = pickle.dumps(myList)
#Write to Bucket named 'mytestbucket' and 
#Store the list using key myList001
s3.put_object(Bucket='mytestbucket',Key='myList001',Body=serializedListObject)

# retrieving a list from S3 Bucket

import boto3
Пример #11
0
def index():
    if request.method == 'GET':
        return 'OK'
    elif request.method == 'POST':
        # Store the IP address of the requester
        request_ip = ipaddress.ip_address(u'{0}'.format(request.remote_addr))

        # If VALIDATE_SOURCEIP is set to false, do not validate source IP
        if os.environ.get('VALIDATE_SOURCEIP', None) != 'false':

            # If GHE_ADDRESS is specified, use it as the hook_blocks.
            if os.environ.get('GHE_ADDRESS', None):
                hook_blocks = [unicode(os.environ.get('GHE_ADDRESS'))]
            # Otherwise get the hook address blocks from the API.
            else:
                hook_blocks = requests.get(
                    'https://api.github.com/meta').json()['hooks']

            # Check if the POST request is from github.com or GHE
            for block in hook_blocks:
                if ipaddress.ip_address(request_ip) in ipaddress.ip_network(
                        block):
                    break  # the remote_addr is within the network range of github.
            else:
                if str(request_ip) != '127.0.0.1':
                    abort(403)

        if request.headers.get('X-GitHub-Event') == "ping":
            return json.dumps({'msg': 'Hi!'})
        if request.headers.get('X-GitHub-Event') != "push":
            return json.dumps({'msg': "wrong event type"})

        repos = json.loads(io.open(REPOS_JSON_PATH, 'r').read())

        payload = json.loads(request.data)
        repo_meta = {
            'name': payload['repository']['name'],
            'owner': payload['repository']['owner']['name'],
        }

        # Try to match on branch as configured in repos.json
        match = re.match(r"refs/heads/(?P<branch>.*)", payload['ref'])
        if match:
            repo_meta['branch'] = match.groupdict()['branch']
            repo = repos.get(
                '{owner}/{name}/branch:{branch}'.format(**repo_meta), None)

            # Fallback to plain owner/name lookup
            if not repo:
                repo = repos.get('{owner}/{name}'.format(**repo_meta), None)

        if repo and repo.get('path', None):
            # Check if POST request signature is valid
            key = repo.get('key', None)
            if key:
                signature = request.headers.get('X-Hub-Signature').split(
                    '=')[1]
                if type(key) == unicode:
                    key = key.encode()
                mac = hmac.new(key, msg=request.data, digestmod=sha1)
                if not compare_digest(mac.hexdigest(), signature):
                    abort(403)

        if repo.get('action', None):
            for action in repo['action']:
                subp = subprocess.Popen(action, cwd=repo.get('path', '.'))
                subp.wait()

        if repo.get('s3bucket', None):
            s3bucketname = repo.get('s3bucket')
        else:
            print('missing s3 bucketname')
            abort(500)
        if repo.get('s3key', None):
            s3key = repo.get('s3key')
        else:
            print('missing s3 filename')
            abort(500)

        print('s3 connection')

        if os.environ.get('USE_EC2', None) == 'true':
            provider = InstanceMetadataProvider(
                iam_role_fetcher=InstanceMetadataFetcher(timeout=1000,
                                                         num_attempts=2))
            creds = provider.load()
            session = boto3.Session(aws_access_key_id=creds.access_key,
                                    aws_secret_access_key=creds.secret_key,
                                    aws_session_token=creds.token)
            s3 = session.resource('s3').Bucket(s3bucketname)
        else:
            s3 = boto3.resource('s3')
            bucket = s3.Bucket(s3bucketname)

        json.load_s3 = lambda f: json.load(bucket.Object(key=f).get()['Body'])
        json.dump_s3 = lambda obj, f: bucket.Object(key=f).put(Body=json.dumps(
            obj))
        #s3 fetch
        s3data = json.load_s3(s3key)
        datad = FilehashMap(s3data)
        commithash = payload['after']
        for commit in payload['commits']:
            for z in commit['added']:
                print(z)
                datad.additem(z, commithash)
            for z in commit['modified']:
                print(z)
                datad.additem(z, commithash)
            for z in commit['removed']:
                datad.delitem(z)
                print(z)

        print('s3 upload')
        json.dump_s3(datad.displayhashmap(), s3key)

        #set perms
        s3objacl = s3.ObjectAcl(s3bucketname, s3key)
        response = s3objacl.put(ACL='public-read')
        print('s3 done')
        return 'OK'