def list_objects(bucketName):  # noqa: E501
    """List objects by bucket name

    Returns list of objects in bucket # noqa: E501

    :param bucketName: bucket name
    :type bucketName: str

    :rtype: OUTPUT
    """
    result = list()
    if (validate_system()):
        try:
            session = boto3.Session(aws_access_key_id, aws_secret_access_key)
            s3 = session.resource('s3')
            client = boto3.client('s3')

            mybucket = s3.Bucket(bucketName)
            for object in mybucket.objects.all():
                result.append(object.key.encode("utf-8"))
            return result
        except:
            return 'Can not list files in bucket'
    else:
        return 'Amazon S3 is not configured correctly in /etc/config.yaml file'
Ejemplo n.º 2
0
def transfer_resources():
    try:
        s3.Bucket(BUCKET_NAME).download_file(KEY, '')
    except botocore.exceptions.ClientError as e:
        if e.response['Error']['Code'] =='404':
            print('The object does not exist')
        else:
            raise
Ejemplo n.º 3
0
def handler(event, context) -> None:
    print(f"BLOG_BUCKET_NAME: {BLOG_BUCKET_NAME}")
    print(f"HOSTNAME: {HOSTNAME}")

    s3 = boto3.resource("s3")
    http_conn = http.client.HTTPSConnection(HOSTNAME, 443)

    bucket: boto3.s3.Bucket = s3.Bucket(BLOG_BUCKET_NAME)
    keys = bucket.objects.all()
    matching_keys: Generator[boto3.s3.ObjectSummary] = (
        key for key in keys if RE_TO_SKIP.search(key.key) is None)
    urls: List[str] = ("/" + key.key for key in matching_keys)
    for url in urls:
        try:
            ping_url(url, http_conn)
        except:
            print("exception")
            traceback.print_exc()
            http_conn = http.client.HTTPSConnection(HOSTNAME, 443)

    return {}
Ejemplo n.º 4
0
    def write_to_s3_bucket(data_tweets):
        """
        Method to write to s3 bucket
        :param self:
        :param data_tweets: list of tweet tuple
        :return:
        """
        s3 = boto3.resource('s3', aws_access_key_id=AWS_Access_Key_ID, aws_secret_access_key=AWS_Secret_Access_Key)
        all_tweets = []
        global index
        index += 1
        fn = "examples/file_"+ str(index)+".csv"
        print "writing 5 tweets"

        for twt in data_tweets:
            str_tweets = ",".join([x or '' for x in twt])
            all_tweets.append(str_tweets)

        all_twt = "\n".join(all_tweets)

        t_bucket = s3.Bucket('tweetBucket')
        exists = True
        try:
            s3.meta.client.head_bucket(Bucket='tweetBucket')
        except botocore.exceptions.ClientError as e:
            # If a client error is thrown, then check that it was a 404 error.
            # If it was a 404 error, then the bucket does not exist.
            error_code = int(e.response['Error']['Code'])
            if error_code == 404:
                exists = False
        if exists is False:
            print "creating bucket"
            t_bucket = s3.create_bucket(Bucket='tweetBucket')

        key = s3.Object("tweetBucket", fn)
        key.put(Body=all_twt)
        t_bucket.Acl().put(ACL='public-read')
        print "done writing to bucket files %s" % fn
        return
def list_buckets():  # noqa: E501
    """Get all buckets

    Returns a list of buckets # noqa: E501


    :rtype: List[OUTPUT]
    """
    result = list()
    if (validate_system()):
        try:
            session = boto3.Session(aws_access_key_id, aws_secret_access_key)
            s3 = session.resource('s3')
            client = boto3.client('s3')

            mybucket = s3.Bucket(bucketName)
            for object in mybucket.objects.all():
                result.append(object.key.encode("utf-8"))
            return result
        except:
            return 'Can not list files'
    else:
        return 'Amazon S3 is not configured correctly in /etc/config.yaml file'
Ejemplo n.º 6
0
for x in range(1400000001,1499999999):
 try:
     url="https://www.vidyavision.com/results/ssc2014.aspx?h="+str(x)
     page = requests.get(url).text
     soup = BeautifulSoup(page, "html.parser")
     if(soup.__len__()!=0):
         registrationNumber.append(str(soup).split('~')[0])
         names.append(str(soup).split('~')[1])
         result.append(str(soup).split('~')[-1])
         marks.append(str(soup).rsplit("~")[-3])
         print(str(x)+"OK")
     else:
        missingNumbers.append(x)
        print(str(x) + "Not  OK")
 except:
    print("Error"+str(x))
d = [registrationNumber, names,marks,result]
export_data = zip_longest(*d, fillvalue = '')
with open('output.csv', 'w', newline='') as myfile:
      wr = csv.writer(myfile)
      wr.writerow(("registrationNumber", "names","marks","result"))
      wr.writerows(export_data)
myfile.close()
print("Done Writing to file")
BUCKET_NAME = 'bjoelr'
FILE_NAME = 'output.csv'
data = open('output.csv', 'rb')
s3 = boto3.resource('s3')
s3.Bucket(BUCKET_NAME).put_object(Key=FILE_NAME,Body=data)
print("Done")
Ejemplo n.º 7
0
    wf = wave.open(path, 'wb')
    wf.setnchannels(1)
    wf.setsampwidth(sample_width)
    wf.setframerate(RATE)
    wf.writeframes(data)
    wf.close()


if __name__ == '__main__':
    print("initing s3 connection")
    bucketName = "audio-files-input"
    session = boto3.Session(aws_access_key_id="xxxx",
                            aws_secret_access_key="xxxx")
    s3 = session.resource('s3')
    bucket = s3.Bucket(bucketName)
    UUID = str(uuid.uuid4())
    segment = 0
    print("initing redis connection")
    #red = redis.Redis(host="52.87.226.193", port=6379)
    #red.set(UUID, "{representativeId: 1}")
    while 1:
        print("please speak a word into the microphone")
        record_to_file('toUpload.wav')
        print("done - result written to toUpload.wav")
        print("uploading to s3")
        with open("toUpload.wav", 'rb') as data:
            bucket.put_object(Key=UUID + "." + str(segment), Body=data)
            segment += 1
        print("uploaded to s3")