def send_to_s3(items=None, is_binary=False):
    """
    For items in an iterable, send them to your s3 account
    """
    conn, bucket = s3_init(AWS_ACCESS_KEY_ID, AWS_SECRET_KEY, BUCKET_NAME)
    for label, data in items:
        key = Key(bucket)
        key.key = label
        for item in bucket.list():
            local_md5 = hashlib.md5(data).hexdigest()
            if item.name == label: 
                key.open()
                key.close(); #loads key.etag
                # remote hash
                remote_md5 = key.etag.replace('\"','') # clears quote marks
                # If new backup is different than the last saved one, update it
                if local_md5 != remote_md5:
                    if is_binary:
                        key.set_contents_from_filename(data)
                    else:
                        key.set_contents_from_string(data)
        else:
            if is_binary:
                key.set_contents_from_filename(data)
            else:
                key.set_contents_from_string(data)
示例#2
0
文件: pypi.py 项目: pooldin/pooldcode
def render_resource(key):
    key = Key(bucket=app.bucket, name=key)

    if not key.exists():
        abort(404)

    name = key.name.strip('/').split('/')[-1]
    key.open()
    key.name = None
    resp = send_file(key,
                     mimetype=key.content_type,
                     attachment_filename=name,
                     as_attachment=True)

    adname = name.encode('utf8') if isinstance(name, unicode) else name
    advalue = adler32(adname) & 0xffffffff

    resp.content_length = key.size

    resp.last_modified = time.strptime(key.last_modified,
                                       '%a, %d %b %Y %H:%M:%S %Z')

    resp.set_etag('flask-%s-%s-%s' % (key.last_modified,
                                      key.size,
                                      advalue))
    return resp
示例#3
0
def login():
  error = ''
  file_contents = ''
  global login_failed
  keys = []
  if(login_failed < 2):
    if request.method == 'POST':
        username_form  = request.form['username']
        bucket = conn.get_bucket(config.buck_name, validate=True)
        k = Key(bucket)
        k.key = 'auth_users.txt'
        k.open()
        file_contents = k.read()
        if username_form in file_contents:
            session['logged_in'] = True
            session['username'] = username_form
            #session.permanent = True
            #app.permanent_session_lifetime = timedelta(seconds=300)
            return render_template('upload_db.html', username = session['username'])
        else:
            login_failed = login_failed+1
            error+= "Invalid Username. Login Again"
            return render_template('welcome.html', error = error)
  else:
	login_failed = 0
	error = 'You have exceeded maximum attempts for failed login. Locked Out. Try agin after 30 mins'
        return render_template('welcome.html', error = error)
示例#4
0
文件: pypi.py 项目: boldfield/bottler
def render_resource(key):
    key = Key(bucket=app.bucket, name=key)

    if not key.exists():
        abort(404)

    name = key.name.strip('/').split('/')[-1]
    key.open()
    key.name = None
    return send_file(key,
                     mimetype=key.content_type,
                     attachment_filename=name,
                     as_attachment=True)
示例#5
0
def register_page():
    error = ''
    file_contents = ''
    try:
            username = request.form['user']
            if (re.match(userreg,username)):
                bucket = conn.get_bucket(config.buck_name, validate=True)
                k = Key(bucket)
                k.key = 'auth_user.txt'
                k.open()
                file_contents = k.read()
                file_contents+=username
                key = bucket.new_key('auth_users.txt')
                key.set_contents_from_string(file_contents)
                key.set_acl('public-read')
                return 'Successfully Registered. Login.'
            else:
		return 'UserName:3-15 charecters consisting of letter or digits and optional -or_.'
    except Exception as e:
        return(str(e))
示例#6
0
文件: pypi.py 项目: pooldin/pooldcode
def render_resource(key):
    key = Key(bucket=app.bucket, name=key)

    if not key.exists():
        abort(404)

    name = key.name.strip('/').split('/')[-1]
    key.open()
    key.name = None
    resp = send_file(key,
                     mimetype=key.content_type,
                     attachment_filename=name,
                     as_attachment=True)

    adname = name.encode('utf8') if isinstance(name, unicode) else name
    advalue = adler32(adname) & 0xffffffff

    resp.content_length = key.size

    resp.last_modified = time.strptime(key.last_modified,
                                       '%a, %d %b %Y %H:%M:%S %Z')

    resp.set_etag('flask-%s-%s-%s' % (key.last_modified, key.size, advalue))
    return resp
示例#7
0
    def run(self, statement, BUCKET_NAME):
        """Execute the sql in the database and return the results. The results
		are a list of tuples. Each tuple has 4 values
		(title, rows, headers, status).
		"""
        #print self.conn
        if not hasattr(self, 'bucket') or not self.bucket:
            self._connect(BUCKET_NAME)
        # Remove spaces and EOL
        statement = statement.strip()
        if not statement:  # Empty string
            yield (None, None, None, None)

        # Split the sql into separate queries and run each one.
        # Unless it's saving a favorite query, in which case we
        # want to save them all together.
        if statement.startswith("\\fs"):
            components = [statement]
        else:
            components = sqlparse.split(statement)
            #print components
        for sql in components:
            # Remove spaces, eol and semi-colons.
            sql = sql.rstrip(";")

            # \G is treated specially since we have to set the expanded output.
            if sql.endswith("\\G"):
                special.set_expanded_output(True)
                sql = sql[:-2].strip()

            if not self.bucket and not (
                    sql.startswith(".open") or sql.lower().startswith("use")
                    or sql.startswith("\\u") or sql.startswith("\\?")
                    or sql.startswith("\\q") or sql.startswith("help")
                    or sql.startswith("exit") or sql.startswith("quit")):
                _logger.debug(
                    "Not connected to database. Will not run statement: %s.",
                    sql)

                #yield self.get_result2()
                raise OperationalError("Not connected to database.")
                # yield ('Not connected to database', None, None, None)
                # return

            cur = self.conn.cursor() if self.conn else None
            if 1:
                print(statement)
                #export LANG=en_US.utf-8
                #export LC_ALL=en_US.utf-8
                k = Key(self.bucket)
                stm = statement.split()
                limit = 25
                if len(stm) == 2:
                    kname, limit = stm
                else:
                    kname = stm[0]
                k.key = kname
                k.open()

                gzipped = GzipFile(None, 'rb', fileobj=k)
                reader = csv.reader(io.TextIOWrapper(gzipped,
                                                     newline="",
                                                     encoding="utf-8"),
                                    delimiter='^')
                if 1:
                    data = []
                    for id, line in enumerate(reader):
                        if id >= int(limit): break
                        data.append([id + 1] + line)

            try:  # Special command
                _logger.debug("Trying a dbspecial command. sql: %r", sql)
                for result in special.execute(cur, sql):
                    yield result
            except special.CommandNotFound:  # Regular SQL
                _logger.debug("Regular sql statement. sql: %r", sql)
                #print(sql)
                #cur.execute(sql)

                yield self.get_result2(data)
示例#8
0
文件: gz.py 项目: pie-crust/etl
            print(', '.join(line.split('^')))

import gzip, zipfile
import csv
import io
import boto
from pprint import pprint
from gzip import GzipFile
from boto.s3.key import Key
s3 = boto.connect_s3()
bname = 'home-pmt-accounting-dev'
bucket = s3.get_bucket(bname, validate=False)
k = Key(bucket)
kname = 'racct/DY_DeskPLRSRange03/file_0.IQ.Jkmk22.20190618_175726.csv.gz'
k.key = kname
k.open()
print(dir(csv))
from include.fmt import get_formatted
if 1:
    gzipped = GzipFile(None, 'rb', fileobj=k)
    reader = io.TextIOWrapper(gzipped, newline="", encoding="utf-8")
    data = []
    for id, line in enumerate(reader):
        data.append(line)
        pprint(line)
        if id > 10: break

    ptitle = kname
    #headers=['Col#%d' % i for i in range(len(data[0]))]
    #print (get_formatted(ptitle,data,headers,join = True))
示例#9
0
class S3FunctionalityTest():
    '''
  Functionality Test of an S3 Bucket
  Only works with Keystone Auth URL v3
  '''
    options = dict()

    def __init__(self, options):
        # First we try to list the ec2 credentials

        try:
            res = json.loads(
                subprocess.check_output([
                    "openstack", "--os-auth-url", options.auth_url,
                    "--os-username", options.username, "--os-password",
                    options.password, "--os-project-name", options.tenant,
                    "--os-project-domain-name", DEFAULT_DOMAIN_NAME,
                    "--os-user-domain-name", DEFAULT_DOMAIN_NAME,
                    "--os-identity-api-version", "3", "ec2", "credentials",
                    "list", "-f", "json"
                ]))
            res[0]['Access']

    # If they don't exist we create some
        except:
            try:
                subprocess.check_output([
                    "openstack", "--os-auth-url", options.auth_url,
                    "--os-username", options.username, "--os-password",
                    options.password, "--os-project-name", options.tenant,
                    "--os-project-domain-name", DEFAULT_DOMAIN_NAME,
                    "--os-user-domain-name", DEFAULT_DOMAIN_NAME,
                    "--os-identity-api-version", "3", "ec2", "credentials",
                    "create"
                ],
                                        stderr=subprocess.STDOUT)
            except:
                print "Could not create EC2 credentials"
                sys.exit(NAGIOS_STATE_UNKNOWN)
            res = json.loads(
                subprocess.check_output([
                    "openstack", "--os-auth-url", options.auth_url,
                    "--os-username", options.username, "--os-password",
                    options.password, "--os-project-name", options.tenant,
                    "--os-project-domain-name", DEFAULT_DOMAIN_NAME,
                    "--os-user-domain-name", DEFAULT_DOMAIN_NAME,
                    "--os-identity-api-version", "3", "ec2", "credentials",
                    "list", "-f", "json"
                ]))

        if LOCAL_DEBUG:
            print res
        _access_key = res[0]['Access']
        _secret_key = res[0]['Secret']
        _s3_host = options.s3_host

        self.conn = S3Connection(aws_access_key_id=_access_key,
                                 aws_secret_access_key=_secret_key,
                                 host=_s3_host)
        try:
            self.b = self.conn.get_bucket(DEFAULT_BUCKET_NAME)
        except:
            self.b = self.conn.create_bucket(DEFAULT_BUCKET_NAME)
        self.k = Key(self.b)
        self.k.key = 'nagiostest3'

    def s3_create_bucket(self):
        """ create a bucket, does not fail if it exists
    """
        self.conn.create_bucket(DEFAULT_BUCKET_NAME)

    def s3_store_data(self):
        """ store a 3MB object in the bucket
    """

        USERHOMEDIR = os.path.expanduser('~')
        TESTFILEPATH = "%s/3MBFILE" % USERHOMEDIR
        if not os.path.exists(TESTFILEPATH):
            with open(TESTFILEPATH, "wb") as out:
                out.truncate(1024 * 1024 * 3)
        self.k.set_contents_from_filename(TESTFILEPATH)

    def s3_read_data(self):
        """ read object from bucket
    """

        self.k.open()
        self.k.read()

    def s3_delete_data(self):
        """ delete object from bucket
    """

        self.k.delete()

    def execute(self):
        results = dict()
        try:
            self.s3_create_bucket()
            self.s3_store_data()
            self.s3_read_data()
            self.s3_delete_data()
        except:
            raise
        return results
示例#10
0
    # To back up more stuff, just add to this dict.
    local_data = {'dumpdata': dumped_data, 'pg_dump': pg_dump}

    # Initialize S3 connection
    print("Connecting to AWS.")
    conn, bucket = s3_init(AWS_ACCESS_KEY_ID, AWS_SECRET_KEY, BUCKET_NAME)

    import hashlib
    # For each thing to back up, back it up
    for label, data in local_data.items():
        # get key
        key = Key(bucket)
        key.key = label
        # local hash
        local_md5 = hashlib.md5(data).hexdigest()
        key.open(); key.close(); #loads key.etag
        # remote hash
        remote_md5 = key.etag.replace('\"','') # clears quote marks
        print("*** %s ***" % label)
        print("\t*LOCAL*")
        print("\t\tmd5: %s" % local_md5)
        print("\t\tcontents: %s" % data[:25])
        print("\t*REMOTE*")
        print("\t\tmd5: %s" % remote_md5)
        print("\t\tcontents: %s" % key.get_contents_as_string()[:25])
        # If new backup is different than the last saved one, update it
        if local_md5 != remote_md5:
            print("\tMD5 values different; uploading new version.")
            key.set_contents_from_string(data)
            print("\tKey '%s' updated." % key.name)
            print("\t*NEW REMOTE*")
示例#11
0
	login = file.split()
	conn=S3Connection(login[1],login[2])
	mybucket = conn.get_bucket('ec2dev')
	print "Connected to S3"

except:
	print "Unable to connect to S3"
	exit()

try:
	for j in mybucket.list():
		if j.name == 'login.txt':
			print j.name
			k = Key(mybucket)
			k.key = j.name
			k.open()
			file_1 = k.read()
			print "Successfully opened login.txt"
except:
	print "Unable to open File on S3"
	exit()

		
login = file_1.split()

try:
	conn = pymysql.connect(host=login[0],user= login[1],password=login[2],db= login[4])
	print "Connected successfully to RDS"
except:
	print "Unable to connect to RDS"
	exit()
示例#12
0
文件: s3.py 项目: samuel/gypsy
class S3File(File):
    def __init__(self, bucket, name):
        self._bucket = bucket
        self._name = name
        self._key = Key(bucket=bucket, name=name.encode('utf-8'))
        self._pos = 0
        self._open = False
        self._fake_open = False
        self._mode = 'r'

    @property
    def name(self):
        return self._name

    @property
    def mode(self):
        return self._key.mode

    @property
    def closed(self):
        return self._fake_open

    def size():
        doc = "The size property."
        def fget(self):
            raise NotImplementedError("S3File doesn't implement size and __len__")
        def fset(self, value):
            raise NotImplementedError("S3File doesn't implement size and __len__")
        return locals()

    def open(self, mode="r"):
        self.close()
        self._mode = (mode or 'r')[0]
        self._fake_open = True

    def close(self):
        if self._open:
            self._pos = 0
            self._key.close()
        self._fake_open = False

    def seek(self, position):
        if position != 0:
            raise NotImplementedError("S3File doesn't implement seek at positions other than 0")
        if self._pos != 0:
            # TODO: This is a bit flakey I imagine
            self._key.resp = None
            self._pos = 0

    def tell(self):
        return self._pos

    def read(self, num_bytes=None):
        if not self._open:
            self._key.open(self._mode)
            self._open = True
        data = self._key.read(num_bytes)
        self._pos += len(data)
        return data

    def write(self, content):
        raise NotImplementedError("S3File doesn't implement write")

    def flush(self):
        raise NotImplementedError("S3File doesn't implement flush")

    def close(self):
        self._key.close()