Example #1
0
    def test_integration(self):
        # Test creating a full table with all options specified.
        users = Table.create('users',
                             schema=[
                                 HashKey('username'),
                                 RangeKey('friend_count', data_type=NUMBER)
                             ],
                             throughput={
                                 'read': 5,
                                 'write': 5,
                             },
                             indexes=[
                                 KeysOnlyIndex('LastNameIndex',
                                               parts=[
                                                   HashKey('username'),
                                                   RangeKey('last_name')
                                               ]),
                             ])
        self.addCleanup(users.delete)

        self.assertEqual(len(users.schema), 2)
        self.assertEqual(users.throughput['read'], 5)

        # Wait for it.
        time.sleep(60)

        # Make sure things line up if we're introspecting the table.
        users_hit_api = Table('users')
        users_hit_api.describe()
        self.assertEqual(len(users.schema), len(users_hit_api.schema))
        self.assertEqual(users.throughput, users_hit_api.throughput)
        self.assertEqual(len(users.indexes), len(users_hit_api.indexes))

        # Test putting some items individually.
        users.put_item(
            data={
                'username': '******',
                'first_name': 'John',
                'last_name': 'Doe',
                'friend_count': 4
            })

        users.put_item(
            data={
                'username': '******',
                'first_name': 'Alice',
                'last_name': 'Expert',
                'friend_count': 2
            })

        time.sleep(5)

        # Test batch writing.
        with users.batch_write() as batch:
            batch.put_item({
                'username': '******',
                'first_name': 'Jane',
                'last_name': 'Doe',
                'friend_count': 3
            })
            batch.delete_item(username='******', friend_count=2)
            batch.put_item({
                'username': '******',
                'first_name': 'Bob',
                'last_name': 'Smith',
                'friend_count': 1
            })

        time.sleep(5)

        # Does it exist? It should?
        self.assertTrue(users.has_item(username='******', friend_count=3))
        # But this shouldn't be there...
        self.assertFalse(
            users.has_item(username='******', friend_count=72948))

        # Test getting an item & updating it.
        # This is the "safe" variant (only write if there have been no
        # changes).
        jane = users.get_item(username='******', friend_count=3)
        self.assertEqual(jane['first_name'], 'Jane')
        jane['last_name'] = 'Doh'
        self.assertTrue(jane.save())

        # Test strongly consistent getting of an item.
        # Additionally, test the overwrite behavior.
        client_1_jane = users.get_item(username='******',
                                       friend_count=3,
                                       consistent=True)
        self.assertEqual(jane['first_name'], 'Jane')
        client_2_jane = users.get_item(username='******',
                                       friend_count=3,
                                       consistent=True)
        self.assertEqual(jane['first_name'], 'Jane')

        # Write & assert the ``first_name`` is gone, then...
        del client_1_jane['first_name']
        self.assertTrue(client_1_jane.save())
        check_name = users.get_item(username='******',
                                    friend_count=3,
                                    consistent=True)
        self.assertEqual(check_name['first_name'], None)

        # ...overwrite the data with what's in memory.
        client_2_jane['first_name'] = 'Joan'
        # Now a write that fails due to default expectations...
        self.assertRaises(exceptions.JSONResponseError, client_2_jane.save)
        # ... so we force an overwrite.
        self.assertTrue(client_2_jane.save(overwrite=True))
        check_name_again = users.get_item(username='******',
                                          friend_count=3,
                                          consistent=True)
        self.assertEqual(check_name_again['first_name'], 'Joan')

        # Reset it.
        jane['username'] = '******'
        jane['first_name'] = 'Jane'
        jane['last_name'] = 'Doe'
        jane['friend_count'] = 3
        self.assertTrue(jane.save(overwrite=True))

        # Test the partial update behavior.
        client_3_jane = users.get_item(username='******',
                                       friend_count=3,
                                       consistent=True)
        client_4_jane = users.get_item(username='******',
                                       friend_count=3,
                                       consistent=True)
        client_3_jane['favorite_band'] = 'Feed Me'
        # No ``overwrite`` needed due to new data.
        self.assertTrue(client_3_jane.save())
        # Expectations are only checked on the ``first_name``, so what wouldn't
        # have succeeded by default does succeed here.
        client_4_jane['first_name'] = 'Jacqueline'
        self.assertTrue(client_4_jane.partial_save())
        partial_jane = users.get_item(username='******',
                                      friend_count=3,
                                      consistent=True)
        self.assertEqual(partial_jane['favorite_band'], 'Feed Me')
        self.assertEqual(partial_jane['first_name'], 'Jacqueline')

        # Reset it.
        jane['username'] = '******'
        jane['first_name'] = 'Jane'
        jane['last_name'] = 'Doe'
        jane['friend_count'] = 3
        self.assertTrue(jane.save(overwrite=True))

        # Ensure that partial saves of a brand-new object work.
        sadie = Item(users,
                     data={
                         'username': '******',
                         'first_name': 'Sadie',
                         'favorite_band': 'Zedd',
                         'friend_count': 7
                     })
        self.assertTrue(sadie.partial_save())
        serverside_sadie = users.get_item(username='******',
                                          friend_count=7,
                                          consistent=True)
        self.assertEqual(serverside_sadie['first_name'], 'Sadie')

        # Test the eventually consistent query.
        results = users.query_2(username__eq='johndoe',
                                last_name__eq='Doe',
                                index='LastNameIndex',
                                attributes=('username', ),
                                reverse=True)

        for res in results:
            self.assertTrue(res['username'] in [
                'johndoe',
            ])
            self.assertEqual(list(res.keys()), ['username'])

        # Ensure that queries with attributes don't return the hash key.
        results = users.query_2(username__eq='johndoe',
                                friend_count__eq=4,
                                attributes=('first_name', ))

        for res in results:
            self.assertEqual(res['first_name'], 'John')
            self.assertEqual(list(res.keys()), ['first_name'])

        # Test the strongly consistent query.
        c_results = users.query_2(username__eq='johndoe',
                                  last_name__eq='Doe',
                                  index='LastNameIndex',
                                  reverse=True,
                                  consistent=True)

        for res in c_results:
            self.assertEqual(res['username'], 'johndoe')

        # Test a query with query filters
        results = users.query_2(username__eq='johndoe',
                                query_filter={'first_name__beginswith': 'J'},
                                attributes=('first_name', ))

        for res in results:
            self.assertTrue(res['first_name'] in ['John'])

        # Test scans without filters.
        all_users = users.scan(limit=7)
        self.assertEqual(next(all_users)['username'], 'bob')
        self.assertEqual(next(all_users)['username'], 'jane')
        self.assertEqual(next(all_users)['username'], 'johndoe')

        # Test scans with a filter.
        filtered_users = users.scan(limit=2, username__beginswith='j')
        self.assertEqual(next(filtered_users)['username'], 'jane')
        self.assertEqual(next(filtered_users)['username'], 'johndoe')

        # Test deleting a single item.
        johndoe = users.get_item(username='******', friend_count=4)
        johndoe.delete()

        # Set batch get limit to ensure keys with no results are
        # handled correctly.
        users.max_batch_get = 2

        # Test the eventually consistent batch get.
        results = users.batch_get(keys=[{
            'username': '******',
            'friend_count': 4
        }, {
            'username': '******',
            'friend_count': 10
        }, {
            'username': '******',
            'friend_count': 1
        }, {
            'username': '******',
            'friend_count': 3
        }])
        batch_users = []

        for res in results:
            batch_users.append(res)
            self.assertIn(res['first_name'], ['Bob', 'Jane'])

        self.assertEqual(len(batch_users), 2)

        # Test the strongly consistent batch get.
        c_results = users.batch_get(keys=[{
            'username': '******',
            'friend_count': 1
        }, {
            'username': '******',
            'friend_count': 3
        }],
                                    consistent=True)
        c_batch_users = []

        for res in c_results:
            c_batch_users.append(res)
            self.assertTrue(res['first_name'] in ['Bob', 'Jane'])

        self.assertEqual(len(c_batch_users), 2)

        # Test count, but in a weak fashion. Because lag time.
        self.assertTrue(users.count() > -1)

        # Test query count
        count = users.query_count(username__eq='bob', )

        self.assertEqual(count, 1)

        # Test without LSIs (describe calls shouldn't fail).
        admins = Table.create('admins', schema=[HashKey('username')])
        self.addCleanup(admins.delete)
        time.sleep(60)
        admins.describe()
        self.assertEqual(admins.throughput['read'], 5)
        self.assertEqual(admins.indexes, [])

        # A single query term should fail on a table with *ONLY* a HashKey.
        self.assertRaises(exceptions.QueryError,
                          admins.query,
                          username__eq='johndoe')
        # But it shouldn't break on more complex tables.
        res = users.query_2(username__eq='johndoe')

        # Test putting with/without sets.
        mau5_created = users.put_item(
            data={
                'username': '******',
                'first_name': 'dead',
                'last_name': 'mau5',
                'friend_count': 2,
                'friends': set(['skrill', 'penny']),
            })
        self.assertTrue(mau5_created)

        penny_created = users.put_item(
            data={
                'username': '******',
                'first_name': 'Penny',
                'friend_count': 0,
                'friends': set([]),
            })
        self.assertTrue(penny_created)

        # Test attributes.
        mau5 = users.get_item(username='******',
                              friend_count=2,
                              attributes=['username', 'first_name'])
        self.assertEqual(mau5['username'], 'mau5')
        self.assertEqual(mau5['first_name'], 'dead')
        self.assertTrue('last_name' not in mau5)
Example #2
0
"""
DynamoDB table configuration

Author: Angad Gill
"""

from boto.dynamodb2.fields import HashKey
from boto.dynamodb2.table import Table

DYNAMO_TABLES = [
    Table('pages', schema=[HashKey('pagename')]),
]
Example #3
0
class JBoxUserV2(JBoxDB):
    """
        - user_id (primary hash key)
        
        - create_month (global secondary hash key)
        - create_time (global secondary range index)
        
        - update_month  (global secondary hash key)
        - update_time (global secondary index)
        
        - activation_code (global secondary hash key)
        - activation_status (global secondary range key)
        
        - image (optional: global secondary hash key)
        - resource_profile (optional: global secondary range key)
        
        - status
        - organization
        - role
        - gtok

        - courses_owned
    """
    NAME = 'jbox_users_v2'

    SCHEMA = [HashKey('user_id', data_type=STRING)]

    INDEXES = [
        GlobalKeysOnlyIndex('create_month-create_time-index',
                            parts=[
                                HashKey('create_month', data_type=NUMBER),
                                RangeKey('create_time', data_type=NUMBER)
                            ]),
        GlobalKeysOnlyIndex('update_month-update_time-index',
                            parts=[
                                HashKey('update_month', data_type=NUMBER),
                                RangeKey('update_time', data_type=NUMBER)
                            ]),
        GlobalKeysOnlyIndex('activation_code-activation_status-index',
                            parts=[
                                HashKey('activation_code', data_type=STRING),
                                RangeKey('activation_status', data_type=NUMBER)
                            ])
    ]

    TABLE = None

    KEYS = ['user_id']
    ATTRIBUTES = [
        'create_month', 'create_time', 'update_month', 'update_time', 'status',
        'activation_code', 'activation_status', 'resource_profile', 'role',
        'gtok', 'courses_offered', 'balance', 'max_cluster_cores'
    ]

    STATUS_ACTIVE = 0
    STATUS_INACTIVE = 1

    ROLE_USER = 0
    ROLE_ACCESS_STATS = 1 << 0
    ROLE_MANAGE_INVITES = 1 << 1
    ROLE_MANAGE_CONTAINERS = 1 << 2
    ROLE_OFFER_COURSES = 1 << 3

    ROLE_SUPER = (1 << 33) - 1

    ACTIVATION_NONE = 0
    ACTIVATION_GRANTED = 1
    ACTIVATION_REQUESTED = 2

    ACTIVATION_CODE_AUTO = 'AUTO'

    RES_PROF_BASIC = 0
    RES_PROF_DISK_EBS_10G = 1 << 0

    RES_PROF_JULIA_PKG_PRECOMP = 1 << 12
    RES_PROF_CLUSTER = 1 << 13
    RES_PROF_API_PUBLISHER = 1 << 14

    STATS = None
    STAT_NAME = "stat_users"

    DEF_MAX_CLUSTER_CORES = 64

    def __init__(self, user_id, create=False):
        try:
            self.item = self.fetch(user_id=user_id)
            self.is_new = False
        except JBoxDBItemNotFound:
            if create:
                data = {
                    'user_id': user_id,
                    'resource_profile': JBoxUserV2.RES_PROF_JULIA_PKG_PRECOMP
                }
                JBoxUserV2._set_time(data, "create")
                JBoxUserV2._set_activation_state(data, '-',
                                                 JBoxUserV2.ACTIVATION_NONE)
                self.create(data)
                self.item = self.fetch(user_id=user_id)
                self.is_new = True
            else:
                raise

    def get_user_id(self):
        return self.get_attrib('user_id')

    def get_status(self):
        return self.get_attrib('status', JBoxUserV2.STATUS_ACTIVE)

    def get_role(self):
        return int(self.get_attrib('role', JBoxUserV2.ROLE_USER))

    def set_role(self, role):
        r = int(self.get_attrib('role', JBoxUserV2.ROLE_USER))
        self.set_attrib('role', r | role)

    def has_role(self, role):
        return self.get_role() & role == role

    def set_status(self, status):
        self.set_attrib('status', status)

    def set_time(self, prefix, dt=None):
        JBoxUserV2._set_time(self.item, prefix, dt)

    @staticmethod
    def _set_time(item, prefix, dt=None):
        if None == dt:
            dt = datetime.datetime.now(pytz.utc)

        if prefix not in ["create", "update"]:
            raise (Exception("invalid prefix for setting time"))

        item[prefix + "_month"] = JBoxUserV2.datetime_to_yyyymm(dt)
        item[prefix + "_time"] = JBoxUserV2.datetime_to_epoch_secs(dt)

    def get_time(self, prefix):
        if prefix not in ["create", "update"]:
            raise (Exception("invalid prefix for setting time"))
        return JBoxUserV2.epoch_secs_to_datetime(self.item[prefix + "_time"])

    def save(self, set_time=True):
        self.set_time("update")
        super(JBoxUserV2, self).save()

    def set_activation_state(self, activation_code, activation_status):
        JBoxUserV2.log_debug("setting activation state of %s to %s, %d",
                             self.get_user_id(), activation_code,
                             activation_status)
        JBoxUserV2._set_activation_state(self.item, activation_code,
                                         activation_status)

    @staticmethod
    def _set_activation_state(item, activation_code, activation_status):
        item['activation_code'] = activation_code
        item['activation_status'] = activation_status

    def get_activation_state(self):
        return self.get_attrib('activation_code', '-'), self.get_attrib(
            'activation_status', JBoxUserV2.ACTIVATION_NONE)

    def set_gtok(self, gtok):
        self.set_attrib('gtok', encrypt(gtok, self.enckey()))

    def get_gtok(self):
        gtok = self.get_attrib('gtok')
        return decrypt(gtok, self.enckey()) if (gtok is not None) else None

    def set_container_type(self, image, resource_profile):
        self.set_attrib('image', image)
        self.set_attrib('resource_profile', resource_profile)

    def get_container_type(self):
        return self.get_attrib('image'), int(
            self.get_attrib('resource_profile', JBoxUserV2.RES_PROF_BASIC))

    def get_resource_profile(self):
        return int(
            self.get_attrib('resource_profile', JBoxUserV2.RES_PROF_BASIC))

    def set_resource_profile(self, mask):
        resource_profile = self.get_resource_profile()
        new_resource_profile = resource_profile | mask
        if new_resource_profile != resource_profile:
            self.set_attrib('resource_profile', new_resource_profile)

    def unset_resource_profile(self, mask):
        resource_profile = self.get_resource_profile()
        new_resource_profile = resource_profile & (~mask)
        if new_resource_profile != resource_profile:
            self.set_attrib('resource_profile', new_resource_profile)

    def has_resource_profile(self, mask):
        resource_profile = self.get_resource_profile()
        if mask == 0:
            return resource_profile == 0
        return (resource_profile & mask) == mask

    def get_courses_offered(self):
        return json.loads(self.get_attrib('courses_offered', '[]'))

    def set_courses_offered(self, courses_offered):
        self.set_attrib('courses_offered', json.dumps(courses_offered))

    def set_balance(self, amt):
        self.set_attrib('balance', amt)

    def credit_balance(self, amt):
        self.set_attrib('balance', self.get_attrib('balance', 0.0) + amt)

    def debit_balance(self, amt):
        self.set_attrib('balance', self.get_attrib('balance', 0.0) - amt)

    def get_balance(self):
        return self.get_attrib('balance', 0.0)

    def set_max_cluster_cores(self, cores):
        self.set_attrib('max_cluster_cores', cores)

    def get_max_cluster_cores(self):
        return int(
            self.get_attrib('max_cluster_cores',
                            JBoxUserV2.DEF_MAX_CLUSTER_CORES))

    @staticmethod
    def get_pending_activations(max_count):
        records = JBoxUserV2.query(
            activation_code__eq=JBoxUserV2.ACTIVATION_CODE_AUTO,
            activation_status__eq=JBoxUserV2.ACTIVATION_REQUESTED,
            index='activation_code-activation_status-index',
            limit=max_count)
        user_ids = []
        for rec in records:
            user_ids.append(rec['user_id'])
        return user_ids

    @staticmethod
    def count_pending_activations():
        count = JBoxUserV2.query_count(
            activation_code__eq='AUTO',
            activation_status__eq=JBoxUserV2.ACTIVATION_REQUESTED,
            index='activation_code-activation_status-index')
        return count

    @staticmethod
    def count_created(hours_before, tilldate=None):
        if None == tilldate:
            tilldate = datetime.datetime.now(pytz.utc)

        fromdate = tilldate - datetime.timedelta(hours=hours_before)

        till_month = JBoxUserV2.datetime_to_yyyymm(tilldate)
        till_time = JBoxUserV2.datetime_to_epoch_secs(tilldate)

        from_month = JBoxUserV2.datetime_to_yyyymm(fromdate)
        from_time = JBoxUserV2.datetime_to_epoch_secs(fromdate)

        count = 0
        mon = from_month
        while mon <= till_month:
            count += JBoxUserV2.query_count(
                create_month__eq=mon,
                create_time__between=(from_time, till_time),
                index='create_month-create_time-index')

            JBoxUserV2.log_debug(
                "adding accounts created in mon %d, from %d till %d. count %d",
                mon, from_time, till_time, count)

            if (mon % 100) == 12:
                mon = (mon / 100 + 1) * 100 + 1
            else:
                mon += 1

        return count

    @staticmethod
    def calc_stat(user, weeks, days):
        stats = JBoxUserV2.STATS
        stats['num_users'] += 1

        gtok_val = user.get('gtok', None)
        if gtok_val is not None:
            stats['sync']['gdrive'] += 1

        role = stats['role']
        role_val = int(user['role']) if user.get(
            'role', None) is not None else JBoxUserV2.ROLE_USER
        if role_val == JBoxUserV2.ROLE_USER:
            role['user'] += 1
        else:
            if (role_val & JBoxUserV2.ROLE_SUPER) == JBoxUserV2.ROLE_SUPER:
                role['superuser'] += 1
            if (role_val & JBoxUserV2.ROLE_ACCESS_STATS
                ) == JBoxUserV2.ROLE_ACCESS_STATS:
                role['access_stats'] += 1

        act_status = stats['activation_status']
        user_act_status = user.get('activation_status', None)
        act_status_val = int(
            user_act_status
        ) if user_act_status is not None else JBoxUserV2.ACTIVATION_NONE
        if act_status_val == JBoxUserV2.ACTIVATION_NONE:
            act_status['none'] += 1
        elif act_status_val == JBoxUserV2.ACTIVATION_GRANTED:
            act_status['granted'] += 1
        elif act_status_val == JBoxUserV2.ACTIVATION_REQUESTED:
            act_status['requested'] += 1

        res_profile = stats['resource_profile']
        user_res_profile = user['resource_profile']
        res_profile_val = int(
            user_res_profile
        ) if user_res_profile is not None else JBoxUserV2.RES_PROF_BASIC
        if res_profile_val == JBoxUserV2.RES_PROF_BASIC:
            res_profile['basic'] += 1
        else:
            if (res_profile_val & JBoxUserV2.RES_PROF_DISK_EBS_10G
                ) == JBoxUserV2.RES_PROF_DISK_EBS_10G:
                res_profile['disk_ebs_10G'] += 1
            elif (res_profile_val & JBoxUserV2.RES_PROF_JULIA_PKG_PRECOMP
                  ) == JBoxUserV2.RES_PROF_JULIA_PKG_PRECOMP:
                res_profile['julia_packages_precompiled'] += 1
            elif (res_profile_val &
                  JBoxUserV2.RES_PROF_CLUSTER) == JBoxUserV2.RES_PROF_CLUSTER:
                res_profile['julia_cluster'] += 1
            elif (res_profile_val & JBoxUserV2.RES_PROF_API_PUBLISHER
                  ) == JBoxUserV2.RES_PROF_API_PUBLISHER:
                res_profile['api_publisher'] += 1

        create_month_val = int(user['create_month'])
        create_month = stats['created_time']['months']
        if create_month_val not in create_month:
            create_month[create_month_val] = 1
        else:
            create_month[create_month_val] += 1

        create_time_val = int(user['create_time'])
        last_n_weeks = JBoxUserV2.STATS['created_time']['last_n_weeks']
        last_n_days = JBoxUserV2.STATS['created_time']['last_n_days']
        for week in range(0, len(weeks)):
            if create_time_val >= weeks[week]:
                last_n_weeks[week + 1] += 1
                break
        for day in range(0, len(days)):
            if create_time_val >= days[day]:
                last_n_days[day + 1] += 1
                break

    @staticmethod
    def calc_stats():
        JBoxUserV2.STATS = {
            'date': '',
            'num_users': 0,
            'sync': {
                'gdrive': 0
            },
            'role': {
                'user': 0,
                'superuser': 0,
                'access_stats': 0
            },
            'activation_status': {
                'none': 0,
                'granted': 0,
                'requested': 0
            },
            'resource_profile': {
                'basic': 0,
                'disk_ebs_10G': 0,
                'julia_packages_precompiled': 0,
                'julia_cluster': 0,
                'api_publisher': 0
            },
            'created_time': {
                'months': {},
                'last_n_weeks': {},
                'last_n_days': {}
            }
        }

        secs_day = 24 * 60 * 60
        secs_week = secs_day * 7
        now = datetime.datetime.now(pytz.utc)
        secs_now = int(JBoxUserV2.datetime_to_epoch_secs(now))

        weeks = [(secs_now - secs_week * week) for week in range(1, 5)]
        days = [(secs_now - secs_day * day) for day in range(1, 8)]

        last_n_weeks = JBoxUserV2.STATS['created_time']['last_n_weeks']
        last_n_days = JBoxUserV2.STATS['created_time']['last_n_days']
        for week in range(0, len(weeks)):
            last_n_weeks[week + 1] = 0
        for day in range(0, len(days)):
            last_n_days[day + 1] = 0

        result_set = JBoxUserV2.scan(attributes=('user_id', 'create_month',
                                                 'create_time', 'gtok', 'role',
                                                 'resource_profile',
                                                 'activation_status'))
        for user in result_set:
            JBoxUserV2.calc_stat(user, weeks, days)

        JBoxUserV2.STATS['date'] = now.isoformat()
Example #4
0
hash_key = ""
range_key = ""
for schema in src_table["KeySchema"]:
    attr_name = schema["AttributeName"]
    key_type = schema["KeyType"]
    if key_type == "HASH":
        hash_key = attr_name
    elif key_type == "RANGE":
        range_key = attr_name

# destnation table
try:
    dst_logs = Table(dst_name,
                     connection=ddbc,
                     schema=[
                         HashKey(hash_key),
                         RangeKey(range_key),
                     ])
    dst_logs.describe()
except JSONResponseError:
    # create table
    schema = []

    if hash_key != "":
        schema.append(HashKey(hash_key))

    if range_key != "":
        schema.append(RangeKey(range_key))

    print("# Create table [%s]." % dst_name)
    dst_logs = Table.create(
from boto.dynamodb2.table import Table
from boto.dynamodb2.types import NUMBER

# make a connection to dynamodb
conn = boto.dynamodb2.connect_to_region(region_name='us-east-1',
                                        aws_access_key_id='1234567890',
                                        aws_secret_access_key='0987654321')

##############################
# create a new tweets tables
##############################

# create a new dynamo table - example 2
tweets = Table.create('tweets',
                      schema=[
                          HashKey('id'),
                      ],
                      throughput={
                          'read': 5,
                          'write': 10,
                      },
                      connection=conn)

###########################
## Creating new item
###########################

# use put_item method
tweets = Table('tweets', connection=conn)
tweets.put_item(
    data={
Example #6
0
class JBoxInstanceProps(JBoxDB):
    NAME = 'jbox_instance'

    SCHEMA = [HashKey('instance_id', data_type=STRING)]

    INDEXES = None
    GLOBAL_INDEXES = None

    TABLE = None

    KEYS = ['instance_id']
    ATTRIBUTES = ['load', 'accept', 'api_status', 'publish_time']
    SQL_INDEXES = None
    KEYS_TYPES = [JBoxDB.VCHAR]
    TYPES = [JBoxDB.VCHAR, JBoxDB.INT, JBoxDB.TEXT, JBoxDB.INT]

    # maintenance runs are once in 5 minutes
    # TODO: make configurable
    SESS_UPDATE_INTERVAL = (5 * 1.5) * 60

    def __init__(self, cluster, instance_id, create=False):
        qinstance_id = JBoxDB.qual(cluster, instance_id)
        try:
            self.item = self.fetch(instance_id=qinstance_id)
            self.is_new = False
        except JBoxDBItemNotFound:
            if create:
                data = {'instance_id': qinstance_id}
                self.create(data)
                self.item = self.fetch(instance_id=qinstance_id)
                self.is_new = True
            else:
                raise

    def get_load(self):
        return self.get_attrib('load', '0.0')

    def set_load(self, load):
        self.set_attrib('load', str(load))

    def get_accept(self):
        return self.get_attrib('accept', 0) == 1

    def set_accept(self, accept):
        self.set_attrib('accept', 1 if accept else 0)

    def get_api_status(self):
        try:
            return json.loads(self.get_attrib('api_status', '{}'))
        except:
            return dict()

    def set_api_status(self, api_status):
        self.set_attrib('api_status', json.dumps(api_status))

    def set_publish_time(self):
        now = datetime.datetime.now(pytz.utc)
        self.set_attrib('publish_time',
                        JBoxInstanceProps.datetime_to_epoch_secs(now))

    def get_publish_time(self):
        now = datetime.datetime.now(pytz.utc)
        return int(
            self.get_attrib('publish_time',
                            JBoxInstanceProps.datetime_to_epoch_secs(now)))

    @staticmethod
    def set_props(cluster,
                  instance_id,
                  load=None,
                  accept=None,
                  api_status=None):
        instance_props = JBoxInstanceProps(cluster, instance_id, create=True)
        if load is not None:
            instance_props.set_load(load)
        if accept is not None:
            instance_props.set_accept(accept)
        if api_status is not None:
            instance_props.set_api_status(api_status)
        instance_props.set_publish_time()
        instance_props.save()

    @staticmethod
    def purge_stale_instances(cluster):
        for iid in JBoxInstanceProps.get_stale_instances(cluster):
            instance = JBoxInstanceProps(cluster, iid)
            instance.delete()

    @staticmethod
    def get_stale_instances(cluster):
        now = datetime.datetime.now(pytz.utc)
        nowsecs = JBoxInstanceProps.datetime_to_epoch_secs(now)
        valid_time = nowsecs - JBoxInstanceProps.SESS_UPDATE_INTERVAL
        stale = []
        for record in JBoxInstanceProps.scan(
                instance_id__beginswith=JBoxDB.qual(cluster, ''),
                publish_time__lt=valid_time):
            stale.append(record.get('instance_id').split('.', 1)[1])
        return stale

    @staticmethod
    def get_instance_status(cluster):
        now = datetime.datetime.now(pytz.utc)
        nowsecs = JBoxInstanceProps.datetime_to_epoch_secs(now)
        valid_time = nowsecs - JBoxInstanceProps.SESS_UPDATE_INTERVAL
        result = dict()
        for record in JBoxInstanceProps.scan(
                instance_id__beginswith=JBoxDB.qual(cluster, ''),
                publish_time__gte=valid_time):
            iid = record.get('instance_id').split('.', 1)[1]
            props = {
                'load': float(record.get('load', '0.0')),
                'accept': bool(record.get('accept', 0)),
                'api_status': json.loads(record.get('api_status', '{}'))
            }
            result[iid] = props
        return result

    @staticmethod
    def get_available_instances(cluster):
        now = datetime.datetime.now(pytz.utc)
        nowsecs = JBoxInstanceProps.datetime_to_epoch_secs(now)
        valid_time = nowsecs - JBoxInstanceProps.SESS_UPDATE_INTERVAL
        result = list()
        for record in JBoxInstanceProps.scan(
                instance_id__beginswith=JBoxDB.qual(cluster, ''),
                publish_time__gte=valid_time,
                accept__eq=1):
            result.append(record.get('instance_id').split('.', 1)[1])
        return result
Example #7
0
'''
   Create the table for the BasicDB exercise.

   You only execute this ONCE.
'''

import boto.dynamodb2

from boto.dynamodb2.table import Table
from boto.dynamodb2.fields import HashKey
from boto.dynamodb2.types import NUMBER

# Modify these as necessary
TABLE_NAME = "activities"
READ_CAPACITY = 1
WRITE_CAPACITY = 1

if __name__ == "__main__":
    acts = Table.create (
        TABLE_NAME,
        schema=[
            HashKey('id', data_type=NUMBER)
            ],
        throughput = {
                'read': READ_CAPACITY,
                'write': WRITE_CAPACITY
                },
        connection=boto.dynamodb2.connect_to_region('us-west-2')
)
Example #8
0
#k=Key(bucket)

DYNAMODB_TABLE_NAME = 'mtaData'
# Prepare DynamoDB client
client_dynamo = boto.dynamodb2.connect_to_region(
    'us-east-1',
    aws_access_key_id=assumedRoleObject.credentials.access_key,
    aws_secret_access_key=assumedRoleObject.credentials.secret_key,
    security_token=assumedRoleObject.credentials.session_token)

from boto.dynamodb2.table import Table
table_dynamo = Table(DYNAMODB_TABLE_NAME, connection=client_dynamo)

try:
    mtaData = Table.create('mtaData',
                           schema=[HashKey('tripId')],
                           connection=client_dynamo)
    mtaData = Table('mtaData',
                    schema=[HashKey('tripId')],
                    connection=client_dynamo)
    time.sleep(.12)
except:
    mtaData = Table('mtaData',
                    schema=[HashKey('tripId')],
                    connection=client_dynamo)

#########before this is all AWS requirements
importmta = mtaUpdates.mtaUpdates('80e73d31e83802678e719e770763da90')

#tu,timest =  importmta.getTripUpdates()
#print t[10].futureStops,v[0].currentStopNumber,timest
Example #9
0
 def create_table(self):
     super(DynamoFaceSignatureClient,
           self).create_table(schema=[HashKey('box_id', data_type=NUMBER)])
Example #10
0
class JBoxDynConfig(JBoxDB):
    NAME = 'jbox_dynconfig'

    SCHEMA = [HashKey('name', data_type=STRING)]

    INDEXES = None

    TABLE = None
    DEFAULT_REGISTRATION_RATE = 60

    def __init__(self, prop, create=False, value=None):
        if self.table() is None:
            return

        self.item = None
        try:
            self.item = self.table().get_item(name=prop)
            self.is_new = False
        except boto.dynamodb2.exceptions.ItemNotFound:
            if create:
                data = {'name': prop}
                if value is not None:
                    data['value'] = value
                self.create(data)
                self.item = self.table().get_item(name=prop)
                self.is_new = True
            else:
                raise

    @staticmethod
    def _n(cluster, name):
        return '.'.join([cluster, name])

    def set_value(self, value):
        self.set_attrib('value', value)

    def get_value(self):
        return self.get_attrib('value')

    @staticmethod
    def unset_cluster_leader(cluster):
        try:
            record = JBoxDynConfig(JBoxDynConfig._n(cluster, 'leader'))
            record.delete()
        except boto.dynamodb2.exceptions.ItemNotFound:
            return

    @staticmethod
    def set_cluster_leader(cluster, instance):
        record = JBoxDynConfig(JBoxDynConfig._n(cluster, 'leader'),
                               create=True,
                               value=instance)
        if not record.is_new:
            record.set_value(instance)
            record.save()

    @staticmethod
    def get_cluster_leader(cluster):
        try:
            return JBoxDynConfig(JBoxDynConfig._n(cluster,
                                                  'leader')).get_value()
        except boto.dynamodb2.exceptions.ItemNotFound:
            return None

    @staticmethod
    def set_allow_registration(cluster, allow):
        record = JBoxDynConfig(JBoxDynConfig._n(cluster, 'allow_registration'),
                               create=True,
                               value=str(allow))
        if not record.is_new:
            record.set_value(str(allow))
            record.save()

    @staticmethod
    def get_allow_registration(cluster):
        try:
            record = JBoxDynConfig(
                JBoxDynConfig._n(cluster, 'allow_registration'))
        except boto.dynamodb2.exceptions.ItemNotFound:
            return True

        return record.get_value() == 'True'

    @staticmethod
    def get_registration_hourly_rate(cluster):
        try:
            return int(
                JBoxDynConfig(
                    JBoxDynConfig._n(cluster,
                                     'registrations_hourly_rate')).get_value())
        except boto.dynamodb2.exceptions.ItemNotFound:
            return JBoxDynConfig.DEFAULT_REGISTRATION_RATE

    @staticmethod
    def set_registration_hourly_rate(cluster, rate):
        record = JBoxDynConfig(JBoxDynConfig._n(cluster,
                                                'registrations_hourly_rate'),
                               create=True,
                               value=str(rate))
        if not record.is_new:
            record.set_value(str(rate))
            record.save()

    @staticmethod
    def set_message(cluster, message, valid_delta):
        tnow = datetime.datetime.now(pytz.utc)
        tvalid = tnow + valid_delta

        msg = {
            'msg': message,
            'valid_till': isodate.datetime_isoformat(tvalid)
        }
        msg = json.dumps(msg)
        record = JBoxDynConfig(JBoxDynConfig._n(cluster, 'message'),
                               create=True,
                               value=msg)
        if not record.is_new:
            record.set_value(msg)
            record.save()

    @staticmethod
    def get_message(cluster, del_expired=True):
        try:
            record = JBoxDynConfig(JBoxDynConfig._n(cluster, 'message'))
        except boto.dynamodb2.exceptions.ItemNotFound:
            return None

        msg = record.get_value()
        if msg is None:
            return None

        msg = json.loads(msg)

        tnow = datetime.datetime.now(pytz.utc)
        tvalid = parse_iso_time(msg['valid_till'])
        #JBoxDynConfig.log_debug("tnow: %s, tvalid: %s", str(tnow), str(tvalid))
        if tvalid >= tnow:
            return msg['msg']

        if del_expired:
            JBoxDynConfig.table().delete_item(
                name='.'.join([cluster, 'message']))

        return None

    @staticmethod
    def get_user_home_image(cluster):
        try:
            record = JBoxDynConfig(JBoxDynConfig._n(cluster,
                                                    'user_home_image'))
        except boto.dynamodb2.exceptions.ItemNotFound:
            return None, None
        img = json.loads(record.get_value())
        return img['bucket'], img['filename']

    @staticmethod
    def set_user_home_image(cluster, bucket, filename):
        img = {'bucket': bucket, 'filename': filename}
        img = json.dumps(img)
        record = JBoxDynConfig(JBoxDynConfig._n(cluster, 'user_home_image'),
                               create=True,
                               value=img)
        if not record.is_new:
            record.set_value(img)
            record.save()

    @staticmethod
    def set_stat_collected_date(cluster):
        dt = datetime.datetime.now(pytz.utc).isoformat()
        record = JBoxDynConfig(JBoxDynConfig._n(cluster, 'stat_date'),
                               create=True,
                               value=dt)
        if not record.is_new:
            record.set_value(dt)
            record.save()

    @staticmethod
    def get_stat_collected_date(cluster):
        try:
            record = JBoxDynConfig(JBoxDynConfig._n(cluster, 'stat_date'))
        except boto.dynamodb2.exceptions.ItemNotFound:
            return None
        return parse_iso_time(record.get_value())

    @staticmethod
    def is_stat_collected_within(cluster, days):
        last_date = JBoxDynConfig.get_stat_collected_date(cluster)
        if last_date is None:
            return False
        dt = datetime.datetime.now(pytz.utc) - datetime.timedelta(days=days)
        return last_date > dt

    @staticmethod
    def set_stat(cluster, stat_name, stat):
        val = json.dumps(stat)
        record = JBoxDynConfig(JBoxDynConfig._n(cluster, stat_name),
                               create=True,
                               value=val)
        if not record.is_new:
            record.set_value(val)
            record.save()

    @staticmethod
    def get_stat(cluster, stat_name):
        try:
            record = JBoxDynConfig(JBoxDynConfig._n(cluster, stat_name))
        except boto.dynamodb2.exceptions.ItemNotFound:
            return None
        return json.loads(record.get_value())

    @staticmethod
    def get_course(cluster, course_id):
        try:
            course_key = '|'.join(['course', course_id])
            record = JBoxDynConfig(JBoxDynConfig._n(cluster, course_key))
        except boto.dynamodb2.exceptions.ItemNotFound:
            return None
        return json.loads(record.get_value())

    @staticmethod
    def set_course(cluster, course_id, course_details):
        val = json.dumps(course_details)
        course_key = '|'.join(['course', course_id])
        record = JBoxDynConfig(JBoxDynConfig._n(cluster, course_key),
                               create=True,
                               value=val)
        if not record.is_new:
            record.set_value(val)
            record.save()
Example #11
0
import os
# imports for flask-dynamo
from boto.dynamodb2.fields import HashKey, RangeKey
from boto.dynamodb2.table import Table
from flask.ext.dynamo import Dynamo

from dateutil.parser import parse
from dateutil.tz import gettz

print(os.environ["AWS_ACCESS_KEY_ID"])
print(os.environ["AWS_SECRET_ACCESS_KEY"])

app = Flask(__name__)
# Schema definition for dynamoDB
app.config['DYNAMO_TABLES'] = [
    Table('orders', schema=[HashKey('id')]),
    Table('stocks', schema=[HashKey('id')]),
    Table('priceHistory', schema=[HashKey('id'),
                                  RangeKey('time')]),
]

# init object for handling dynamoDB
dynamo = Dynamo(app)

idBoerse = 'datBoerse'


@app.route('/')
def hello():
    return "<h1 style='color:blue'>Hello There!</h1>"
Example #12
0
    def test_update_table_online_indexing_support(self):
        # Create a table using gsi to test the DynamoDB online indexing support
        # https://github.com/boto/boto/pull/2925
        users = Table.create('online_indexing_support_users',
                             schema=[HashKey('user_id')],
                             throughput={
                                 'read': 5,
                                 'write': 5
                             },
                             global_indexes=[
                                 GlobalAllIndex('EmailGSIIndex',
                                                parts=[HashKey('email')],
                                                throughput={
                                                    'read': 2,
                                                    'write': 2
                                                })
                             ])

        # Add this function to be called after tearDown()
        self.addCleanup(users.delete)

        # Wait for it.
        time.sleep(60)

        # Fetch fresh table desc from DynamoDB
        users.describe()

        # Assert if everything is fine so far
        self.assertEqual(len(users.global_indexes), 1)
        self.assertEqual(users.global_indexes[0].throughput['read'], 2)
        self.assertEqual(users.global_indexes[0].throughput['write'], 2)

        # Update a GSI throughput. it should work.
        users.update_global_secondary_index(
            global_indexes={'EmailGSIIndex': {
                'read': 2,
                'write': 1,
            }})

        # Wait for it.
        time.sleep(60)

        # Fetch fresh table desc from DynamoDB
        users.describe()

        # Assert if everything is fine so far
        self.assertEqual(len(users.global_indexes), 1)
        self.assertEqual(users.global_indexes[0].throughput['read'], 2)
        self.assertEqual(users.global_indexes[0].throughput['write'], 1)

        # Update a GSI throughput using the old fashion way for compatibility
        # purposes. it should work.
        users.update(
            global_indexes={'EmailGSIIndex': {
                'read': 3,
                'write': 2,
            }})

        # Wait for it.
        time.sleep(60)

        # Fetch fresh table desc from DynamoDB
        users.describe()

        # Assert if everything is fine so far
        self.assertEqual(len(users.global_indexes), 1)
        self.assertEqual(users.global_indexes[0].throughput['read'], 3)
        self.assertEqual(users.global_indexes[0].throughput['write'], 2)

        # Delete a GSI. it should work.
        users.delete_global_secondary_index('EmailGSIIndex')

        # Wait for it.
        time.sleep(60)

        # Fetch fresh table desc from DynamoDB
        users.describe()

        # Assert if everything is fine so far
        self.assertEqual(len(users.global_indexes), 0)

        # Create a GSI. it should work.
        users.create_global_secondary_index(global_index=GlobalAllIndex(
            'AddressGSIIndex',
            parts=[HashKey('address', data_type=STRING)],
            throughput={
                'read': 1,
                'write': 1,
            }))
        # Wait for it. This operation usually takes much longer than the others
        time.sleep(60 * 10)

        # Fetch fresh table desc from DynamoDB
        users.describe()

        # Assert if everything is fine so far
        self.assertEqual(len(users.global_indexes), 1)
        self.assertEqual(users.global_indexes[0].throughput['read'], 1)
        self.assertEqual(users.global_indexes[0].throughput['write'], 1)
Example #13
0
    def test_query_after_describe_with_gsi(self):
        # Create a table to using gsi to reproduce the error mentioned on issue
        # https://github.com/boto/boto/issues/2828
        users = Table.create('more_gsi_query_users',
                             schema=[HashKey('user_id')],
                             throughput={
                                 'read': 5,
                                 'write': 5
                             },
                             global_indexes=[
                                 GlobalAllIndex('EmailGSIIndex',
                                                parts=[HashKey('email')],
                                                throughput={
                                                    'read': 1,
                                                    'write': 1
                                                })
                             ])

        # Add this function to be called after tearDown()
        self.addCleanup(users.delete)

        # Wait for it.
        time.sleep(60)

        # populate a couple of items in it
        users.put_item(
            data={
                'user_id': '7',
                'username': '******',
                'first_name': 'John',
                'last_name': 'Doe',
                'email': '*****@*****.**',
            })
        users.put_item(
            data={
                'user_id': '24',
                'username': '******',
                'first_name': 'Alice',
                'last_name': 'Expert',
                'email': '*****@*****.**',
            })
        users.put_item(
            data={
                'user_id': '35',
                'username': '******',
                'first_name': 'Jane',
                'last_name': 'Doe',
                'email': '*****@*****.**',
            })

        # Try the GSI. it should work.
        rs = users.query_2(email__eq='*****@*****.**',
                           index='EmailGSIIndex')

        for rs_item in rs:
            self.assertEqual(rs_item['username'], ['johndoe'])

        # The issue arises when we're introspecting the table and try to
        # query_2 after call describe method
        users_hit_api = Table('more_gsi_query_users')
        users_hit_api.describe()

        # Try the GSI. This is what os going wrong on #2828 issue. It should
        # work fine now.
        rs = users_hit_api.query_2(email__eq='*****@*****.**',
                                   index='EmailGSIIndex')

        for rs_item in rs:
            self.assertEqual(rs_item['username'], ['johndoe'])
Example #14
0
    def test_query_with_reverse(self):
        posts = Table.create('more-posts',
                             schema=[HashKey('thread'),
                                     RangeKey('posted_on')],
                             throughput={
                                 'read': 5,
                                 'write': 5,
                             })
        self.addCleanup(posts.delete)

        # Wait for it.
        time.sleep(60)

        # Add some data.
        test_data_path = os.path.join(os.path.dirname(__file__),
                                      'forum_test_data.json')
        with open(test_data_path, 'r') as test_data:
            data = json.load(test_data)

            with posts.batch_write() as batch:
                for post in data:
                    batch.put_item(post)

        time.sleep(5)

        # Test the default order (ascending).
        results = posts.query_2(thread__eq='Favorite chiptune band?',
                                posted_on__gte='2013-12-24T00:00:00')
        self.assertEqual([post['posted_on'] for post in results], [
            '2013-12-24T12:30:54',
            '2013-12-24T12:35:40',
            '2013-12-24T13:45:30',
            '2013-12-24T14:15:14',
            '2013-12-24T14:25:33',
            '2013-12-24T15:22:22',
        ])

        # Test the explicit ascending order.
        results = posts.query_2(thread__eq='Favorite chiptune band?',
                                posted_on__gte='2013-12-24T00:00:00',
                                reverse=False)
        self.assertEqual([post['posted_on'] for post in results], [
            '2013-12-24T12:30:54',
            '2013-12-24T12:35:40',
            '2013-12-24T13:45:30',
            '2013-12-24T14:15:14',
            '2013-12-24T14:25:33',
            '2013-12-24T15:22:22',
        ])

        # Test the explicit descending order.
        results = posts.query_2(thread__eq='Favorite chiptune band?',
                                posted_on__gte='2013-12-24T00:00:00',
                                reverse=True)
        self.assertEqual([post['posted_on'] for post in results], [
            '2013-12-24T15:22:22',
            '2013-12-24T14:25:33',
            '2013-12-24T14:15:14',
            '2013-12-24T13:45:30',
            '2013-12-24T12:35:40',
            '2013-12-24T12:30:54',
        ])

        # Test the old, broken style.
        results = posts.query(thread__eq='Favorite chiptune band?',
                              posted_on__gte='2013-12-24T00:00:00')
        self.assertEqual([post['posted_on'] for post in results], [
            '2013-12-24T15:22:22',
            '2013-12-24T14:25:33',
            '2013-12-24T14:15:14',
            '2013-12-24T13:45:30',
            '2013-12-24T12:35:40',
            '2013-12-24T12:30:54',
        ])
        results = posts.query(thread__eq='Favorite chiptune band?',
                              posted_on__gte='2013-12-24T00:00:00',
                              reverse=True)
        self.assertEqual([post['posted_on'] for post in results], [
            '2013-12-24T12:30:54',
            '2013-12-24T12:35:40',
            '2013-12-24T13:45:30',
            '2013-12-24T14:15:14',
            '2013-12-24T14:25:33',
            '2013-12-24T15:22:22',
        ])
Example #15
0
def create_table():
    table = Table.create(
        "messages", schema=[HashKey("forum_name")], throughput={"read": 10, "write": 10}
    )
    return table
    aws_access_key_id=assumedRoleObject.credentials.access_key,
    aws_secret_access_key=assumedRoleObject.credentials.secret_key,
    security_token=assumedRoleObject.credentials.session_token)
from boto import kinesis
#kinesis = kinesis.connect_to_region("us-east-1")
#stream = kinesis.create_stream("edisonDemoKinesis", 1)
#kinesis.describe_stream("edisonDemoKinesis")
#kinesis.list_streams()
#global count_dyn
count_dyn = 0

from boto.dynamodb2.table import Table
table_dynamo = Table(DYNAMODB_TABLE_NAME, connection=client_dynamo)
try:
    pingdata = Table.create('pingdata',
                            schema=[HashKey('timestamp')],
                            connection=client_dynamo)
    count_dyn = 0
    pingdata = Table('pingdata',
                     schema=[HashKey('timestamp')],
                     connection=client_dynamo)
except:
    #count_dyn = tempdata.count() + 1
    pingdata = Table('pingdata',
                     schema=[HashKey('timestamp')],
                     connection=client_dynamo)
    existingitems = pingdata.scan()
    for i in existingitems:
        count_dyn = count_dyn + 1
time.sleep(.15)
#tempdata = Table('tempdata', schema = [HashKey('timestamp')], connection = client_dynamo)
Example #17
0
class JBoxDiskState(JBPluginDB):
    provides = [JBPluginDB.JBP_TABLE_DYNAMODB]

    NAME = 'jbox_diskstate'

    SCHEMA = [
        HashKey('disk_key', data_type=STRING)
    ]

    INDEXES = [
        GlobalKeysOnlyIndex('state-index', parts=[
            HashKey('state', data_type=NUMBER)
        ])
    ]

    TABLE = None

    STATE_ATTACHED = 1
    STATE_ATTACHING = 2
    STATE_DETACHING = 3
    STATE_DETACHED = 0

    def __init__(self, disk_key=None, cluster_id=None, region_id=None, user_id=None, volume_id=None,
                 attach_time=None, create=False):
        if create and ((cluster_id is None) or (region_id is None) or (user_id is None)):
            raise AssertionError
        if disk_key is None:
            disk_key = '_'.join([user_id, cluster_id, region_id])
        try:
            self.item = self.fetch(disk_key=disk_key)
            self.is_new = False
        except JBoxDBItemNotFound:
            if create:
                data = {
                    'disk_key': disk_key,
                    'cluster_id': cluster_id,
                    'region_id': region_id,
                    'user_id': user_id
                }

                if volume_id is not None:
                    data['volume_id'] = volume_id
                    if attach_time is None:
                        attach_time = datetime.datetime.now(pytz.utc)
                    data['attach_time'] = JBoxDiskState.datetime_to_epoch_secs(attach_time)

                self.create(data)
                self.item = self.fetch(disk_key=disk_key)
                self.is_new = True
            else:
                raise

    def set_attach_time(self, attach_time=None):
        if attach_time is None:
            attach_time = datetime.datetime.now(pytz.utc)
        self.set_attrib('attach_time', JBoxDiskState.datetime_to_epoch_secs(attach_time))

    def get_attach_time(self):
        return JBoxDiskState.epoch_secs_to_datetime(self.item['attach_time'])

    def set_detach_time(self, detach_time=None):
        if detach_time is None:
            detach_time = datetime.datetime.now(pytz.utc)
        self.set_attrib('detach_time', JBoxDiskState.datetime_to_epoch_secs(detach_time))

    def get_detach_time(self):
        return JBoxDiskState.epoch_secs_to_datetime(int(self.item['detach_time']))

    def get_state(self):
        state = self.get_attrib('state')
        return int(state) if state is not None else None

    def set_state(self, state, time=None):
        self.set_attrib('state', state)
        if state == JBoxDiskState.STATE_ATTACHING or state == JBoxDiskState.STATE_ATTACHED:
            self.set_attach_time(time)
        else:
            self.set_detach_time(time)

    def get_user_id(self):
        return self.get_attrib('user_id')

    def set_user_id(self, user_id):
        self.set_attrib('user_id', user_id)

    def get_region_id(self):
        return self.get_attrib('region_id')

    def set_region_id(self, region_id):
        self.set_attrib('region_id', region_id)

    def get_cluster_id(self):
        return self.get_attrib('cluster_id')

    def set_cluster_id(self, cluster_id):
        self.set_attrib('cluster_id', cluster_id)

    def get_volume_id(self):
        return self.get_attrib('volume_id')

    def set_volume_id(self, volume_id):
        self.set_attrib('volume_id', volume_id)

    def get_snapshot_ids(self):
        snapshots = self.get_attrib('snapshot_id')
        if (snapshots is not None) and (len(snapshots) > 0):
            return json.loads(snapshots)
        return []

    def add_snapshot_id(self, snapshot_id):
        ids = self.get_snapshot_ids()
        ids.append(snapshot_id)
        self.set_snapshot_ids(ids)

    def set_snapshot_ids(self, snapshot_ids):
        self.set_attrib('snapshot_id', json.dumps(snapshot_ids))

    @staticmethod
    def get_detached_disks(max_count=None):
        disk_keys = []
        try:
            records = JBoxDiskState.query(state__eq=JBoxDiskState.STATE_DETACHED,
                                          index='state-index',
                                          limit=max_count)
            for rec in records:
                disk_keys.append(rec['disk_key'])
        except:
            # boto bug: https://github.com/boto/boto/issues/2708
            JBoxDiskState.TABLE = None
            JBoxDiskState.log_warn("Exception in getting detached disks. Probably empty table.")
        return disk_keys
Example #18
0
def dynamodb_users_table(dynamodb_table_maker):
    schema = [HashKey('name'), RangeKey('last_name')]
    return dynamodb_table_maker('users', schema)
Example #19
0
                                                      oidc['Token'])

# table
DYNAMODB_TABLE_NAME = 'USER1'

# DynamoDB
client_dynamo = boto.dynamodb2.connect_to_region(
    'us-east-1',
    aws_access_key_id=assumedRoleObject.credentials.access_key,
    aws_secret_access_key=assumedRoleObject.credentials.secret_key,
    security_token=assumedRoleObject.credentials.session_token)
from boto.dynamodb2.table import Table
table_dynamo = Table(DYNAMODB_TABLE_NAME, connection=client_dynamo)
try:
    MTA = Table.create('USER1',
                       schema=[HashKey('TIME')],
                       connection=client_dynamo)
    time.sleep(30)
except:
    MTA = Table('USER1', connection=client_dynamo)


### YOUR CODE HERE ####
def putdata(dentduration, stage):
    USER1.put_item(
        data={
            'TIME': str(time.time()),
            'DENTDURATION': str(dentduration),
            'STAGE': str(stage),
        })
Example #20
0
def dynamodb_records_table(dynamodb_table_maker):
    schema = [HashKey('time_index_key'), RangeKey('range_key')]
    return dynamodb_table_maker('records', schema)
Example #21
0
class JBoxUserProfile(JBoxDB):
    NAME = 'jbox_user_profiles'

    SCHEMA = [HashKey('user_id', data_type=STRING)]

    INDEXES = None
    GLOBAL_INDEXES = [
        GlobalKeysOnlyIndex('create_month-create_time-index',
                            parts=[
                                HashKey('create_month', data_type=NUMBER),
                                RangeKey('create_time', data_type=NUMBER)
                            ]),
        GlobalKeysOnlyIndex('update_month-update_time-index',
                            parts=[
                                HashKey('update_month', data_type=NUMBER),
                                RangeKey('update_time', data_type=NUMBER)
                            ])
    ]

    TABLE = None

    ATTR_FIRST_NAME = 'first_name'
    ATTR_LAST_NAME = 'last_name'
    ATTR_COUNTRY = 'country'
    ATTR_CITY = 'city'
    ATTR_LOCATION = 'location'  # a fuzzy location string, indicative of country and city
    ATTR_IP = 'ip'  # ip from which last accessed
    ATTR_INDUSTRY = 'industry'
    ATTR_ORGANIZATION = 'org'  # workplace
    ATTR_ORG_TITLE = 'org_title'  # job title

    KEYS = ['user_id']
    ATTRIBUTES = [
        'create_month',
        'create_time',
        'update_month',
        'update_time',
        ATTR_FIRST_NAME,
        ATTR_LAST_NAME,
        ATTR_COUNTRY,
        ATTR_CITY,
        ATTR_LOCATION,
        ATTR_IP,
        ATTR_INDUSTRY,
        ATTR_ORGANIZATION,
        ATTR_ORG_TITLE,
        'sources'  # a JSON field that indicates where each profile attribute was filled from
    ]
    SQL_INDEXES = [
        {
            'name': 'create_month-create_time-index',
            'cols': ['create_month', 'create_time']
        },
        {
            'name': 'update_month-update_time-index',
            'cols': ['update_month', 'update_time']
        },
    ]
    KEYS_TYPES = [JBoxDB.VCHAR]
    TYPES = [
        JBoxDB.INT, JBoxDB.INT, JBoxDB.INT, JBoxDB.INT, JBoxDB.VCHAR,
        JBoxDB.VCHAR, JBoxDB.VCHAR, JBoxDB.VCHAR, JBoxDB.VCHAR, JBoxDB.VCHAR,
        JBoxDB.VCHAR, JBoxDB.VCHAR, JBoxDB.VCHAR, JBoxDB.VCHAR
    ]

    SRC_USER = 1  # filled in by the user
    SRC_DERIVED = 2  # derived from other fields

    def __init__(self, user_id, create=False):
        try:
            self.item = self.fetch(user_id=user_id)
            self.is_new = False
        except JBoxDBItemNotFound:
            if create:
                data = {'user_id': user_id}
                JBoxUserProfile._set_time(data, "create")
                self.create(data)
                self.item = self.fetch(user_id=user_id)
                self.is_new = True
            else:
                raise

    def get_user_id(self):
        return self.get_attrib('user_id')

    def get_attrib_source(self, attrib_name):
        sources_str = self.get_attrib('sources', '{}')
        if len(sources_str) == 0:
            return None
        sources = json.loads(sources_str)
        return sources[attrib_name] if attrib_name in sources else None

    def set_attrib_source(self, attrib_name, source):
        sources_str = self.get_attrib('sources', '{}')
        if len(sources_str) == 0:
            sources_str = '{}'
        sources = json.loads(sources_str)
        sources[attrib_name] = source
        self.set_attrib('sources', json.dumps(sources))

    def is_set_by_user(self, attrib_name):
        return self.get_attrib_source(attrib_name) == JBoxUserProfile.SRC_USER

    def set_profile(self, attrib_name, value, source):
        # do not overwrite attributes set by the user
        if source != JBoxUserProfile.SRC_USER and self.is_set_by_user(
                attrib_name):
            return False
        self.set_attrib(attrib_name, value)
        self.set_attrib_source(attrib_name, source)
        return True

    def can_set(self, attrib_name, value):
        if value is None or len(value) == 0:
            return False
        return value != self.get_attrib(attrib_name)

    def get_profile(self, attrib_name, default=''):
        return self.get_attrib(attrib_name, default)

    def set_time(self, prefix, dt=None):
        JBoxUserProfile._set_time(self.item, prefix, dt)

    @staticmethod
    def _set_time(item, prefix, dt=None):
        if dt is None:
            dt = datetime.datetime.now(pytz.utc)

        if prefix not in ["create", "update"]:
            raise (Exception("invalid prefix for setting time"))

        item[prefix + "_month"] = JBoxUserProfile.datetime_to_yyyymm(dt)
        item[prefix + "_time"] = JBoxUserProfile.datetime_to_epoch_secs(dt)

    def get_time(self, prefix):
        if prefix not in ["create", "update"]:
            raise (Exception("invalid prefix for setting time"))
        return JBoxUserProfile.epoch_secs_to_datetime(self.item[prefix +
                                                                "_time"])

    def save(self, set_time=True):
        self.set_time("update")
        super(JBoxUserProfile, self).save()
Example #22
0
class JBoxInvite(JBoxDB):
    NAME = 'jbox_invites'

    SCHEMA = [HashKey('invite_code', data_type=STRING)]

    INDEXES = None

    TABLE = None

    def __init__(self, invite_code, invited=None, create=False):
        if self.table() is None:
            return

        self.item = None
        try:
            self.item = self.table().get_item(invite_code=invite_code)
            self.is_new = False
        except boto.dynamodb2.exceptions.ItemNotFound:
            if create:
                if len(invite_code) < 6:
                    raise (Exception(
                        "Invite code is too short. Must be at least 6 chars."))
                now = datetime.datetime.now(pytz.utc)
                data = {
                    'invite_code':
                    invite_code,
                    'time_created':
                    now.isoformat(),
                    'expires_on':
                    (now +
                     datetime.datetime.timedelta(1)).isoformat(),  # 1 day
                    'invited':
                    invited
                }
                self.create(data)
                self.item = self.table().get_item(invite_code=invite_code)
                self.is_new = True
            else:
                raise

    def save(self):
        self.set_attrib('time_updated',
                        datetime.datetime.now(pytz.utc).isoformat())
        super(JBoxInvite, self).save()

    def is_invited(self, user_id):
        if (self.table() is None) or (self.item is None):
            return  # is this handled well?

        if self.item.get('invited', None) is None:
            return False

        max_count = self.item.get('max_count', None)
        if max_count is not None and max_count <= self.item.get('count', 0):
            return False

        try:
            expires = isodate.parse_datetime(self.item['expires_on'])
        except:
            self.log_info("Error parsing invite code expiry date: " +
                          str(self.item['invite_id']) +
                          str(self.item['expires_on']))
            return False

        if expires < datetime.datetime.now(pytz.utc):
            # This invite code has expired, and hence invalid
            return False
        if self.item['invited'] == '*':  # Anyone is allowed
            return True

        ids = map(str.strip, self.item['invited'].split(","))
        return user_id in ids

    def increment_count(self):
        if (self.table() is None) or (self.item is None):
            return  # is this handled well?

        c = self.item.get('count', 0)
        self.item['count'] = c + 1
Example #23
0
    if key_type == 'HASH':
        hash_key = attr_name
        hash_key_type = [
            x["AttributeType"] for x in src['AttributeDefinitions']
            if x["AttributeName"] == hash_key
        ][0]
    elif key_type == 'RANGE':
        range_key = attr_name
        range_key_type = [
            x["AttributeType"] for x in src['AttributeDefinitions']
            if x["AttributeName"] == range_key
        ][0]

# 2. Create the new table
table_struct = None
schema = [HashKey(hash_key, data_type=hash_key_type)]
if range_key != '':
    schema.append(RangeKey(range_key, data_type=range_key_type))

try:
    new_logs = Table(dst_table, connection=ddbc, schema=schema)

    table_struct = new_logs.describe()
    print 'Table %s already exists' % dst_table
    sys.exit(0)
except JSONResponseError:
    new_logs = Table.create(
        dst_table,
        connection=ddbc,
        schema=schema,
    )
Example #24
0
def get_args():
    argp = argparse.ArgumentParser(
        description="Create table for Assignment 3 of CMPT 474, Summer 2016")
    argp.add_argument("name", help="Name of table to create")
    argp.add_argument(
        "--reads",
        type=int,
        default=DEF_READ_CAPACITY,
        help="Read capacity (default {0})".format(DEF_READ_CAPACITY))
    argp.add_argument(
        "--writes",
        type=int,
        default=DEF_WRITE_CAPACITY,
        help="write capacity (default {0})".format(DEF_WRITE_CAPACITY))
    argp.add_argument("--region",
                      default=DEF_REGION,
                      help="Region (default {0})".format(DEF_REGION))
    return argp.parse_args()


if __name__ == "__main__":
    args = get_args()
    acts = Table.create(args.name,
                        schema=[HashKey('id', data_type=NUMBER)],
                        throughput={
                            'read': args.reads,
                            'write': args.writes
                        },
                        connection=boto.dynamodb2.connect_to_region(
                            args.region))
hash_key = ''
range_key = ''
for schema in src['KeySchema']:
    attr_name = schema['AttributeName']
    key_type = schema['KeyType']
    if key_type == 'HASH':
        hash_key = attr_name
    elif key_type == 'RANGE':
        range_key = attr_name

# 2. Create the new table
table_struct = None
try:
    new_logs = Table(dst_table,
                     connection=ddbc,
                     schema=[HashKey(hash_key),
                             RangeKey(range_key),
                             ]
                     )

    table_struct = new_logs.describe()
    print 'Table %s already exists' % dst_table
except JSONResponseError:
    schema = [HashKey(hash_key)]
    if range_key != '':
        schema.append(RangeKey(range_key))
    new_logs = Table.create(dst_table,
                            connection=ddbc,
                            schema=schema,
                            )
    print '*** Waiting for the new table %s to become active' % dst_table
Example #26
0
def init(app):

    ec2 = boto.ec2.connect_to_region(
        app.config["identity"]['region'],
        aws_access_key_id=app.config['keys.key_id'],
        aws_secret_access_key=app.config['keys.key_secret'],
        security_token=app.config['keys.key_token'])
    #print "instance id : ", app.config["instance_id"]
    # Get meta tags
    reservation = ec2.get_all_instances(instance_ids=app.config["instance_id"])
    meta_tags = {}
    if reservation:
        for tag in reservation[0].instances[0].tags:
            meta_tags[str(tag)] = str(reservation[0].instances[0].tags[tag])
            #print str(tag), str(reservation[0].instances[0].tags[tag])

    #meta_tags = {}
    #for tag in ec2.get_all_tags():
    #    meta_tags[str(tag.name)] = str(tag.value)

    # Log the metadata tags
    app.config["instance.tags"] = meta_tags
    for k in meta_tags:
        logging.debug("[TAGS] {0} : {1}".format(k, meta_tags[k]))

    sqs = boto.sqs.connect_to_region(
        app.config["identity"]['region'],
        aws_access_key_id=app.config['keys.key_id'],
        aws_secret_access_key=app.config['keys.key_secret'],
        security_token=app.config['keys.key_token'])

    sns = boto.sns.connect_to_region(
        app.config["identity"]['region'],
        aws_access_key_id=app.config['keys.key_id'],
        aws_secret_access_key=app.config['keys.key_secret'],
        security_token=app.config['keys.key_token'])

    ses = boto.ses.connect_to_region(
        app.config["identity"]['region'],
        aws_access_key_id=app.config['keys.key_id'],
        aws_secret_access_key=app.config['keys.key_secret'],
        security_token=app.config['keys.key_token'])

    scale = boto.ec2.autoscale.AutoScaleConnection(
        aws_access_key_id=app.config['keys.key_id'],
        aws_secret_access_key=app.config['keys.key_secret'],
        security_token=app.config['keys.key_token'])

    s3 = S3Connection(aws_access_key_id=app.config['keys.key_id'],
                      aws_secret_access_key=app.config['keys.key_secret'],
                      security_token=app.config['keys.key_token'])

    dyno = Table(
        app.config["instance.tags"]
        ["DynamoDBTableName"],  #app.config['dynamodb.table_name'],                 
        schema=[HashKey("job_id")],
        connection=ddb.connect_to_region(
            app.config['dynamodb.region'],
            aws_access_key_id=app.config['keys.key_id'],
            aws_secret_access_key=app.config['keys.key_secret'],
            security_token=app.config['keys.key_token']))

    app.config["ec2.conn"] = ec2
    app.config["sns.conn"] = sns
    app.config["sqs.conn"] = sqs
    app.config["ses.conn"] = ses
    app.config["s3.conn"] = s3
    app.config["scale.conn"] = scale
    app.config["dyno.conn"] = dyno
    app.config["doReload"] = True

    return app
Example #27
0
def create_table(table_name,
                 region=None,
                 key=None,
                 keyid=None,
                 profile=None,
                 read_capacity_units=None,
                 write_capacity_units=None,
                 hash_key=None,
                 hash_key_data_type=None,
                 range_key=None,
                 range_key_data_type=None,
                 local_indexes=None,
                 global_indexes=None):
    '''
    Creates a DynamoDB table.

    CLI Example:

    .. code-block:: bash

        salt myminion boto_dynamodb.create_table table_name /
        region=us-east-1 /
        hash_key=id /
        hash_key_data_type=N /
        range_key=created_at /
        range_key_data_type=N /
        read_capacity_units=1 /
        write_capacity_units=1
    '''
    schema = []
    primary_index_fields = []
    primary_index_name = ''
    if hash_key:
        hash_key_obj = HashKey(hash_key, data_type=hash_key_data_type)
        schema.append(hash_key_obj)
        primary_index_fields.append(hash_key_obj)
        primary_index_name += hash_key
    if range_key:
        range_key_obj = RangeKey(range_key, data_type=range_key_data_type)
        schema.append(range_key_obj)
        primary_index_fields.append(range_key_obj)
        primary_index_name += '_'
        primary_index_name += range_key
    primary_index_name += '_index'
    throughput = {'read': read_capacity_units, 'write': write_capacity_units}
    local_table_indexes = []
    if local_indexes:
        # Add the table's key
        local_table_indexes.append(
            AllIndex(primary_index_name, parts=primary_index_fields))
        for index in local_indexes:
            local_table_indexes.append(_extract_index(index))
    global_table_indexes = []
    if global_indexes:
        for index in global_indexes:
            global_table_indexes.append(
                _extract_index(index, global_index=True))

    conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)

    Table.create(table_name,
                 schema=schema,
                 throughput=throughput,
                 indexes=local_table_indexes,
                 global_indexes=global_table_indexes,
                 connection=conn)

    # Table creation can take several seconds to propagate.
    # We will check MAX_ATTEMPTS times.
    MAX_ATTEMPTS = 30
    for i in range(MAX_ATTEMPTS):
        if exists(table_name, region, key, keyid, profile):
            return True
        else:
            time.sleep(1)  # sleep for one second and try again
    return False
Example #28
0
import logging
import time
from boto.dynamodb2.table import Table
from boto.dynamodb2.fields import HashKey, RangeKey

logger = logging.getLogger('flotilla')

SCHEMAS = {
    'assignments': [HashKey('instance_id')],
    'locks': [HashKey('lock_name')],
    'regions': [HashKey('region_name')],
    'revisions': [HashKey('rev_hash')],
    'services': [HashKey('service_name')],
    'stacks': [HashKey('stack_arn')],
    'status': [HashKey('service'), RangeKey('instance_id')],
    'units': [HashKey('unit_hash')],
    'users': [HashKey('username')]
}


class DynamoDbTables(object):
    def __init__(self, dynamo, environment=None, backoff=0.5):
        self._dynamo = dynamo
        if environment:
            self._prefix = 'flotilla-{0}-'.format(environment)
        else:
            self._prefix = 'flotilla-'
        self.assignments = None
        self.locks = None
        self.revisions = None
        self.services = None
Example #29
0
def extract_index(index_data, global_index=False):
    """
    Instantiates and returns an AllIndex object given a valid index
    configuration

    CLI Example:

    .. code-block:: bash

        salt myminion boto_dynamodb.extract_index index
    """
    parsed_data = {}
    keys = []

    for key, value in index_data.items():
        for item in value:
            for field, data in item.items():
                if field == "hash_key":
                    parsed_data["hash_key"] = data
                elif field == "hash_key_data_type":
                    parsed_data["hash_key_data_type"] = data
                elif field == "range_key":
                    parsed_data["range_key"] = data
                elif field == "range_key_data_type":
                    parsed_data["range_key_data_type"] = data
                elif field == "name":
                    parsed_data["name"] = data
                elif field == "read_capacity_units":
                    parsed_data["read_capacity_units"] = data
                elif field == "write_capacity_units":
                    parsed_data["write_capacity_units"] = data
                elif field == "includes":
                    parsed_data["includes"] = data
                elif field == "keys_only":
                    parsed_data["keys_only"] = True

    if parsed_data["hash_key"]:
        keys.append(
            HashKey(parsed_data["hash_key"],
                    data_type=parsed_data["hash_key_data_type"]))
    if parsed_data.get("range_key"):
        keys.append(
            RangeKey(parsed_data["range_key"],
                     data_type=parsed_data["range_key_data_type"]))
    if (global_index and parsed_data["read_capacity_units"]
            and parsed_data["write_capacity_units"]):
        parsed_data["throughput"] = {
            "read": parsed_data["read_capacity_units"],
            "write": parsed_data["write_capacity_units"],
        }
    if parsed_data["name"] and keys:
        if global_index:
            if parsed_data.get("keys_only") and parsed_data.get("includes"):
                raise SaltInvocationError(
                    "Only one type of GSI projection can be used.")

            if parsed_data.get("includes"):
                return GlobalIncludeIndex(
                    parsed_data["name"],
                    parts=keys,
                    throughput=parsed_data["throughput"],
                    includes=parsed_data["includes"],
                )
            elif parsed_data.get("keys_only"):
                return GlobalKeysOnlyIndex(
                    parsed_data["name"],
                    parts=keys,
                    throughput=parsed_data["throughput"],
                )
            else:
                return GlobalAllIndex(
                    parsed_data["name"],
                    parts=keys,
                    throughput=parsed_data["throughput"],
                )
        else:
            return AllIndex(parsed_data["name"], parts=keys)
Example #30
0
    aws_access_key_id=assumedRoleObject.credentials.access_key,
    aws_secret_access_key=assumedRoleObject.credentials.secret_key,
    security_token=assumedRoleObject.credentials.session_token)
from boto import kinesis
#kinesis = kinesis.connect_to_region("us-east-1")
#stream = kinesis.create_stream("edisonDemoKinesis", 1)
#kinesis.describe_stream("edisonDemoKinesis")
#kinesis.list_streams()
#global count_dyn
count_dyn = 0

from boto.dynamodb2.table import Table
table_dynamo = Table(DYNAMODB_TABLE_NAME, connection=client_dynamo)
try:
    pingdata = Table.create('androidUser',
                            schema=[HashKey('userNo', data_type=NUMBER)],
                            connection=client_dynamo)
    count_dyn = 0
    pingdata = Table('androidUser',
                     schema=[HashKey('userNo', data_type=NUMBER)],
                     connection=client_dynamo)
except:
    #count_dyn = tempdata.count() + 1
    pingdata = Table('androidUser',
                     schema=[HashKey('userNo', data_type=NUMBER)],
                     connection=client_dynamo)
    existingitems = pingdata.scan()
    for i in existingitems:
        count_dyn = count_dyn + 1
time.sleep(.15)
#tempdata = Table('tempdata', schema = [HashKey('timestamp')], connection = client_dynamo)