예제 #1
0
파일: setup.py 프로젝트: acuros/remembrall
def create_table(region):
    connection = boto.dynamodb2.connect_to_region(region)
    Table.create(
        'Word',
        schema=[
            HashKey('user'),
            RangeKey('word')
        ],
        global_indexes = [
            GlobalAllIndex('WordList-index', parts=[
                HashKey('user'),
                RangeKey('wordList')
            ])
        ],
        throughput={
            'read': 1,
            'write': 1
        },
        connection=connection
    )
    Table.create(
        'WordList',
        schema=[
            HashKey('user'),
            RangeKey('name')
        ],
        throughput={
            'read': 1,
            'write': 1
        },
        connection=connection
    )
예제 #2
0
def get_lock_table(connection=None):
    if not connection:
        connection = boto.dynamodb2.connect_to_region('eu-west-1')

    lock_table_name = 'hotel_elasticsearch_lock'

    lock_table_config = dict(
        connection=connection,
        schema=[
            HashKey('lock_name', data_type=STRING),
        ],
        throughput={
            'read': 5,
            'write': 5,
        }
    )

    try:
        connection.describe_table(lock_table_name)
    except JSONResponseError:
        Table.create(
            lock_table_name,
            **lock_table_config
        )

    return Table(
        lock_table_name,
        **lock_table_config
    )
예제 #3
0
    def createTable(isLocal,localPort):
        """Used to create table for Dyanmo DB"""
        SessionTable.LOCAL_PORT =localPort
        secondaryIndex = [
            GlobalAllIndex('expiration-index',
                parts=[
                    HashKey('expiration', data_type=NUMBER)
                ],
                throughput={'read': 5, 'write': 5}
            )
        ]
        if isLocal:
            try:
                Table.create(
                    SessionTable.TABLE_NAME,
                    schema=[HashKey(SessionTable.KEY_NAME)],
                    global_indexes=secondaryIndex,
                    connection=SessionTable.getLocalConnection()
                )
            except exceptions.JSONResponseError as jre:
                if jre.status == 400 and "preexisting" in jre.message.lower():
                    #table already exists
                    pass

        else:
            Table.create(
                SessionTable.TABLE_NAME,
                schema=[HashKey(SessionTable.KEY_NAME)],
                global_indexes=secondaryIndex
            )
예제 #4
0
    def create_all(self):
        """
        Create all user-specified DynamoDB tables.

        This function try to create each table defined. If the table already
        exists, it try to update the throughput and global indexes.
        If none of these values has changed, it will just raise an exception.
        """
        tables_failed = []
        try:
            for table_name, table in self.tables.items():
                try:
                    Table.create(
                        table_name = table.table_name,
                        schema = table.schema,
                        throughput = table.throughput,
                        indexes = table.indexes,
                        global_indexes = table.global_indexes,
                        connection = self.connection,
                    )
                except JSONResponseError:
                    try:
                        Table(table.table_name, connection=self.connection).update(
                            throughput = table.throughput,
                            global_indexes = table.global_indexes
                        )
                    except JSONResponseError:
                        tables_failed.append(table_name)
                        continue
        finally:
            if len(tables_failed):
                raise DynamodbTableError('The tables {tables_failed} creation/update failed'.format(
                    tables_failed=', '.join(tables_failed)
                ))
예제 #5
0
    def test_create_table(self):
        self.storage_mocker.StubOutWithMock(storage, 'create_table')
        storage.create_table(IgnoreArg(), IgnoreArg())
        self.storage_mocker.ReplayAll()

        Table.create(
            "test",
            schema=[
                fields.HashKey('hash', data_type=schema_types.NUMBER),
                fields.RangeKey('range', data_type=schema_types.STRING)
            ],
            throughput={
                'read': 20,
                'write': 10,
            },
            indexes=[
                fields.KeysOnlyIndex(
                    'index_name',
                    parts=[
                        fields.RangeKey('indexed_field',
                                        data_type=schema_types.STRING)
                    ]
                )
            ],
            connection=self.DYNAMODB_CON
        )

        self.storage_mocker.VerifyAll()
예제 #6
0
    def create_table(self, table_name, schema, throughput=None, indexes=None,
                     global_indexes=None):
        '''
        (str, list, dict, list, list) -> boto.dynamodb2.table.Table

        table_name: str con el nombre de la tabla a crear.
        schema: list de "BaseSchemaField" que representa el esquema de la tabla
        throughput: dict con 'read' & 'write' key y values enteros.
        indexes: list de "BaseIndexField" que define los indices de la tabla.
        global_indexes: list de "GlobalBaseIndexField" que define los indices
        globales para la tabla.

        Permite crear una tabla. Retorna la tabla que se ha creado.
        '''

        tables = self.cnn.list_tables()
        table = Table(table_name, connection=self.cnn)

        #verifica si se debe eliminar una tabla antes de crearla, por ejemplo
        #si la aplicacion  esta ejecuntado un entorno de tests
        if table_name in tables['TableNames'] and table_name.startswith('_'):
            if table.delete():
                tables = self.cnn.list_tables()

        #valida si la tabla ya se encuentra creada.
        if not table_name in tables['TableNames']:
            Table.create(table_name,
                         schema=schema,
                         throughput=throughput,
                         global_indexes=global_indexes,
                         indexes=indexes,
                         connection=self.cnn)
        return table
예제 #7
0
 def tables_create(self):
     """
     Creates a new table.. throws exception if tables already exist
     """
     for table_name, schema in dynamo_schema.iteritems():
         logging.info("Creating " + self.prefix + table_name)
         Table.create(self.prefix + table_name, **schema)
예제 #8
0
 def create_tables( self ):         
     """ """
     try:
         logger.info( 'Starting' )                           
         # if the table exists then delete it
         self.spot_master_table = Table( self.spot_master_table_name, connection=self.dynamodb_conn ) 
         self.spot_request_table = Table( self.spot_request_table_name, connection=self.dynamodb_conn ) 
         self.spot_rsa_key_table = Table( self.spot_rsa_key_table_name, connection=self.dynamodb_conn ) 
         self.spot_batch_job_parm_table = Table( self.spot_batch_job_parm_table_name, connection=self.dynamodb_conn ) 
         
         delete_and_wait_until_tables_deleted( [self.spot_master_table, self.spot_request_table, 
                                                self.spot_rsa_key_table, self.spot_batch_job_parm_table ] )
 
         self.spot_master_table = Table.create( self.spot_master_table_name, connection=self.dynamodb_conn, 
                                                schema=[HashKey( TableSpotMaster.spot_master_uuid, data_type=STRING) ],
                                                global_indexes=[ GlobalAllIndex('IsOpen', parts=[ HashKey( TableSpotMaster.is_open, data_type=NUMBER) ] ) ] 
                                                )
         self.spot_request_table = Table.create( self.spot_request_table_name, connection=self.dynamodb_conn, 
                                            schema=[HashKey( TableSpotRequest.spot_request_uuid, data_type=STRING) ],
                                            global_indexes=[ GlobalAllIndex('MasterUuid', parts=[ HashKey( TableSpotRequest.spot_master_uuid, data_type=STRING) ] ) ] 
                                            )
         self.spot_rsa_key_table = Table.create( self.spot_rsa_key_table_name, connection=self.dynamodb_conn, 
                                      schema=[HashKey( TableSpotRSAKey.spot_master_uuid, data_type=STRING) ] 
                                      )          
         self.spot_batch_job_parm_table = Table.create( self.spot_batch_job_parm_table_name, connection=self.dynamodb_conn, 
                                      schema=[HashKey( TableSpotBatchJobParm.spot_master_uuid, data_type=STRING) ] 
                                      )          
         
         wait_until_tables_active( [self.spot_master_table, self.spot_request_table, self.spot_rsa_key_table, self.spot_batch_job_parm_table ] )            
         logger.info( 'Completed Successfully' )
 
     except StandardError as e:
         logger.error( e )
         logger.error( traceback.format_exc() )
         sys.exit(8)
예제 #9
0
    def test_create_table_duplicate(self):
        self.storage_mocker.StubOutWithMock(storage, 'create_table')
        storage.create_table(IgnoreArg(), IgnoreArg())
        storage.create_table(
            IgnoreArg(), IgnoreArg()
        ).AndRaise(TableAlreadyExistsException)
        self.storage_mocker.ReplayAll()

        Table.create(
            "test",
            schema=[
                fields.HashKey('hash', data_type=schema_types.NUMBER),
                fields.RangeKey('range', data_type=schema_types.STRING)
            ],
            throughput={
                'read': 20,
                'write': 10,
            },
            indexes=[
                fields.KeysOnlyIndex(
                    'index_name',
                    parts=[
                        fields.RangeKey('indexed_field',
                                        data_type=schema_types.STRING)
                    ]
                )
            ],
            connection=self.DYNAMODB_CON
        )

        try:
            Table.create(
                "test",
                schema=[
                    fields.HashKey('hash', data_type=schema_types.NUMBER),
                    fields.RangeKey('range', data_type=schema_types.STRING)
                ],
                throughput={
                    'read': 20,
                    'write': 10,
                },
                indexes=[
                    fields.KeysOnlyIndex(
                        'index_name',
                        parts=[
                            fields.RangeKey('indexed_field',
                                            data_type=schema_types.STRING)
                        ]
                    )
                ],
                connection=self.DYNAMODB_CON
            )

            self.fail()
        except JSONResponseError as e:
            self.assertEqual('ResourceInUseException', e.error_code)
            self.storage_mocker.VerifyAll()
        except Exception as e:
            self.fail()
예제 #10
0
 def create_table(
     self, table_name, schema, throughput, indexes=None, global_indexes=None
 ):
     Table.create(
         self.get_table_name(table_name), schema=schema,
         throughput=throughput, connection=self.get_connection(),
         indexes=indexes, global_indexes=global_indexes)
     self.wait_table_active(table_name)
     self.get_tables()
예제 #11
0
파일: post.py 프로젝트: dkothari777/CSC346
def createTagTable():
    Table.create('Tags', schema = [HashKey('Tag'), RangeKey('Time')],connection = conn)
    while True:
        time.sleep(5)
        try:
            conn.describe_table('Tags')
        except Exception, e:
            print e
        else:
            break
예제 #12
0
 def upload(self, filename, file_hash):
     """
     Upload filename and store the archive id for future retrieval
     """
     
     rel_filename = os.path.relpath(filename, bagparent)
     archive_id = self.vault.concurrent_create_archive_from_file(filename, description=rel_filename)
     
     # Storing the archive_id, filename, file_hash, and bag_date relationships in dynamodb
     try:
         archives.put_item(data={
             'archive_id': archive_id,
             'vault_name': vault_name,
             'file_hash': file_hash,
             'filename': rel_filename,
         })
     ## If the database doesn't exist, create it
     except JSONResponseError as e:
         if e.status == 400 and e.message == 'Requested resource not found':
             print('freezerbag_archives table missing, creating now')
             Table.create('freezerbag_archives', schema=[HashKey('archive_id'), RangeKey('vault_name', data_type='S')])
             time.sleep(30)
         ## Bail if we hit a JSON error we don't understand
         else:
             print(e.status)
             print(e.message)
             exit()
     ## Write out the hash too
     hashes.put_item(data={
         'file_hash': file_hash,
         'archive_id': archive_id,
         'vault_name': vault_name,
     })
     try:
         names.put_item(data={
             'filename': rel_filename,
             'bag_date': bag_date,
             'bagname': bagname,
             'archive_id': archive_id,
             'file_hash': file_hash,
             'vault_name': vault_name,
         })
     ## If the database doesn't exist, create it            
     except JSONResponseError as e:
         if e.status == 400 and e.message == 'Requested resource not found':
             print('freezerbag_names table missing, creating now')
             print('freezerbag_names table missing, creating now')
             Table.create('freezerbag_names', schema=[HashKey('filename'), RangeKey('bag_date', data_type=NUMBER)])
             time.sleep(30)
         ## Bail if we hit a JSON error we don't understand
         else:
             print(e.status)
             print(e.message)
             exit()
예제 #13
0
    def _create_table(self, name, range_key=False):
        if range_key:
            schema = [
                HashKey(Doc.hashkey, data_type=STRING),
                RangeKey(Doc.key, data_type=STRING)
            ]
        else:
            schema = [
                HashKey(Doc.key, data_type=STRING) # by default, use single index
            ]

        Table.create(name, schema=schema, connection=self.conn)
예제 #14
0
def create_or_update_dynamo_table(connection, module):
    table_name = module.params.get('name')
    hash_key_name = module.params.get('hash_key_name')
    hash_key_type = module.params.get('hash_key_type')
    range_key_name = module.params.get('range_key_name')
    range_key_type = module.params.get('range_key_type')
    read_capacity = module.params.get('read_capacity')
    write_capacity = module.params.get('write_capacity')
    all_indexes = module.params.get('indexes')

    for index in all_indexes:
        validate_index(index, module)

    schema = get_schema_param(hash_key_name, hash_key_type, range_key_name, range_key_type)

    throughput = {
        'read': read_capacity,
        'write': write_capacity
    }

    indexes, global_indexes = get_indexes(all_indexes)

    result = dict(
        region=module.params.get('region'),
        table_name=table_name,
        hash_key_name=hash_key_name,
        hash_key_type=hash_key_type,
        range_key_name=range_key_name,
        range_key_type=range_key_type,
        read_capacity=read_capacity,
        write_capacity=write_capacity,
        indexes=all_indexes,
    )

    try:
        table = Table(table_name, connection=connection)


        if dynamo_table_exists(table):
            result['changed'] = update_dynamo_table(table, throughput=throughput, check_mode=module.check_mode, global_indexes=global_indexes)
        else:
            if not module.check_mode:
                Table.create(table_name, connection=connection, schema=schema, throughput=throughput, indexes=indexes, global_indexes=global_indexes)
            result['changed'] = True

        if not module.check_mode:
            result['table_status'] = table.describe()['Table']['TableStatus']

    except BotoServerError:
        result['msg'] = 'Failed to create/update dynamo table due to error: ' + traceback.format_exc()
        module.fail_json(**result)
    else:
        module.exit_json(**result)
예제 #15
0
def create_or_update_dynamo_table(connection, module):
    table_name = module.params.get('name')
    hash_key_name = module.params.get('hash_key_name')
    hash_key_type = module.params.get('hash_key_type')
    range_key_name = module.params.get('range_key_name')
    range_key_type = module.params.get('range_key_type')
    read_capacity = module.params.get('read_capacity')
    write_capacity = module.params.get('write_capacity')

    if range_key_name:
        schema = [
            HashKey(hash_key_name, DYNAMO_TYPE_MAP.get(hash_key_type)),
            RangeKey(range_key_name, DYNAMO_TYPE_MAP.get(range_key_type))
        ]
    else:
        schema = [
            HashKey(hash_key_name, DYNAMO_TYPE_MAP.get(hash_key_type))
        ]
    throughput = {
        'read': read_capacity,
        'write': write_capacity
    }

    result = dict(
        region=module.params.get('region'),
        table_name=table_name,
        hash_key_name=hash_key_name,
        hash_key_type=hash_key_type,
        range_key_name=range_key_name,
        range_key_type=range_key_type,
        read_capacity=read_capacity,
        write_capacity=write_capacity,
    )

    try:
        table = Table(table_name, connection=connection)

        if dynamo_table_exists(table):
            result['changed'] = update_dynamo_table(table, throughput=throughput, check_mode=module.check_mode)
        else:
            if not module.check_mode:
                Table.create(table_name, connection=connection, schema=schema, throughput=throughput)
            result['changed'] = True

        if not module.check_mode:
            result['table_status'] = table.describe()['Table']['TableStatus']

    except BotoServerError:
        result['msg'] = 'Failed to create/update dynamo table due to error: ' + traceback.format_exc()
        module.fail_json(**result)
    else:
        module.exit_json(**result)
예제 #16
0
    def create_all(self):
        """
        Create all user-specified DynamoDB tables.

        We'll error out if the tables can't be created for some reason.
        """
        for table_name, table in self.tables.iteritems():
            Table.create(
                table_name = table.table_name,
                schema = table.schema,
                throughput = table.throughput,
                indexes = table.indexes,
                global_indexes = table.global_indexes,
                connection = self.connection,
            )
예제 #17
0
def createDynamoObject(name):
    try:
        users = Table.create(name, schema=[HashKey('id')],
                             throughput={'read': db_read_cap,
                             'write': db_write_cap},
                             global_indexes=[GlobalAllIndex('EverythingIndex'
                             , parts=[HashKey('name')])],
                             connection=boto.dynamodb2.connect_to_region(AWS_REGION))
    except:
        users = Table(name,
                      connection=boto.dynamodb2.connect_to_region('us-west-2'
                      ))
        print "1) Table 'data' already created for table: " + name

  # On first Run this wont insert data because of delay to create table on aws server side.

    try:
        users.put_item(data={
            'id': '3',
            'type': 'person',
            'name': 'dummy',
            'activities': ['activity one'],
            })
    except:
        print '2) Dummy Data already added for tabe: ' + name
    return users
def test_failed_overwrite():
    table = Table.create('messages', schema=[
        HashKey('id'),
    ], throughput={
        'read': 7,
        'write': 3,
    })

    data1 = {'id': '123', 'data': '678'}
    table.put_item(data=data1)

    data2 = {'id': '123', 'data': '345'}
    table.put_item(data=data2, overwrite=True)

    data3 = {'id': '123', 'data': '812'}
    table.put_item.when.called_with(data=data3).should.throw(ConditionalCheckFailedException)

    returned_item = table.lookup('123')
    dict(returned_item).should.equal(data2)

    data4 = {'id': '124', 'data': 812}
    table.put_item(data=data4)

    returned_item = table.lookup('124')
    dict(returned_item).should.equal(data4)
def createGamesTable(db):

    try:
        hostStatusDate = GlobalAllIndex("HostId-StatusDate-index",
                                        parts=[HashKey("HostId"), RangeKey("StatusDate")],
                                        throughput={
                                            'read': 1,
                                            'write': 1
                                        })
        opponentStatusDate  = GlobalAllIndex("OpponentId-StatusDate-index",
                                        parts=[HashKey("OpponentId"), RangeKey("StatusDate")],
                                        throughput={
                                            'read': 1,
                                            'write': 1
                                        })

        #global secondary indexes
        GSI = [hostStatusDate, opponentStatusDate]

        gamesTable = Table.create("Games",
                    schema=[HashKey("GameId")],
                    throughput={
                        'read': 1,
                        'write': 1
                    },
                    global_indexes=GSI,
                    connection=db)

    except JSONResponseError, jre:
        try:
            gamesTable = Table("Games", connection=db)
        except Exception, e:
            print "Games Table doesn't exist."
예제 #20
0
    def _get_scheduled_jobs(self, dynamodb_connection):  # noqa
        """
        WARNING -- this method requires cleanup; the user must remember to
        delete the table once complete.  For example:

        >>> NEW_JOB = {'log_version': 'ad_click', 'log_schema_version': '1'}
        >>> def cool_test_fn(dynamodb_connection):
        >>>     tsj = TestScheduledJobs()
        >>>     table, scheduled_jobs = tsj._get_scheduled_jobs(dynamodb_connection)
        >>>     assert scheduled_jobs.put(**NEW_JOB)
        >>>     yield scheduled_jobs
        >>>     assert table.delete()  # THIS IS THE KEY CLEANUP!!

        """
        avro_schema = get_avro_schema('mycroft/avro/scheduled_jobs.json')
        index_load_status = GlobalAllIndex(
            ScheduledJobs.INDEX_LOAD_STATUS,
            parts=[HashKey('load_status')])
        index_et_status = GlobalAllIndex(
            ScheduledJobs.INDEX_ET_STATUS,
            parts=[HashKey('et_status')])
        index_load_status = GlobalAllIndex(
            ScheduledJobs.INDEX_LOAD_STATUS,
            parts=[HashKey('load_status')])
        index_log_name_and_log_schema_version = GlobalAllIndex(
            ScheduledJobs.INDEX_LOG_NAME_AND_LOG_SCHEMA_VERSION,
            parts=[HashKey('log_name'), RangeKey('log_schema_version')])
        table = Table.create(
            'ScheduledJobs',
            schema=NAME_TO_SCHEMA['scheduled_jobs'],
            connection=dynamodb_connection,
            global_indexes=[index_et_status, index_load_status,
                            index_log_name_and_log_schema_version])
        return table, ScheduledJobs(persistence_object=table, avro_schema_object=avro_schema)
예제 #21
0
    def test_unprocessed_batch_writes(self):
        # Create a very limited table w/ low throughput.
        users = Table.create('slow_users', schema=[
            HashKey('user_id'),
        ], throughput={
            'read': 1,
            'write': 1,
        })
        self.addCleanup(users.delete)

        # Wait for it.
        time.sleep(60)

        with users.batch_write() as batch:
            for i in range(500):
                batch.put_item(data={
                    'user_id': str(i),
                    'name': 'Droid #{0}'.format(i),
                })

            # Before ``__exit__`` runs, we should have a bunch of unprocessed
            # items.
            self.assertTrue(len(batch._unprocessed) > 0)

        # Post-__exit__, they should all be gone.
        self.assertEqual(len(batch._unprocessed), 0)
def main():
    try:
        Table.create(
            'pi_project_data',
            schema=[
                HashKey('device_name', data_type=STRING),
                RangeKey('timestamp', data_type=NUMBER)
            ],
            throughput={
                'read': 1,
                'write': 1
            }
        )
        print('Table created: pi_project_data')
    except JSONResponseError as e:
        print(e.message)
예제 #23
0
  def setUp(self):
    logging.getLogger('boto').setLevel(logging.CRITICAL)

    err = 'Use a real DynamoDB %s. Add datastore/dynamo/test_settings.py.'
    assert aws_access_key != '<aws access key>', err % 'access key.'
    assert aws_secret_key != '<aws secret key>', err % 'secret key.'
    self.conn = boto.dynamodb2.connect_to_region(aws_region, aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
    
    # Create an indexed table 
    table = Table(self.INDEXED_TABLE, connection=self.conn)
    try:
      status = table.describe()
    except:
      table = Table.create(self.INDEXED_TABLE, schema=[
          HashKey('department', data_type=STRING),
          RangeKey('name', data_type=STRING)
      ], indexes=[
        AllIndex('ScoreIndex', parts=[
          HashKey('department'),
          RangeKey('score', data_type=NUMBER)
        ])
      ], global_indexes=[
        GlobalAllIndex('GroupIndex', parts=[
          HashKey('group'),
          RangeKey('age', data_type=NUMBER)
        ])
      ], connection=self.conn)

    # make sure we're clean :)
    self._delete_keys_from_table(self.SIMPLE_TABLE) 
    self._delete_keys_from_table(self.INDEXED_TABLE) 
    self._delete_keys_from_table(self.RANGEKEY_TABLE) 
예제 #24
0
 def _mock_create_table(self, name, hash_key=MOCK_TABLE_HASH_KEY,
                        range_key=MOCK_TABLE_RANGE_KEY,
                        read_throughput=MOCK_TABLE_READ_THROUGHPUT,
                        write_throughput=MOCK_TABLE_WRITE_THROUGHPUT,
                        global_index_name=MOCK_TABLE_GLOBAL_INDEX_NAME,
                        global_index_attr_name=MOCK_TABLE_GLOBAL_INDEX_ATTR_NAME):
     Table.create(
         name,
         schema=[
             HashKey(hash_key),
             RangeKey(range_key)],
         throughput={
             'read': read_throughput,
             'write': write_throughput},
         global_indexes=[GlobalAllIndex(global_index_name,
                                        parts=[HashKey(global_index_attr_name)])])
예제 #25
0
def dynamodb_table_name(dynamodb_connection):  # noqa
    table_name = 'My_Test_Table'
    table = Table.create(table_name,
                         schema=[HashKey('hash', data_type=STRING)],
                         connection=dynamodb_connection)
    yield table_name
    assert table.delete()
예제 #26
0
파일: db.py 프로젝트: tomzhang/autopush
def create_router_table(tablename="router", read_throughput=5,
                        write_throughput=5):
    """Create a new router table

    The last_connect index is a value used to determine the last month a user
    was seen in. To prevent hot-keys on this table during month switchovers the
    key is determined based on the following scheme:

        (YEAR)(MONTH)(DAY)(HOUR)(0001-0010)

    Note that the random key is only between 1-10 at the moment, if the key is
    still too hot during production the random range can be increased at the
    cost of additional queries during GC to locate expired users.

    """
    return Table.create(tablename,
                        schema=[HashKey("uaid")],
                        throughput=dict(read=read_throughput,
                                        write=write_throughput),
                        global_indexes=[
                            GlobalKeysOnlyIndex(
                                'AccessIndex',
                                parts=[HashKey('last_connect',
                                               data_type=NUMBER)],
                                throughput=dict(read=5, write=5))],
                        )
예제 #27
0
def create_db():
    logging.info('creating dynamodb table')

    try:
        table = Table.create(table_name, schema=[
            HashKey('id')
        ], throughput = { 'read' : 64, 'write' : 64 })

    except JSONResponseError as e:
        logging.error('exception: {}'.format(e.error_message))
        return -1

    # allow time for table creation
    #
    tries = 16
    for x in range(tries):
        time.sleep(1)
        t = table.describe()
        tstat = t['Table']['TableStatus']
        logging.info('{}/{}: table status: {}'.format(1+x, tries, tstat))
        if tstat == 'ACTIVE':
            logging.info('table created')
            return 1

    logging.error('create table failed')
    return -1
예제 #28
0
def sendtodynamo_cnn(cnnjson):
  ''' Send json to DynamoDB
  Assumes that article timestamps have been deduped to avoid collisions
  '''

  conn = connect_to_region('us-west-2', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
  
  hashkey = "CNN" # primary key to be used for DynamoDB table

  try:
    table = Table('CNN', connection=conn)
    table.describe()
  except boto.exception.JSONResponseError:
    print "Creating table"
    table = Table.create('CNN', schema=[HashKey('source'), RangeKey('tstamp',data_type=NUMBER)], throughput={'read':25, 'write':25}, indexes=[GlobalAllIndex('showidx',parts=[HashKey('show')],throughput={'read':10,'write':5})])

  iteration = 0
  for article in cnnjson:
    # Iterate through list of articles and upload to table
    rangekey = float(article['timestamp'])
    rowdata = {'source':hashkey,'tstamp':rangekey, 'cnnShow':article['show']}
    for key in article.keys():
      rowdata[key]=article[key]
    item = table.put_item(data = rowdata)
    iteration += 1
    if iteration%100==0:
      print "Uploaded "+iteration+" articles"

  return None
예제 #29
0
def _create_table(table_name, schema, throughput):
    if _table_exists(table_name):
        logging.debug("using existing table: %s", table_name)
        return Table(table_name, schema=schema)
    else:
        logging.debug("creating table: %s", table_name)
        return Table.create(table_name, schema=schema, throughput=throughput)
예제 #30
0
    def test_gsi(self):
        users = Table.create('gsi_users', schema=[
            HashKey('user_id'),
        ], throughput={
            'read': 5,
            'write': 3,
        },
        global_indexes=[
            GlobalKeysOnlyIndex('StuffIndex', parts=[
                HashKey('user_id')
            ], throughput={
                'read': 2,
                'write': 1,
            }),
        ])
        self.addCleanup(users.delete)

        # Wait for it.
        time.sleep(60)

        users.update(
            throughput={
                'read': 3,
                'write': 4
            },
            global_indexes={
                'StuffIndex': {
                    'read': 1,
                    'write': 2
                }
            }
        )

        # Wait again for the changes to finish propagating.
        time.sleep(150)
예제 #31
0
    def table_maker(name, schema):
        _delete_table_if_exists(dynamodb_connection, name)
        throughput = {'read': 5, 'write': 5}
        table = Table.create(name,
                             schema=schema,
                             throughput=throughput,
                             connection=dynamodb_connection)

        def tear_down():
            _delete_table_if_exists(dynamodb_connection, name)

        request.addfinalizer(tear_down)

        return table
def test_get_special_item():
    table = Table.create(
        "messages",
        schema=[HashKey("date-joined")],
        throughput={
            "read": 10,
            "write": 10
        },
    )

    data = {"date-joined": 127549192, "SentBy": "User A"}
    table.put_item(data=data)
    returned_item = table.get_item(**{"date-joined": 127549192})
    dict(returned_item).should.equal(data)
def test_get_item_without_range_key():
    table = Table.create('messages', schema=[
        HashKey('test_hash'),
        RangeKey('test_range'),
    ], throughput={
        'read': 10,
        'write': 10,
    })

    hash_key = 3241526475
    range_key = 1234567890987
    table.put_item(data={'test_hash': hash_key, 'test_range': range_key})
    table.get_item.when.called_with(
        test_hash=hash_key).should.throw(ValidationException)
예제 #34
0
def setupDDBTable(region, table_name):
    print('running setupDDBTable')

    conn = boto.dynamodb.connect_to_region(region)
    tables = conn.list_tables()
    check = [t for t in tables if t == table_name]
    conn = boto.dynamodb2.connect_to_region(region)
    if check:
        _table = Table(table_name,connection=conn)
    else:
        _table = Table.create(table_name,
                              schema=[HashKey('instanceId')
                              ],connection=conn)

    return _table
예제 #35
0
    def test_create_table_no_range(self):
        self.storage_mocker.StubOutWithMock(storage, 'create_table')
        storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndReturn(
            models.TableMeta(
                '00000000-0000-0000-0000-000000000000',
                models.TableSchema(
                    {
                        'hash': models.AttributeType('N'),
                        'indexed_field': models.AttributeType('S')
                    }, ['hash'], {
                        "index_name":
                        models.IndexDefinition('hash', 'indexed_field')
                    }), models.TableMeta.TABLE_STATUS_ACTIVE, None))
        self.storage_mocker.ReplayAll()

        Table.create("test",
                     schema=[
                         fields.HashKey('hash', data_type=schema_types.NUMBER),
                     ],
                     throughput={
                         'read': 20,
                         'write': 10,
                     },
                     indexes=[
                         fields.KeysOnlyIndex(
                             'index_name',
                             parts=[
                                 fields.HashKey('hash',
                                                data_type=schema_types.NUMBER),
                                 fields.RangeKey('indexed_field',
                                                 data_type=schema_types.STRING)
                             ])
                     ],
                     connection=self.DYNAMODB_CON)

        self.storage_mocker.VerifyAll()
예제 #36
0
def create_router_table(tablename="router",
                        read_throughput=5,
                        write_throughput=5):
    """Create a new router table"""
    return Table.create(
        tablename,
        schema=[HashKey("uaid")],
        throughput=dict(read=read_throughput, write=write_throughput),
        global_indexes=[
            GlobalKeysOnlyIndex(
                'AccessIndex',
                parts=[HashKey('last_connect', data_type=NUMBER)],
                throughput=dict(read=5, write=5))
        ],
    )
예제 #37
0
def etl_records(dynamodb_connection):
    avro_schema = get_avro_schema('mycroft/avro/etl_record.json')
    index_job_id = GlobalAllIndex(
        ETLRecords.INDEX_JOB_ID_AND_DATA_DATE,
        parts=[HashKey('job_id'), RangeKey('data_date')])
    table = Table.create('ETLRecords',
                         schema=NAME_TO_SCHEMA['etl_records'],
                         connection=dynamodb_connection,
                         global_indexes=[index_job_id])
    etl_records = ETLRecords(persistence_object=table,
                             avro_schema_object=avro_schema)
    for etl_record in SAMPLE_RECORD_JOBS:
        assert etl_records.put(**etl_record)
    yield etl_records
    assert table.delete()
예제 #38
0
파일: dynamo.py 프로젝트: kmcintyre/scewpt
 def create(self):
     Table.create(
         self.table_name,
         schema=[
             HashKey(communication_keys.websocket_key),
             RangeKey(communication_keys.websocket_ip)
         ],
         throughput=standard_throughput,
         indexes={
             AllIndex(self.index_start,
                      parts=[
                          HashKey(communication_keys.websocket_key),
                          RangeKey(communication_keys.websocket_ts_start,
                                   data_type=NUMBER)
                      ]),
             AllIndex(self.index_end,
                      parts=[
                          HashKey(communication_keys.websocket_key),
                          RangeKey(communication_keys.websocket_ts_end,
                                   data_type=NUMBER)
                      ])
         },
         global_indexes={})
     print 'creating connection table'
예제 #39
0
    def get_hashes_archive_id(self, file_hash):
        """
        Get the archive_id corresponding to the file_hash
        """

        try:
            hash = hashes.get_item(file_hash=file_hash)
            return hash['archive_id']
        ## If the database doesn't exist, create it
        except JSONResponseError as e:
            if e.status == 400 and e.message == 'Requested resource not found':
                print('freezerbag_hashes table missing, creating now')
                Table.create('freezerbag_hashes',
                             schema=[HashKey('file_hash')])
                ## Leave some time for the table to get created
                time.sleep(30)
                return None
            ## Bail if we hit a JSON error we don't understand
            else:
                print(e.status)
                print(e.message)
                exit()
        except ItemNotFound:
            return None
def test_conflicting_writes():
    table = Table.create('messages', schema=[
        HashKey('id'),
    ])

    item_data = {'id': '123', 'data': '678'}
    item1 = Item(table, item_data)
    item2 = Item(table, item_data)
    item1.save()

    item1['data'] = '579'
    item2['data'] = '912'

    item1.save()
    item2.save.when.called_with().should.throw(ConditionalCheckFailedException)
예제 #41
0
def test_get_special_item():
    table = Table.create('messages',
                         schema=[HashKey('date-joined')],
                         throughput={
                             'read': 10,
                             'write': 10,
                         })

    data = {
        'date-joined': 127549192,
        'SentBy': 'User A',
    }
    table.put_item(data=data)
    returned_item = table.get_item(**{'date-joined': 127549192})
    dict(returned_item).should.equal(data)
예제 #42
0
def createDevicesTable(db):
    try:
        devicesTable = Table.create("Devices",
                    schema=[HashKey("ManualName")],
                    throughput={
                        'read': 1,
                        'write': 1
                    },
                    connection=db)

    except JSONResponseError, jre:
        try:
            devicesTable = Table("Devices", connection=db)
        except Exception, e:
            print "Devices Table doesn't exist."
def test_update_item_remove():
    conn = boto.dynamodb2.connect_to_region("us-east-1")
    table = Table.create("messages", schema=[HashKey("username")])

    data = {"username": "******", "SentBy": "User A", "SentTo": "User B"}
    table.put_item(data=data)
    key_map = {"username": {"S": "steve"}}

    # Then remove the SentBy field
    conn.update_item("messages",
                     key_map,
                     update_expression="REMOVE SentBy, SentTo")

    returned_item = table.get_item(username="******")
    dict(returned_item).should.equal({"username": "******"})
def test_update_item_nested_remove():
    conn = boto.dynamodb2.connect_to_region("us-east-1")
    table = Table.create("messages", schema=[HashKey("username")])

    data = {"username": "******", "Meta": {"FullName": "Steve Urkel"}}
    table.put_item(data=data)
    key_map = {"username": {"S": "steve"}}

    # Then remove the Meta.FullName field
    conn.update_item("messages",
                     key_map,
                     update_expression="REMOVE Meta.FullName")

    returned_item = table.get_item(username="******")
    dict(returned_item).should.equal({"username": "******", "Meta": {}})
예제 #45
0
파일: fs.py 프로젝트: lunastorm/DynamoFS
    def createTable(self):
        provider = Provider('aws')
        connection = DynamoDBConnection(aws_access_key_id=provider.get_access_key(),
            aws_secret_access_key=provider.get_secret_key(), region=self.regionv2)
        self.blockTablev2 = Table.create(self.tableName + "Blocks",
            schema=[
                HashKey('blockId'),
                RangeKey('blockNum', data_type=NUMBER)
            ],
            throughput={'read': 30, 'write': 10},
            connection=connection
        )
        self.tablev2 = Table.create(self.tableName,
            schema=[
                HashKey('path'),
                RangeKey('name')
            ],
            throughput={'read': 30, 'write': 10},
            indexes=[
                KeysOnlyIndex("Links", parts=[
                    HashKey('path'),
                    RangeKey('link')
                ])
            ],
            connection=connection
        )

        description = connection.describe_table(self.tableName)
        iter = 0
        while description["Table"]["TableStatus"] != "ACTIVE":
            print "Waiting for %s to create %d..." % (self.tableName, iter)
            iter += 1
            sleep(1)
            description = connection.describe_table(self.tableName)
        self.table = self.conn.get_table(self.tableName)
        self.blockTable = self.conn.get_table(self.tableName + "Blocks")
예제 #46
0
 def get_mail_table(self, domain):
     mail_table = 'smtp'
     s3_mail_table = Table(mail_table)
     try:
         print mail_table, 'count:', s3_mail_table.count()
     except:
         print 'creating:', mail_table
         s3_mail_table = Table.create(
             mail_table,
             schema=[HashKey('derived_to'),
                     RangeKey('derived_from')],
             throughput={
                 'read': 3,
                 'write': 3
             })
     return s3_mail_table
예제 #47
0
def create_dynamodb_table(profile, region, table):
    """
    DynamoDB table creation with AWS Boto library in Python
    """

    connection = boto.dynamodb2.connect_to_region(region, profile_name=profile)
    aggregate = Table.create(table,
                             schema=[
                                 HashKey("BucketStart"),
                                 RangeKey("EventType"),
                             ],
                             throughput={
                                 'read': THROUGHPUT_READ,
                                 'write': THROUGHPUT_WRITE
                             },
                             connection=connection)
예제 #48
0
    def setUp(self):
        # Connect to DynamoDB Local
        self.conn = DynamoDBConnection(host='localhost',
                                       port=8000,
                                       aws_secret_access_key='anything',
                                       is_secure=False)

        tables = self.conn.list_tables()
        if 'employees' not in tables['TableNames']:
            # Create table of employees
            self.employees = Table.create(
                'employees',
                schema=[HashKey('etype'), RangeKey('id')],
                indexes=[
                    AllIndex('TitleIndex',
                             parts=[HashKey('etype'),
                                    RangeKey('title')])
                ],
                connection=self.conn)
        else:
            self.employees = Table('employees', connection=self.conn)

        self.employeeData = [{
            'etype': 'E',
            'first_name': 'John',
            'last_name': 'Doe',
            'id': '123456789',
            'title': 'Head Bottle Washer',
            'hiredate': 'June 5 1986'
        }, {
            'etype': 'E',
            'first_name': 'Alice',
            'last_name': 'Kramden',
            'id': '007',
            'title': 'Assistant Bottle Washer',
            'hiredate': 'July 1 1950'
        }, {
            'etype': 'E',
            'first_name': 'Bob',
            'last_name': 'Dylan',
            'id': '42',
            'title': 'Assistant Bottle Washer',
            'hiredate': 'January 1 1970'
        }]

        for data in self.employeeData:
            self.employees.put_item(data=data, overwrite=True)
예제 #49
0
 def _table(self, name, schema, read, write):
     table = Table(name, connection=self._dynamo)
     try:
         table.describe()
         return table
     except Exception as e:
         if e.error_code != 'ResourceNotFoundException':
             raise e
         logger.debug('Creating table %s', name)
         table = Table.create(name,
                              schema=schema,
                              throughput={
                                  'read': read,
                                  'write': write
                              },
                              connection=self._dynamo)
         return table
def test_lookup():
    from decimal import Decimal
    table = Table.create('messages', schema=[
        HashKey('test_hash'),
        RangeKey('test_range'),
    ], throughput={
        'read': 10,
        'write': 10,
    })

    hash_key = 3241526475
    range_key = 1234567890987
    data = {'test_hash': hash_key, 'test_range': range_key}
    table.put_item(data=data)
    message = table.lookup(hash_key, range_key)
    message.get('test_hash').should.equal(Decimal(hash_key))
    message.get('test_range').should.equal(Decimal(range_key))
예제 #51
0
def test_update_item_set():
    conn = boto.dynamodb2.connect_to_region("us-east-1")
    table = Table.create("messages", schema=[HashKey("username")])

    data = {"username": "******", "SentBy": "User A"}
    table.put_item(data=data)
    key_map = {"username": {"S": "steve"}}

    conn.update_item(
        "messages",
        key_map,
        update_expression="SET foo=:bar, blah=:baz REMOVE SentBy",
        expression_attribute_values={":bar": {"S": "bar"}, ":baz": {"S": "baz"}},
    )

    returned_item = table.get_item(username="******")
    dict(returned_item).should.equal({"username": "******", "foo": "bar", "blah": "baz"})
 def setUpClass(cls):
     conn = boto.dynamodb2.connect_to_region('eu-west-1')
     #Delete 'hh_test_table' if it already exists.
     #While loop ensures control is returned only
     #when table is properly deleted.
     if 'hh_test_table' in conn.list_tables()['TableNames']:
         conn.delete_table('hh_test_table')
         while True:
             if 'hh_test_table' not in conn.list_tables()['TableNames']:
                 break
     #Create table 'hh_test_table'. While loop ensures thread of execution
     #regained only when table is active.
     schema = [HashKey('kind', data_type=STRING), RangeKey('id', data_type=STRING)]
     table = Table.create('hh_test_table', connection=conn, schema=schema, throughput={'read': 50, 'write': 50})
     while True:
         if table.describe()['Table']['TableStatus'] == 'ACTIVE':
             break
예제 #53
0
def setupDDBTable(region, table_name):
    log.debug('running setupDDBTable')

    conn = boto.dynamodb.connect_to_region(region,proxy=boto.config.get('Boto', 'proxy'),
                                          proxy_port=boto.config.get('Boto', 'proxy_port'))
    tables = conn.list_tables()
    check = [t for t in tables if t == table_name]
    conn = boto.dynamodb2.connect_to_region(region,proxy=boto.config.get('Boto', 'proxy'),
                                          proxy_port=boto.config.get('Boto', 'proxy_port'))
    if check:
        _table = Table(table_name,connection=conn)
    else:
        _table = Table.create(table_name,
                              schema=[HashKey('instanceId')
                              ],connection=conn)

    return _table
예제 #54
0
def test_reverse_query():
    conn = boto.dynamodb2.layer1.DynamoDBConnection()

    table = Table.create(
        'messages',
        schema=[HashKey('subject'),
                RangeKey('created_at', data_type='N')])

    for i in range(10):
        table.put_item({'subject': "Hi", 'created_at': i})

    results = table.query_2(subject__eq="Hi",
                            created_at__lt=6,
                            limit=4,
                            reverse=True)

    expected = [Decimal(5), Decimal(4), Decimal(3), Decimal(2)]
    [r['created_at'] for r in results].should.equal(expected)
def create_remote_access_table(db):
    remote_access_table = None
    try:

        remote_access_table = Table.create("remote_access",
                                           schema=[HashKey("msp_id")],
                                           throughput={
                                               'read': 1,
                                               'write': 1
                                           },
                                           connection=db)
    except JSONResponseError as jre:
        try:
            remote_access_table = Table("remote_access", connection=db)
        except Exception as e:
            print e.message
    finally:
        return remote_access_table
예제 #56
0
 def __init__(self, dbName):
     self.table_dynamo = None
     ####################################################################
     # YOUR CODE HERE
     #1. create new table
     try:
         self.table_dynamo = Table.create(
             dbName, schema=[HashKey('CUID')], connection=client_dynamo
         )  #HINT: Table.create; #HINT 2: Use CUID as your hashkey
         print("Wait 20 sec until the table is created")
         time.sleep(20)
         print("New Table Created")
     #2.table already exists, so get the table
     except Exception as e:
         self.table_dynamo = Table(
             dbName, connection=client_dynamo
         )  #HINT: Remember to use "connection=client.dynamo"
         print("Table Already Exists")
예제 #57
0
    def test_query_with_limits(self):
        # Per the DDB team, it's recommended to do many smaller gets with a
        # reduced page size.
        # Clamp down the page size while ensuring that the correct number of
        # results are still returned.
        posts = Table.create('posts', schema=[
            HashKey('thread'),
            RangeKey('posted_on')
        ], throughput={
            'read': 5,
            'write': 5,
        })
        self.addCleanup(posts.delete)

        # Wait for it.
        time.sleep(60)

        # Add some data.
        test_data_path = os.path.join(
            os.path.dirname(__file__),
            'forum_test_data.json'
        )
        with open(test_data_path, 'r') as test_data:
            data = json.load(test_data)

            with posts.batch_write() as batch:
                for post in data:
                    batch.put_item(post)

        time.sleep(5)

        # Test the reduced page size.
        results = posts.query_2(
            thread__eq='Favorite chiptune band?',
            posted_on__gte='2013-12-24T00:00:00',
            max_page_size=2
        )

        all_posts = list(results)
        self.assertEqual(
            [post['posted_by'] for post in all_posts],
            ['joe', 'jane', 'joe', 'joe', 'jane', 'joe']
        )
        self.assertTrue(results._fetches >= 3)
예제 #58
0
def test_query_with_global_indexes():
    table = Table.create('messages',
                         schema=[
                             HashKey('subject'),
                             RangeKey('version'),
                         ],
                         global_indexes=[
                             GlobalAllIndex('topic-created_at-index',
                                            parts=[
                                                HashKey('topic'),
                                                RangeKey('created_at',
                                                         data_type='N')
                                            ],
                                            throughput={
                                                'read': 6,
                                                'write': 1
                                            }),
                             GlobalAllIndex('status-created_at-index',
                                            parts=[
                                                HashKey('status'),
                                                RangeKey('created_at',
                                                         data_type='N')
                                            ],
                                            throughput={
                                                'read': 2,
                                                'write': 1
                                            })
                         ])

    item_data = {
        'subject': 'Check this out!',
        'version': '1',
        'created_at': 0,
        'status': 'inactive'
    }
    item = Item(table, item_data)
    item.save(overwrite=True)

    item['version'] = '2'
    item.save(overwrite=True)

    results = table.query(status__eq='active')
    list(results).should.have.length_of(0)
    def pf_ddb_create_table(self, name):
        # creating a full table with all options specified.
        users = Table.create(name, schema=[
            HashKey('username'),
            RangeKey('friend_count', data_type=NUMBER)
        ], throughput={
            'read': 5,
            'write': 5,
        }, indexes=[
            KeysOnlyIndex('LastNameIndex', parts=[
                HashKey('username'),
                RangeKey('last_name')
            ]),
        ])

        # Wait for it.
        time.sleep(60)
        
        return json.dumps(name + " table created")
예제 #60
0
    def create(connection):
        """Create dynamodb tables.
        """
        Table.create(config.table_name('metric_names'),
                     schema=[HashKey('domain'), RangeKey('name')],
                     throughput=Schema.metric_names_tp, connection=connection)
        Table.create(config.table_name('tag_names'),
                     schema=[HashKey('domain'), RangeKey('name')],
                     throughput=Schema.tag_names_tp, connection=connection)
        Table.create(config.table_name('tag_values'),
                     schema=[HashKey('domain'), RangeKey('value')],
                     throughput=Schema.tag_values_tp, connection=connection)

        DatapointsSchema.create(connection)