Ejemplo n.º 1
0
    def put_item(self, data, overwrite=False):
        """
        Saves an entire item to DynamoDB.

        By default, if any part of the ``Item``'s original data doesn't match
        what's currently in DynamoDB, this request will fail. This prevents
        other processes from updating the data in between when you read the
        item & when your request to update the item's data is processed, which
        would typically result in some data loss.

        Requires a ``data`` parameter, which should be a dictionary of the data
        you'd like to store in DynamoDB.

        Optionally accepts an ``overwrite`` parameter, which should be a
        boolean. If you provide ``True``, this will tell DynamoDB to blindly
        overwrite whatever data is present, if any.

        Returns ``True`` on success.

        Example::

            >>> users.put_item(data={
            ...     'username': '******',
            ...     'first_name': 'Jane',
            ...     'last_name': 'Doe',
            ...     'date_joined': 126478915,
            ... })
            True

        """
        item = Item(self, data=data)
        return item.save(overwrite=overwrite)
Ejemplo n.º 2
0
    def flush(self):
        batch_data = {
            self.table.table_name: [
                # We'll insert data here shortly.
            ],
        }

        for put in self._to_put:
            item = Item(self.table, data=put)
            batch_data[self.table.table_name].append({
                'PutRequest': {
                    'Item': item.prepare_full(),
                }
            })

        for delete in self._to_delete:
            batch_data[self.table.table_name].append({
                'DeleteRequest': {
                    'Key': self.table._encode_keys(delete),
                }
            })

        resp = self.table.connection.batch_write_item(batch_data)
        self.handle_unprocessed(resp)

        self._to_put = []
        self._to_delete = []
        return True
Ejemplo n.º 3
0
def main(args):
  conn=dynaconnect(args)
  tableName=args.table

  try:
    conn.describe_table(tableName)
  except boto.exception.JSONResponseError as details:
    if (details.error_code != "ResourceNotFoundException"):
      error("Error when connecting to DynamodDB",details.message)
    
    sys.stdout.write("Table does not exist, creating it")
    sys.stdout.flush()
    
    table = Table.create(tableName, schema=[ HashKey('name') ], global_indexes=[GlobalAllIndex('StacksByType', parts=[HashKey('type')])], connection=conn)

    while (table.describe()["Table"]["TableStatus"]!="ACTIVE"):
      time.sleep(1)
      sys.stdout.write('.')
      sys.stdout.flush()
    print("")
  else:
    table = Table(tableName,connection=conn)

  parameters = dict([x.strip() for x in line.strip().split("=")] for line in open(args.prop_file))
  additionals = dict([x.strip() for x in k.strip().split("=")] for k in args.key)

  dynamodata={'type':args.type, 'name':args.name, 'config':parameters}
  dynamodata.update(additionals)
  item=Item(table,data=dynamodata)

  item.save(overwrite=True)
    def put_metrics(self, build_time_metrics):
        if not build_time_metrics:
            return None

        try:
            item = self.table.get_item(instance_type=_get_instance_type(self.localrun), config=self.benchmark_config)
            self.logger.debug("Found existing entity in dynamodb")
        except ItemNotFound:
            self.logger.debug("No existing entity found in dynamodb, creating new one")
            item = Item(self.table, data={'instance_type': _get_instance_type(self.localrun),
                                          'config': self.benchmark_config})

        build_time_json = item['build_time']

        if build_time_json:
            self.logger.debug("Extending existing metric list for build_time")
            # extend existing list
            build_time = json.loads(build_time_json)
            build_time.extend(build_time_metrics)
            item['build_time'] = json.dumps(build_time)
        else:
            item['build_time'] = json.dumps(build_time_metrics)

        if item.needs_save():
            item.partial_save()
            self.logger.debug("Saved item to dynamodb")
Ejemplo n.º 5
0
    def _put_with_retries(self, boto_table, table_name, data, overwrite,
                          condition, vars, retry):
        boto_item = BotoItem(boto_table, data)

        # Use internal boto method to access to full AWS Dynamo capabilities
        final_data = boto_item.prepare_full()

        def try_function():
            expected = boto_item.build_expects(
            ) if overwrite is False else None
            return boto_table.connection.put_item(
                table_name,
                final_data,
                expected=expected,  # Don't overwrite
                condition_expression=condition,
                expression_attribute_values=vars)

        try:
            ret = self._attempt_throttled_operation(
                try_function,
                retry,
                boto_table,
                increased_throughput=get_double_writes(boto_table))
        except ConditionalCheckFailedException as e:
            raise self.ClariDynamoConditionCheckFailedException(
                str(e) + ' - ' + 'This could be due to a duplicate insertion.')
        return ret
Ejemplo n.º 6
0
    def flush(self):
        batch_data = {
            self.table.table_name: [
                # We'll insert data here shortly.
            ],
        }

        for put in self._to_put:
            item = Item(self.table, data=put)
            batch_data[self.table.table_name].append(
                {'PutRequest': {
                    'Item': item.prepare_full(),
                }})

        for delete in self._to_delete:
            batch_data[self.table.table_name].append(
                {'DeleteRequest': {
                    'Key': self.table._encode_keys(delete),
                }})

        resp = self.table.connection.batch_write_item(batch_data)
        self.handle_unprocessed(resp)

        self._to_put = []
        self._to_delete = []
        return True
Ejemplo n.º 7
0
 def delete(self):
     """Delete the current record matching the attribute with get_key_name()
     in the table get_table_name()
     """
     table = self.get_class_table()
     item = Item(table, data=self.get_item())
     return item.delete()
Ejemplo n.º 8
0
 def deprecated__handle_truth( self, rs ):
     if self._mask is None:
         self._mask = rs.get_mask()
     rs.set_mask(self._mask)
     accuracy = rs.accuracy()
     with tempfile.SpooledTemporaryFile() as temp:
         np.save(temp, accuracy)
         temp.seek(0)
         conn = boto.connect_s3(  )
         bucket = conn.create_bucket( self.s3_results )
         k = Key(bucket)
         m = hashlib.md5()
         m.update(accuracy)
         md5 =  m.hexdigest()
         k.key = md5
         k.set_contents_from_file( temp )
     run_id = rs.get_run_id()
     try:
         item = Item( self.truth_table, {'run_id':run_id,
                 'strain_id': rs.spec_string} )
         item['accuracy_file'] = md5
         item['result_files'] =  base64.b64encode( json.dumps( 
             rs.get_result_files() ) )
         item['bucket'] = self.s3_results
         item['timestamp'] = datetime.datetime.utcnow().strftime('%Y.%m.%d-%H:%M:%S')
         item.save()
     except ConditionalCheckFailedException as ccfe:
         print "*"*20
         print ccfe
         if rs is not None:
             print  {'run_id':run_id,'strain_id': rs.spec_string}
             print rs.get_result_files()
Ejemplo n.º 9
0
    def create_tables(self):
        '''
        () -> None

        Permite crear todas las tablas necesarias para el entorno de pruebas.
        Las tablas creadas seran llenadas con datos de prueba que se encuentran
        en el archivo test_data.json.
        '''

        #Creacion de las tablas para los test
        super(dbTablesTest, self).create_tables()

        import os
        from commons import jsondecoder

        #cargar los datos de prueba del archivo test_data.json
        path_file = os.path.abspath(self.config['DB_TEST_DATA_PATH'])
        json_data = open(path_file).read()
        data = jsondecoder(json_data)

        #guardar los datos contenidos en el archivo json en la base de datos.
        for key, value in data.items():
            table = self.tables[key]
            for item in value:
                if key == 'tbl_timeline':
                    if 'skills' in item:
                        item['skills'] = set(item['skills'])
                    if 'win_answers' in item:
                        item['win_answers'] = set(item['win_answers'])
                item = Item(table, data=item)
                item.save()
Ejemplo n.º 10
0
    def put(self, key, value):
        '''Stores the object.'''
        table = self._table(key)

        value = self._wrap(table, key, value)
        item = Item(table, data=value)
        item.save(overwrite=True)
Ejemplo n.º 11
0
def do_create(request, table, id, name, response):
    try:
        item = table.get_item(id=id)
        if item["name"] != name:
            response.status = 400
            return {
                "errors": [{
                    "id_exists": {
                        "status": "400",  # "Bad Request"
                        "title": "id already exists",
                        "detail": {
                            "name": item['name']
                        }
                    }
                }]
            }

    except ItemNotFound as inf:
        p = Item(table, data={'id': id, 'name': name, 'activities': set()})
        p.save()

    response.status = 201  # "Created"

    return {
        "data": {
            "type": "person",
            "id": id,
            "links": {
                "self":
                "{0}://{1}/users/{2}".format(request['urlparts']['scheme'],
                                             request['urlparts']['netloc'], id)
            }
        }
    }
Ejemplo n.º 12
0
    def add_to_db(self):
        items_table = Table('items')

        for product in self.viable_products:
            temp_item = Item(items_table, data={
                'type':'iphone',
                'title':product[0],
                'itemId':product[1],
                'viewItemURL':product[2],
                'sellerUserName':product[3],
                'positiveFeedbackPercent':product[4],
                'feedbackRatingStar':product[5],
                'conditionId':product[6],
                'listingType':product[7],
                'currentPrice':product[8],
                'bidCount':product[9],
                'timeLeft':product[10],
                'endTime':product[11],
                'carrier':product[12],
                'storage':product[13],
                'model':product[14],
                'color':product[15],
                'pmresult':product[16],
            })

            temp_item.save(overwrite=True)

        print 'all set'
Ejemplo n.º 13
0
class Item(ItemEngine):

    def __init__(self, collection, raw_item):
        ItemEngine.__init__(self, collection, raw_item)
        self.__item = raw_item

    @property
    def ddb_item(self):
        return self.__item

    def update(self, patch, context, updates):
        if patch:
            for k, v in iteritems(updates):
                self.__item[k] = v
            self.__item.partial_save()
        else:
            if context is None:
                self.__item = BotoItem(self.__item.table, updates)
                self.__item.save(True)
            else:
                context.put_item(self.__table, updates)

    def delete(self, index, context):
        if context is None:
            self.__item.delete()
        else:
            context.delete_item(
                self.__table,
                **(index.make_key_dict_from_dict(self.get_dict()))
            )

    def get_dict(self):
        return self.__item._data
Ejemplo n.º 14
0
    def _scan(self, limit=None, exclusive_start_key=None, segment=None, total_segments=None, **filter_kwargs):
        """
        The internal method that performs the actual scan. Used extensively
        by ``ResultSet`` to perform each (paginated) request.
        """
        kwargs = {"limit": limit, "segment": segment, "total_segments": total_segments}

        if exclusive_start_key:
            kwargs["exclusive_start_key"] = {}

            for key, value in exclusive_start_key.items():
                kwargs["exclusive_start_key"][key] = self._dynamizer.encode(value)

        # Convert the filters into something we can actually use.
        kwargs["scan_filter"] = self._build_filters(filter_kwargs, using=FILTER_OPERATORS)

        raw_results = self.connection.scan(self.table_name, **kwargs)
        results = []
        last_key = None

        for raw_item in raw_results.get("Items", []):
            item = Item(self)
            item.load({"Item": raw_item})
            results.append(item)

        if raw_results.get("LastEvaluatedKey", None):
            last_key = {}

            for key, value in raw_results["LastEvaluatedKey"].items():
                last_key[key] = self._dynamizer.decode(value)

        return {"results": results, "last_key": last_key}
Ejemplo n.º 15
0
    def _query(
        self, limit=None, index=None, reverse=False, consistent=False, exclusive_start_key=None, **filter_kwargs
    ):
        """
        The internal method that performs the actual queries. Used extensively
        by ``ResultSet`` to perform each (paginated) request.
        """
        kwargs = {"limit": limit, "index_name": index, "scan_index_forward": reverse, "consistent_read": consistent}

        if exclusive_start_key:
            kwargs["exclusive_start_key"] = {}

            for key, value in exclusive_start_key.items():
                kwargs["exclusive_start_key"][key] = self._dynamizer.encode(value)

        # Convert the filters into something we can actually use.
        kwargs["key_conditions"] = self._build_filters(filter_kwargs, using=QUERY_OPERATORS)

        raw_results = self.connection.query(self.table_name, **kwargs)
        results = []
        last_key = None

        for raw_item in raw_results.get("Items", []):
            item = Item(self)
            item.load({"Item": raw_item})
            results.append(item)

        if raw_results.get("LastEvaluatedKey", None):
            last_key = {}

            for key, value in raw_results["LastEvaluatedKey"].items():
                last_key[key] = self._dynamizer.decode(value)

        return {"results": results, "last_key": last_key}
Ejemplo n.º 16
0
    def add_to_db(self):
        items_table = Table('items')

        for product in self.viable_products:
            temp_item = Item(items_table,
                             data={
                                 'type': 'iphone',
                                 'title': product[0],
                                 'itemId': product[1],
                                 'viewItemURL': product[2],
                                 'sellerUserName': product[3],
                                 'positiveFeedbackPercent': product[4],
                                 'feedbackRatingStar': product[5],
                                 'conditionId': product[6],
                                 'listingType': product[7],
                                 'currentPrice': product[8],
                                 'bidCount': product[9],
                                 'timeLeft': product[10],
                                 'endTime': product[11],
                                 'carrier': product[12],
                                 'storage': product[13],
                                 'model': product[14],
                                 'color': product[15],
                                 'pmresult': product[16],
                             })

            temp_item.save(overwrite=True)

        print 'all set'
Ejemplo n.º 17
0
    def delete_tables(self, new_timestamp=None):
        """Delete the tables for this block.
        """
        if not new_timestamp:
            new_timestamp = self.tbase

        if self.data_points_table:
            # noinspection PyBroadException
            try:
                self.data_points_table.delete()
            except:
                pass
            self.data_points_table = None
            self.dp_writer = None
        if self.index_table:
            try:
                self.index_table.delete()
            except:
                pass
            self.index_table = None

        try:
            self.item.delete()
        except:
            pass

        self.item = Item(self.master, data=dict(self.item.items()))
        self.item['state'] = 'INITIAL'
        self.item['tbase'] = base_time(new_timestamp)
        self.item.save(overwrite=True)

        return self.state
Ejemplo n.º 18
0
    def put_item(self, data, overwrite=False):
        """
        Saves an entire item to DynamoDB.

        By default, if any part of the ``Item``'s original data doesn't match
        what's currently in DynamoDB, this request will fail. This prevents
        other processes from updating the data in between when you read the
        item & when your request to update the item's data is processed, which
        would typically result in some data loss.

        Requires a ``data`` parameter, which should be a dictionary of the data
        you'd like to store in DynamoDB.

        Optionally accepts an ``overwrite`` parameter, which should be a
        boolean. If you provide ``True``, this will tell DynamoDB to blindly
        overwrite whatever data is present, if any.

        Returns ``True`` on success.

        Example::

            >>> users.put_item(data={
            ...     'username': '******',
            ...     'first_name': 'Jane',
            ...     'last_name': 'Doe',
            ...     'date_joined': 126478915,
            ... })
            True

        """
        item = Item(self, data=data)
        return item.save(overwrite=overwrite)
Ejemplo n.º 19
0
 def route_email(self, ee):
     print 'bag it route_email:', ee.broadcast_dict[
         'derived_to'], 'from:', ee.broadcast_dict['derived_from']
     try:
         item = self.get_mail_table(ee.domain).query(
             derived_to__eq=ee.broadcast_dict['derived_to'],
             derived_from__eq=ee.broadcast_dict['derived_from'],
             limit=1).next()
         item['lastConnection'] = time.time()
         item['connectionsMade'] = item['connectionsMade'] + 1
         item['msg'] = item['msg'] + "," + ee.broadcast_dict['file_dest']
         item.save()
     except Exception as e:
         from boto.dynamodb2.items import Item
         print 'create item:', e
         try:
             now = time.time()
             item = Item(self.get_mail_table(ee.domain),
                         data={
                             'derived_to': ee.broadcast_dict['derived_to'],
                             'derived_from':
                             ee.broadcast_dict['derived_from'],
                             'firstConnection': now,
                             'lastConnection': now,
                             'connectionsMade': 1,
                             'msg': ee.broadcast_dict['file_dest']
                         })
             item.save()
         except Exception as e2:
             print e2
Ejemplo n.º 20
0
def save_partition(part):

    for record in part:
        item = Item(
            out_table,
            data={"airport": record[0][0], "carrier": record[0][1], "mean_delay": int(record[1][0] / record[1][1])},
        )
        item.save(overwrite=True)
Ejemplo n.º 21
0
def save_partition(part):
    for record in part:
        item = Item(out_table, data={
            "airport": record[0][0],
            "carrier": record[0][1],
            "average_delay": int(record[1][0] / record[1][1])
        })
        item.save(overwrite=True)
Ejemplo n.º 22
0
def save_partition(part):
    for record in part:
        item = Item(out_table, data={
            "origin": record[0][0],
            "destination": record[0][1],
            "average_delay": int(record[1][0] / record[1][1])
        })
        item.save(overwrite=True)
Ejemplo n.º 23
0
def get_state(table, project):
	try:
		return table.get_item(project=project, consistent=True)
	except ItemNotFound:
		state = Item(table, data={
			'project': project,
			'state': 'idle',
		})
		state.save()
Ejemplo n.º 24
0
def get_state(table, project):
    try:
        return table.get_item(project=project, consistent=True)
    except ItemNotFound:
        state = Item(table, data={
            'project': project,
            'state': 'idle',
        })
        state.save()
Ejemplo n.º 25
0
def save_partition(part):

    for record in part:
        item = Item(out_table, data={
            "origin": record[0][0],
            "destination": record[0][1],
            "mean_delay": int(record[1][0] / record[1][1])
        })
        item.save(overwrite=True)
Ejemplo n.º 26
0
    def get_item(self, consistent=False, attributes=None, **kwargs):
        """
        Fetches an item (record) from a table in DynamoDB.

        To specify the key of the item you'd like to get, you can specify the
        key attributes as kwargs.

        Optionally accepts a ``consistent`` parameter, which should be a
        boolean. If you provide ``True``, it will perform
        a consistent (but more expensive) read from DynamoDB.
        (Default: ``False``)

        Optionally accepts an ``attributes`` parameter, which should be a
        list of fieldname to fetch. (Default: ``None``, which means all fields
        should be fetched)

        Returns an ``Item`` instance containing all the data for that record.

        Example::

            # A simple hash key.
            >>> john = users.get_item(username='******')
            >>> john['first_name']
            'John'

            # A complex hash+range key.
            >>> john = users.get_item(username='******', last_name='Doe')
            >>> john['first_name']
            'John'

            # A consistent read (assuming the data might have just changed).
            >>> john = users.get_item(username='******', consistent=True)
            >>> john['first_name']
            'Johann'

            # With a key that is an invalid variable name in Python.
            # Also, assumes a different schema than previous examples.
            >>> john = users.get_item(**{
            ...     'date-joined': 127549192,
            ... })
            >>> john['first_name']
            'John'

        """
        raw_key = self._encode_keys(kwargs)
        item_data = self.connection.get_item(
            self.table_name,
            raw_key,
            attributes_to_get=attributes,
            consistent_read=consistent
        )
        if 'Item' not in item_data:
            raise exceptions.ItemNotFound("Item %s couldn't be found." % kwargs)
        item = Item(self)
        item.load(item_data)
        return item
Ejemplo n.º 27
0
Archivo: table.py Proyecto: B-Rich/boto
    def get_item(self, consistent=False, attributes=None, **kwargs):
        """
        Fetches an item (record) from a table in DynamoDB.

        To specify the key of the item you'd like to get, you can specify the
        key attributes as kwargs.

        Optionally accepts a ``consistent`` parameter, which should be a
        boolean. If you provide ``True``, it will perform
        a consistent (but more expensive) read from DynamoDB.
        (Default: ``False``)

        Optionally accepts an ``attributes`` parameter, which should be a
        list of fieldname to fetch. (Default: ``None``, which means all fields
        should be fetched)

        Returns an ``Item`` instance containing all the data for that record.

        Example::

            # A simple hash key.
            >>> john = users.get_item(username='******')
            >>> john['first_name']
            'John'

            # A complex hash+range key.
            >>> john = users.get_item(username='******', last_name='Doe')
            >>> john['first_name']
            'John'

            # A consistent read (assuming the data might have just changed).
            >>> john = users.get_item(username='******', consistent=True)
            >>> john['first_name']
            'Johann'

            # With a key that is an invalid variable name in Python.
            # Also, assumes a different schema than previous examples.
            >>> john = users.get_item(**{
            ...     'date-joined': 127549192,
            ... })
            >>> john['first_name']
            'John'

        """
        raw_key = self._encode_keys(kwargs)
        item_data = self.connection.get_item(
            self.table_name,
            raw_key,
            attributes_to_get=attributes,
            consistent_read=consistent
        )
        if 'Item' not in item_data:
            raise exceptions.ItemNotFound("Item %s couldn't be found." % kwargs)
        item = Item(self)
        item.load(item_data)
        return item
Ejemplo n.º 28
0
 def add_new_isoc_cw_item(self, msp_id=None, credentials=None):
     now = str(datetime.now())
     item = Item(self.cm.get_msp_cw_table(),
                 data={
                     "msp_id": msp_id,
                     "credentials": credentials,
                     "created_at": str(datetime.now()),
                     "updated_at": str(datetime.now())
                 })
     return item.save()
def saveToDynamo(filename):
    d = {}
    try:
        d = getSingleFileMetadata(filename)
    except:
        pass
    
    if len(d) > 0:
        newItem = Item(table_s3_metadata, data=d)
        newItem.save(overwrite=True)
Ejemplo n.º 30
0
    def _query(self, limit=None, index=None, reverse=False, consistent=False,
               exclusive_start_key=None, select=None, attributes_to_get=None,
               **filter_kwargs):
        """
        The internal method that performs the actual queries. Used extensively
        by ``ResultSet`` to perform each (paginated) request.
        """
        kwargs = {
            'limit': limit,
            'index_name': index,
            'consistent_read': consistent,
            'select': select,
            'attributes_to_get': attributes_to_get,
        }

        if reverse:
            kwargs['scan_index_forward'] = False

        if exclusive_start_key:
            kwargs['exclusive_start_key'] = {}

            for key, value in exclusive_start_key.items():
                kwargs['exclusive_start_key'][key] = \
                    self._dynamizer.encode(value)

        # Convert the filters into something we can actually use.
        kwargs['key_conditions'] = self._build_filters(
            filter_kwargs,
            using=QUERY_OPERATORS
        )

        raw_results = self.connection.query(
            self.table_name,
            **kwargs
        )
        results = []
        last_key = None

        for raw_item in raw_results.get('Items', []):
            item = Item(self)
            item.load({
                'Item': raw_item,
            })
            results.append(item)

        if raw_results.get('LastEvaluatedKey', None):
            last_key = {}

            for key, value in raw_results['LastEvaluatedKey'].items():
                last_key[key] = self._dynamizer.decode(value)

        return {
            'results': results,
            'last_key': last_key,
        }
Ejemplo n.º 31
0
    def save(self, items, overwrite=None):
        """
        Save models to dynamo

        Parameters
        ----------
        items : list or :class:`~flywheel.models.Model`
        overwrite : bool, optional
            If False, raise exception if item already exists (default set by
            :attr:`.default_conflict`)

        Raises
        ------
        exc : :class:`boto.dynamodb2.exceptions.ConditionalCheckFailedException`
            If overwrite is False and an item already exists in the database

        Notes
        -----
        Overwrite will replace the *entire* item with the new one, not just
        different fields. After calling save(overwrite=True) you are guaranteed
        that the item in the database is exactly the item you saved.

        Due to the structure of the AWS API, saving with overwrite=True is much
        faster because the requests can be batched.

        """
        if overwrite is None:
            overwrite = self.default_conflict in ('update', 'overwrite')
        if isinstance(items, Model):
            items = [items]
        if not items:
            return
        tables = defaultdict(list)
        for item in items:
            tables[item.meta_.ddb_tablename].append(item)
        for tablename, items in tables.iteritems():
            table = Table(tablename, connection=self.dynamo)
            if overwrite:
                with table.batch_write() as batch:
                    for item in items:
                        item.pre_save_(self)
                        batch.put_item(data=item.ddb_dump_())
                        item.post_save_()
            else:
                for item in items:
                    expected = {}
                    for name in item.meta_.fields:
                        expected[name] = {
                            'Exists': False,
                        }
                    item.pre_save_(self)
                    boto_item = Item(table, data=item.ddb_dump_())
                    self.dynamo.put_item(tablename, boto_item.prepare_full(),
                                         expected=expected)
                    item.post_save_()
Ejemplo n.º 32
0
 def update_isoc_remote(self, msp_id, credentials):
     old_msp_item = self.has_item_by_mspid_remote(msp_id=msp_id)
     now = str(datetime.now())
     item = Item(self.cm.get_ra_table(),
                 data={
                     "msp_id": old_msp_item['msp_id'],
                     "credentials": credentials,
                     "created_at": old_msp_item['created_at'],
                     "updated_at": str(now)
                 })
     return item.save(overwrite=True)
Ejemplo n.º 33
0
 def createGame(self, gameId, creator, invitee):
     now = str(datetime.now())
     statusDate = "PENDING_" + now
     item = Item(self.cm.getGameTable(),
                 data={
                     "GameId": gameId,
                     "HostId": creator,
                     "StatusDate": statusDate,
                     "OUser": creator,
                     "Turn": invitee,
                     "Opponent": invitee
                 })
     return item.save()
def add_new_keg(tap, cost, volume, abv, beer_name):
    remove_current_keg(tap)
    new_keg = Item(kegs, data={
                'tap': str(tap),
                'start_timestamp': str(long(time.time())),
                'finish_timestamp': str(-1),
                'cost': str(cost),
                'volume': str(volume),
                'abv': str(abv),
                'beer_name': str(beer_name),
                'volume_remaining': str(volume)
                })
    new_keg.save()
Ejemplo n.º 35
0
    def _create_post(self, data):
        '''(item) -> NoneType

        Funcion de apoyo, crea un item en la tabla timeline

        '''
        data['key_post'] = hashCreate()
        data['key_timeline_post'] = timeUTCCreate()
        post = Item(table_timeline, data)
        post.save()
        if not data.get('key_post_original'):
            cskill = Skill()
            cskill.post_skills_post(list(data['skills']), data['key_post'])
Ejemplo n.º 36
0
 def update(cls, form_id, user_id, answer_json):
     answers_table = Table("answers")
     questions_with_answers = answer_json["questions"]
     for question_with_answer in questions_with_answers:
         item = Item(
             answers_table,
             data={
                 "form_question_id": cls.form_question_id(form_id, question_with_answer["question_id"]),
                 "answer": question_with_answer["answer"],
                 "user_id": user_id,
             },
         )
         item.save(overwrite=True)
Ejemplo n.º 37
0
def add_entry(keycode_param):
    params = request.args
    try:
        item = Item(table,
                    data={
                        'keycode': keycode_param,
                        'action': params['action'],
                        'name': params['name']
                    })
    except KeyError as e:
        # better error?
        raise e
    item.save()
Ejemplo n.º 38
0
    def _batch_get(self, keys, consistent=False):
        """
        The internal method that performs the actual batch get. Used extensively
        by ``BatchGetResultSet`` to perform each (paginated) request.
        """
        items = {
            self.table_name: {
                'Keys': [],
            },
        }

        if consistent:
            items[self.table_name]['ConsistentRead'] = True

        for key_data in keys:
            raw_key = {}

            for key, value in key_data.items():
                raw_key[key] = self._dynamizer.encode(value)

            items[self.table_name]['Keys'].append(raw_key)

        raw_results = self.connection.batch_get_item(request_items=items)
        results = []
        unprocessed_keys = []

        for raw_item in raw_results['Responses'].get(self.table_name, []):
            item = Item(self)
            item.load({
                'Item': raw_item,
            })
            results.append(item)

        raw_unproccessed = raw_results.get('UnprocessedKeys', {})

        for raw_key in raw_unproccessed.get('Keys', []):
            py_key = {}

            for key, value in raw_key.items():
                py_key[key] = self._dynamizer.decode(value)

            unprocessed_keys.append(py_key)

        return {
            'results': results,
            # NEVER return a ``last_key``. Just in-case any part of
            # ``ResultSet`` peeks through, since much of the
            # original underlying implementation is based on this key.
            'last_key': None,
            'unprocessed_keys': unprocessed_keys,
        }
Ejemplo n.º 39
0
    def _batch_get(self, keys, consistent=False):
        """
        The internal method that performs the actual batch get. Used extensively
        by ``BatchGetResultSet`` to perform each (paginated) request.
        """
        items = {
            self.table_name: {
                'Keys': [],
            },
        }

        if consistent:
            items[self.table_name]['ConsistentRead'] = True

        for key_data in keys:
            raw_key = {}

            for key, value in key_data.items():
                raw_key[key] = self._dynamizer.encode(value)

            items[self.table_name]['Keys'].append(raw_key)

        raw_results = self.connection.batch_get_item(request_items=items)
        results = []
        unprocessed_keys = []

        for raw_item in raw_results['Responses'].get(self.table_name, []):
            item = Item(self)
            item.load({
                'Item': raw_item,
            })
            results.append(item)

        raw_unproccessed = raw_results.get('UnprocessedKeys', {})

        for raw_key in raw_unproccessed.get('Keys', []):
            py_key = {}

            for key, value in raw_key.items():
                py_key[key] = self._dynamizer.decode(value)

            unprocessed_keys.append(py_key)

        return {
            'results': results,
            # NEVER return a ``last_key``. Just in-case any part of
            # ``ResultSet`` peeks through, since much of the
            # original underlying implementation is based on this key.
            'last_key': None,
            'unprocessed_keys': unprocessed_keys,
        }
Ejemplo n.º 40
0
def createitem():

    
    users = Table('items')
    # WARNING - This doens't save it yet!
    brush = Item(users, data={
     'rfid': '165',
     'pname': 'Toothpaste',
     'Price': '3$',
     'tray_status': '1',
     })

     # The data now gets persisted to the server.
    brush.save()
Ejemplo n.º 41
0
Archivo: table.py Proyecto: B-Rich/boto
    def _scan(self, limit=None, exclusive_start_key=None, segment=None,
              total_segments=None, attributes=None, **filter_kwargs):
        """
        The internal method that performs the actual scan. Used extensively
        by ``ResultSet`` to perform each (paginated) request.
        """
        kwargs = {
            'limit': limit,
            'segment': segment,
            'total_segments': total_segments,
            'attributes_to_get': attributes,
        }

        if exclusive_start_key:
            kwargs['exclusive_start_key'] = {}

            for key, value in exclusive_start_key.items():
                kwargs['exclusive_start_key'][key] = \
                    self._dynamizer.encode(value)

        # Convert the filters into something we can actually use.
        kwargs['scan_filter'] = self._build_filters(
            filter_kwargs,
            using=FILTER_OPERATORS
        )

        raw_results = self.connection.scan(
            self.table_name,
            **kwargs
        )
        results = []
        last_key = None

        for raw_item in raw_results.get('Items', []):
            item = Item(self)
            item.load({
                'Item': raw_item,
            })
            results.append(item)

        if raw_results.get('LastEvaluatedKey', None):
            last_key = {}

            for key, value in raw_results['LastEvaluatedKey'].items():
                last_key[key] = self._dynamizer.decode(value)

        return {
            'results': results,
            'last_key': last_key,
        }
Ejemplo n.º 42
0
def putRecord(fid, filename, desc, keysrc, keythb):
    """ Adds a new item to the DynamoDB table."""
    uid = "unique ID"
    timestamp = "timestamp" 
    new_item = Item(get_table(), data={
        'owner': 'Carlos',
        'uid': fid,
        'name': filename,
        'description': desc,
        'timestamp': datetime.today().strftime('%Y%m%d-%H%M%S-%f'),
        'source': keysrc,
        'thumbnail': keythb
    })
    new_item.save()
Ejemplo n.º 43
0
    def _scan(self,
              limit=None,
              exclusive_start_key=None,
              segment=None,
              total_segments=None,
              attributes=None,
              **filter_kwargs):
        """
        The internal method that performs the actual scan. Used extensively
        by ``ResultSet`` to perform each (paginated) request.
        """
        kwargs = {
            'limit': limit,
            'segment': segment,
            'total_segments': total_segments,
            'attributes_to_get': attributes,
        }

        if exclusive_start_key:
            kwargs['exclusive_start_key'] = {}

            for key, value in exclusive_start_key.items():
                kwargs['exclusive_start_key'][key] = \
                    self._dynamizer.encode(value)

        # Convert the filters into something we can actually use.
        kwargs['scan_filter'] = self._build_filters(filter_kwargs,
                                                    using=FILTER_OPERATORS)

        raw_results = self.connection.scan(self.table_name, **kwargs)
        results = []
        last_key = None

        for raw_item in raw_results.get('Items', []):
            item = Item(self)
            item.load({
                'Item': raw_item,
            })
            results.append(item)

        if raw_results.get('LastEvaluatedKey', None):
            last_key = {}

            for key, value in raw_results['LastEvaluatedKey'].items():
                last_key[key] = self._dynamizer.decode(value)

        return {
            'results': results,
            'last_key': last_key,
        }
def register_fob(fob_id, drinker_id):
    drinker = get_drinker(drinker_id)
    if not drinker:
        return False
    fob = get_fob(fob_id)
    if not fob:
        fob = Item(fobs, data={
                'fob_id': str(fob_id),
                'drinker_id': str(drinker_id)
                })
    fob['drinker_id'] = (drinker_id)
    fob['fob_id'] = str(fob_id)
    fob.save()
    return True
Ejemplo n.º 45
0
    def save(self):
        """Save the results of get_item in the table get_table_name() under the
        key identified by the field name get_key_name(). Note that we unconditionally
        overwrite the data and ignore the possibility someone else has written data
        for this subject.
        """
        all_errors = list(self.errors())
        if all_errors:
            raise InvalidDataObject(errors)

        table = self.get_class_table()
        data = self.get_item()
        logger.debug("SAVING with key %s data %s" % (self.get_key_name(), repr(data)))
        item = Item(table, data=data)
        item.save(overwrite=True)
Ejemplo n.º 46
0
def main(args):
  conn=dynaconnect(args)
  tableName=args.table

  try:
    table=Table(tableName,connection=conn)
  except boto.exception.JSONResponseError as details:
    error("Error when connecting to DynamodDB",details.message)
    
  users = [[x.strip() for x in line.strip().split(",")] for line in open(args.file)]

  for user in users:
    dynamodata={'firstname':user[0], 'lastname':user[1], 'society':user[2]}
    item=Item(table,data=dynamodata)
    item.save(overwrite=True)
Ejemplo n.º 47
0
 def add_new_isoc_remote_item(self,
                              msp_id=None,
                              remote_access_status=None,
                              action_type=None,
                              user_choice=None):
     now = str(datetime.now())
     item = Item(self.cm.get_ra_table(),
                 data={
                     "msp_id": msp_id,
                     "remote_access_status": remote_access_status,
                     "user_choice": user_choice,
                     "action_type": action_type,
                     "created_at": now,
                     "updated_at": now,
                 })
     return item.save()
Ejemplo n.º 48
0
    def save(self, obj):
        """Required functionality."""
        if not obj.id:
            obj.id = uuid()

        stored_data = {'id': obj.id, 'value': obj.to_data()}

        index_vals = obj.indexes() or {}
        for key in obj.__class__.index_names() or []:
            val = index_vals.get(key, '')
            stored_data[key] = DynamoMappings.map_index_val(val)

        table = self.get_class_table(obj.__class__)
        item = Item(table, data=stored_data)

        item.save(overwrite=True)
Ejemplo n.º 49
0
    def claim_shard_if_expired(self, shard):
        myshard = None
        last_sequence_number = None

        #get table
        table = self.get_table()

        #put, conditional on the time
        now = int(time.time())
        cutoff_time = now - self.heartbeat_timeout

        #query for items to see if they exist already
        shard_id = shard + "-" + self.app_id

        try:
            myitem = table.get_item(id=shard_id)
            savedtime = myitem['time']
            host = myitem['host']
            if savedtime < cutoff_time:
                #I claim this for Spain, since no heartbeat seen recently
                myitem['host'] = self.host_id
                myitem['time'] = now

                if myitem.save():
                    myshard = shard
                    last_sequence_number = myitem[
                        k_dynamodb_last_sequence_number]

        except ItemNotFound, e:
            #gah! This shard tracking item does not exist yet.  Got put in a new one
            newitem = Item(table,
                           data={
                               'id': shard_id,
                               'time': now,
                               'host': self.host_id
                           })
            try:
                newitem.save()
                #if we got here, then victory
                myshard = shard

                logging.warning('Shard %s did not exist yet... creating' %
                                shard_id)

            except ConditionalCheckFailedException, e:
                #argh, someone saved this item before me!  try another shard.
                foo = 3
Ejemplo n.º 50
0
 def createNewGame(self, gameId, creator, invitee):
     now = str(datetime.now())
     statusDate = "PENDING_" + now
     try:
         item = Item(self.cm.getGamesTable(),
                     data={
                         "GameId": gameId,
                         "HostId": creator,
                         "OpponentId": invitee,
                         "StatusDate": statusDate,
                         "OUser": creator,
                         "Turn": invitee
                     })
     except Exception as ex:
         logger.debug(ex.msg)
         return None
     return item.save()
Ejemplo n.º 51
0
def receiver():
    phone = request.form.get("phone")
    try:
        receiver = request_table.get_item(phone=phone)
    except ItemNotFound:
        receiver = Item(request_table, data={"phone": phone})
    values = json.loads(request.form['values'])
    receiver['vaccine_type'] = values[0]['value']
    receiver['number_of_vaccines'] = values[1]['value']
    reciever['Status'] = "Requested"
    receiver.save()
    loc = normalize_location(
        "%s, %s" % (receiver['location']['lat'], receiver['location']['lon']))

    closest = closest_locations(receiver['location'])
    if not len(closest):
        closest = ["+17173327758"]
        # return Response(json.dumps({"status" : "success", "results" : "None"}))
    try:
        closest.remove(phone)
    except:
        pass

    extra = {
        "lat": receiver['location']['lat'],
        "lon": receiver['location']['lon'],
        "location_english": loc['results'][0]['formatted_address'],
        "receiver_phone": phone,
        "number_of_vaccines": receiver['number_of_vaccines'],
        "vaccine_type": receiver['vaccine_type']
    }

    payload = {"flow_uuid": GIVER_FLOW_UUID, "phone": closest, "extra": extra}
    res = requests.post("https://api.rapidpro.io/api/v1/runs.json",
                        headers={
                            "Authorization": "Token %s" % RAPIDPRO_API_KEY,
                            'content-type': 'application/json'
                        },
                        data=json.dumps(payload))
    return Response(json.dumps({
        "status": "success",
        "response": res.json(),
        "closest": closest
    }),
                    mimetype="application/json")
Ejemplo n.º 52
0
    def create_or_update_user(self, datos_twitter, access_token, token_secret):
        '''(dict or Item) -> bool

        crea un nuevo usaurio o lo actualiza si ya existe.
        '''

        user = self.get_item(key_twitter=datos_twitter['key_twitter'])

        token = generate_token(hash_key=datos_twitter['key_twitter'],
                               access_token=access_token,
                               token_secret=token_secret)

        #Valida si el usuario ya se encuentra registrado en la base de datos.
        #si no existe se crea y si existe se actualiza.
        if not user:
            datos_twitter['registered'] = timeUTCCreate()
            datos_twitter['key_user'] = hashCreate()
            datos_twitter['token_user'] = token
            user = Item(table_user, datos_twitter)
        else:
            user._data['nickname'] = datos_twitter['nickname']
            user._data['name'] = datos_twitter['name']
            user._data['link_image'] = datos_twitter['link_image']
            user._data['token_user'] = token
        user.save()
        return user._data
Ejemplo n.º 53
0
 def _get_safe_data(self, dictionary, checker=None):
     checker = checker or Item(self.table)
     data = {}
     for key in dictionary:
         if isinstance(dictionary[key], dict):
             data[key] = self._get_safe_data(dictionary[key], checker)
         elif checker._is_storable(dictionary[key]):
             data[key] = dictionary[key]
     return data
Ejemplo n.º 54
0
    def save(self, obj):
        if not obj.id:
            obj.id = uuid()

        stored_data = {
            'id': obj.id,
            'value': obj.to_data()
        }

        index_vals = obj.indexes() or {}
        for key in obj.__class__.index_names() or []:
            val = index_vals.get(key, '')
            stored_data[key] = DynamoMappings.map_index_val(val)

        table = self.get_class_table(obj.__class__)
        item = Item(table, data=stored_data)

        item.save(overwrite=True)
Ejemplo n.º 55
0
 def test_002_update_item(self):
     emp = self.employees.get_item(etype='E', id='123456789')
     emp['first_name'] = 'Jane'
     emp.save()
     emp = self.employees.get_item(etype='E', id='123456789')
     data = self.getEmployeeData('id', '123456789')[0]
     expected = Item(self.employees, data=data)
     expected['first_name'] = 'Jane'
     self.assertEqual(emp._data, expected._data)
Ejemplo n.º 56
0
def ingest_alerts():
    alerts_table = Table('mbta_alerts')
    saFeed = gtfs_realtime_pb2.FeedMessage()
    saResponse = requests.get('https://cdn.mbta.com/realtime/Alerts.pb')
    saFeed.ParseFromString(saResponse.content)
    now_ts = time.time()
    alerts = []
    for entity in saFeed.entity:
        if entity.HasField('alert'):
            include_alert = False
            for informed in entity.alert.informed_entity:
                if informed.route_type <= 1:  # Subway/Green Line
                    include_alert = True
                    break
            if include_alert:
                include_alert = False
                for period in entity.alert.active_period:
                    # Include all future and current alerts
                    if period.end == 0 or now_ts < period.end:
                        include_alert = True
                        break

            if include_alert:
                alerts.append(entity)

    for entity in alerts:
        id = int(entity.id)
        alert = entity.alert

        sorted_active_periods = sorted(entity.alert.active_period, key=lambda period: period.start)
        current_period = None
        for period in sorted_active_periods:
            if now_ts > period.start and (now_ts < period.end or period.end == 0):
                current_period = period
                break

        if current_period == None:
            continue

        alert_item = None
        try:
            alert_item = alerts_table.get_item(alert_id=id)
        except exceptions.ItemNotFound:
            pass
        if not alert_item or alert_item['start'] != current_period.start:
            alert_item = Item(alerts_table, data={
                'alert_id': id,
                'start': current_period.start,
                'end': current_period.end,
                'future': (current_period.start > now_ts),
            })

            send_and_save_event(alert_item, alert, current_period)
        elif alert_item['future'] == True and alert_item['start'] < now_ts:
            alert_item['future'] = False
            send_and_save_event(alert_item, alert, current_period)
Ejemplo n.º 57
0
def makeItem():
    item = Item(twitter, data={ \
        'user_type': "standard_user",
        'post_time': time.ctime(), \
        'username': sys.argv[1][0:10], \
        #messages must have 140 char
        'post': sys.argv[2][0:140],
        'hashtaglist': set(hashtaglist)} \
        )  
    return item              
Ejemplo n.º 58
0
class DBItem(object):
    def __init__(self, table):
        self._table = table
        self._item = Item(table.get_table(), data=self.get_attrs())

    def get_attrs(self):
        item_list = {}
        for key, value in self.__dict__.iteritems():
            if not key.startswith('_'):
                item_list[key] = value
        return item_list

    def save(self, **kwargs):
        self._item.save(**kwargs)

    def insert(self, **kwargs):
        return self._table.put_item(data=self._item._data)

    def delete(self):
        return self._item.delete()
Ejemplo n.º 59
0
def activate(table, project, playbook, progress_str):
    print('.', end='')

    try:
        state = table.get_item(project=project, consistent=True)
    except ItemNotFound:
        state = Item(table, data={
            'project': project,
            'state': 'idle',
        })

    if state['state'] == 'blocked':
        raise ProjectBlockedException()

    if state['state'] == 'active':
        raise ProjectActiveException()

    state['state'] = 'active'
    state.partial_save()
    return state