Beispiel #1
0
    def add_to_db(self):
        items_table = Table('items')

        for product in self.viable_products:
            temp_item = Item(items_table,
                             data={
                                 'type': 'iphone',
                                 'title': product[0],
                                 'itemId': product[1],
                                 'viewItemURL': product[2],
                                 'sellerUserName': product[3],
                                 'positiveFeedbackPercent': product[4],
                                 'feedbackRatingStar': product[5],
                                 'conditionId': product[6],
                                 'listingType': product[7],
                                 'currentPrice': product[8],
                                 'bidCount': product[9],
                                 'timeLeft': product[10],
                                 'endTime': product[11],
                                 'carrier': product[12],
                                 'storage': product[13],
                                 'model': product[14],
                                 'color': product[15],
                                 'pmresult': product[16],
                             })

            temp_item.save(overwrite=True)

        print 'all set'
Beispiel #2
0
 def route_email(self, ee):
     print 'bag it route_email:', ee.broadcast_dict[
         'derived_to'], 'from:', ee.broadcast_dict['derived_from']
     try:
         item = self.get_mail_table(ee.domain).query(
             derived_to__eq=ee.broadcast_dict['derived_to'],
             derived_from__eq=ee.broadcast_dict['derived_from'],
             limit=1).next()
         item['lastConnection'] = time.time()
         item['connectionsMade'] = item['connectionsMade'] + 1
         item['msg'] = item['msg'] + "," + ee.broadcast_dict['file_dest']
         item.save()
     except Exception as e:
         from boto.dynamodb2.items import Item
         print 'create item:', e
         try:
             now = time.time()
             item = Item(self.get_mail_table(ee.domain),
                         data={
                             'derived_to': ee.broadcast_dict['derived_to'],
                             'derived_from':
                             ee.broadcast_dict['derived_from'],
                             'firstConnection': now,
                             'lastConnection': now,
                             'connectionsMade': 1,
                             'msg': ee.broadcast_dict['file_dest']
                         })
             item.save()
         except Exception as e2:
             print e2
Beispiel #3
0
    def delete_tables(self, new_timestamp=None):
        """Delete the tables for this block.
        """
        if not new_timestamp:
            new_timestamp = self.tbase

        if self.data_points_table:
            # noinspection PyBroadException
            try:
                self.data_points_table.delete()
            except:
                pass
            self.data_points_table = None
            self.dp_writer = None
        if self.index_table:
            try:
                self.index_table.delete()
            except:
                pass
            self.index_table = None

        try:
            self.item.delete()
        except:
            pass

        self.item = Item(self.master, data=dict(self.item.items()))
        self.item['state'] = 'INITIAL'
        self.item['tbase'] = base_time(new_timestamp)
        self.item.save(overwrite=True)

        return self.state
Beispiel #4
0
    def flush(self):
        batch_data = {
            self.table.table_name: [
                # We'll insert data here shortly.
            ],
        }

        for put in self._to_put:
            item = Item(self.table, data=put)
            batch_data[self.table.table_name].append(
                {'PutRequest': {
                    'Item': item.prepare_full(),
                }})

        for delete in self._to_delete:
            batch_data[self.table.table_name].append(
                {'DeleteRequest': {
                    'Key': self.table._encode_keys(delete),
                }})

        resp = self.table.connection.batch_write_item(batch_data)
        self.handle_unprocessed(resp)

        self._to_put = []
        self._to_delete = []
        return True
Beispiel #5
0
    def put_item(self, data, overwrite=False):
        """
        Saves an entire item to DynamoDB.

        By default, if any part of the ``Item``'s original data doesn't match
        what's currently in DynamoDB, this request will fail. This prevents
        other processes from updating the data in between when you read the
        item & when your request to update the item's data is processed, which
        would typically result in some data loss.

        Requires a ``data`` parameter, which should be a dictionary of the data
        you'd like to store in DynamoDB.

        Optionally accepts an ``overwrite`` parameter, which should be a
        boolean. If you provide ``True``, this will tell DynamoDB to blindly
        overwrite whatever data is present, if any.

        Returns ``True`` on success.

        Example::

            >>> users.put_item(data={
            ...     'username': '******',
            ...     'first_name': 'Jane',
            ...     'last_name': 'Doe',
            ...     'date_joined': 126478915,
            ... })
            True

        """
        item = Item(self, data=data)
        return item.save(overwrite=overwrite)
Beispiel #6
0
def save_partition(part):

    for record in part:

        fl_xy = record[1][0]
        fl_yz = record[1][1]
        route = fl_xy.Origin + '-' + fl_xy.Dest + '-' + fl_yz.Dest
        depdate = record[0][0]

        item_new = Item(out_table,
                        data={
                            "route":
                            route,
                            "depdate":
                            depdate,
                            "flight_xy":
                            fl_xy.UniqueCarrier + str(fl_xy.FlightNum),
                            "flight_yz":
                            fl_yz.UniqueCarrier + str(fl_yz.FlightNum),
                            "total_delay":
                            int(fl_xy.DepDelay + fl_xy.ArrDelay +
                                fl_yz.DepDelay + fl_yz.ArrDelay)
                        })

        # check old item delay
        try:
            item_old = out_table.get_item(route=route, depdate=depdate)
            if (item_old['total_delay'] > item_new['total_delay']):
                item_new.save(overwrite=True)
        except:
            item_new.save(overwrite=True)
Beispiel #7
0
def do_create(request, table, id, name, response):
    try:
        item = table.get_item(id=id)
        if item["name"] != name:
            response.status = 400
            return {
                "errors": [{
                    "id_exists": {
                        "status": "400",  # "Bad Request"
                        "title": "id already exists",
                        "detail": {
                            "name": item['name']
                        }
                    }
                }]
            }

    except ItemNotFound as inf:
        p = Item(table, data={'id': id, 'name': name, 'activities': set()})
        p.save()

    response.status = 201  # "Created"

    return {
        "data": {
            "type": "person",
            "id": id,
            "links": {
                "self":
                "{0}://{1}/users/{2}".format(request['urlparts']['scheme'],
                                             request['urlparts']['netloc'], id)
            }
        }
    }
Beispiel #8
0
def save_partition(part):
    for record in part:
        item = Item(out_table, data={
            "airport": record[0][0],
            "carrier": record[0][1],
            "average_delay": int(record[1][0] / record[1][1])
        })
        item.save(overwrite=True)
Beispiel #9
0
def save_partition(part):
    for record in part:
        item = Item(out_table, data={
            "origin": record[0][0],
            "destination": record[0][1],
            "average_delay": int(record[1][0] / record[1][1])
        })
        item.save(overwrite=True)
Beispiel #10
0
 def _get_safe_data(self, dictionary, checker=None):
     checker = checker or Item(self.table)
     data = {}
     for key in dictionary:
         if isinstance(dictionary[key], dict):
             data[key] = self._get_safe_data(dictionary[key], checker)
         elif checker._is_storable(dictionary[key]):
             data[key] = dictionary[key]
     return data
Beispiel #11
0
 def test_002_update_item(self):
     emp = self.employees.get_item(etype='E', id='123456789')
     emp['first_name'] = 'Jane'
     emp.save()
     emp = self.employees.get_item(etype='E', id='123456789')
     data = self.getEmployeeData('id', '123456789')[0]
     expected = Item(self.employees, data=data)
     expected['first_name'] = 'Jane'
     self.assertEqual(emp._data, expected._data)
def get_state(table, project):
    try:
        return table.get_item(project=project, consistent=True)
    except ItemNotFound:
        state = Item(table, data={
            'project': project,
            'state': 'idle',
        })
        state.save()
Beispiel #13
0
def ingest_alerts():
    alerts_table = Table('mbta_alerts')
    saFeed = gtfs_realtime_pb2.FeedMessage()
    saResponse = requests.get('https://cdn.mbta.com/realtime/Alerts.pb')
    saFeed.ParseFromString(saResponse.content)
    now_ts = time.time()
    alerts = []
    for entity in saFeed.entity:
        if entity.HasField('alert'):
            include_alert = False
            for informed in entity.alert.informed_entity:
                if informed.route_type <= 1:  # Subway/Green Line
                    include_alert = True
                    break
            if include_alert:
                include_alert = False
                for period in entity.alert.active_period:
                    # Include all future and current alerts
                    if period.end == 0 or now_ts < period.end:
                        include_alert = True
                        break

            if include_alert:
                alerts.append(entity)

    for entity in alerts:
        id = int(entity.id)
        alert = entity.alert

        sorted_active_periods = sorted(entity.alert.active_period, key=lambda period: period.start)
        current_period = None
        for period in sorted_active_periods:
            if now_ts > period.start and (now_ts < period.end or period.end == 0):
                current_period = period
                break

        if current_period == None:
            continue

        alert_item = None
        try:
            alert_item = alerts_table.get_item(alert_id=id)
        except exceptions.ItemNotFound:
            pass
        if not alert_item or alert_item['start'] != current_period.start:
            alert_item = Item(alerts_table, data={
                'alert_id': id,
                'start': current_period.start,
                'end': current_period.end,
                'future': (current_period.start > now_ts),
            })

            send_and_save_event(alert_item, alert, current_period)
        elif alert_item['future'] == True and alert_item['start'] < now_ts:
            alert_item['future'] = False
            send_and_save_event(alert_item, alert, current_period)
Beispiel #14
0
    def get_item(self, consistent=False, attributes=None, **kwargs):
        """
        Fetches an item (record) from a table in DynamoDB.

        To specify the key of the item you'd like to get, you can specify the
        key attributes as kwargs.

        Optionally accepts a ``consistent`` parameter, which should be a
        boolean. If you provide ``True``, it will perform
        a consistent (but more expensive) read from DynamoDB.
        (Default: ``False``)

        Optionally accepts an ``attributes`` parameter, which should be a
        list of fieldname to fetch. (Default: ``None``, which means all fields
        should be fetched)

        Returns an ``Item`` instance containing all the data for that record.

        Example::

            # A simple hash key.
            >>> john = users.get_item(username='******')
            >>> john['first_name']
            'John'

            # A complex hash+range key.
            >>> john = users.get_item(username='******', last_name='Doe')
            >>> john['first_name']
            'John'

            # A consistent read (assuming the data might have just changed).
            >>> john = users.get_item(username='******', consistent=True)
            >>> john['first_name']
            'Johann'

            # With a key that is an invalid variable name in Python.
            # Also, assumes a different schema than previous examples.
            >>> john = users.get_item(**{
            ...     'date-joined': 127549192,
            ... })
            >>> john['first_name']
            'John'

        """
        raw_key = self._encode_keys(kwargs)
        item_data = self.connection.get_item(
            self.table_name,
            raw_key,
            attributes_to_get=attributes,
            consistent_read=consistent
        )
        if 'Item' not in item_data:
            raise exceptions.ItemNotFound("Item %s couldn't be found." % kwargs)
        item = Item(self)
        item.load(item_data)
        return item
Beispiel #15
0
def makeItem():
    item = Item(twitter, data={ \
        'user_type': "standard_user",
        'post_time': time.ctime(), \
        'username': sys.argv[1][0:10], \
        #messages must have 140 char
        'post': sys.argv[2][0:140],
        'hashtaglist': set(hashtaglist)} \
        )  
    return item              
def saveToDynamo(filename):
    d = {}
    try:
        d = getSingleFileMetadata(filename)
    except:
        pass
    
    if len(d) > 0:
        newItem = Item(table_s3_metadata, data=d)
        newItem.save(overwrite=True)
Beispiel #17
0
 def add_new_isoc_cw_item(self, msp_id=None, credentials=None):
     now = str(datetime.now())
     item = Item(self.cm.get_msp_cw_table(),
                 data={
                     "msp_id": msp_id,
                     "credentials": credentials,
                     "created_at": str(datetime.now()),
                     "updated_at": str(datetime.now())
                 })
     return item.save()
Beispiel #18
0
    def _query(self, limit=None, index=None, reverse=False, consistent=False,
               exclusive_start_key=None, select=None, attributes_to_get=None,
               **filter_kwargs):
        """
        The internal method that performs the actual queries. Used extensively
        by ``ResultSet`` to perform each (paginated) request.
        """
        kwargs = {
            'limit': limit,
            'index_name': index,
            'consistent_read': consistent,
            'select': select,
            'attributes_to_get': attributes_to_get,
        }

        if reverse:
            kwargs['scan_index_forward'] = False

        if exclusive_start_key:
            kwargs['exclusive_start_key'] = {}

            for key, value in exclusive_start_key.items():
                kwargs['exclusive_start_key'][key] = \
                    self._dynamizer.encode(value)

        # Convert the filters into something we can actually use.
        kwargs['key_conditions'] = self._build_filters(
            filter_kwargs,
            using=QUERY_OPERATORS
        )

        raw_results = self.connection.query(
            self.table_name,
            **kwargs
        )
        results = []
        last_key = None

        for raw_item in raw_results.get('Items', []):
            item = Item(self)
            item.load({
                'Item': raw_item,
            })
            results.append(item)

        if raw_results.get('LastEvaluatedKey', None):
            last_key = {}

            for key, value in raw_results['LastEvaluatedKey'].items():
                last_key[key] = self._dynamizer.decode(value)

        return {
            'results': results,
            'last_key': last_key,
        }
Beispiel #19
0
 def update_isoc_remote(self, msp_id, credentials):
     old_msp_item = self.has_item_by_mspid_remote(msp_id=msp_id)
     now = str(datetime.now())
     item = Item(self.cm.get_ra_table(),
                 data={
                     "msp_id": old_msp_item['msp_id'],
                     "credentials": credentials,
                     "created_at": old_msp_item['created_at'],
                     "updated_at": str(now)
                 })
     return item.save(overwrite=True)
Beispiel #20
0
    def test_node_clear_fail(self):
        r = get_router_table()
        router = Router(r, SinkMetrics())

        def raise_condition(*args, **kwargs):
            raise ConditionalCheckFailedException(None, None)

        router.table.connection.put_item = Mock()
        router.table.connection.put_item.side_effect = raise_condition
        data = dict(uaid=dummy_uaid, node_id="asdf", connected_at=1234)
        result = router.clear_node(Item(r, data))
        eq_(result, False)
Beispiel #21
0
    def new_item(self, *args):
        """
        Returns a new, blank item

        This is mostly for consistency with boto.dynamodb
        """
        if not self.schema:
            self.describe()
        data = {}
        for x, arg in enumerate(args):
            data[self.schema[x].name] = arg
        return Item(self, data=data)
Beispiel #22
0
def add_entry(keycode_param):
    params = request.args
    try:
        item = Item(table,
                    data={
                        'keycode': keycode_param,
                        'action': params['action'],
                        'name': params['name']
                    })
    except KeyError as e:
        # better error?
        raise e
    item.save()
Beispiel #23
0
 def createGame(self, gameId, creator, invitee):
     now = str(datetime.now())
     statusDate = "PENDING_" + now
     item = Item(self.cm.getGameTable(),
                 data={
                     "GameId": gameId,
                     "HostId": creator,
                     "StatusDate": statusDate,
                     "OUser": creator,
                     "Turn": invitee,
                     "Opponent": invitee
                 })
     return item.save()
Beispiel #24
0
    def test_clear_node_provision_failed(self):
        r = get_router_table()
        router = Router(r, SinkMetrics())
        router.table.connection.put_item = Mock()

        def raise_error(*args, **kwargs):
            raise ProvisionedThroughputExceededException(None, None)

        router.table.connection.put_item.side_effect = raise_error
        with self.assertRaises(ProvisionedThroughputExceededException):
            router.clear_node(
                Item(r, dict(uaid="asdf", connected_at="1234",
                             node_id="asdf")))
Beispiel #25
0
    def _batch_get(self, keys, consistent=False):
        """
        The internal method that performs the actual batch get. Used extensively
        by ``BatchGetResultSet`` to perform each (paginated) request.
        """
        items = {
            self.table_name: {
                'Keys': [],
            },
        }

        if consistent:
            items[self.table_name]['ConsistentRead'] = True

        for key_data in keys:
            raw_key = {}

            for key, value in key_data.items():
                raw_key[key] = self._dynamizer.encode(value)

            items[self.table_name]['Keys'].append(raw_key)

        raw_results = self.connection.batch_get_item(request_items=items)
        results = []
        unprocessed_keys = []

        for raw_item in raw_results['Responses'].get(self.table_name, []):
            item = Item(self)
            item.load({
                'Item': raw_item,
            })
            results.append(item)

        raw_unproccessed = raw_results.get('UnprocessedKeys', {})

        for raw_key in raw_unproccessed.get('Keys', []):
            py_key = {}

            for key, value in raw_key.items():
                py_key[key] = self._dynamizer.decode(value)

            unprocessed_keys.append(py_key)

        return {
            'results': results,
            # NEVER return a ``last_key``. Just in-case any part of
            # ``ResultSet`` peeks through, since much of the
            # original underlying implementation is based on this key.
            'last_key': None,
            'unprocessed_keys': unprocessed_keys,
        }
Beispiel #26
0
    def _scan(self,
              limit=None,
              exclusive_start_key=None,
              segment=None,
              total_segments=None,
              attributes=None,
              **filter_kwargs):
        """
        The internal method that performs the actual scan. Used extensively
        by ``ResultSet`` to perform each (paginated) request.
        """
        kwargs = {
            'limit': limit,
            'segment': segment,
            'total_segments': total_segments,
            'attributes_to_get': attributes,
        }

        if exclusive_start_key:
            kwargs['exclusive_start_key'] = {}

            for key, value in exclusive_start_key.items():
                kwargs['exclusive_start_key'][key] = \
                    self._dynamizer.encode(value)

        # Convert the filters into something we can actually use.
        kwargs['scan_filter'] = self._build_filters(filter_kwargs,
                                                    using=FILTER_OPERATORS)

        raw_results = self.connection.scan(self.table_name, **kwargs)
        results = []
        last_key = None

        for raw_item in raw_results.get('Items', []):
            item = Item(self)
            item.load({
                'Item': raw_item,
            })
            results.append(item)

        if raw_results.get('LastEvaluatedKey', None):
            last_key = {}

            for key, value in raw_results['LastEvaluatedKey'].items():
                last_key[key] = self._dynamizer.decode(value)

        return {
            'results': results,
            'last_key': last_key,
        }
Beispiel #27
0
    def get_instance_assignments(self, service):
        """Get instances and assignments for a service
        :param service:  Service name.
        :return: Map of instances of assignments (None if unassigned).
        """
        live_instances = []
        dead_instances = []
        dead_cutoff = time.time() - INSTANCE_EXPIRY
        for instance_status in self._status.query_2(
                service__eq=service,
                attributes=('instance_id', 'status_time')):
            instance_id = instance_status['instance_id']
            if instance_status['status_time'] < dead_cutoff:
                dead_instances.append(instance_id)
            else:
                live_instances.append(instance_id)

        if dead_instances:
            logger.debug('Removing %d dead instances.', len(dead_instances))
            with self._status.batch_write() as status_batch:
                for dead_instance in dead_instances:
                    status_batch.delete_item(service=service,
                                             instance_id=dead_instance)
            with self._assignments.batch_write() as assignment_batch:
                for dead_instance in dead_instances:
                    assignment_batch.delete_item(instance_id=dead_instance)

        assignments = defaultdict(list)
        if not live_instances:
            return assignments

        unassigned = set(live_instances)
        keys = [{'instance_id': i} for i in live_instances]
        for assignment in self._assignments.batch_get(
                keys=keys, attributes=('instance_id', 'assignment')):
            assigned = assignment['assignment']
            instance_id = assignment['instance_id']
            unassigned.remove(instance_id)
            assignments[assigned].append(assignment)

        assignments[None] = [
            Item(self._assignments,
                 data={
                     'instance_id': instance_id,
                     'service': service
                 }) for instance_id in unassigned
        ]

        return assignments
Beispiel #28
0
    def save(self, obj):
        """Required functionality."""
        if not obj.id:
            obj.id = uuid()

        stored_data = {'id': obj.id, 'value': obj.to_data()}

        index_vals = obj.indexes() or {}
        for key in obj.__class__.index_names() or []:
            val = index_vals.get(key, '')
            stored_data[key] = DynamoMappings.map_index_val(val)

        table = self.get_class_table(obj.__class__)
        item = Item(table, data=stored_data)

        item.save(overwrite=True)
Beispiel #29
0
 def _get_item_for_record(self, record):
     if record._item:
         item = record._item
         data = record.get_dict()
         for key in data:
             if item._is_storable(data[key]) or key in item:
                 # only copy storable fields or those we want to reset
                 # for example, if item['name']='Bob' we can set it to ''
                 # but if item didn't have 'name' field then we should
                 # not set it (dynamodb does not allow empty fields)
                 item[key] = data[key]
         return item
     else:
         data = self._get_safe_data(record.get_dict())
         item = Item(self.table, data=data)
         return item
Beispiel #30
0
 def add_new_isoc_remote_item(self,
                              msp_id=None,
                              remote_access_status=None,
                              action_type=None,
                              user_choice=None):
     now = str(datetime.now())
     item = Item(self.cm.get_ra_table(),
                 data={
                     "msp_id": msp_id,
                     "remote_access_status": remote_access_status,
                     "user_choice": user_choice,
                     "action_type": action_type,
                     "created_at": now,
                     "updated_at": now,
                 })
     return item.save()