def put(self, key, value): '''Stores the object.''' table = self._table(key) value = self._wrap(table, key, value) item = Item(table, data=value) item.save(overwrite=True)
def create_tables(self): ''' () -> None Permite crear todas las tablas necesarias para el entorno de pruebas. Las tablas creadas seran llenadas con datos de prueba que se encuentran en el archivo test_data.json. ''' #Creacion de las tablas para los test super(dbTablesTest, self).create_tables() import os from commons import jsondecoder #cargar los datos de prueba del archivo test_data.json path_file = os.path.abspath(self.config['DB_TEST_DATA_PATH']) json_data = open(path_file).read() data = jsondecoder(json_data) #guardar los datos contenidos en el archivo json en la base de datos. for key, value in data.items(): table = self.tables[key] for item in value: if key == 'tbl_timeline': if 'skills' in item: item['skills'] = set(item['skills']) if 'win_answers' in item: item['win_answers'] = set(item['win_answers']) item = Item(table, data=item) item.save()
def add_to_db(self): items_table = Table('items') for product in self.viable_products: temp_item = Item(items_table, data={ 'type': 'iphone', 'title': product[0], 'itemId': product[1], 'viewItemURL': product[2], 'sellerUserName': product[3], 'positiveFeedbackPercent': product[4], 'feedbackRatingStar': product[5], 'conditionId': product[6], 'listingType': product[7], 'currentPrice': product[8], 'bidCount': product[9], 'timeLeft': product[10], 'endTime': product[11], 'carrier': product[12], 'storage': product[13], 'model': product[14], 'color': product[15], 'pmresult': product[16], }) temp_item.save(overwrite=True) print 'all set'
def create_or_update_user(self, datos_twitter, access_token, token_secret): '''(dict or Item) -> bool crea un nuevo usaurio o lo actualiza si ya existe. ''' user = self.get_item(key_twitter=datos_twitter['key_twitter']) token = generate_token(hash_key=datos_twitter['key_twitter'], access_token=access_token, token_secret=token_secret) #Valida si el usuario ya se encuentra registrado en la base de datos. #si no existe se crea y si existe se actualiza. if not user: datos_twitter['registered'] = timeUTCCreate() datos_twitter['key_user'] = hashCreate() datos_twitter['token_user'] = token user = Item(table_user, datos_twitter) else: user._data['nickname'] = datos_twitter['nickname'] user._data['name'] = datos_twitter['name'] user._data['link_image'] = datos_twitter['link_image'] user._data['token_user'] = token user.save() return user._data
def save_partition(part): for record in part: fl_xy = record[1][0] fl_yz = record[1][1] route = fl_xy.Origin + '-' + fl_xy.Dest + '-' + fl_yz.Dest depdate = record[0][0] item_new = Item(out_table, data={ "route": route, "depdate": depdate, "flight_xy": fl_xy.UniqueCarrier + str(fl_xy.FlightNum), "flight_yz": fl_yz.UniqueCarrier + str(fl_yz.FlightNum), "total_delay": int(fl_xy.DepDelay + fl_xy.ArrDelay + fl_yz.DepDelay + fl_yz.ArrDelay) }) # check old item delay try: item_old = out_table.get_item(route=route, depdate=depdate) if (item_old['total_delay'] > item_new['total_delay']): item_new.save(overwrite=True) except: item_new.save(overwrite=True)
def do_create(request, table, id, name, response): try: item = table.get_item(id=id) if item["name"] != name: response.status = 400 return { "errors": [{ "id_exists": { "status": "400", # "Bad Request" "title": "id already exists", "detail": { "name": item['name'] } } }] } except ItemNotFound as inf: p = Item(table, data={'id': id, 'name': name, 'activities': set()}) p.save() response.status = 201 # "Created" return { "data": { "type": "person", "id": id, "links": { "self": "{0}://{1}/users/{2}".format(request['urlparts']['scheme'], request['urlparts']['netloc'], id) } } }
def deprecated__handle_truth( self, rs ): if self._mask is None: self._mask = rs.get_mask() rs.set_mask(self._mask) accuracy = rs.accuracy() with tempfile.SpooledTemporaryFile() as temp: np.save(temp, accuracy) temp.seek(0) conn = boto.connect_s3( ) bucket = conn.create_bucket( self.s3_results ) k = Key(bucket) m = hashlib.md5() m.update(accuracy) md5 = m.hexdigest() k.key = md5 k.set_contents_from_file( temp ) run_id = rs.get_run_id() try: item = Item( self.truth_table, {'run_id':run_id, 'strain_id': rs.spec_string} ) item['accuracy_file'] = md5 item['result_files'] = base64.b64encode( json.dumps( rs.get_result_files() ) ) item['bucket'] = self.s3_results item['timestamp'] = datetime.datetime.utcnow().strftime('%Y.%m.%d-%H:%M:%S') item.save() except ConditionalCheckFailedException as ccfe: print "*"*20 print ccfe if rs is not None: print {'run_id':run_id,'strain_id': rs.spec_string} print rs.get_result_files()
def route_email(self, ee): print 'bag it route_email:', ee.broadcast_dict[ 'derived_to'], 'from:', ee.broadcast_dict['derived_from'] try: item = self.get_mail_table(ee.domain).query( derived_to__eq=ee.broadcast_dict['derived_to'], derived_from__eq=ee.broadcast_dict['derived_from'], limit=1).next() item['lastConnection'] = time.time() item['connectionsMade'] = item['connectionsMade'] + 1 item['msg'] = item['msg'] + "," + ee.broadcast_dict['file_dest'] item.save() except Exception as e: from boto.dynamodb2.items import Item print 'create item:', e try: now = time.time() item = Item(self.get_mail_table(ee.domain), data={ 'derived_to': ee.broadcast_dict['derived_to'], 'derived_from': ee.broadcast_dict['derived_from'], 'firstConnection': now, 'lastConnection': now, 'connectionsMade': 1, 'msg': ee.broadcast_dict['file_dest'] }) item.save() except Exception as e2: print e2
class Item(ItemEngine): def __init__(self, collection, raw_item): ItemEngine.__init__(self, collection, raw_item) self.__item = raw_item @property def ddb_item(self): return self.__item def update(self, patch, context, updates): if patch: for k, v in iteritems(updates): self.__item[k] = v self.__item.partial_save() else: if context is None: self.__item = BotoItem(self.__item.table, updates) self.__item.save(True) else: context.put_item(self.__table, updates) def delete(self, index, context): if context is None: self.__item.delete() else: context.delete_item( self.__table, **(index.make_key_dict_from_dict(self.get_dict())) ) def get_dict(self): return self.__item._data
def add_to_db(self): items_table = Table('items') for product in self.viable_products: temp_item = Item(items_table, data={ 'type':'iphone', 'title':product[0], 'itemId':product[1], 'viewItemURL':product[2], 'sellerUserName':product[3], 'positiveFeedbackPercent':product[4], 'feedbackRatingStar':product[5], 'conditionId':product[6], 'listingType':product[7], 'currentPrice':product[8], 'bidCount':product[9], 'timeLeft':product[10], 'endTime':product[11], 'carrier':product[12], 'storage':product[13], 'model':product[14], 'color':product[15], 'pmresult':product[16], }) temp_item.save(overwrite=True) print 'all set'
def main(args): conn=dynaconnect(args) tableName=args.table try: conn.describe_table(tableName) except boto.exception.JSONResponseError as details: if (details.error_code != "ResourceNotFoundException"): error("Error when connecting to DynamodDB",details.message) sys.stdout.write("Table does not exist, creating it") sys.stdout.flush() table = Table.create(tableName, schema=[ HashKey('name') ], global_indexes=[GlobalAllIndex('StacksByType', parts=[HashKey('type')])], connection=conn) while (table.describe()["Table"]["TableStatus"]!="ACTIVE"): time.sleep(1) sys.stdout.write('.') sys.stdout.flush() print("") else: table = Table(tableName,connection=conn) parameters = dict([x.strip() for x in line.strip().split("=")] for line in open(args.prop_file)) additionals = dict([x.strip() for x in k.strip().split("=")] for k in args.key) dynamodata={'type':args.type, 'name':args.name, 'config':parameters} dynamodata.update(additionals) item=Item(table,data=dynamodata) item.save(overwrite=True)
def save_partition(part): for record in part: item = Item( out_table, data={"airport": record[0][0], "carrier": record[0][1], "mean_delay": int(record[1][0] / record[1][1])}, ) item.save(overwrite=True)
def save_partition(part): for record in part: item = Item(out_table, data={ "origin": record[0][0], "destination": record[0][1], "average_delay": int(record[1][0] / record[1][1]) }) item.save(overwrite=True)
def save_partition(part): for record in part: item = Item(out_table, data={ "airport": record[0][0], "carrier": record[0][1], "average_delay": int(record[1][0] / record[1][1]) }) item.save(overwrite=True)
def get_state(table, project): try: return table.get_item(project=project, consistent=True) except ItemNotFound: state = Item(table, data={ 'project': project, 'state': 'idle', }) state.save()
def save_partition(part): for record in part: item = Item(out_table, data={ "origin": record[0][0], "destination": record[0][1], "mean_delay": int(record[1][0] / record[1][1]) }) item.save(overwrite=True)
def saveToDynamo(filename): d = {} try: d = getSingleFileMetadata(filename) except: pass if len(d) > 0: newItem = Item(table_s3_metadata, data=d) newItem.save(overwrite=True)
def _create_post(self, data): '''(item) -> NoneType Funcion de apoyo, crea un item en la tabla timeline ''' data['key_post'] = hashCreate() data['key_timeline_post'] = timeUTCCreate() post = Item(table_timeline, data) post.save() if not data.get('key_post_original'): cskill = Skill() cskill.post_skills_post(list(data['skills']), data['key_post'])
def update(cls, form_id, user_id, answer_json): answers_table = Table("answers") questions_with_answers = answer_json["questions"] for question_with_answer in questions_with_answers: item = Item( answers_table, data={ "form_question_id": cls.form_question_id(form_id, question_with_answer["question_id"]), "answer": question_with_answer["answer"], "user_id": user_id, }, ) item.save(overwrite=True)
def add_entry(keycode_param): params = request.args try: item = Item(table, data={ 'keycode': keycode_param, 'action': params['action'], 'name': params['name'] }) except KeyError as e: # better error? raise e item.save()
def add_new_keg(tap, cost, volume, abv, beer_name): remove_current_keg(tap) new_keg = Item(kegs, data={ 'tap': str(tap), 'start_timestamp': str(long(time.time())), 'finish_timestamp': str(-1), 'cost': str(cost), 'volume': str(volume), 'abv': str(abv), 'beer_name': str(beer_name), 'volume_remaining': str(volume) }) new_keg.save()
def putRecord(fid, filename, desc, keysrc, keythb): """ Adds a new item to the DynamoDB table.""" uid = "unique ID" timestamp = "timestamp" new_item = Item(get_table(), data={ 'owner': 'Carlos', 'uid': fid, 'name': filename, 'description': desc, 'timestamp': datetime.today().strftime('%Y%m%d-%H%M%S-%f'), 'source': keysrc, 'thumbnail': keythb }) new_item.save()
def register_fob(fob_id, drinker_id): drinker = get_drinker(drinker_id) if not drinker: return False fob = get_fob(fob_id) if not fob: fob = Item(fobs, data={ 'fob_id': str(fob_id), 'drinker_id': str(drinker_id) }) fob['drinker_id'] = (drinker_id) fob['fob_id'] = str(fob_id) fob.save() return True
def createitem(): users = Table('items') # WARNING - This doens't save it yet! brush = Item(users, data={ 'rfid': '165', 'pname': 'Toothpaste', 'Price': '3$', 'tray_status': '1', }) # The data now gets persisted to the server. brush.save()
def save(self): """Save the results of get_item in the table get_table_name() under the key identified by the field name get_key_name(). Note that we unconditionally overwrite the data and ignore the possibility someone else has written data for this subject. """ all_errors = list(self.errors()) if all_errors: raise InvalidDataObject(errors) table = self.get_class_table() data = self.get_item() logger.debug("SAVING with key %s data %s" % (self.get_key_name(), repr(data))) item = Item(table, data=data) item.save(overwrite=True)
def main(args): conn=dynaconnect(args) tableName=args.table try: table=Table(tableName,connection=conn) except boto.exception.JSONResponseError as details: error("Error when connecting to DynamodDB",details.message) users = [[x.strip() for x in line.strip().split(",")] for line in open(args.file)] for user in users: dynamodata={'firstname':user[0], 'lastname':user[1], 'society':user[2]} item=Item(table,data=dynamodata) item.save(overwrite=True)
def save(self, obj): """Required functionality.""" if not obj.id: obj.id = uuid() stored_data = {'id': obj.id, 'value': obj.to_data()} index_vals = obj.indexes() or {} for key in obj.__class__.index_names() or []: val = index_vals.get(key, '') stored_data[key] = DynamoMappings.map_index_val(val) table = self.get_class_table(obj.__class__) item = Item(table, data=stored_data) item.save(overwrite=True)
def put_item(self, data, overwrite=False): """ Saves an entire item to DynamoDB. By default, if any part of the ``Item``'s original data doesn't match what's currently in DynamoDB, this request will fail. This prevents other processes from updating the data in between when you read the item & when your request to update the item's data is processed, which would typically result in some data loss. Requires a ``data`` parameter, which should be a dictionary of the data you'd like to store in DynamoDB. Optionally accepts an ``overwrite`` parameter, which should be a boolean. If you provide ``True``, this will tell DynamoDB to blindly overwrite whatever data is present, if any. Returns ``True`` on success. Example:: >>> users.put_item(data={ ... 'username': '******', ... 'first_name': 'Jane', ... 'last_name': 'Doe', ... 'date_joined': 126478915, ... }) True """ item = Item(self, data=data) return item.save(overwrite=overwrite)
def claim_shard_if_expired(self, shard): myshard = None last_sequence_number = None #get table table = self.get_table() #put, conditional on the time now = int(time.time()) cutoff_time = now - self.heartbeat_timeout #query for items to see if they exist already shard_id = shard + "-" + self.app_id try: myitem = table.get_item(id=shard_id) savedtime = myitem['time'] host = myitem['host'] if savedtime < cutoff_time: #I claim this for Spain, since no heartbeat seen recently myitem['host'] = self.host_id myitem['time'] = now if myitem.save(): myshard = shard last_sequence_number = myitem[ k_dynamodb_last_sequence_number] except ItemNotFound, e: #gah! This shard tracking item does not exist yet. Got put in a new one newitem = Item(table, data={ 'id': shard_id, 'time': now, 'host': self.host_id }) try: newitem.save() #if we got here, then victory myshard = shard logging.warning('Shard %s did not exist yet... creating' % shard_id) except ConditionalCheckFailedException, e: #argh, someone saved this item before me! try another shard. foo = 3
def receiver(): phone = request.form.get("phone") try: receiver = request_table.get_item(phone=phone) except ItemNotFound: receiver = Item(request_table, data={"phone": phone}) values = json.loads(request.form['values']) receiver['vaccine_type'] = values[0]['value'] receiver['number_of_vaccines'] = values[1]['value'] reciever['Status'] = "Requested" receiver.save() loc = normalize_location( "%s, %s" % (receiver['location']['lat'], receiver['location']['lon'])) closest = closest_locations(receiver['location']) if not len(closest): closest = ["+17173327758"] # return Response(json.dumps({"status" : "success", "results" : "None"})) try: closest.remove(phone) except: pass extra = { "lat": receiver['location']['lat'], "lon": receiver['location']['lon'], "location_english": loc['results'][0]['formatted_address'], "receiver_phone": phone, "number_of_vaccines": receiver['number_of_vaccines'], "vaccine_type": receiver['vaccine_type'] } payload = {"flow_uuid": GIVER_FLOW_UUID, "phone": closest, "extra": extra} res = requests.post("https://api.rapidpro.io/api/v1/runs.json", headers={ "Authorization": "Token %s" % RAPIDPRO_API_KEY, 'content-type': 'application/json' }, data=json.dumps(payload)) return Response(json.dumps({ "status": "success", "response": res.json(), "closest": closest }), mimetype="application/json")
def _post_skill(self, skill, key, prefix): '''(list, UUID, str) -> NoneType Funcion de apoyo, inserta un skill en la tabla skill. ''' data = {'key_skill' : hashCreate(), 'key_time': timeUTCCreate()} if prefix: data['key_post'] = key data['skill'] = prefix + skill else: data['key_user'] = key data['skill'] = skill insert_skill = Item(table_skill, data) insert_skill.save()
def save(self, obj): if not obj.id: obj.id = uuid() stored_data = { 'id': obj.id, 'value': obj.to_data() } index_vals = obj.indexes() or {} for key in obj.__class__.index_names() or []: val = index_vals.get(key, '') stored_data[key] = DynamoMappings.map_index_val(val) table = self.get_class_table(obj.__class__) item = Item(table, data=stored_data) item.save(overwrite=True)
def add_new_isoc_cw_item(self, msp_id=None, credentials=None): now = str(datetime.now()) item = Item(self.cm.get_msp_cw_table(), data={ "msp_id": msp_id, "credentials": credentials, "created_at": str(datetime.now()), "updated_at": str(datetime.now()) }) return item.save()
def __enter__(self): lock_table = get_lock_table(connection=self.dynamodb_connection) try: lock_item = lock_table.get_item(lock_name=self.key, consistent=True) if lock_item['expires'] < time.time(): lock_item['expires'] = time.time() + self.expires try: lock_item.save() return self except ConditionalCheckFailedException: pass except ItemNotFound: lock_item = Item(lock_table, data={ 'lock_name': self.key, 'expires': time.time() + self.expires }) lock_item.save() return self raise BlockedByAnotherTimerException()
def update_dynamo(values): # Tables: # DRINK_RECORD # -code : hash key # -timestamp : range key # -amount # -tap # USER_TOTALS # -code : hash key # -number_of_drinks # -volume_consumed code = values["CODE"] tap_one = values["TAP_ONE"] tap_two = values["TAP_TWO"] total_amount = tap_one + tap_two timestamp = long(time.time()) if (total_amount > 0): drinker = get_drinker(code) if (tap_one > 0): drinker['volume_consumed'] = drinker['volume_consumed'] + int(tap_one) drinker['number_of_drinks'] = drinker['number_of_drinks'] + 1 drink_1 = Item(drinks, data={ 'code': code, 'timestamp': timestamp, 'amount': int(tap_one), 'tap' : 1 }) drink_1.save() timestamp = timestamp + 1 # Not ideal, but this lets us store both drinks separately, as the code/timestamp combination must be unique if (tap_two > 0): drinker['volume_consumed'] = drinker['volume_consumed'] + int(tap_two) drinker['number_of_drinks'] = drinker['number_of_drinks'] + 1 drink_2 = Item(drinks, data={ 'code': code, 'timestamp': timestamp, 'amount': int(tap_two), 'tap' : 2 }) drink_2.save() drinker.save() print("User " + code + " drank " + str(tap_one) + " mL from tap one and " + str(tap_two) + " mL from tap two at " + datetime.fromtimestamp(timestamp).strftime("%I:%M%p on %A, %d %B %Y")) else: print("User " + code + " tapped their badge without drinking at " + datetime.fromtimestamp(timestamp).strftime("%I:%M%p on %A, %d %B %Y"))
def main(args): conn = dynaconnect(args) tableName = args.table try: table = Table(tableName, connection=conn) except boto.exception.JSONResponseError as details: error("Error when connecting to DynamodDB", details.message) users = [[x.strip() for x in line.strip().split(",")] for line in open(args.file)] for user in users: dynamodata = { 'firstname': user[0], 'lastname': user[1], 'society': user[2] } item = Item(table, data=dynamodata) item.save(overwrite=True)
class DBItem(object): def __init__(self, table): self._table = table self._item = Item(table.get_table(), data=self.get_attrs()) def get_attrs(self): item_list = {} for key, value in self.__dict__.iteritems(): if not key.startswith('_'): item_list[key] = value return item_list def save(self, **kwargs): self._item.save(**kwargs) def insert(self, **kwargs): return self._table.put_item(data=self._item._data) def delete(self): return self._item.delete()
def update_isoc_remote(self, msp_id, credentials): old_msp_item = self.has_item_by_mspid_remote(msp_id=msp_id) now = str(datetime.now()) item = Item(self.cm.get_ra_table(), data={ "msp_id": old_msp_item['msp_id'], "credentials": credentials, "created_at": old_msp_item['created_at'], "updated_at": str(now) }) return item.save(overwrite=True)
def main(args): conn = dynaconnect(args) tableName = args.table try: conn.describe_table(tableName) except boto.exception.JSONResponseError as details: if (details.error_code != "ResourceNotFoundException"): error("Error when connecting to DynamodDB", details.message) sys.stdout.write("Table does not exist, creating it") sys.stdout.flush() table = Table.create(tableName, schema=[HashKey('name')], global_indexes=[ GlobalAllIndex('StacksByType', parts=[HashKey('type')]) ], connection=conn) while (table.describe()["Table"]["TableStatus"] != "ACTIVE"): time.sleep(1) sys.stdout.write('.') sys.stdout.flush() print("") else: table = Table(tableName, connection=conn) parameters = dict([x.strip() for x in line.strip().split("=")] for line in open(args.prop_file)) additionals = dict([x.strip() for x in k.strip().split("=")] for k in args.key) dynamodata = {'type': args.type, 'name': args.name, 'config': parameters} dynamodata.update(additionals) item = Item(table, data=dynamodata) item.save(overwrite=True)
def createitem(tagid): tagid = str(tagid) if tagid == "165": product = "Toothpaste" price = "3$" if tagid == "214": product = "Brush" price = "33$" if tagid == "100": product = "Diet Coke" price = "2$" users = Table('items') # WARNING - This doens't save it yet! brush = Item(users, data={ 'rfid': tagid, 'pname': product, 'Price': price, 'tray_status': '0', }) # The data now gets persisted to the server. brush.save()
def heartbeat(self, terminate=False): # if self.service.stopHeartbeat: # return # Update service Expiry based on TTL and current time self.service.Expiry = int(time.mktime(datetime.now().timetuple())) if not terminate: self.service.Expiry += TTL table = self.registry.Table() item_info = { 'Name': self.service.Name, 'Endpoint': self.service.Endpoint } if table.has_item(**item_info): item = table.get_item(**item_info) else: item = Item(table, self.service.__dict__) item['Expiry'] = self.service.Expiry item.save()
def createGame(self, gameId, creator, invitee): now = str(datetime.now()) statusDate = "PENDING_" + now item = Item(self.cm.getGameTable(), data={ "GameId": gameId, "HostId": creator, "StatusDate": statusDate, "OUser": creator, "Turn": invitee, "Opponent": invitee }) return item.save()
def create_user(table, request): try: id = request['id'] name = request['name'] item = table.get_item(id=int(id), consistent=True) if item["name"] != name: return { "errors": [{ "id_exists": { "status": "400", # "Bad Request" "title": "id already exists", "detail": { "name": item['name'] } } }], "msg_id": request['msg_id'], "status": 400 } except ItemNotFound as inf: p = Item(table, data={'id': id, 'name': name, 'activities': set()}) p.save() return { "data": { "type": "person", "id": id, "links": { "self": "{0}://{1}/users/{2}".format(request['scheme'], request['netloc'], id) } }, "msg_id": request['msg_id'], "status": 201 }
def record_drink(fob_id, tap, volume, timestamp): if volume <= 0: return fob = get_fob(fob_id) if fob is None or fob['drinker_id'] is None: print "An unknown fob ({0}) managed to get {1} ml of beer from tap {2}.".format(fob_id, volume, tap) return drinker_id = fob['drinker_id'] drinker = get_drinker(drinker_id) if drinker is None: print "An unknown drinker ({0}) managed to get {1} ml of beer from tap {2}.".format(drinker_id, volume, tap) return keg = get_current_keg(tap) cost = price(Decimal(volume) * Decimal(keg['cost']) / Decimal(keg['volume'])) alcohol = Decimal(volume) * Decimal(keg['abv']) drink = Item(drinks, data={ 'drinker_id': str(drinker_id), 'timestamp': str(timestamp), 'volume': str(volume), 'tap': str(tap), 'cost': str(cost), 'alcohol': str(alcohol), 'payer_id': str(drinker['payer_id']), 'beer_name': str(keg['beer_name']) }) drink.save() payer = get_payer(drinker['payer_id']) payer['credit'] = str(Decimal(payer['credit']) - Decimal(cost)) payer.save() drinker['number_of_drinks'] = str(Decimal(drinker['number_of_drinks']) + 1) drinker['volume_consumed'] = str(Decimal(drinker['volume_consumed']) + Decimal(volume)) drinker['alcohol_consumed'] = str(Decimal(drinker['alcohol_consumed']) + Decimal(alcohol)) drinker['total_cost'] = str(Decimal(drinker['total_cost']) + Decimal(cost)) drinker.save() keg['volume_remaining'] = str(Decimal(keg['volume_remaining']) - Decimal(volume)) keg.save()
def save_partition(part): for record in part: fl_xy = record[1][0] fl_yz = record[1][1] route = fl_xy.Origin + '-' + fl_xy.Dest + '-' + fl_yz.Dest depdate = record[0][0] item_new = Item(out_table, data={ "route": route, "depdate": depdate, "flight_xy": fl_xy.UniqueCarrier + str(fl_xy.FlightNum), "flight_yz": fl_yz.UniqueCarrier + str(fl_yz.FlightNum), "total_delay": int(fl_xy.DepDelay + fl_xy.ArrDelay + fl_yz.DepDelay + fl_yz.ArrDelay) }) # check old item delay try: item_old = out_table.get_item(route=route, depdate = depdate) if (item_old['total_delay'] > item_new['total_delay']): item_new.save(overwrite=True) except: item_new.save(overwrite=True)
def add_new_isoc_remote_item(self, msp_id=None, remote_access_status=None, action_type=None, user_choice=None): now = str(datetime.now()) item = Item(self.cm.get_ra_table(), data={ "msp_id": msp_id, "remote_access_status": remote_access_status, "user_choice": user_choice, "action_type": action_type, "created_at": now, "updated_at": now, }) return item.save()
def createNewGame(self, gameId, creator, invitee): now = str(datetime.now()) statusDate = "PENDING_" + now try: item = Item(self.cm.getGamesTable(), data={ "GameId": gameId, "HostId": creator, "OpponentId": invitee, "StatusDate": statusDate, "OUser": creator, "Turn": invitee }) except Exception as ex: logger.debug(ex.msg) return None return item.save()
def createNewGame(self, gameId, creator, invitee): """ Using the High-Level API, an Item is created and saved to the table. All the primary keys for either the schema or an index (GameId, HostId, StatusDate, and OpponentId) as well as extra attributes needed to maintain game state are given a value. Returns True/False depending on the success of the save. """ now = str(datetime.now()) statusDate = "PENDING_" + now item = Item(self.cm.getGamesTable(), data= { "GameId" : gameId, "HostId" : creator, "StatusDate" : statusDate, "OUser" : creator, "Turn" : creator, "OpponentId" : invitee }) return item.save()
def createNewGame(self, gameId, creator, invitee): """ Using the High-Level API, an Item is created and saved to the table. All the primary keys for either the schema or an index (GameId, HostId, StatusDate, and OpponentId) as well as extra attributes needed to maintain game state are given a value. Returns True/False depending on the success of the save. """ now = str(datetime.now()) statusDate = "PENDING_" + now item = Item(self.cm.getGamesTable(), data={ "GameId": gameId, "HostId": creator, "StatusDate": statusDate, "OUser": creator, "Turn": invitee, "OpponentId": invitee }) return item.save()
def store_in_dynamo(signup_data): signup_item = Item(ddb_table, data=signup_data) signup_item.save() print "Data stored in DDB"
indexes=[ AllIndex('PostIndex', parts=[HashKey('post_time'), RangeKey('post')]) ], connection=conn) else: twitter = Table('twitter', connection=conn) ##############Parse input data##################### if (len(sys.argv) is not 3 or sys.argv[1][0] is not '@'): print("Usage:\n%s @<user> <msg>" % (sys.argv[0])) sys.exit(1) ########## Handling our post - IT DOESN'T SAVE ON Dynamo item = Item(twitter, data={ \ 'post_time': time.ctime(), \ 'username': sys.argv[1][0:11], \ #messages must have 140 char 'post': sys.argv[2][0:140]} \ ) ####UPLOAD TO DYNAMODB item.save() #print by scan order for emp in twitter.scan(): PrintItem(emp)
def store_in_dynamo(signup_data): signup_item = Item(ddb_table, data=signup_data) signup_item.save()
jsonIn = args.json dbTable = args.table table = Table(dbTable) with open(jsonIn, 'r') as f: lines = f.read().splitlines() f.close() for line in lines: with open(line, 'r') as l: data = json.load(l) dynamodb_json = json.dumps(data) payload = Item(table, data=json.loads(dynamodb_json)) payload.save() #test3.put_item(data=json.loads(dynamodb_json)) print line + " added to" + dbTable + "dynamoDB" l.close() ''' with open(jsonIn, 'r') as f: for line in f: line = line.strip() with open(line, 'r') as l: data = json.load(l) dynamodb_json = json.dumps(data) test3.put_item(data=json.loads(dynamodb_json)) print line + " added to test2 dynamoDB" l.close() '''
conn = dynamodb2.connect_to_region( REGION, aws_access_key_id='AKIAINKNLJLNPOG7BJAQ', aws_secret_access_key='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') table = Table(TABLE_NAME, connection=conn) users = Table('ParkingPass') janedoe = Item(users, data={ 'Space': 'janedoe', 'Type': 'Jane', 'Available': 'Doe', }) janedoe.save() #table=dynamodb2.get_table('ParkingPass') #attrs = {'Space': 'A1', 'Type': 'A', 'Available': 'Yes'} #my_item = table.new_item(attrs=attrs) #results = table.query_2( # Space__eq=0 # ) #response = table.scan( # FilterExpression=Attr('Available').eq('YES') #print(items) #tables= conn.list_tables() #print(tables)
print "Inclusao de registros" studentsTable.put_item(data={ 'identidade': '1', 'nome': 'Alex', 'endereco': 'Rua epaminondas, 10', }) # Colocando os dados em memoria e depois persistindo sueli = Item(studentsTable, data={ 'identidade': '2', 'nome': 'Sueli', 'cidade': 'Sao Paulo', }) sueli.save() print "Pesquisa de registros" result_set = studentsTable.scan() for estudante in result_set: print estudante['identidade'] estudantePesquisado = studentsTable.get_item(identidade='1') print estudantePesquisado['nome'] print "Atualizacao de registros" estudantePesquisado['nome'] = 'Coqueiro' print estudantePesquisado['nome'] print "Apagar um atributo"
class Block(object): index_key_lru = LRUCache(config.get().CACHE_WRITE_INDEX_KEY) def __init__(self, master, connection, n): self.master = master self.connection = connection self.item = self.master.query(n__eq=n, consistent=True).next() self.dp_writer = self.data_points_table = self.index_table = None # noinspection PyBroadException try: self.bind() except: pass # TODO log def bind(self): """Bind to existing tables. """ if self.data_points_name and self.index_name: data_points_table = Table(self.data_points_name, connection=self.connection) try: s1 = data_points_table.describe()['Table']['TableStatus'] except: raise else: self.data_points_table = data_points_table self.dp_writer = TimedBatchTable( self.data_points_table.batch_write()) index_table = Table(self.index_name, connection=self.connection) try: s2 = index_table.describe()['Table']['TableStatus'] except: raise else: self.index_table = index_table if s1 == s2: self.item['state'] = s1 else: self.item['state'] = 'UNDEFINED' return self.state def create_tables(self): """Create tables. """ if self.data_points_table and self.index_table: return self.state self.item['data_points_name'] = '%s_%s' % (config.table_name('dp'), self.tbase) self.item['index_name'] = '%s_%s' % (config.table_name('dp_index'), self.tbase) try: self.bind() except: if not self.data_points_table: Table.create(self.data_points_name, schema=[ HashKey('domain_metric_tbase_tags'), RangeKey('toffset', data_type=NUMBER) ], throughput={ 'read': config.get().TP_READ_DATAPOINTS / BLOCKS, 'write': config.get().TP_WRITE_DATAPOINTS }, connection=self.connection) if not self.index_table: Table.create( self.index_name, schema=[HashKey('domain_metric'), RangeKey('tbase_tags')], throughput={ 'read': config.get().TP_READ_INDEX_KEY / BLOCKS, 'write': config.get().TP_WRITE_INDEX_KEY }, connection=self.connection) self.item['state'] = self.bind() self.item.save(overwrite=True) return self.state def replace(self, new_timestamp): """Replace this block with new block. """ if block_pos(new_timestamp) != self.n: raise ValueError( 'time %s (pos=%s) is not valid for block (pos=%s)' % (new_timestamp, block_pos(new_timestamp), self.n)) if base_time(new_timestamp) == self.tbase: return self self.delete_tables(new_timestamp) return self def delete_tables(self, new_timestamp=None): """Delete the tables for this block. """ if not new_timestamp: new_timestamp = self.tbase if self.data_points_table: # noinspection PyBroadException try: self.data_points_table.delete() except: pass self.data_points_table = None self.dp_writer = None if self.index_table: try: self.index_table.delete() except: pass self.index_table = None try: self.item.delete() except: pass self.item = Item(self.master, data=dict(self.item.items())) self.item['state'] = 'INITIAL' self.item['tbase'] = base_time(new_timestamp) self.item.save(overwrite=True) return self.state def turndown_tables(self): """Reduce write throughput for this block. """ try: self.dp_writer.flush() except: pass self.dp_writer = None if self.data_points_table: self.data_points_table.update({ 'read': config.get().TP_READ_DATAPOINTS / BLOCKS, 'write': 1 }) if self.index_table: self.index_table.update({ 'read': config.get().TP_READ_INDEX_KEY / BLOCKS, 'write': 1 }) @property def n(self): return self.item['n'] @property def tbase(self): return self.item['tbase'] @property def data_points_name(self): return self.item['data_points_name'] @property def index_name(self): return self.item['index_name'] @property def state(self): state = self.item['state'] if state == 'INITIAL': return state s1 = self._calc_state(self.data_points_table.describe()) s2 = self._calc_state(self.index_table.describe()) if s1 != s2: return 'UNDEFINED' return s1 def store_datapoint(self, timestamp, metric, tags, value, domain): """Store index key and datapoint value in tables. """ #TODO: exception if not self.dp_writer: return key = util.hdata_points_key(domain, metric, timestamp, tags) self._store_index(key, timestamp, metric, tags, domain) return self.dp_writer.put_item( data={ 'domain_metric_tbase_tags': key, 'toffset': util.offset_time(timestamp), 'value': value }) def query_index(self, domain, metric, start_time, end_time): """Query index for keys. """ if not self.index_table: return [] key = util.index_hash_key(domain, metric) time_range = map( str, [util.base_time(start_time), util.base_time(end_time) + 1]) return [ IndexKey(k) for k in self.index_table.query(consistent=False, domain_metric__eq=key, tbase_tags__between=time_range) ] def query_datapoints(self, index_key, start_time, end_time, attributes=tuple(['value'])): """Query datapoints. """ if not self.data_points_table: return [] key = index_key.to_data_points_key() time_range = util.offset_range(index_key, start_time, end_time) attributes_ = ['toffset'] attributes_.extend(attributes) return [ value for value in self.data_points_table.query( consistent=False, reverse=True, attributes=attributes_, domain_metric_tbase_tags__eq=key, toffset__between=time_range) ] # noinspection PyMethodMayBeStatic def _calc_state(self, desc): desc = desc['Table'] state = desc['TableStatus'] if state == 'ACTIVE' and desc['ProvisionedThroughput'][ 'WriteCapacityUnits'] == 1: state = 'TURNED_DOWN' return state # noinspection PyMethodMayBeStatic def _store_cache(self, key, cache, table, data): if not cache.get(key): table.put_item(data=data(), overwrite=True) cache.put(key, 1) def _store_index(self, key, timestamp, metric, tags, domain): """Store an index key if not yet stored. """ self._store_cache( key, Block.index_key_lru, self.index_table, lambda: { 'domain_metric': util.index_hash_key(domain, metric), 'tbase_tags': util.index_range_key(timestamp, tags) }) def __str__(self): return str((self.n, self.state, self.tbase, self.data_points_name, self.index_name)) def __repr__(self): return str(self)
'tweet': 'Hello twitter!' }) # construct an item and then save it from boto.dynamodb2.items import Item from boto.dynamodb2.table import Table tweets = Table('tweets', connection=conn) _item = Item(tweets, data={ 'id': '2222', 'username': '******', 'screen_name': 'dddd', 'tweet': 'test tweet', }) _item.save() ########################### ## Getting an item ########################### from boto.dynamodb2.table import Table tweets = Table('tweets', connection=conn) _tweet = tweets.get_item(id='1111') print type(_tweet) print _tweet.keys() for key in _tweet.keys(): print key, _tweet[key] ########################### ## Updating an item