def initiate(): fixedNumber = "" print request.form['number'] print "A" print request.values.get('number') for elem in request.values.get('number'): if elem.isnumeric(): fixedNumber = fixedNumber + str(elem) print "B" if len(fixedNumber) == 10: fixedNumber = "+1" + fixedNumber elif len(fixedNumber) == 11: fixedNumber = "+" + fixedNumber print "C" r = Record(fixedNumber, request.values.get("image")) phones = Record.query.filter_by(phone=fixedNumber).all() print "D" if phones == None: db.session.add(r) db.session.commit() else: pass print "E" image(fixedNumber, request.values.get("image")) client = TwilioRestClient(twilio_account_sid, twilio_auth_token) message = client.messages.create(to=fixedNumber, from_=twilio_number, body="Hey! Want to HEAR what your PICTURE looks like? Send \"yes\" to this SMS!") return "None"
def delete(record_id): record = Record.get(id=record_id) if not record: abort(404) record.delete() db.commit() return '', 204
def show(record_id): # This will serialize our data schema = RecordSchema() # This gets a record by ID record = Record.get(id=record_id) # If we can't find a record, send a 404 response if not record: abort(404) # otherwise, send back the record data as JSON return schema.dumps(record)
def update(record_id): schema = RecordSchema() record = Record.get(id=record_id) if not record: abort(404) try: data = schema.load(request.get_json()) record.set(**data) db.commit() except ValidationError as err: return jsonify({'message': 'Validation failed', 'errors': err.messages}), 422 return schema.dumps(record)
def create(): # This will deserialize the JSON from insomnia schema = RecordSchema() try: # attempt to convert the JSON into a dict data = schema.load(request.get_json()) # Use that to create a record object record = Record(**data, createdBy=g.current_user) # store it in the database db.commit() except ValidationError as err: # if the validation fails, send back a 422 response return jsonify({'message': 'Validation failed', 'errors': err.messages}), 422 # otherwise, send back the record data as JSON return schema.dumps(record), 201
def process_item(self, item, spider): if spider.name != 'record': return item self.received_elements_in_this_cycle_count += 1 self.received_elements_total_count += 1 parsed_record = Record.parse_input(item) # set initial value for min/max id in current process cycle if self.received_elements_in_this_cycle_count == 1: self.current_min_user_id = parsed_record['user_id'] self.current_max_user_id = parsed_record['user_id'] + 1 self.current_min_user_id = min(self.current_min_user_id, parsed_record['user_id']) # self.current_max_user_id is the upper-bound that should be included, +1 so it won't be skipped self.current_max_user_id = max(self.current_max_user_id, parsed_record['user_id'] + 1) current_user_id = parsed_record['user_id'] if current_user_id not in self.entities_seen: self.entities_seen[current_user_id] = set() self.items_to_process.append(parsed_record) else: # only if current record is not seen under the user record set before, append it to the process queue # this is needed to handle situations such as duplicated record # i.e. user A marked subject a as status 1 before, then during the scraping, user marks a as status 2 # then two status of same subject might be received if parsed_record['subject_id'] not in self.entities_seen[ current_user_id]: self.items_to_process.append(parsed_record) self.entities_seen[parsed_record['user_id']].add( parsed_record['subject_id']) if self.received_elements_in_this_cycle_count >= self.commit_threshold: self.write_to_db() if self.received_elements_total_count % self.optimize_items_seen_dict_threshold == 0: self.optimize_items_seen_dict(self.current_min_user_id) return item
def get(self): Utils.reset(self) # reset/clean standard variables # validate and assign parameters passwd = Utils.required(self, 'passwd') uuid = Utils.required(self, 'uuid') guid = self.request.get('guid') # check password if self.error == '' and passwd != config.testing['passwd']: self.error = 'passwd is incorrect.' start_time = time.time() # start count # if error, skip this if self.error == '': # query player state for given uuid players = Player.all().filter('uuid =', uuid).ancestor( db.Key.from_path('Player', config.db['playerdb_name'])).fetch(1) didDelete = False # we have not delete anything yet for player in players: # we might have duplicate data, just delete them all # query scores for this player and delete them all scores = Score.all().filter('uuid =', player.uuid).ancestor( db.Key.from_path('Score', config.db['scoredb_name'])) for score in scores: score.delete() # query items for this player and delete them all items = Item.all().filter('uuid =', player.uuid).ancestor( db.Key.from_path('Item', config.db['itemdb_name'])) for item in items: item.delete() # query records for this player and delete them all records = Record.all().filter('uuid =', player.uuid).ancestor( db.Key.from_path('Record', config.db['recorddb_name'])) for record in records: record.delete() # query buildings for this player and delete them all buildings = Building.all().filter( 'uuid =', player.uuid).ancestor( db.Key.from_path('Building', config.db['buildingdb_name'])) for building in buildings: building.delete() cars = Car.all().filter('uuid =', player.uuid).ancestor( db.Key.from_path('Car', config.db['cardb_name'])) for car in cars: car.delete() # delete all this user's challenge Challenge.DeleteByUserId(self, player.uuid) # and finally, delete this player player.delete() didDelete = True # compose result if didDelete == True: self.respn = '"' + uuid + ' was deleted successfully."' else: self.error = uuid + ' does not exist in Database.' # calculate time taken and return the result time_taken = time.time() - start_time self.response.headers['Content-Type'] = 'text/html' self.response.write(Utils.RESTreturn(self, time_taken))
def mapResult(self, cursor): #TODO: fetch later rows = cursor.fetchall() columns = [c.name for c in cursor.description] return [Record(dict(zip(columns, row))) for row in rows]
vinyl = Medium(name='Vinyl') User( username='******', email='*****@*****.**', password_hash=schema.generate_hash('pass'), ) first = Record( artist="First", title="Title", cover="/images/one.png", description="""Series to the cult groove and the short bass style compositions and producers across the styles of the world of sound are subtle and all the album moves and the steppers of the other instruments in the spiritual, and the title Unit and late ‘80s acts and stares at the style scene and collaboration Recorded and studio series and produced by the listeners of the most story heard of their share of ‘The Works’ get the most proof string of ‘Mark One Time’""", mediums=[cd, tape] ) second = Record( artist="Second", title="Title", cover="/images/two.png", description="""A sound in the band of electronic music of electronic musics and hardcore and sounds of sounds of the missing for those transformation and the distance of experience of the strongest design of ‘Billish Martin' and the sound of his recorded by Michelle Lear. The series of field recordings animation Del and strangely styles of a super sound of the sense to the world""", mediums=[cd, tape, download] )
def index(): # This will serialize our data # `many=True` because there are many records, ie we expect a list schema = RecordSchema(many=True) records = Record.select() # get all the records return schema.dumps(records) # `schema.dumps` converts the list to JSON
def write_to_db(self): """ Diff record in webpage and db and update/create/skip records :return: """ # reset the counter self.received_elements_in_this_cycle_count = 0 # items received in this process cycle skipped_entities_count = 0 items_to_process = self.items_to_process entities_to_create = self.entities_to_create entities_to_update = self.entities_to_update self.items_to_process = [] self.entities_to_create = [] self.entities_to_update = [] database_response_entities = self.recordTableDatabaseExecutor.query_range( self.current_min_user_id, self.current_max_user_id) database_response_dict = {} new_entities_might_need_deleting = set() for db_entity in database_response_entities: database_response_dict[(db_entity.user_id, db_entity.subject_id)] = db_entity if self.entities_seen.get(db_entity.user_id) is None or db_entity.subject_id not in \ self.entities_seen.get(db_entity.user_id): new_entities_might_need_deleting.add( (db_entity.user_id, db_entity.subject_id)) # first we assume all records from db need to be deleted self.entities_might_need_deleting.update( new_entities_might_need_deleting) for record in items_to_process: primary_key_from_scrapy_item = (record['user_id'], record['subject_id']) entity_in_db = database_response_dict.get( primary_key_from_scrapy_item) if entity_in_db: # if the record is in scrapy's response, which indicates it's still on web page, remove it from the set try: self.entities_might_need_deleting.remove( primary_key_from_scrapy_item) except KeyError: # entity might not exist in deleting set, in such case just ignore the error pass difference = entity_in_db.diff_self_with_input(record) if len(list(difference)) > 0: # db/API contain entity and it has difference, overwriting data in db entity_in_db.set_attribute(record) entities_to_update.append(entity_in_db) else: # db/API contain entity but nothing changes, skipping it skipped_entities_count += 1 else: # record has been parsed in self.process_item(), doesn't need to be parsed again entity = Record(record, False) entities_to_create.append(entity) start_id = self.current_min_user_id end_id = self.current_max_user_id logger.info('Creating %s new instances in user_id range (%s, %s)', len(entities_to_create), start_id, end_id) created_entities = self.recordTableDatabaseExecutor.create( entities_to_create) self.stats['created_entities'] += created_entities self.stats['skipped_entities'] += skipped_entities_count logger.info( 'Skipping %s existed instances in range (%s, %s) as there\'s no difference', skipped_entities_count, start_id, end_id) logger.info('Updating %s existed instances in range (%s, %s)', len(entities_to_update), start_id, end_id) updated_entities = self.recordTableDatabaseExecutor.update( entities_to_update) self.stats['updated_entities'] += updated_entities