def choose_question(): topic_id = request.args.get('topic_id', '') phone_number = request.args.get('number', '') record = ("on" == request.args.get('if_record', '')) logger.debug(record) question = pick_question(topic_id) url = "{}/handle_call?question_id={}&action=speak".format(URL, question.id) print 1 print record print request.args.get('if_record', '') call = client.calls.create( to=phone_number, from_=TWILIO_NUMBER, url=url, record=record, status_callback=URL + "/handle_recording", status_callback_method="POST" ) return render_template( 'in_call.html', topics=db.session.query(Topic).all(), is_current=True, call_sid=call.sid, question_id=question.id, languages=db.session.query(Language).all(), answer=question.answer, answer_language=str(question.language()), language="python" )
async def face_verification(name: str, file: bytes = File(...)): # Supports single face in a single image log.debug("Calling face_verification.") session = Session() name = name.lower() target_face = session.query(Face).filter_by(name=name).first() if target_face is None: raise NotFoundError("Face with that name does not exist in database.") image = file_to_image(file) fa_faces = analyze_image(image, fa) inp_face = fa_faces[0] target_emb = string_to_nparray(target_face.embedding) sim = compute_similarity(inp_face.embedding, target_emb) assert (sim != -99) sim *= 100 if (sim >= 60): status = True else: status = False result = {"similarity": int(sim), "status": status} json_resp = JSONResponse(content={"status_code": 200, "result": result}) session.close() return json_resp
def employees(): """ GET method: sends get request to server (api/employees); receives list of employees from server and displays it POST method: sends post request to server (api/employees) with dates from form; receives list of employees that has been filtered with dates - employees who were born on a certain date or in the period between dates :return: HTML page ('employees.html') with employees list """ if request.method == 'POST': date_from = request.form.get('date_from', '') date_by = request.form.get('date_by', '') logger.debug('Dates was received and sent to server') employees_list = requests.post(ip + '/api/employees', data={ 'date_from': date_from, 'date_by': date_by }).json() dates = [date_from, date_by] else: employees_list = requests.get(ip + '/api/employees').json() dates = None logger.debug('Employees page was displayed') return render_template('employees.html', employees_list=employees_list, dates=dates)
def loop(): """ Starts a loop for the user processor. """ logger.debug("@ USER PROCESSOR THREAD: start") # Create queue for users user_processors = {} while True: db_users = find_users_to_process() thread_pool = [] up_pool = [] for db_user in db_users: if db_user.id not in user_processors: up = UserProcessor(db_user.id) user_processors[db_user.id] = up else: up = user_processors[db_user.id] up_pool.append(up) db.session.expunge(db_user) for up in up_pool: t = threading.Thread(target=up.process_user) thread_pool.append(t) for t in thread_pool: t.start() for t in thread_pool: t.join() logger.debug("@ sleep %s" % Config.USER_PROCESS_INTERVAL) sleep(Config.USER_PROCESS_INTERVAL)
def process_friend_status(self, friend_queue_element): """ Process a status from a friend :param friend_queue_element: Container for friend id and friend statuses """ friend_id = friend_queue_element.friend_id friend_statuses = friend_queue_element.friend_statuses logger.debug( "@ \t%s processing friend %s" % (self.db_user_id, friend_id)) for status in friend_statuses: logger.debug("@ \t\tprocessing friend %s status %s" % ( friend_id, status.id)) # get term frequency of the status status_text = status.text tf = get_term_frequency(status_text) self.tfp.commit_term_frequency( db_session=self.db_session, entity_id=self.db_user_id, tf=tf, source="USR_FRIEND_STATUS", ) self.lock.acquire() self.processed_friends_tweets += 1 self.lock.release() self.mark_friend_as_processed(friend_id)
def get_it_list(): ''' Get list of all items with their given id and name @Params: - p - ipp ''' logger.debug("List items") params = request.args # Validation if not params: raise errors.ApiError(70001, "Missing required key params") _needed_params = {'p', 'ipp'} if not _needed_params.issubset(params.keys()): raise errors.ApiError(70001, "Missing required key params: {}".format(list(_needed_params))) if 'cols' not in params: cols = ','.join(['description']) else: cols = params['cols'] _resp = Item.it_list(cols=cols, p=params['p'], ipp=params['ipp']) if params.get('csv', '0') == '1': csv_ = pd.DataFrame(_resp, dtype=str).to_csv(quoting=csv.QUOTE_ALL) return Response( csv_, mimetype="text/csv", headers={"Content-disposition": "attachment; filename=item_data.csv"}) return jsonify({ "status": "OK", "items": _resp })
def advance(self): if self.todos: super().advance() return logger.debug(f"Advancing {self.__class__.__name__}...") self._message = None if self.context.current_message in [ "close loop", "no", "done", "closed loop", "close the loop" ]: self.done = True elif re.match("close.{1,6}loop", self.context.current_message): self.done = True elif not isinstance(self.context.parsed, BaseGoal): self._message = "I didn't quite catch that. What action did you want me to add?" elif self.context.parsed.error is not None: self._message = self.context.parsed.error elif self.context.parsed._message is not None: self._message = self.context.parsed._message else: action = self.context.parsed setattr(action, "actions", self.actions) if action.is_complete: action.complete() if len(self.actions) == 1: return "Added action to the loop! Anything else? If yes, what's next? If no, say \"close loop\"." else: return "Added action to the loop! Anything else? If no, say \"close loop\"." else: self.todos.append(action)
def sync_call(request_msg): logger.debug('IN::>>> [%s]' % request_msg) s = None res_list = socket.getaddrinfo(POS_PLAT_SERVER_IP, POS_PLAT_SERVER_PORT, socket.AF_UNSPEC, socket.SOCK_STREAM) for res in res_list: af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except socket.error: s = None continue try: s.connect(sa) except socket.error: s.close() s = None continue break if s is None: logger.error('EXCEPT[%x]:: could not connect POS platform server !!!' % id(s)) return try: s.send(request_msg) logger.info('REQ[%x]::>>> [%s]' % (id(s), request_msg)) ans = s.recv(204800) logger.info('ACK[%x]::<<< [%s]' % (id(s), ans)) return ans except Exception as e: logger.error(traceback.print_exc()) logger.error('EXCEPT[%x]:: %s' % (id(s), e.message)) finally: s.close()
def set_identify_and_initial_query(self): if not self.pmh_url: self.harvest_identify_response = u"error, no pmh_url given" return try: # set timeout quick... if it can't do this quickly, won't be good for harvesting logger.debug(u"getting my_sickle for {}".format(self)) my_sickle = self.get_my_sickle(self.pmh_url, timeout=10) data = my_sickle.Identify() self.harvest_identify_response = "SUCCESS!" except Exception as e: logger.exception(u"in set_identify_and_initial_query") self.error = u"error in calling identify: {} {}".format( e.__class__.__name__, unicode(e.message).encode("utf-8")) if my_sickle: self.error += u" calling {}".format(my_sickle.get_http_response_url()) self.harvest_identify_response = self.error last = datetime.datetime.utcnow() first = last - datetime.timedelta(days=30) self.sample_pmh_record = None (pmh_input_record, pmh_records, error) = self.get_pmh_input_record(first, last) if error: self.harvest_test_recent_dates = error elif pmh_input_record: self.harvest_test_recent_dates = "SUCCESS!" self.sample_pmh_record = json.dumps(pmh_input_record.metadata) else: self.harvest_test_recent_dates = "error, no pmh_input_records returned"
def add_character(json_data): id = json_data[u'id'] thumbnail = json_data[u'thumbnail'][u'path']+'.' +json_data[u'thumbnail'][u'extension'] name = json_data[u'name'] description = json_data[u'description'] number_of_comics = json_data[u'comics'][u'available'] number_of_stories = json_data[u'stories'][u'available'] number_of_series = json_data[u'series'][u'available'] comic_ids = [] for comic in json_data[u'comics'][u'items']: comic_ids.append(int(comic[u'resourceURI'].split('/comics/')[1])) character_test = Character.query.filter_by(id=id).first() if not character_test: character = Character(id=id, thumbnail=thumbnail, name=name, description=description, number_of_comics=number_of_comics, number_of_stories=number_of_stories, number_of_series=number_of_series) for comic_id in comic_ids: comic = Comic.query.filter_by(id=comic_id).first() if comic: character.comics.append(comic) db.session.add(character) logger.debug('Added %s', character) db.session.commit()
def get_sarasora_current_results(): if logger: logger.debug("get_sarasora_current_results Started.") results, ret_code = get_data_file(FL_SARASOTA_PREDICTIONS_FILE) return (results, ret_code, {"Content-Type": "Application-JSON"})
def update_comic(json_data): id = json_data[u'id'] comic = Comic.query.filter_by(id=id).first() if comic: character_ids = [] for character in json_data[u'characters'][u'items']: character_ids.append(int(character[u'resourceURI'].split('/characters/')[1])) creator_ids = [] for creator in json_data[u'creators'][u'items']: creator_ids.append(int(creator[u'resourceURI'].split('/creators/')[1])) for character_id in character_ids: character = Character.query.filter_by(id=character_id).first() if character and character not in comic.characters: comic.characters.append(character) for creator_id in creator_ids: creator = Creator.query.filter_by(id=creator_id).first() if creator and creator not in comic.creators: comic.creators.append(creator) logger.debug('Updated %s', comic) db.session.commit()
def penalize_recommendations(recommendations, now): """ When a request for recommended events occur, event score penalization by time occurs. When a request for recommended events occur, event score penalization by time occurs. It is calculated by the following heuristic: * Assume the most relevant event is with correlation 1.0 * In X hours it should become "old news" / obsolete (have correlation 0.0). * Assume linearly falling function """ logger.debug("+ penalizing recommendations") results = [] if recommendations: user_id = recommendations[0].user_id likes = Action.query.filter(and_(Action.action_type == 'LIKE', Action.user_id == user_id)).all() event_likes = [action.event.id for action in likes] for recommendation in recommendations: penalized_correlation = penalize(recommendation.base_correlation, recommendation.event.start_time, now) if penalized_correlation > Config.EVENT_RECOMMENDATION_THRESHOLD: result = dict() result["id"] = recommendation.event.id result["headline"] = recommendation.event.headline result["tweet_count"] = recommendation.event.tweet_count result["start_time"] = arrow.get( recommendation.event.start_time).humanize() result["image_url"] = recommendation.event.image_url result["summary"] = recommendation.event.summary result["correlation"] = penalized_correlation result["correlation_timestamp"] = now result[ "is_liked"] = True if recommendation.event.id in event_likes \ else False results.append(result) return sorted(results, key=lambda k: k['correlation'], reverse=True)
def __dev_fits(self, options, id): time.sleep(options['exposure']) sample_fits = [ x for x in os.listdir(os.environ['SAMPLE_FITS_PATH']) if x.lower().endswith('.fits') ] file_index = int(time.time() * 1000) % len(sample_fits) dest_filename = '{}.fits'.format(id) source_path = os.path.join(os.environ['SAMPLE_FITS_PATH'], sample_fits[file_index]) dest_path = os.path.join(self.settings.camera_tempdir, dest_filename) logger.debug('{} ==> {}'.format(source_path, dest_path)) if 'roi' in options and options['roi']: roi = options['roi'] with fits.open(source_path) as hdu: data = hdu[0].data full_height, full_width = hdu[0].shape logger.debug('shape: {}x{}'.format(full_width, full_height)) cutout_size = (roi['height'], roi['width']) cutout_center = (roi['x'] + roi['width'] / 2, roi['y'] + roi['height'] / 2) cutout = Cutout2D(data, cutout_center, cutout_size, copy=True) hdu[0].data = cutout.data hdu.writeto(dest_path) else: shutil.copyfile(source_path, dest_path, follow_symlinks=True) image = self.__new_image_to_list(dest_filename, id) return image.to_map(for_saving=False)
def showSummary(): entries = [] sumIncome = 0 expensePerCategory = {} for category in Category.objects().all(): expensePerCategory.update({category.name : 0}) userId = ObjectId(session.get('user')['_id']['$oid']) for entry in Entry.objects(owner = userId).all(): e = {'_id' : entry.id, 'amount' : entry.amount, 'description' : entry.description } # Check if the entry has a category. if entry.category is not None: e.update({'category' : Category.objects(id = entry.category.id).first().name}) else: e.update({'category' : '-'}) entries.append(e) if e['amount'] > 0: sumIncome = sumIncome + e['amount'] else: expensePerCategory[e['category']] = \ expensePerCategory[e['category']] - e['amount'] logger.debug('Currency: {0}'.format(session.get('currency'))) return render_template('/budget/summary.html', entries = entries, sumIncome = sumIncome, expensePerCategory = expensePerCategory)
def __download_file(self, file): dest_path = settings.astrometry_path(file['filename']) if os.path.exists(dest_path): os.remove(dest_path) with urllib.request.urlopen(file['url']) as response, open( dest_path, 'wb') as out_file: file_progress = 0 while True: file_total = int(response.headers['Content-Length']) chunk = response.read(128 * 1024) if not chunk: break chunk_length = len(chunk) self.downloaded += chunk_length file_progress += chunk_length out_file.write(chunk) self.event_listener.on_astrometry_index_downloader( 'progress', { 'file': file['filename'], 'downloaded': file_progress, 'total': file_total, 'all_downloaded': self.downloaded, 'all_total': self.total, }) # logger.debug('read {} bytes, {} total, {} remaining'.format(len(chunk), self.downloaded, self.total)) logger.debug('file downloaded: {}'.format(file['filename'])) if not self.__verify(file): raise FailedMethodError('Checksum verification failed')
def recommend(self, db_user): """ Recommend users to event """ logger.debug("+ recommend %s" % db_user.id) now = datetime.utcnow() # what is the latest event time that we care about latest_event_time = now - timedelta( seconds=Config.EARLIEST_EVENT_HOURS * 60 * 60) events_to_recommend = RecommendProcessor.get_events_to_recommend( db_user.id, latest_event_time) recommendations_to_update = \ RecommendProcessor.get_recommendations_to_update( db_user, latest_event_time) user_tf = self.get_user_term_frequency(db_user.id) if events_to_recommend: for event in events_to_recommend: self.recommend_event(event, db_user.id, user_tf, now) # Get recommendations to update if recommendations_to_update: for recommendation in recommendations_to_update: self.update_recommendation(recommendation, db_user.id, user_tf, now) try: self.db_session.commit() except IntegrityError: self.db_session.rollback()
def add_creator(json_data): id = json_data[u'id'] thumbnail = json_data[u'thumbnail'][u'path']+'.' +json_data[u'thumbnail'][u'extension'] first_name = json_data[u'firstName'] last_name = json_data[u'lastName'] number_of_comics = json_data[u'comics'][u'available'] number_of_stories = json_data[u'stories'][u'available'] number_of_series = json_data[u'series'][u'available'] creator_test = Creator.query.filter_by(id=id).first() if not creator_test: creator = Creator(id=id, thumbnail=thumbnail, first_name=first_name, last_name=last_name, number_of_comics=number_of_comics, number_of_series=number_of_series, number_of_stories=number_of_stories) comic_ids = [] for comic in json_data[u'comics'][u'items']: comic_ids.append(int(comic[u'resourceURI'].split('/comics/')[1])) for comic_id in comic_ids: comic = Comic.query.filter_by(id=comic_id).first() if comic: creator.comics.append(comic) db.session.add(creator) logger.debug('Added %s', creator) db.session.commit()
def add_character(json_data): id = json_data[u'id'] thumbnail = json_data[u'thumbnail'][u'path'] + '.' + json_data[ u'thumbnail'][u'extension'] name = json_data[u'name'] description = json_data[u'description'] number_of_comics = json_data[u'comics'][u'available'] number_of_stories = json_data[u'stories'][u'available'] number_of_series = json_data[u'series'][u'available'] comic_ids = [] for comic in json_data[u'comics'][u'items']: comic_ids.append(int(comic[u'resourceURI'].split('/comics/')[1])) character_test = Character.query.filter_by(id=id).first() if not character_test: character = Character(id=id, thumbnail=thumbnail, name=name, description=description, number_of_comics=number_of_comics, number_of_stories=number_of_stories, number_of_series=number_of_series) for comic_id in comic_ids: comic = Comic.query.filter_by(id=comic_id).first() if comic: character.comics.append(comic) db.session.add(character) logger.debug('Added %s', character) db.session.commit()
async def get_faces(page_size: int = 10, page: int = 1): # Get faces from db log.debug("Calling get_faces.") if (page_size > 100 or page_size <= 0): raise ValidationError( "Page size must be more than 0 and less or equals 100.") session = Session() page -= 1 total_count = session.query(Face).count() if (page * page_size >= total_count): raise ValidationError( "Page and page count resulted out of range error.") faces = session.query(Face).limit(page_size).offset(page * page_size).all() json_compatible_faces = jsonable_encoder(faces) result = {"faces": json_compatible_faces, "total_count": total_count} json_resp = JSONResponse(content={"status_code": 200, "result": result}) session.close() return json_resp
def validate_activity(self, activity): logger.debug('Activity is %s', activity.data) logger.debug('CHILD ID %d', self.childid) act = Activity.query.filter_by(activity=activity.data, childid=self.childid).first() if act is not None: raise ValidationError('Activity already exists')
def connect(self): try: logger.debug(self.host) self.client = Elasticsearch([self.host]) # logger.debug(self.client.info()) except AttributeError: raise
def validate_childname(self, childname): child = Child.query.filter_by(childname=childname.data, parentid=current_user.id).first() logger.debug('Child is %s', childname.data) if child is not None: raise ValidationError('Please use a different childname')
def choose_question(): print "In choose question" topic_id = request.args.get('topic_id', '') phone_number = request.args.get('number', '') print request.args record = ("on" == request.args.get('if_record', '')) print record logger.debug(record) question = pick_question(topic_id) url = "{}/handle_call?question_id={}&action=speak".format(app.config['NGROK_ROUTE'], question.id) call = client.calls.create( to=phone_number, from_=app.config['TWILIO_NUMBER_1'], url=url, record=record, status_callback=app.config['NGROK_ROUTE'] + "/handle_recording", status_callback_method="POST" ) return render_template( 'in_call.html', topics=db.session.query(Topic).all(), is_current=True, call_sid=call.sid, question_id=question.id, languages=app.config["LANGUAGES"], answer=question.answer, answer_language=str(question.language) )
def get_reviews(prof_id): """GET /reviews/<string:prof_id> (retrieve all reviews for a professor)""" if prof_id is None: logger.debug('No string supplied during the call of reviews/prof_id endpoint') return jsonify({"msg": "please provide necessary arguments"}), http.HTTPStatus.PRECONDITION_FAILED return get_all_results_with_joins(Posts, Tags, PostTags, Modules, prof_id) # @endpoint_output.route('/V1/predictions', methods=['GET']) # @endpoint_output.route('/predictions', methods=['GET']) # @jwt_required # def get_multiple_predictions(): # """GET /predictions # (retrieve multiple predictions based on a previously sent set of dispute predictors, retrieved by dispute_ids given # in &id=123 parameters)""" # # args = get_args(request) # if not args: # logger.debug('No arguments supplied during the call of /predictions endpoint') # return jsonify({"msg": "please provide necessary arguments"}), http.HTTPStatus.BAD_REQUEST # # return get_results(Prediction, args, 'prediction_time')
async def websocket_movement(websocket: WebSocket): """ Websocket for customer positions """ logger.info("/ws/events contacted") await websocket.accept() ws_consumer_uuid = str(uuid.uuid4().hex) try: while True: for key, item in app.state.customer_positions.items(): ci = item['customer'] ws_consumers = item['ws_consumers'] logger.debug(f"item: {item}") try: ws_consumers.index(ws_consumer_uuid) except ValueError: await websocket.send_json(ci.dict()) async with app.state.customer_positions_lock: ws_consumers.append(ws_consumer_uuid) await asyncio.sleep(1) except ConnectionClosedOK: logger.info(f"Connection {ws_consumer_uuid} closed.")
def post(self): args = request.values logger.debug('[ApiCardMembers] in: args[%s]' % args) openid = args.get("openid") share = args.get("share") cards = get_cache_customer_cards(openid) if not cards: customer_cards = CustomerCard.query.filter(CustomerCard.customer_id == openid) \ .order_by(CustomerCard.status.asc()).all() cards = [{ 'globalId': item.id, 'cardId': item.card_id, 'merchantId': item.card.merchant.id, 'cardCode': item.card_code, 'amount': item.amount, 'title': item.card.title, 'logo': item.card.merchant.logo, 'img': item.img or 'http://wx.cdn.pipapay.com/static/images/card_blue.png', 'status': item.status, 'expireDate': str(item.expire_date) } for item in customer_cards] cache_customer_cards(openid, cards) data = [card for card in cards if card['status'] < 3] if share else cards logger.debug('[ApiCardMembers] out: result[0], data[%s]' % data) return {"result": 0, "data": data}
def add_creator(json_data): id = json_data[u'id'] thumbnail = json_data[u'thumbnail'][u'path'] + '.' + json_data[ u'thumbnail'][u'extension'] first_name = json_data[u'firstName'] last_name = json_data[u'lastName'] number_of_comics = json_data[u'comics'][u'available'] number_of_stories = json_data[u'stories'][u'available'] number_of_series = json_data[u'series'][u'available'] creator_test = Creator.query.filter_by(id=id).first() if not creator_test: creator = Creator(id=id, thumbnail=thumbnail, first_name=first_name, last_name=last_name, number_of_comics=number_of_comics, number_of_series=number_of_series, number_of_stories=number_of_stories) comic_ids = [] for comic in json_data[u'comics'][u'items']: comic_ids.append(int(comic[u'resourceURI'].split('/comics/')[1])) for comic_id in comic_ids: comic = Comic.query.filter_by(id=comic_id).first() if comic: creator.comics.append(comic) db.session.add(creator) logger.debug('Added %s', creator) db.session.commit()
def add_employee(): """ GET method: sends get request to server (/api/departments-ids) that returns list of departments` ids; displays form to write information about new employee; departments` ids put into HTML tag selection for choice POST method: gets information about new employee from form and sends it into server that adds new employee in database :return: GET: HTML page with form to write information about new employee :return: POST: redirect to employees page after adding new employee """ if request.method == 'POST': name = request.form.get('name', '') department_id = request.form.get('department_id', 'None') date_of_birthday = request.form.get('date_of_birthday', '') salary = request.form.get('salary', '') requests.post(ip + '/api/employee', data={ 'name': name, 'department_id': department_id, 'date_of_birthday': date_of_birthday, 'salary': salary }) logger.debug('Employee`s data was sent to server (add)') return redirect('/employees') ids = requests.get(ip + '/api/departments-ids').json() logger.debug('Employee page was displayed') return render_template('add_employee.html', ids=ids['ids'])
def query_by(by): """ Endpoint to query items table by given values @Params: - by: <str> column to compare values with - keys: <str> can be item_uuid, gtin, name, description @Response: - resp: items list @Example: /query/gtin?keys=07501034691224,07501284858385 """ logger.info("Query Items by Item UUID...") params = request.args.to_dict() logger.debug(params) # Validate required params _needed_params = {'keys'} if not _needed_params.issubset(params): raise errors.ApiError(70001, "Missing required key params") # Complement optional params, and set default if needed _opt_params = {'cols': '', 'p':1, 'ipp': 50} for _o, _dft in _opt_params.items(): if _o not in params: params[_o] = _dft _items = Item.query(by, **params) return jsonify({ 'status': 'OK', 'items': _items })
def advance(self): if self.todos: super().advance() return logger.debug(f"Advancing {self.__class__.__name__}...") self._message = None if self.context.current_message in ["done", "nothing", "no"]: # Check if user indicated that they are done with adding actions self.done = True elif not isinstance(self.context.parsed, BaseGoal): self._message = "I didn't quite catch that. What action did you want me to add?" elif self.context.parsed.error: self._message = self.context.parsed.error elif self.context.parsed._message: self._message = self.context.parsed._message else: action = self.context.parsed # Set the property "actions" for the ActionGoal so the ActionGoal can add its corresponding Action to the list setattr(action, "actions", self.actions) if action.is_complete: action.complete() self._message = "Added action to the procedure! Do you want to do anything else?" else: self.todos.append(action)
def add_person(self, data): person = self.get_person(id=data['personId']) if not person: api_person = self._api.people.get(data['personId']) logger.debug("Adding person '" + data['personId'] + "'") person = { 'sparkId': data['personId'], 'displayName': api_person.displayName, 'nickName': getattr(api_person, 'nickName', None), 'lastName': api_person.lastName, 'email': api_person.emails[0], 'avatar': api_person.avatar, 'project': self._project, 'currentlyInQueue': False, # microseconds 'admin': False, 'commands': 0, 'timesInQueue': [], 'timesAtHead': [], 'added_to_queue': [], 'removed_from_queue': [] } self._people.append(person) self._save() return person else: logger.info("Person '" + data['personId'] + "' not added because they already exist") return person
def check_prices_geolocated(): """ Get price date for alerts, from the given stores, items and stores. Returns only the items that are off the variation in the items and stores specified. @Payload: - stores <list (uuids, retailer)> - items <list <tuple (uuid, price)>> - retailers <list (str)> - date <str (%Y-%m-%d)> : date to get the prices from - variation - variation_type """ logger.info('Alert geolocated endpoint...') params = request.get_json() if 'retailers' not in params: raise errors.AppError("invalid_request", "Retailers parameters missing") if 'items' not in params: raise errors.AppError("invalid_request", "items parameters missing") if 'date' not in params: params['date'] = datetime.datetime.utcnow().strftime('%Y-%m-%d') logger.debug('Params correct...') try: prices = Alert.get_geolocated(params) except: raise errors.AppError('server_serror', "Alerts geolocation failed") return jsonify(prices)
def aggregate(self, collection_name, query_list): try: cursor = self.mongo_db[collection_name].aggregate(query_list) return list(cursor) except Exception as e: print('Exception while aggregating MongoDB', e) logger.debug('Exception while aggregating in MongoDB')
def login_wx(): """ 提供各个方式的登录 :return: """ type = request.get_json().get("type") code = request.get_json().get("code") lack, lack_msg = check_args(type=type, code=code) if not lack: return response(msg=lack_msg) wx_data = get_wx_login_data(code) if wx_data.get('session_key') and wx_data.get('openid'): # 微信用户增加or更新 open_id = wx_data.get("openid") bind_wx(wx_data.get('openid'), wx_data.get('session_key')) # 查询用户角色 role_id = get_role_wx(open_id) jwt_data = { "role": role_id, "uid": open_id } auth_token = jwt_util.encode_auth_token(jwt_data).decode() logger.debug(f"用户id[{open_id}] 角色[{role_id}]正在访问login") return response(data={"auth_token": auth_token}) else: return response(code=RespStatus.Auth_WX_Error.value, msg=wx_data.get('errmsg'))
def renew_tor_connection(ip): logger.debug('Thread {} Resetting IP address {}'.format( current_thread().name, ip)) with Controller.from_port(port=9051) as c: c.authenticate(password="******") # send NEWNYM signal to establish a new clean connection through the Tor network c.signal(Signal.NEWNYM)
def bind_wx_(): # jwt_data = get_data_by_jwt(request) auth_token = g.get("auth_token") # logger.debug(f"通过g获取了auth_token:{jwt_data}") data = request.get_json() # 获取学号密码 stn = data.get('student_number') pwd = data.get('password') logger.debug(f"获取到用户名{stn} 密码{pwd}") lack, lack_msg = check_args(student_number=stn, password=pwd) if not lack: return response(code=RespStatus.LackArgs.value, msg=lack_msg) # 获取微信open_id open_id = auth_token['data'].get("uid") logger.debug(f"从auth_token中获取到信息是{auth_token}") # 插入新用户 add_new_user_wx(stn, pwd, open_id) # 修改微信id的权限,并且下发新的auth_token role = RoleStatus.WX_Auth.value update_user_permission_wx(open_id, role) role_id = get_role_wx(open_id) auth_token = { "role": role_id, "uid": open_id } logger.warning(f"更改用户权限之后的jwt_data为{auth_token}") auth_token = jwt_util.encode_auth_token(auth_token).decode() return response(msg="绑定学号成功", data={"auth_token": auth_token})
def __socket_loop(self, address, port): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as connection: try: connection.connect((address, port)) self.__put_result({'connected': True}) except Exception as e: self.__put_error(PHD2ConnectionError(str(e), e)) return self.__connected = True inout = [connection] fileobj = connection.makefile() try: while self.__connect: infds, outfds, errfds = select.select(inout, inout, [], 2) if infds: self.__handle_message(fileobj.readline()) if outfds: try: message = self.methods_queue.get_nowait() # logger.debug('PHD2Socket: >>> {}'.format(message)) connection.send('{}\r\n'.format( json.dumps(message)).encode()) except Empty: pass finally: self.__connect = False self.__connected = False logger.debug('PHD2 socket disconnected') self.__put_event('disconnected')
def post(self): args = request.values logger.debug('[ApiCardPayRecords] in: args[%s]' % args) card_id = args['cardId'] left = datetime.date.today() right = datetime.date.today() - datetime.timedelta(30) records = CustomerTradeRecords.query.filter( CustomerTradeRecords.card_id == card_id, CustomerTradeRecords.time.between(left, right)).all() recharge_total = 0 expend_total = 0 for item in records: if item.type == 0: recharge_total += item.amount else: expend_total += item.amount data = { 'rechargeTotal': recharge_total, 'expendTotal': expend_total, 'records': [{ 'merchantName': item.card.merchant.name, 'date': str(item.time), 'amount': item.amount } for item in records] } logger.debug('[ApiCardPayRecords] out: result[0] data[%s]' % args) return {'result': 0, 'data': data}
def update_comic(json_data): id = json_data[u'id'] comic = Comic.query.filter_by(id=id).first() if comic: character_ids = [] for character in json_data[u'characters'][u'items']: character_ids.append( int(character[u'resourceURI'].split('/characters/')[1])) creator_ids = [] for creator in json_data[u'creators'][u'items']: creator_ids.append( int(creator[u'resourceURI'].split('/creators/')[1])) for character_id in character_ids: character = Character.query.filter_by(id=character_id).first() if character and character not in comic.characters: comic.characters.append(character) for creator_id in creator_ids: creator = Creator.query.filter_by(id=creator_id).first() if creator and creator not in comic.creators: comic.creators.append(creator) logger.debug('Updated %s', comic) db.session.commit()
def post(self): args = request.values logger.debug('[ApiCardShare] in: args[%s]' % args) open_id = args['openId'] card_id = args['cardId'] card_code = args['cardCode'] sign = args['sign'] timestamp = args['timestamp'] content = args['content'] try: card = CustomerCard.query.filter_by(customer_id=open_id, card_id=card_id, card_code=card_code).first() card.status = 4 # 卡表状态更新为 4:转赠中 record = CustomerCardShare(share_customer_id=open_id, customer_card_id=card.id, timestamp=timestamp, content=content, sign=sign, status=0) db.session.add(card) db.session.add(record) db.session.commit() logger.info( '[ApiCardShare] customer[%s] result[0] card[%s] share ok' % (open_id, card_id)) return {'result': 0} except Exception as e: logger.error(traceback.print_exc()) logger.error( '[ApiCardShare] customer[%s] card[%s] share error:%s' % (open_id, card_id, e.message)) return {'result': 255, 'data': e.message}
def get_parameters(self, res_type: object) -> dict: """Return the result of the boto3 ssm iterator clients response. get parameters, collect params which contain given string, put them into param_name list then call get_parameter() and put parameter value belongs to parameter_name :type res_type: object """ ssm_client = _session() paginator = ssm_client.get_paginator('describe_parameters') params = [] param_name = [] param_value = dict() logger.debug('PUSR01 - Query parameter hit by user' + res_type) for response in paginator.paginate(): params.append(response['Parameters']) for iteration, _ in enumerate(params): for param_obj, _ in enumerate(params[iteration]): if res_type in params[iteration][param_obj]['Name']: param_name.append((params[iteration][param_obj]['Name'])) for iteration, _ in enumerate(param_name): # TODO create list from these objects param_value.update( {iteration: self.get_parameter(param_name[iteration])}) return param_value
def post(self): '''!Receive consent receipt :return: status code 409 - already exists :return: status code 201 - created ''' logger.info(json.loads(request.get_data())) for item in self.receipt: self.receipt[item] = request.json.get(item) receipt = Receipts.query.filter_by( consent_receipt_id=self.receipt['consentReceipt']['consent_receipt_id']).first() # if receipt exist updated it if receipt is not None: logger.debug('Receipt already exist!') # TODO still return 409 after demo # abort(409, 'Receipt already exist!') #=====start===== receipt.rpt = self.receipt['rpt'] receipt.rs_id = self.receipt['consentReceipt']['rs_id'], receipt.consent_receipt_id = self.receipt['consentReceipt']['consent_receipt_id'] receipt.service_contract_id = self.receipt['consentReceipt']['service_contract_id'] receipt.authorization_status = self.receipt['consentReceipt']['authorization_status'] receipt.data_usage_license = self.receipt['consentReceipt']['data_usage_license'] receipt.consent_summary = json.dumps(self.receipt['consentReceipt']['consent_summary']) receipt.update_time = datetime.datetime.now() db.session.add(receipt) db.session.commit() return {'message': 'updated'}, 201 #======end===== for item in self.receipt['consentReceipt']: if item is None: logger.debug(item + 'can not be none!') abort(409, item + 'can not be none!') # receipt[item] = self.receipt[item] receipt = Receipts( self.receipt['rpt'], self.receipt['consentReceipt']['rs_id'], str(self.receipt['consentReceipt']['consent_receipt_id']), str(self.receipt['consentReceipt']['service_contract_id']), self.receipt['consentReceipt']['authorization_status'], str(self.receipt['consentReceipt']['data_usage_license']), json.dumps(self.receipt['consentReceipt']['consent_summary'])) mapping = Mappings( self.receipt['consentReceipt']['account_id'], str(self.receipt['consentReceipt']['consent_receipt_id']), datetime.datetime.now() ) db.session.add(mapping) db.session.add(receipt) db.session.commit() return {'message': 'created'}, 201
def add_order_to_auction(self, new_order): if new_order.volume > 0: if new_order.is_limit(): if new_order.is_buy(): insort(self.buy_orders, new_order) elif new_order.is_sell(): insort(self.sell_orders, new_order) else: logger.debug('Discarding market order')
def process_statuses(self, statuses): """ Processes statuses of a user """ logger.debug("@ processing user statuses %s" % self.db_user_id) # put statuses in queue for status in statuses: self.status_q.put(status) self.db_user.intent_own_tweets += 1 self.db_session.add(self.db_user) self.db_session.commit()
def wrapper(*args, **kwargs): if not getattr(func, 'authenticated', True): return func(*args, **kwargs) acct = verify_token() # custom account lookup function if acct: return func(*args, **kwargs) logger.debug('Error request, check http header!') abort(401, 'Error request, check http header!')
def importFromPlex(plex, db): logger.info("Importing viewed Movies from PMS") viewedMovies = plex.getViewedMovies() for movie in viewedMovies: if db.session.query(models.Processed).filter(models.Processed.session_id.like("%" + movie.get('key') + "%")).first(): logger.debug("skipping import of '%s' because there already is a entry in database" % movie.get("title")) continue el = models.Processed() el.time = datetime.datetime.fromtimestamp(int(movie.get("lastViewedAt"))) - datetime.timedelta(seconds=(int(movie.get("duration")) / 1000)) el.stopped = datetime.datetime.fromtimestamp(int(movie.get("lastViewedAt"))) el.user = config.IMPORT_USERNAME el.platform = "Imported" el.title = movie.get("title") el.orig_title = movie.get("title") el.year = movie.get("year") el.summary = movie.get("summary") el.notified = 1 el.progress = 100 el.duration = movie.get("duration") el.xml = xml_to_string(movie) el.session_id = "im_%s_pt" % movie.get('key') db.session.merge(el) db.session.commit() logger.info("Importing viewed Episodes from PMS") for episode in plex.getViewedEpisodes(): eptitle = "%s - %s - s%02de%02d" % (episode.get("grandparentTitle"), episode.get("title"), int(episode.get('parentIndex')), int(episode.get('index'))) if db.session.query(models.Processed).filter(models.Processed.session_id.like("%" + episode.get('key') + "%")).first(): logger.debug("skipping import of '%s' because there already is a entry in database" % eptitle) continue el = models.Processed() el.time = datetime.datetime.fromtimestamp(int(episode.get("lastViewedAt"))) - datetime.timedelta(seconds=(int(episode.get("duration")) / 1000)) el.stopped = datetime.datetime.fromtimestamp(int(episode.get("lastViewedAt"))) el.user = config.IMPORT_USERNAME el.platform = "Imported" el.title = eptitle el.orig_title = episode.get("grandparentTitle") el.orig_title_ep = episode.get("title") el.year = episode.get("year") el.summary = episode.get("summary") el.episode = episode.get('index') el.season = episode.get('parentIndex') el.notified = 1 el.progress = 100 el.duration = episode.get("duration") el.xml = xml_to_string(episode) el.session_id = "im_%s_pt" % episode.get('key') db.session.merge(el) db.session.commit() return True
def loop(): """ Starts the recommender loop. """ logger.debug("+ RECOMMENDATION THREAD: start") rp = RecommendProcessor() while True: db_users_ids = RecommendProcessor.get_users_to_recommend() rp.process_recommendations(db_users_ids) # sleep N seconds logger.debug("+ sleep %s" % Config.RECOMMENDATION_PROCESS_INTERVAL) sleep(Config.RECOMMENDATION_PROCESS_INTERVAL)
def wrapper(*args, **kwargs): user = session.get('user', None) if user is None: session['next'] = request.path logger.debug('Next: {0}'.format(session.get('next'))) return redirect(url_for('auth.login')) if user['authLevel'] < level: logger.info('A user tried to access a higher-level area.') abort(403) return func(*args, **kwargs)
def send_message(gcm_url, server_api_key, token, message): data = '{"to":"%s","data":%s}' % (token, message) r = requests.post(gcm_url, headers=_headers(server_api_key), data=to_utf8(data)) logger.debug(r.content) if r.status_code != 200: logger.error('Error to GCM Message') else: response = json.loads(r.content) if response['success'] == 1: logger.info('Success to GCM Message') else: logger.error('Failed to GCM Message')
def get_mb_current_sample_data(): if logger: logger.debug("get_mb_current_sample_data Started.") results, ret_code = get_data_file(SC_MB_ADVISORIES_FILE) # Wrap the results in the status and contents keys. The app expects this format. json_ret = {"status": {"http_code": ret_code}, "contents": simplejson.loads(results)} results = simplejson.dumps(json_ret) if logger: logger.debug("get_mb_current_sample_data Finished.") return (results, ret_code, {"Content-Type": "Application-JSON"})
def fetch_save(symbol): now = datetime.now() date = '%04d%02d%02d' % (now.year, now.month, now.day) time = '%02d:%02d' % (now.hour, now.minute) symbol_ts_last = db['timeseries.%s' % symbol] http = Http(timeout=10) url = 'http://finance.yahoo.com/q?s=' + symbol (resp_headers, content) = http.request(url, 'GET') quote = process(content) symbol_ts_last.update({'_id': date}, {'$set': {time : quote}}, upsert=True) logger.debug(quote) db['quotes'].save(quote)
def transmit_book_to_client(rkey = None): logger.warning('RKEY:::::::::::::::%s'%rkey) cumulative_book = rcon.get(rkey) logger.warning('CUMULATIVE BOOK %s'%cumulative_book) logger.warning('CUMULATIVE BOOK %s'%type(cumulative_book)) try: buy_side, sell_side = json.loads(rcon.get(rkey)) socketio.emit('orderbook update', {'buy_side':buy_side, 'sell_side': sell_side}, namespace='/client') logger.debug('Sent orderbook volume to client') except TypeError, ValueError: logger.exception('OADIJOASIDJAOISDJOASIJDOASIDJ')
def getRSIDbyExtid(ext_id): '''!Get rs_id by ext_id :param ext_id: :return: status code 200 - rs_id :return: status code 404 - rs_id not found ''' map = Mappings.query.filter_by(ext_id=ext_id).first() if map is not None: receipt = Receipts.query.filter_by(consent_receipt_id=map.receipt_id).first() if receipt is not None: return receipt.rs_id logger.debug('rs_id not found with ext_id:'+ext_id) abort(404, 'rs_id not found with ext_id:' + ext_id)
def deleteEntry(id): # Fetch the appropiate entry from the collection. userId = ObjectId(session.get('user')['_id']['$oid']) expense = Entry.objects(id = ObjectId(id), owner = userId).first() logger.debug('Trying to delete ({0}, {1})'.format(ObjectId(id), userId)) if expense is not None: logger.debug('Trying to delete expense {0}'.format(expense.id)) expense.delete() flash('Your entry has been deleted.') else: flash('You are not authorized to delete this entry.') return redirect(url_for('budget.default'))
def login_view(self): logger.info('LOGIN') form = LoginForm(request.form) logger.debug(form) if helpers.validate_form_on_submit(form): user = form.get_user() login.login_user(user) if login.current_user.is_authenticated: return redirect(url_for('.index')) link = '<p>Input login and pasword for admin.</p>' self._template_args['form'] = form self._template_args['link'] = link return super(CustomAdminIndexView, self).index()
def recommend_event(self, event, db_user_id, user_tf, now): # get the event term frequency event_tf = self.get_event_term_frequency(event.id) # correlate correlation = RecommendProcessor.calculate_correlation(user_tf, event_tf) logger.debug("%s %s: %s" % (event.id, event.headline, correlation)) # put recommendation in db r = Recommendation( user_id=db_user_id, event_id=event.id, base_correlation=correlation, calculated_time=now ) self.db_session.add(r)
def api(): logger.debug("log:test") allowed_contents = { 'application/json-rpc':"json-rpc", 'application/json':"json-rpc", } if allowed_contents.get(request.content_type,None): print "11" jsonData = json.loads(request.get_json()) jsonrpc = JsonRpc(jsonData) ret = jsonrpc.execute() return json.dumps(ret,cls=CjsonEncoder) else: return "404", 404
def update_character(json_data): id = json_data[u'id'] character = Character.query.filter_by(id=id).first() logger.debug(character) if character: comic_ids = [] for comic in json_data[u'comics'][u'items']: comic_ids.append(int(comic[u'resourceURI'].split('/comics/')[1])) for comic_id in comic_ids: comic = Comic.query.filter_by(id=comic_id).first() if comic and comic not in character.comics: character.comics.append(comic) logger.debug('Updated %s', character) db.session.commit()
def queue_daemon(self, rv_ttl=500): """ The daemon that listens for incoming orders. Must be run in a separate process. All received orders are stored in the database """ while True: logger.debug('Waiting for orders...') order_form_data = self.redis.blpop(prefixed(self.uuid)) order_form_data = loads(order_form_data[1]) new_order = Order(**order_form_data) self.store_order(new_order) try: response = self.process_order(new_order) logger.debug('Finished processing order.') except Exception, e: logger.exception(e) response = e