def post_menus(restaurant_id: int): session = DBSession() new_item = MenuItem( name=request.json["name"], description=request.json["description"], price=request.json["price"], course=request.json["course"], restaurant_id=restaurant_id, ) session.add(new_item) session.commit() return ( jsonify( { "name": new_item.name, "description": new_item.description, "id": new_item.id, "price": new_item.price, "course": new_item.course, }, ), 201, { "Location": url_for("api.get_menus", restaurant_id=restaurant_id) + f"/{new_item.id}", }, )
def AddUser(self, req: AddUserRequest, context) -> AddUserResponse: session = DBSession() user = ProjectUser(id=req.userProjectAssignmentId, user_id=req.userId, project_id=req.projectId) session.add(user) session.commit() return AddUserResponse()
def create_new_restaurant(): session = DBSession() if request.method == "POST": new_restaurant = Restaurant(name=request.form["name"]) session.add(new_restaurant) session.commit() flash(f"New restaurant {new_restaurant.name} created") return redirect(url_for("show_restaurants")) else: return render_template("1_new_restaurants.html")
def create_new_menu_item(restaurant_id: int): session = DBSession() if request.method == "POST": new_item = MenuItem(name=request.form["name"], restaurant_id=restaurant_id) session.add(new_item) session.commit() flash("New menu item created!") return redirect(url_for("show_menu", restaurant_id=restaurant_id)) else: return render_template("5_new_menu_item.html", restaurant_id=restaurant_id)
def spider_opened(self, spider): task = SpiderTask(spider_name=self.name, spider_rule_id=self.rule_id, start_time=datetime.now(), status='running') session = DBSession() query = session.query(SpiderTask).filter( SpiderTask.spider_rule_id == task.spider_rule_id, SpiderTask.end_time == None) if query.count() == 0: session.add(task) session.commit() session.close()
def unsubscribe(message): session = DBSession() wechat = session.query(Wechat).filter( Wechat.openid == message.source).one_or_none() if wechat: wechat.subscribe = False wechat.unsubscribe_time = datetime.datetime.now() else: wechat = Wechat(openid=message.source, subscribe=False, unsubscribe_time=datetime.datetime.now(), create_time=datetime.datetime.now()) session.add(wechat) session.commit() session.close() return SuccessReply()
def post_register(): if 'user_id' not in session or \ 'name' not in request.form: return 'Invalied Data' name = request.form['name'] id = session['user_id'] user = User() user.id = id user.name = name DBSession.add(user) DBSession.commit() session['name'] = name return redirect(url_for('application'))
def set_plot(): if 'x' not in request.form or \ 'y' not in request.form or \ 'comment' not in request.form or \ 'name' not in request.form : return 'Invalid Form' plot = Restraurant() plot.x = request.form['x'] plot.y = request.form['y'] plot.comment = request.form['comment'] plot.name = request.form['name'] plot.created_by = session['user_id'] DBSession.add(plot) DBSession.commit() return 'ok'
class MysqlPipeline(object): def open_spider(self, spider): self.session = DBSession() def close_spider(self, spider): self.session.close() def process_item(self, item, spider): a = Article(title=item["title"], url=item["url"], body=item["body"], text=item["text"], publish_time=item["publish_time"], source_site=item["source_site"]) self.session.add(a) self.session.commit() return item
def CreateInstance(self, request: CreateInstanceRequest, context, claims: TokenClaims) -> CreateInstanceResponse: session = DBSession() user = get_user_from_claims(session, claims) client = create_client() client.connection.list_flavors() # Creating the instance directly will not return the volume it also creates, # so the volume must be manually created volume = client.connection.create_volume(size=request.volume, image=request.imageName, bootable=True) os_instance = client.connection.create_server( request.name, image=request.imageName, flavor=request.flavorName, boot_volume=volume.id, ) # manually get the flavor flavor = client.connection.get_flavor_by_id(os_instance.flavor["id"]) resp = CreateInstanceResponse() resp.flavor.name = flavor.name resp.flavor.cpu = flavor.vcpus resp.flavor.memory = flavor.ram resp.flavor.rootDisk = flavor.disk resp.instanceId = os_instance.id db_volume = models.Volume(id=volume.id, size=request.volume, owner_id=user.id, instance_id=os_instance.id) db_instance = models.Instance(id=os_instance.id, image_name=request.imageName, owner_id=user.id) session.add(db_instance) session.add(db_volume) session.commit() return resp
def edit_menu_item(restaurant_id: int, menu_id: int): session = DBSession() item_to_edit = session.query(MenuItem).filter_by(id=menu_id).one_or_none() if request.method == "POST": if request.form["name"]: item_to_edit.name = request.form["name"] session.add(item_to_edit) session.commit() flash(f"Menu item successfully edited to {item_to_edit.name}") return redirect(url_for("show_menu", restaurant_id=restaurant_id)) else: return render_template( "6_edit_menu_item.html", restaurant_id=restaurant_id, item=item_to_edit, )
def edit_restaurant(restaurant_id: int): session = DBSession() restaurant_to_edit = ( session.query(Restaurant).filter_by(id=restaurant_id).one_or_none() ) if request.method == "POST": if request.form["name"]: restaurant_to_edit.name = request.form["name"] session.add(restaurant_to_edit) session.commit() flash(f"Restaurant successfully edited to {restaurant_to_edit.name}") return redirect(url_for("show_restaurants")) else: return render_template( "2_edit_restaurants.html", restaurant_id=restaurant_id, restaurant=restaurant_to_edit, )
def cast_vote(person_id, movie_id): """ Submit a vote, one user to one movie. Return whether the vote was cast. """ # this query returns None if no rows are returned exists = DBSession.query(Vote).filter_by(person_id=person_id, movie_id=movie_id).first() if exists: result = { "result": "ERROR", "message": "Person has already voted for this movie." } # HTTP status code 409 means "conflict" return json.dumps(result), 409 else: # create a new Vote and save it to the database vote = Vote(person_id=person_id, movie_id=movie_id) DBSession.add(vote) DBSession.commit() result = {"result": "OK", "message": "Vote registered."} return json.dumps(result)
def db_storage(recipients, message, user_id, sender): """ This method is responsible to store Sms and related fields provided by client in database """ sms_ids = '' # contains all message ids that are sended by client to recipient of same or different network. #smses_ids = '' # contains only message ids that are sended by client to recipient of some other network. sender = sender.decode(encoding='ascii') recipients = recipients.decode(encoding='ascii').splitlines() selected_package = selected_packages(user_id) for recipient in recipients: processed_fields = process_outgoing_sms(sender, user_id, recipient) # storing vaues to database S = Sms() S.sms_type = 'outgoing' S.sms_from = processed_fields['sender_number'] S.sms_to = recipient S.schedule_delivery_time = datetime.date.today() S.validity_period = datetime.date.today()+datetime.timedelta(days=1) S.msg = message S.timestamp = datetime.date.today() S.status = 'scheduled' S.msg_type = 'text' S.user_id = user_id S.package_name = selected_package['package_name'] S.rates = selected_package['rates'] S.target_network = processed_fields['target_network'] # process sms file would use it to send to respective network of which server is. S.client_type = 'smpp' DBSession.add(S) transaction.commit() sms = DBSession.query(Sms)[-1] # to send id to the client for ancilliary operations and querying. sms_ids = sms_ids + str(sms.id) + '\n' if processed_fields['target_network'] != processed_fields['source_network']: # if destination and source network is different connect_info(recipient, message, processed_fields['target_network'], sms.id, processed_fields['sender_number']) # connect to the destination's smpp server. #smses_ids = smses_ids + str(s) + '\n' #updating_status(smses_ids) return(sms_ids)
def process_steps(test_id, tests_cache, registry, step_ques, finish_que, workers_last_activity, timers): #print "tick", test_id #TODO what if this gets interrupted by kill during io? #TODO dont forget actual finish time test = tests_cache[test_id] #new_rx = get_rx() #rx = new_rx - test['rx_snapshot'] #test['rx_snapshot'] = new_rx #new_tx = get_tx() #tx = new_tx - test['tx_snapshot'] #test['tx_snapshot'] = new_tx connect_errors = [] while True: steps = step_ques[test_id].next() if steps: buf_statuses = defaultdict(int) buf_resp_time = defaultdict(list) buf_conn_time = defaultdict(list) buf_errors = defaultdict(int) buf_concur_users_num_max = 0 buf_concur_users_num_min = 0 buf_concur_conns_num_min = 0 buf_concur_conns_num_max = 0 buf_start_session = 0 buf_request_sent = 0 is_finish_only_step = all( len(data) == 1 and data[0]['type'] == stypes.FINISH_TEST for _node_id, data in steps.items()) for node_id, data in steps.items(): node_id = int(node_id) workers_last_activity[test_id][node_id] = time.time() for rec in data: data_type = rec['type'] if data_type == stypes.RESPONSE_STATUS: grp, status = rec['value'] if int(status) not in (200, 201, 202): #TODO move to test logic test['resp_bad_statuses_total'] += 1 else: test['resp_successful_total'] += 1 status = str(status) + " " + grp tests_cache[test_id]['resp_statuses'].add(status) buf_statuses[status] += 1 elif data_type == stypes.RESPONSE_TIME: grp_name, resp_time = rec['value'] buf_resp_time[grp_name].append(resp_time) tests_cache[test_id]['groups'].add(grp_name) elif data_type == stypes.CONNECT_TIME: test['conns_total'] += 1 grp_name, timelen = rec['value'] buf_conn_time[grp_name].append(timelen) tests_cache[test_id]['groups'].add(grp_name) elif data_type == stypes.CONCUR_USERS_NUM_MAX: buf_concur_users_num_max += rec['value'] elif data_type == stypes.CONCUR_USERS_NUM_MIN: buf_concur_users_num_min += rec['value'] elif data_type == stypes.CONCUR_CONNS_NUM_MIN: buf_concur_conns_num_min += rec['value'] elif data_type == stypes.CONCUR_CONNS_NUM_MAX: buf_concur_conns_num_max += rec['value'] elif data_type == stypes.START_SESSION: buf_start_session += rec['value'] elif data_type == stypes.REQUEST_SENT: test['reqs_total'] += rec['value'] buf_request_sent += rec['value'] elif data_type == stypes.CONNECT_ERROR: test['conns_errors_total'] += 1 try: tests_cache[test_id]['errors'].add( "connect " + rec['value']['msg']) except: print "rec:", rec raise buf_errors[rec['value']['msg']] += 1 if "not enough ports" not in rec['value']['msg']: connect_errors.append( u'%s\t%s\t%s' % (rec['value']['time'], rec['value']['ip'], rec['value']['msg'])) elif data_type == stypes.RESPONSE_ERROR: if "timeout" in rec['value']: test['resp_timeouts_total'] += 1 else: test['resp_errors_total'] += 1 ern = "response " + rec['value'] tests_cache[test_id]['errors'].add(ern) buf_errors[ern] += 1 elif data_type == stypes.FINISH_TEST: finish_que[test_id][node_id] = True else: raise NotImplementedError(rec['type']) if not is_finish_only_step: res = test['result'] rt = {} rm = {} for grp, times in buf_resp_time.iteritems(): resp_time_med = util.get_median(times) rt[grp] = resp_time_med rm[grp] = util.get_median( abs(t - resp_time_med) for t in times) res['resp_time'].append(rt) res['resp_time_meav'].append(rm) #TODO rename to med_abs_dev rt = {} rm = {} for grp, times in buf_conn_time.iteritems(): resp_time_med = util.get_median(times) rt[grp] = resp_time_med rm[grp] = util.get_median( abs(t - resp_time_med) for t in times) res['conn_time'].append(rt) res['conn_time_meav'].append(rm) #TODO rename to med_abs_dev res['start_session'].append(buf_start_session) res['resp_status'].append(buf_statuses) res['req_sent'].append(buf_request_sent) res['errors'].append(buf_errors) res['concur_users_num_max'].append(buf_concur_users_num_max) res['concur_users_num_min'].append(buf_concur_users_num_min) res['concur_conns_num_min'].append(buf_concur_conns_num_min) res['concur_conns_num_max'].append(buf_concur_conns_num_max) #res['network_received'].append(float(rx) / 1024.0) #res['network_sent'].append(float(tx) / 1024.0) else: break if connect_errors and test['write_connect_error_log']: with open("conn_err_%s.log" % test_id, 'ab') as f: f.write("\n".join(connect_errors).encode('utf-8') + "\n") is_crashed = False now = time.time() for _node_id, ts_last in workers_last_activity[test_id].items(): if now - ts_last >= WORKERS_TIMEOUT: is_crashed = True break is_finished = len(finish_que[test_id]) == test['worker_num'] if is_crashed: is_finished = True if is_finished: tests_cache[test_id]['finished'] = ( now - WORKERS_TIMEOUT) if is_crashed else now #DBSession.query(Test).filter_by(id = test_id).update({Test.data: dbdump(tests_cache[test_id])}) t = Test.query.filter_by(id=test_id).first() if not t: raise Exception( "no test with id = %s (maybe it was deleted before finish timeout happened?)" % test_id) t.data = dbdump(tests_cache[test_id]) DBSession.add(t) DBSession.commit() if registry.has_listeners: registry.notify(OnFinishTest(t, registry.settings)) del tests_cache[test_id] del finish_que[test_id] del step_ques[test_id] del workers_last_activity[test_id] gevent.kill(timers[test_id]) del timers[test_id] log.info("finished test #%s%s" % (test_id, " (timeout)" if is_crashed else ""))
def check_smart_trades_for_user(user): while True: session = DBSession() try: account = session.query(Account).filter_by(user_name=user.username, type='binance').first() if account.active == False: session.close() continue apiKey = account.api_key apiSecret = account.api_secret account_name = account.name.strip() profile = session.query(Profile).filter_by(user_id=user.id).first() if profile: auto_close_timer = profile.auto_close_timer.split(":") auto_close_timer = int(auto_close_timer[0]) * 60 + int( auto_close_timer[1]) else: auto_close_timer = 720 #minutes bot = ThreeCommas(apiKey, apiSecret) try: smart_trades = bot.get_smart_trades(account_name) except: traceback.print_exc() #send_debug_email("While fetching smart trades", user.user_name, traceback.format_exc()) if smart_trades is None: print("Something wrong with: ", user.username) time.sleep(5) continue for smart_trade in smart_trades: if smart_trade['status'] == "buy_order_placed": created_at = datetime.datetime.strptime( smart_trade["created_at"], '%Y-%m-%dT%H:%M:%S.%fZ') now = datetime.datetime.utcnow() minutes_diff = (now - created_at).total_seconds() / 60 if minutes_diff >= auto_close_timer: bot.cancel_smart_trade(smart_trade['id']) continue updated_targets = [] processed_target = None for target in smart_trade['take_profit_steps']: prev_target = { 'percent': target['percent'], 'price': target['price'], 'price_method': 'bid', 'position': target['position'] } updated_targets.append(prev_target) if target['status'] == "processed": already_updated = session.query( ProcessedTarget).filter_by( user_name=user.username, trade_id=smart_trade['id'], step_id=target['id']).first() if already_updated: continue processed_target = target if processed_target is not None: stop_loss = None if processed_target['position'] == 1: stop_loss = smart_trade['buy_price'] else: for step in smart_trade['take_profit_steps']: if processed_target['position'] - 1 == step[ 'position']: stop_loss = step['price'] break update = bot.update_smart_trade(smart_trade['id'], stop_loss, updated_targets) try: if update['id']: processed = ProcessedTarget() processed.user_name = user.username processed.trade_id = update['id'] processed.step_id = processed_target['id'] session.add(processed) session.commit() #send_update_email(user.user_name, "trade_id = {}, step = {}, stop_loss = {}".format(update['id'], processed_target['position'], stop_loss)) except: traceback.print_exc() processed = ProcessedTarget() processed.user_name = user.username processed.trade_id = smart_trade['id'] processed.step_id = processed_target['id'] session.add(processed) session.commit() #send_debug_email("While updating stop loss after fetching smart trades", user.user_name, json.dumps(update) + "\n" + traceback.format_exc()) except: traceback.print_exc() pass session.close() time.sleep(5)
def create_trade(): session = DBSession() pair = request.form.get('pair') if pair.endswith("BTC"): pair = pair.split("_") pair = pair[1] + "_" + pair[0] buy_price = request.form.get('buy_price') stop_loss = request.form.get('stop_loss') targets = [] tp1 = request.form.get('tp1') tp2 = request.form.get('tp2') tp3 = request.form.get('tp3') tp4 = request.form.get('tp4') note = request.form.get('note') code = request.form.get('code') if tp1 is not None and tp1 is not "": targets.append(tp1) else: return jsonify({"status": "error", "message": "Target 1 is required."}) if tp2 is not None and tp2 is not "": targets.append(tp2) if tp3 is not None and tp3 is not "": targets.append(tp3) if tp4 is not None and tp4 is not "": targets.append(tp4) signal = session.query(Signal).filter_by(pair=pair, buy_price=buy_price, stop_loss=stop_loss, tp1=tp1, tp2=tp2, tp3=tp3, tp4=tp4, note=note, channel=code).first() if signal: return jsonify({"status": "error", "message": "Trade already taken"}) signal = Signal(pair=pair, buy_price=buy_price, stop_loss=stop_loss, tp1=tp1, tp2=tp2, tp3=tp3, tp4=tp4, note=note, channel=code) session.add(signal) session.commit() users = session.query(User).filter_by(is_active=True).all() threads = [] stepSize = get_buy_step_size(pair.split("_")[1]) for user in users: if len(threads) >= NUM_WORKER_THREADS: print("waiting for threads to join") for thread in threads: thread.join() threads = [] thread = Thread(target=create_smart_trade, args=(user, pair, buy_price, targets, stop_loss, note, signal.id, code, stepSize)) thread.daemon = True thread.start() threads.append(thread) session.close() return jsonify({ "status": "ok", "message": "Creating smart orders started." })
def create_smart_trade(user, pair, buy_price, targets, stop_loss, note, signal_id, code, stepSize): try: session = DBSession() account = session.query(Account).filter_by(user_name=user.username, type='binance').first() if account.active == False: session.close() return apiKey = account.api_key apiSecret = account.api_secret bot = ThreeCommas(apiKey, apiSecret) account_name = account.name.strip() channel = session.query(Channel).filter_by(user_name=user.username, code=code).first() if channel.active == False: session.close() return profile = session.query(Profile).filter_by(user_id=user.id).first() if profile: max_trades_per_coin = profile.max_trades_per_coin coin = pair.split("_")[-1] total_trades = bot.get_total_trades(coin, account_name) if total_trades >= max_trades_per_coin: db_trade = Trade( signal_id=signal_id, channel=code, user_name=user.username, response=json.dumps({ "status": "Ignored", "message": "max_trades_per_coin is reached for {}".format(pair) })) session.add(db_trade) session.commit() session.close() return else: session.close() return risk = channel.risk_percent allowed = channel.allowed_percent base_asset = pair.split("_")[0] balance = bot.get_balance(account_name, base_asset) total_value = float(balance['total_btc_value']) * (allowed / 100) if float(buy_price) > 0.0001: buy_amount_total_potfolio = math.floor( ((total_value * (float(risk) / 100)) / (abs((float(stop_loss) / float(buy_price)) - 1)) / float(buy_price)) * 100) / 100 buy_amount_available_btc = math.floor( (float(balance['total_available']) / float(buy_price)) * 100) / 100 min_amount = round(0.0011 * len(targets) / float(buy_price), 2) else: buy_amount_total_potfolio = math.floor( (total_value * (float(risk) / 100)) / (abs((float(stop_loss) / float(buy_price)) - 1)) / float(buy_price)) buy_amount_available_btc = math.floor( float(balance['total_available']) / float(buy_price)) min_amount = round(0.0011 * len(targets) / float(buy_price), 0) buy_amount = buy_amount_total_potfolio if float(balance['total_available'] ) < buy_amount_total_potfolio * float(buy_price): buy_amount = buy_amount_available_btc buy_amount = max(buy_amount, min_amount) if user.username == "bot_refrence_user": if float(buy_price) > 0.00011: buy_amount = round((0.0011 * len(targets)) / float(buy_price), 2) else: buy_amount = math.ceil( (0.0011 * len(targets)) / float(buy_price)) buy_amount = format_value(buy_amount, stepSize) trade = bot.create_smart_trade(account_name=account_name, pair=pair, units_to_buy=buy_amount, buy_price=buy_price, targets=targets, stop_loss=stop_loss, note=note) db_trade = Trade(signal_id=signal_id, channel=code, user_name=user.username, response=json.dumps(trade)) session.add(db_trade) session.commit() session.close() except: traceback.print_exc() session.close()
def subscribe(message): template_id = 'SO8Yjl0gcmNI0HW-pzgkpJqlPz0GV28llZNVoBjsspA' user = myrobot.client.get_user_info(message.source) if user: session = DBSession() wechat = session.query(Wechat).filter( Wechat.openid == user['openid']).one_or_none() if wechat: wechat.subscribe = True wechat.nickname = user['nickname'] wechat.sex = user.get('sex', 0) wechat.city = user.get('city') wechat.country = user.get('country') wechat.province = user.get('province') wechat.language = user.get('language') wechat.headimgurl = user.get('headimgurl') wechat.subscribe_time = datetime.datetime.fromtimestamp( int(user['subscribe_time'])) wechat.unionid = user.get('unionid') wechat.remark = user.get('remark') wechat.groupid = user.get('groupid') wechat.tagid_list = ','.join( [str(x) for x in user.get('tagid_list', [])]) wechat.subscribe_scene = user.get('subscribe_scene') wechat.qr_scene_str = user.get('qr_scene_str') else: wechat = Wechat( openid=user['openid'], nickname=user['nickname'], subscribe=user['subscribe'], sex=user.get('sex', 0), city=user.get('city'), country=user.get('country'), province=user.get('province'), language=user.get('language'), headimgurl=user.get('headimgurl'), subscribe_time=datetime.datetime.fromtimestamp( int(user['subscribe_time'])), unionid=user.get('unionid'), remark=user.get('remark'), groupid=user.get('groupid'), tagid_list=','.join([str(x) for x in user.get('tagid_list', [])]), subscribe_scene=user.get('subscribe_scene'), qr_scene_str=user.get('qr_scene_str'), create_time=datetime.datetime.now() ) session.add(wechat) session.commit() session.close() r = myrobot.client.post( url="https://api.weixin.qq.com/cgi-bin/message/template/send", data={ "touser": message.source, "template_id": template_id, "data": { "first": { "value": "欢迎关注'千济方开发者开台'公众号", "color": "#173177" }, "keyword1": { "value": "你可点击进入,进行账号绑定.", "color": "#173177" }, "keyword2": { "value": "13888888888", "color": "#173177" }, "remark": { "value": "如有疑问,请联系小孟", "color": "#173177" } }, "miniprogram": { "appid": "wx79edc80703771261", #"pagepath": "pages/monitor/monitor" } } ) return SuccessReply() # return 'Hello %s!' % user['nickname'] else: return 'Hello My Friend!'
def find_zufang(area, url, start): res = None proxy_url = proxy_api.get() proxies = { 'https:': "https://" + proxy_url } headers = HttpUtil.getHeaders() try: url = "%s?start=%s" % (url, start) logging.info("start spider [page:{url}] [proxy:{proxy}]".format(url=url, proxy=proxy_url)) res = requests.get(url, headers=headers, proxies=proxies, timeout=10) except Exception as e: logging.error(e) return False if res is None: return False elif res.status_code == 403: logging.info("403 forbidden : " + proxy_url) proxy_api.delete(proxy_url) return False else: bs = BeautifulSoup(res.text, "lxml") table = bs.find("table", {"class": "olt"}) if table: rows = table.findAll("tr", {"class": ""}) if rows: for row in rows: session = DBSession() tds = row.findAll("td") url = tds[0].find("a").get("href") publish_time = None try: content = urlopen(url) except Exception as e: logging.error(e) if content is not None: try: bsContentObj = BeautifulSoup(content.read(), 'lxml') publish_time = str2datetime( bsContentObj.find("h3").find("span", {"class": "color-green"}).get_text()) except Exception as e: logging.error(e) urlSplited = url.split("/") article_id = urlSplited[len(urlSplited) - 2] title = tds[0].find("a").get("title") author = tds[1].find("a").get_text() author_url = tds[1].find("a").get("href") if not validate(title): continue item = Dbzf(city='广州市', area=area, title=title, url=url, article_id=article_id, author_url=author_url, publish_time=publish_time, create_time=getNow()) try: session.add(item) session.commit() except Exception as e: logging.error(e) else: logging.info("table not found") return False return True
S = Sms() S.sms_type = 'outgoing' S.sms_from = '+9233365195924' S.sms_to = '+923366767999' S.schedule_delivery_time = d # give date 2nd december S.validity_period = d+datetime.timedelta(days=1) S.msg = "dont disturb.." S.timestamp = d S.status = 'delivered' S.msg_type = 'text' S.user_id = 'ASMA' S.package_name = 'dhamaka' S.rates = 0.0 S.target_network = 'ufone' # process sms file would use it to send to respective network of which server is. S.client_type = 'smpp' DBSession.add(S) #user = DBSession.query(User_Number).filter_by(user_id='ASMA').first() # user refers to normal user #cell_number = user.cell_number #source_prefix = cell_number[0:6] #dest_prefix = '+92300' #source_network = DBSession.query(Prefix_Match).filter_by(prefix=source_prefix).first() # t_user refers to network #dest_network = DBSession.query(Prefix_Match).filter_by(prefix=dest_prefix).first() # t_user refers to network #print(source_network.network) #print(dest_network.network) transaction.commit() #data = b'\x00\x00\x00/\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01SMPP3TEST\x00secret08\x00' + \ #b'SUBMIT1\x00P\x01\x01\x00' #P = BindTransmitter.decode(data) #print(DBSession.query(User).count()) #system=P.system_id.value.decode(encoding='ascii') #for U in DBSession.query(User).all():
def process_steps (test_id, tests_cache, registry, step_ques, finish_que, workers_last_activity, timers): #print "tick", test_id #TODO what if this gets interrupted by kill during io? #TODO dont forget actual finish time test = tests_cache[test_id] #new_rx = get_rx() #rx = new_rx - test['rx_snapshot'] #test['rx_snapshot'] = new_rx #new_tx = get_tx() #tx = new_tx - test['tx_snapshot'] #test['tx_snapshot'] = new_tx connect_errors = [] while True: steps = step_ques[test_id].next() if steps: buf_statuses = defaultdict(int) buf_resp_time = defaultdict(list) buf_conn_time = defaultdict(list) buf_errors = defaultdict(int) buf_concur_users_num_max = 0 buf_concur_users_num_min = 0 buf_concur_conns_num_min = 0 buf_concur_conns_num_max = 0 buf_start_session = 0 buf_request_sent = 0 is_finish_only_step = all(len(data) == 1 and data[0]['type'] == stypes.FINISH_TEST for _node_id, data in steps.items()) for node_id, data in steps.items(): node_id = int(node_id) workers_last_activity[test_id][node_id] = time.time() for rec in data: data_type = rec['type'] if data_type == stypes.RESPONSE_STATUS: grp, status = rec['value'] if int(status) not in (200, 201, 202): #TODO move to test logic test['resp_bad_statuses_total'] += 1 else: test['resp_successful_total'] += 1 status = str(status) + " " + grp tests_cache[test_id]['resp_statuses'].add(status) buf_statuses[status] += 1 elif data_type == stypes.RESPONSE_TIME: grp_name, resp_time = rec['value'] buf_resp_time[grp_name].append(resp_time) tests_cache[test_id]['groups'].add(grp_name) elif data_type == stypes.CONNECT_TIME: test['conns_total'] += 1 grp_name, timelen = rec['value'] buf_conn_time[grp_name].append(timelen) tests_cache[test_id]['groups'].add(grp_name) elif data_type == stypes.CONCUR_USERS_NUM_MAX: buf_concur_users_num_max += rec['value'] elif data_type == stypes.CONCUR_USERS_NUM_MIN: buf_concur_users_num_min += rec['value'] elif data_type == stypes.CONCUR_CONNS_NUM_MIN: buf_concur_conns_num_min += rec['value'] elif data_type == stypes.CONCUR_CONNS_NUM_MAX: buf_concur_conns_num_max += rec['value'] elif data_type == stypes.START_SESSION: buf_start_session += rec['value'] elif data_type == stypes.REQUEST_SENT: test['reqs_total'] += rec['value'] buf_request_sent += rec['value'] elif data_type == stypes.CONNECT_ERROR: test['conns_errors_total'] += 1 try: tests_cache[test_id]['errors'].add("connect " + rec['value']['msg']) except: print "rec:", rec raise buf_errors[rec['value']['msg']] += 1 if "not enough ports" not in rec['value']['msg']: connect_errors.append(u'%s\t%s\t%s' % (rec['value']['time'], rec['value']['ip'], rec['value']['msg'])) elif data_type == stypes.RESPONSE_ERROR: if "timeout" in rec['value']: test['resp_timeouts_total'] += 1 else: test['resp_errors_total'] += 1 ern = "response " + rec['value'] tests_cache[test_id]['errors'].add(ern) buf_errors[ern] += 1 elif data_type == stypes.FINISH_TEST: finish_que[test_id][node_id] = True else: raise NotImplementedError(rec['type']) if not is_finish_only_step: res = test['result'] rt = {} rm = {} for grp, times in buf_resp_time.iteritems(): resp_time_med = util.get_median(times) rt[grp] = resp_time_med rm[grp] = util.get_median(abs(t - resp_time_med) for t in times) res['resp_time'].append(rt) res['resp_time_meav'].append(rm) #TODO rename to med_abs_dev rt = {} rm = {} for grp, times in buf_conn_time.iteritems(): resp_time_med = util.get_median(times) rt[grp] = resp_time_med rm[grp] = util.get_median(abs(t - resp_time_med) for t in times) res['conn_time'].append(rt) res['conn_time_meav'].append(rm) #TODO rename to med_abs_dev res['start_session'].append(buf_start_session) res['resp_status'].append(buf_statuses) res['req_sent'].append(buf_request_sent) res['errors'].append(buf_errors) res['concur_users_num_max'].append(buf_concur_users_num_max) res['concur_users_num_min'].append(buf_concur_users_num_min) res['concur_conns_num_min'].append(buf_concur_conns_num_min) res['concur_conns_num_max'].append(buf_concur_conns_num_max) #res['network_received'].append(float(rx) / 1024.0) #res['network_sent'].append(float(tx) / 1024.0) else: break if connect_errors and test['write_connect_error_log']: with open("conn_err_%s.log" % test_id, 'ab') as f: f.write("\n".join(connect_errors).encode('utf-8') + "\n") is_crashed = False now = time.time() for _node_id, ts_last in workers_last_activity[test_id].items(): if now - ts_last >= WORKERS_TIMEOUT: is_crashed = True break is_finished = len(finish_que[test_id]) == test['worker_num'] if is_crashed: is_finished = True if is_finished: tests_cache[test_id]['finished'] = (now - WORKERS_TIMEOUT) if is_crashed else now #DBSession.query(Test).filter_by(id = test_id).update({Test.data: dbdump(tests_cache[test_id])}) t = Test.query.filter_by(id = test_id).first() if not t: raise Exception("no test with id = %s (maybe it was deleted before finish timeout happened?)" % test_id) t.data = dbdump(tests_cache[test_id]) DBSession.add(t) DBSession.commit() if registry.has_listeners: registry.notify(OnFinishTest(t, registry.settings)) del tests_cache[test_id] del finish_que[test_id] del step_ques[test_id] del workers_last_activity[test_id] gevent.kill(timers[test_id]) del timers[test_id] log.info("finished test #%s%s" % (test_id, " (timeout)" if is_crashed else ""))
"Easy Listening", "Electronic", "Hip Hop/Rap", "Holiday", "Industrial", "Jazz", "New Age", "Opera", "Pop", "R&B/Soul", "Reggae", "Rock", "Soundtrack", "World", ] inspector = Inspector.from_engine(engine) # Create all tables if they don't exist if not "categories" in inspector.get_table_names(): Base.metadata.create_all(engine) session = DBSession() # add all the categories to the newly created categories table for category in categories: new_category = Category(name=category) session.add(new_category) session.commit() session.close()