def from_to(ses, fs, ts, pc, pn): es = [] has_next_p = False if fs != '' and ts == '': es, has_next_p = full(ses, fs.replace(' ', '%').replace('-', '%'), pc, pn) elif fs == '' and ts != '': es, has_next_p = full(ses, ts.replace(' ', '%').replace('-', '%'), pc, pn) else: prepared_fs = fs.replace(' ', '%').replace('-', '%') prepared_ts = ts.replace(' ', '%').replace('-', '%') prepared_ph = prepared_fs + '%' + prepared_ts try: q = ses.query(orm.uatrains.E).\ filter(orm.and_( orm.or_(orm.uatrains.E.ua_graph.ilike('%' + prepared_ph.lower() + '%'), orm.uatrains.E.ru_graph.ilike('%' + prepared_ph.lower() + '%'), orm.uatrains.E.en_graph.ilike('%' + prepared_ph.lower() + '%')), orm.uatrains.E.ref_id == None)).\ order_by(orm.uatrains.E.etype.desc(), orm.uatrains.E.vc.desc(), orm.uatrains.E.ua_title) es = q.limit(pc).offset(pn * pc).all() next_p_es = q.limit(pc).offset((pn + 1) * pc).all() if len(next_p_es) > 0: has_next_p = True except Exception: nlog.info( 'Uatrains error', 'Can\'t find entities by fs and ts\n' + traceback.format_exc()) return es, has_next_p
def getStationSitemap(lng): now = datetime.datetime.now().strftime("%Y-%m-%d") conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) ss = None try: ss = ses.query(orm.uatrains.E).filter( orm.uatrains.E.etype == etype.station).all() except: nlog.info('Uatrains error', 'Can\'t create sitemap\n' + traceback.format_exc()) sitemap = '<?xml version="1.0" encoding="UTF-8"?>' +\ '<urlset xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' +\ 'xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 ' +\ 'http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd" ' +\ 'xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">' if ss is not None: for s in ss: sitemap += '<url>' +\ '<loc>' + cherrypy.request.base + '/' + str(s.id) + '</loc>' +\ '<lastmod>' + now + '</lastmod>' +\ '<changefreq>daily</changefreq>' +\ '<priority>1.0</priority>' +\ '</url>' sitemap += '</urlset>' return sitemap
def social(self, u, t, l=0, v=0): cherrypy.response.headers['Access-Control-Allow-Origin'] = "*" try: qurl = urllib.parse.quote(u) display_label = True if int(l) > 0 else False is_vertical = True if int(v) > 0 else False return layout.getSocial(qurl, t, display_label, is_vertical) except: nlog.info('podelitsya error', traceback.format_exc()) return ''
def grab_ptrain(msg): try: start_dt = datetime.datetime.now() start_time = time.time() ventilator.run(task_drvs.passengers, 32) end_time = time.time() exec_delta = datetime.timedelta(seconds=int(end_time - start_time)) exec_log.info('uatrains bot - task runner - ptrain %s %s' % (str(start_dt), str(exec_delta))) except: nlog.info('uatrains bot - task runner - ptrain - server error', traceback.format_exc())
def index(self): l = 'No data' try: conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) user_agents = ses.query(orm.UserAgent).all() l = layout.getHome(user_agents) ses.close() conn.close() except: nlog.info('ugently - error', traceback.format_exc()) return l
def news(self): lng = get_lng() conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) news = [] try: news = ses.query(orm.uatrains.New).filter(orm.uatrains.New.lng == lng).\ order_by(orm.uatrains.New.date.desc()).all() except: nlog.info('Uatrains error', 'Can\'t find news\n' + traceback.format_exc()) ses.close() conn.close() return layout.getNews(news, lng)
def rnd(msg): rnd_free_proxy = None def base_rnd(): if red.exists(red_keys.froxly_base_check_free_proxy) and \ red.scard(red_keys.froxly_base_check_free_proxy) > 0: return red.srandmember( red_keys.froxly_base_check_free_proxy) else: conn = orm.null_engine.connect() ses = orm.sescls(bind=conn) free_proxies = ses.query(orm.FreeProxy).filter( orm.and_(orm.FreeProxy.protocol == 'http', orm.FreeProxy.http_status == 200)).all() for free_proxy in free_proxies: sproxy = data_server_common.dbproxy2sproxy(free_proxy) red.sadd(red_keys.froxly_base_check_free_proxy, sproxy) ses.close() conn.close() return red.srandmember( red_keys.froxly_base_check_free_proxy) if msg is not None and msg['params'] is not None and 'url' in msg['params'] and \ msg['params']['url'] is not None: url_red_key = red_keys.froxly_url_free_proxy_prefix + msg[ 'params']['url'] if red.exists(url_red_key) and red.scard(url_red_key) > 0: rnd_free_proxy = red.srandmember(url_red_key) else: red.rpush(red_keys.froxly_rnd_free_proxy_log, 'No proxies for url: ' + msg['params']['url']) rnd_free_proxy = base_rnd() else: rnd_free_proxy = base_rnd() if rnd_free_proxy is not None: froxly_data_worker_socket.send_unicode( json.dumps( {'result': json.loads(rnd_free_proxy.decode('utf-8'))})) else: froxly_data_worker_socket.send_unicode( json.dumps({'result': None})) nlog.info('froxly - rnd free proxy error', 'Random free proxy is None')
def ts(self, ph='', pn=0): lng = get_lng() conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) pn = int(pn) pc = 9 ts = [] has_next_p = False try: q = None if lng == lngs.UA: q = ses.query(orm.uatrains.E).\ filter(orm.and_(orm.or_(orm.uatrains.E.etype == etype.train, orm.uatrains.E.etype == etype.etrain, orm.uatrains.E.etype == etype.ptrain), orm.uatrains.E.ua_graph.ilike('%' + ph.lower().replace(' ', '%') + '%'), orm.uatrains.E.ua_title.ilike('%' + ph.lower() + '%'), orm.uatrains.E.ref_id == None)).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.ua_title) if lng == lngs.RU: q = ses.query(orm.uatrains.E).\ filter(orm.and_(orm.or_(orm.uatrains.E.etype == etype.train, orm.uatrains.E.etype == etype.etrain, orm.uatrains.E.etype == etype.ptrain), orm.uatrains.E.ru_graph.ilike('%' + ph.lower().replace(' ', '%') + '%'), orm.uatrains.E.ru_title.ilike('%' + ph.lower() + '%'), orm.uatrains.E.ref_id == None)).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.ru_title) if lng == lngs.EN: q = ses.query(orm.uatrains.E).\ filter(orm.and_(orm.or_(orm.uatrains.E.etype == etype.train, orm.uatrains.E.etype == etype.etrain, orm.uatrains.E.etype == etype.ptrain), orm.uatrains.E.en_graph.ilike('%' + ph.lower().replace(' ', '%') + '%'), orm.uatrains.E.en_title.ilike('%' + ph.lower() + '%'), orm.uatrains.E.ref_id == None)).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.en_title) ts = q.limit(pc).offset(pn * pc).all() next_p_ts = q.limit(pc).offset((pn + 1) * pc).all() if len(next_p_ts) > 0: has_next_p = True except Exception: nlog.info('Uatrains error', 'Can\'t find trains\n' + traceback.format_exc()) ses.close() conn.close() return layout.getTrains(ts, ph, pn, has_next_p, lng)
def run(): conn = None cur = None try: ctx = zmq.Context() ugently_data_worker_socket = ctx.socket(zmq.REP) ugently_data_worker_socket.connect(sockets.ugently_data_worker) red = redis.StrictRedis(unix_socket_path=sockets.redis) while True: msg = ugently_data_worker_socket.recv_unicode() rnd_user_agent = None if red.exists(red_keys.ugently_user_agent_value) and red.scard( red_keys.ugently_user_agent_value) > 0: rnd_user_agent = red.srandmember( red_keys.ugently_user_agent_value).decode('utf-8') else: conn = psycopg2.connect( 'host=localhost port=5432 dbname=werp user=werp password=0v}II587' ) cur = conn.cursor() cur.execute( "select value from user_agent order by random() limit 1;") rnd_user_agent = cur.fetchone()[0] cur.close() conn.close() if rnd_user_agent is not None: ugently_data_worker_socket.send_unicode(rnd_user_agent) else: nlog.info('ugently - data worker error', 'Random user agent is None') ugently_data_worker_socket.send_unicode('') except: nlog.info('ugently - data worker fatal', traceback.format_exc()) if cur is not None: cur.close() if conn is not None: conn.close()
def s(self, sid): conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) id = None try: ss = ses.query(orm.uatrains.E).filter( orm.and_(orm.uatrains.E.oid == int(sid), orm.uatrains.E.etype == etype.station)).all() s = ss[0] for station in ss: if s.id < station.id: s = station id = s.id except: nlog.info('Uatrains error', 'Can\'t find station by sid = ' + str(sid) + '\n' +\ traceback.format_exc()) ses.close() conn.close() cherrypy.response.status = 301 if id is not None: cherrypy.response.headers['Location'] = '/' + str(id) else: cherrypy.response.headers['Location'] = '/' return ''
def full(ses, ph, pc, pn): es = [] has_next_p = False prepared_ph = ph.replace(' ', '%').replace('-', '%') try: q = ses.query(orm.uatrains.E).\ filter(orm.and_(orm.or_(orm.uatrains.E.ua_title.ilike('%' + prepared_ph.lower() + '%'), orm.uatrains.E.ru_title.ilike('%' + prepared_ph.lower() + '%'), orm.uatrains.E.en_title.ilike('%' + prepared_ph.lower() + '%'), orm.uatrains.E.ua_graph.ilike('%' + prepared_ph.lower() + '%'), orm.uatrains.E.ru_graph.ilike('%' + prepared_ph.lower() + '%'), orm.uatrains.E.en_graph.ilike('%' + prepared_ph.lower() + '%'), orm.uatrains.E.value.op('similar to')('([0-9А-Яа-я]*/)?' + prepared_ph.lower() + \ '([А-Яа-я]*)?(/[0-9А-Яа-я]*)?(/[0-9А-Яа-я]*)?')), orm.uatrains.E.ref_id == None)).\ order_by(orm.uatrains.E.etype.desc(), orm.uatrains.E.vc.desc(), orm.uatrains.E.ua_title) es = q.limit(pc).offset(pn * pc).all() next_p_es = q.limit(pc).offset((pn + 1) * pc).all() if len(next_p_es) > 0: has_next_p = True except Exception: nlog.info('Uatrains error', 'Can\'t find entities\n' + traceback.format_exc()) return es, has_next_p
def ss(self, ph='', pn=0): lng = get_lng() conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) pn = int(pn) pc = 9 ss = [] has_next_p = False try: q = ses.query(orm.uatrains.E).filter(orm.uatrains.E.etype == etype.station).\ filter(orm.or_(orm.uatrains.E.ua_title.ilike('%' + ph.lower() + '%'), orm.uatrains.E.ru_title.ilike('%' + ph.lower() + '%'), orm.uatrains.E.en_title.ilike('%' + ph.lower() + '%'))).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.ua_title) ss = q.limit(pc).offset(pn * pc).all() next_p_ss = q.limit(pc).offset((pn + 1) * pc).all() if len(next_p_ss) > 0: has_next_p = True except Exception: nlog.info('Uatrains error', 'Can\'t find stations\n' + traceback.format_exc()) ses.close() conn.close() return layout.getStations(ss, ph, pn, has_next_p, lng)
def t(self, tid): conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) id = None try: ts = ses.query(orm.uatrains.E).filter( orm.and_(orm.uatrains.E.oid == int(tid), orm.uatrains.E.etype == etype.train)).all() t = ts[0] for train in ts: if t.id < train.id and t.ref_id is None: t = train id = t.id except: nlog.info('Uatrains error', 'Can\'t find train by tid = ' + str(tid) + '\n' +\ traceback.format_exc()) ses.close() conn.close() cherrypy.response.status = 301 if id is not None: cherrypy.response.headers['Location'] = '/' + str(id) else: cherrypy.response.headers['Location'] = '/' return ''
def index(self, eid=None): lng = get_lng() conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) ts = [] pc = 5 try: q = None if lng == lngs.UA: q = ses.query(orm.uatrains.E).\ filter(orm.and_(orm.uatrains.E.etype == etype.train, orm.uatrains.E.ref_id == None)).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.ua_title) if lng == lngs.RU: q = ses.query(orm.uatrains.E).\ filter(orm.and_(orm.uatrains.E.etype == etype.train, orm.uatrains.E.ref_id == None)).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.ru_title) if lng == lngs.EN: q = ses.query(orm.uatrains.E).\ filter(orm.and_(orm.uatrains.E.etype == etype.train, orm.uatrains.E.ref_id == None)).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.en_title) ts = q.limit(pc).all() except Exception: nlog.info('Uatrains error', 'Can\'t find trains\n' + traceback.format_exc()) ss = [] try: q = None if lng == lngs.UA: q = ses.query(orm.uatrains.E).filter(orm.uatrains.E.etype == etype.station).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.ua_title) if lng == lngs.RU: q = ses.query(orm.uatrains.E).filter(orm.uatrains.E.etype == etype.station).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.ru_title) if lng == lngs.EN: q = ses.query(orm.uatrains.E).filter(orm.uatrains.E.etype == etype.station).\ order_by(orm.uatrains.E.vc.desc(), orm.uatrains.E.en_title) ss = q.limit(pc).all() except Exception: nlog.info('Uatrains error', 'Can\'t find stations\n' + traceback.format_exc()) news = [] try: news = ses.query(orm.uatrains.New).filter(orm.uatrains.New.lng == lng).\ order_by(orm.uatrains.New.date.desc()).limit(3).all() except: nlog.info('Uatrains error', 'Can\'t find news\n' + traceback.format_exc()) ses.close() conn.close() return layout.getHome(ts, ss, news, lng)
ugently_data_server_socket.bind(sockets.ugently_data_server) ugently_data_worker_socket = ctx.socket(zmq.DEALER) ugently_data_worker_socket.bind(sockets.ugently_data_worker) poller = zmq.Poller() poller.register(ugently_data_server_socket, zmq.POLLIN) poller.register(ugently_data_worker_socket, zmq.POLLIN) for wrk_num in range(WORKER_POOL): thr = threading.Thread(target=worker.run) thr.start() while True: socks = dict(poller.poll()) try: # frontend if ugently_data_server_socket in socks and socks[ ugently_data_server_socket] == zmq.POLLIN: req_msg = ugently_data_server_socket.recv_multipart() ugently_data_worker_socket.send_multipart(req_msg) # backend if ugently_data_worker_socket in socks and socks[ ugently_data_worker_socket] == zmq.POLLIN: res_msg = ugently_data_worker_socket.recv_multipart() ugently_data_server_socket.send_multipart(res_msg) except: nlog.info('ugently - data server error', traceback.format_exc()) except: nlog.info('ugently - data server fatal', traceback.format_exc())
nlog.info('uatrains bot - task runner - etrain - server error', traceback.format_exc()) def grab_ptrain(msg): try: start_dt = datetime.datetime.now() start_time = time.time() ventilator.run(task_drvs.passengers, 32) end_time = time.time() exec_delta = datetime.timedelta(seconds=int(end_time - start_time)) exec_log.info('uatrains bot - task runner - ptrain %s %s' % (str(start_dt), str(exec_delta))) except: nlog.info('uatrains bot - task runner - ptrain - server error', traceback.format_exc()) methods = {} methods[grab_etrain.__name__] = grab_etrain methods[grab_ptrain.__name__] = grab_ptrain while True: try: msg = json.loads(uatrains_bot_server_socket.recv_unicode()) if msg['method'] in methods: thr = threading.Thread(target=methods[msg['method']], args=(msg, )) thr.start() except: nlog.info('uatrains bot - server error', traceback.format_exc()) uatrains_bot_server_socket.send_unicode(json.dumps({'result': None})) except: nlog.info('uatrains bot - server fatal', traceback.format_exc())
import traceback import zmq from werp import orm from werp import nlog ctx = zmq.Context() puller = ctx.socket(zmq.PULL) puller.bind("ipc:///home/www/sockets/req_logger.socket") try: while True: message = puller.recv() conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) log = orm.Log(message.decode('utf-8').strip()) ses.add(log) ses.commit() ses.close() conn.close() except: nlog.info('req_logger - log error', traceback.format_exc())
et.to_date >= datetime.datetime.now())): last_etrain = et similar_etrains = ses.query(orm.uatrains.E).\ filter(orm.and_(orm.or_(orm.uatrains.E.value == et.value, orm.uatrains.E.oid == et.oid), orm.uatrains.E.ua_title == et.ua_title, orm.or_(orm.uatrains.E.etype == 1, orm.uatrains.E.etype == 4))).all() if len(similar_etrains) > 1: for similar_et in similar_etrains: if (similar_et.from_date is not None and similar_et.to_date is not None and \ similar_et.from_date <= datetime.datetime.now() and \ similar_et.to_date >= datetime.datetime.now()) and \ ((last_etrain.from_date is None and last_etrain.to_date is None) or \ (last_etrain.to_date <= similar_et.from_date) or \ (last_etrain.id < similar_et.id)): last_etrain = similar_et if last_etrain.id > et.id: et.ref_id = last_etrain.id ses.commit() ses.close() conn.close() end_time = time.time() exec_delta = datetime.timedelta(seconds=int(end_time - start_time)) exec_log.info('uatrains bot etrain referer %s %s' % (str(start_dt), str(exec_delta))) except: if ses is not None: ses.close() if conn is not None: conn.close() nlog.info('uatrains bot - etrain referer fatal', traceback.format_exc())
def run(): try: ctx = zmq.Context() froxly_data_worker_socket = ctx.socket(zmq.REP) froxly_data_worker_socket.connect(sockets.froxly_data_worker) red = redis.StrictRedis(unix_socket_path=sockets.redis) def rnd(msg): rnd_free_proxy = None def base_rnd(): if red.exists(red_keys.froxly_base_check_free_proxy) and \ red.scard(red_keys.froxly_base_check_free_proxy) > 0: return red.srandmember( red_keys.froxly_base_check_free_proxy) else: conn = orm.null_engine.connect() ses = orm.sescls(bind=conn) free_proxies = ses.query(orm.FreeProxy).filter( orm.and_(orm.FreeProxy.protocol == 'http', orm.FreeProxy.http_status == 200)).all() for free_proxy in free_proxies: sproxy = data_server_common.dbproxy2sproxy(free_proxy) red.sadd(red_keys.froxly_base_check_free_proxy, sproxy) ses.close() conn.close() return red.srandmember( red_keys.froxly_base_check_free_proxy) if msg is not None and msg['params'] is not None and 'url' in msg['params'] and \ msg['params']['url'] is not None: url_red_key = red_keys.froxly_url_free_proxy_prefix + msg[ 'params']['url'] if red.exists(url_red_key) and red.scard(url_red_key) > 0: rnd_free_proxy = red.srandmember(url_red_key) else: red.rpush(red_keys.froxly_rnd_free_proxy_log, 'No proxies for url: ' + msg['params']['url']) rnd_free_proxy = base_rnd() else: rnd_free_proxy = base_rnd() if rnd_free_proxy is not None: froxly_data_worker_socket.send_unicode( json.dumps( {'result': json.loads(rnd_free_proxy.decode('utf-8'))})) else: froxly_data_worker_socket.send_unicode( json.dumps({'result': None})) nlog.info('froxly - rnd free proxy error', 'Random free proxy is None') def activate(msg): pass def deactivate(msg): pass def deactivate_for_url(msg): if 'proxy' in msg['params'] and 'url' in msg['params']: red.srem( red_keys.froxly_url_free_proxy_prefix + msg['params']['url'], msg['params']['proxy']) if 'reason' in msg['params']: red.sadd( red_keys.froxly_url_free_proxy_log_prefix + msg['params']['url'], '[' + str(datetime.datetime.now()) + '] ' + msg['params']['reason']) froxly_data_worker_socket.send_unicode(json.dumps({'result': None})) def rnd_for_url(msg): rnd(msg) def clear_for_url(msg): if 'url' in msg['params']: red.delete(red_keys.froxly_url_free_proxy_prefix + msg['params']['url']) froxly_data_worker_socket.send_unicode(json.dumps({'result': None})) methods = {} methods[rnd.__name__] = rnd methods[activate.__name__] = activate methods[deactivate.__name__] = deactivate methods[deactivate_for_url.__name__] = deactivate_for_url methods[rnd_for_url.__name__] = rnd_for_url methods[clear_for_url.__name__] = clear_for_url while True: try: msg = json.loads(froxly_data_worker_socket.recv_unicode()) if msg['method'] in methods: methods[msg['method']](msg) except: froxly_data_worker_socket.send_unicode( json.dumps({'result': None})) nlog.info('froxly - data server error', traceback.format_exc()) except: nlog.info('froxly - data worker fatal', traceback.format_exc())
froxly_requester_worker_socket = ctx.socket(zmq.DEALER) froxly_requester_worker_socket.bind(sockets.froxly_requester_worker) poller = zmq.Poller() poller.register(froxly_requester_server_socket, zmq.POLLIN) poller.register(froxly_requester_worker_socket, zmq.POLLIN) for wrk_num in range(data_server_common.REQUESTER_WORKER_POOL): thr = threading.Thread(target=worker.run) thr.start() while True: socks = dict(poller.poll()) try: # frontend if froxly_requester_server_socket in socks and socks[ froxly_requester_server_socket] == zmq.POLLIN: req_msg = froxly_requester_server_socket.recv_multipart() froxly_requester_worker_socket.send_multipart(req_msg) # backend if froxly_requester_worker_socket in socks and socks[ froxly_requester_worker_socket] == zmq.POLLIN: res_msg = froxly_requester_worker_socket.recv_multipart() froxly_requester_server_socket.send_multipart(res_msg) except: #froxly_data_server_socket.send_unicode(json.dumps({'result': None})) nlog.info('froxly - requester server error', traceback.format_exc()) except: nlog.info('froxly - requester server fatal', traceback.format_exc())
def run(): try: ctx = zmq.Context() froxly_requester_worker_socket = ctx.socket(zmq.REP) froxly_requester_worker_socket.connect(sockets.froxly_requester_worker) ugently_data_server_socket = ctx.socket(zmq.REQ) ugently_data_server_socket.connect(sockets.ugently_data_server) froxly_data_server_socket = ctx.socket(zmq.REQ) froxly_data_server_socket.connect(sockets.froxly_data_server) while True: req_msg = froxly_requester_worker_socket.recv_unicode() req_url = None res = { 'result': { 'data': None, 'http_status': None, 'http_status_reason': None } } try: req = json.loads(req_msg) url_obj = urllib.parse.urlparse(req['params']['url']) ugently_data_server_socket.send_unicode('') rnd_user_agent = ugently_data_server_socket.recv_unicode() rnd_proxy_req = {'method': 'rnd_for_url', 'params': None} if url_obj.netloc is not None and url_obj.netloc != '': req_url = url_obj.scheme + '://' + url_obj.netloc rnd_proxy_req['params'] = {'url': req_url} froxly_data_server_socket.send_unicode( json.dumps(rnd_proxy_req)) rnd_proxy = json.loads( froxly_data_server_socket.recv_unicode())['result'] timeout = timeouts.froxly_requester if 'timeout' in req['params']: timeout = req['params']['timeout'] if rnd_proxy['protocol'] == 'socks4/5': s = socket.socket() s.settimeout(timeout) s.connect((rnd_proxy['proxy']['ip'], int(rnd_proxy['proxy']['port']))) ipaddr = socket.inet_aton( socket.gethostbyname(url_obj.netloc)) destport = 80 conn_req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr conn_req = conn_req + chr(0x00).encode() s.sendall(conn_req) conn_resp = s.recv(8) if resp[0:1] == chr(0x00).encode() or resp[1:2] == chr( 0x5A).encode(): s.close() raise Exception('Socks proxy error') remote_req_str = 'GET ' + req['params']['url'] + ' HTTP/1.1' + \ '\r\nHost:' + url_obj.netloc + '\r\n\r\n' s.sendall(remote_req_str.encode()) remote_charset = 'utf-8' if 'charset' in req['params'] and req['params'][ 'charset'] is not None: remote_charset = req['params']['charset'] remote_res = s.recv(15).decode(remote_charset, 'ignore') if remote_res == 'HTTP/1.1 200 OK' or remote_res == 'HTTP/1.0 200 OK': buf = s.recv(1024) while buf: remote_res += buf.decode(remote_charset, 'ignore') buf = s.recv(1024) start_body = remote_res.find('\r\n\r\n') res['result']['data'] = remote_res[start_body + 4:] res['result']['http_status'] = 200 res['result']['http_status_reason'] = None else: res['result']['http_status'] = -1 res['result']['http_status_reason'] = remote_res s.close() else: s = socket.socket() s.settimeout(timeout) s.connect((rnd_proxy['ip'], int(rnd_proxy['port']))) remote_req_str = 'GET ' + req['params']['url'] + ' HTTP/' + rnd_proxy['protocol_version'] + \ '\r\nHost:' + url_obj.netloc + '\r\n\r\n' s.sendall(remote_req_str.encode()) remote_charset = 'utf-8' if 'charset' in req['params'] and req['params'][ 'charset'] is not None: remote_charset = req['params']['charset'] remote_res = s.recv(15).decode(remote_charset, 'ignore') if remote_res == 'HTTP/1.1 200 OK' or remote_res == 'HTTP/1.0 200 OK': buf = s.recv(1024) while buf: remote_res += buf.decode(remote_charset, 'ignore') buf = s.recv(1024) start_body = remote_res.find('\r\n\r\n') res['result']['data'] = remote_res[start_body + 4:] res['result']['http_status'] = 200 res['result']['http_status_reason'] = None else: res['result']['http_status'] = -1 res['result']['http_status_reason'] = remote_res s.close() except Exception as e: res['result']['http_status'] = -11 res['result']['http_status_reason'] = str(e) if req_url is not None: sproxy = data_server_common.jproxy2sproxy(rnd_proxy) deactivate_proxy_req = { 'method': 'deactivate_for_url', 'params': { 'url': req_url, 'proxy': sproxy, 'reason': res['result']['http_status_reason'] } } froxly_data_server_socket.send_unicode( json.dumps(deactivate_proxy_req)) froxly_data_server_socket.recv_unicode() froxly_requester_worker_socket.send_unicode(json.dumps(res)) except: nlog.info('froxly - requester worker fatal', traceback.format_exc())
newest_halt = halts[0] for halt in halts: if halt.c_date > newest_halt.c_date: newest_halt = halt for halt in halts: if halt.id != newest_halt.id: ses.delete(halt) ses.commit() #halts = ses.query(orm.uatrains.TrainStation).\ # filter(orm.uatrains.TrainStation.t_id == train.id).all() #halts_to_delete = [] #for halt in halts: # for h in halts: # if halt.order == h.order and halt.c_date >= h.c_date and h not in halts_to_delete: # halts_to_delete.append(h) #for h in halts_to_delete: # ses.delete(h) #ses.commit() ses.close() conn.close() end_time = time.time() exec_delta = datetime.timedelta(seconds=int(end_time - start_time)) exec_log.info('uatrains bot halt cleaner %s %s' % (str(start_dt), str(exec_delta))) except: if ses is not None: ses.close() if conn is not None: conn.close() nlog.info('uatrains bot - halt cleaner fatal', traceback.format_exc())
import json import zmq import traceback from werp import nlog from werp.common import sockets conn = None ses = None try: ctx = zmq.Context() uatrains_bot_server_socket = ctx.socket(zmq.REQ) uatrains_bot_server_socket.connect(sockets.uatrains_bot_server) uatrains_bot_server_socket.send_unicode( json.dumps({ 'method': 'grab_ptrain', 'params': None })) uatrains_bot_server_socket.recv_unicode() except: nlog.info('uatrains bot - ptrain task runner fatal', traceback.format_exc())
def default(self, eid=None, *a, **kw): lng = get_lng() l = '' ref_id = None if eid is not None: prepared_eid = None try: prepared_eid = int(float(eid)) except: nlog.info('Uatrains error', 'Can\'t parse eid = ' + str(prepared_eid) + '\n' +\ traceback.format_exc()) if prepared_eid is not None: conn = orm.q_engine.connect() ses = orm.sescls(bind=conn) e = None try: e = ses.query(orm.uatrains.E).filter( orm.uatrains.E.id == prepared_eid).one() if e.ref_id is not None: ref_id = e.ref_id else: e.vc = e.vc + 1 ses.commit() except: nlog.info('Uatrains error', 'Can\'t find entity by eid = ' + str(prepared_eid) + '\n' +\ traceback.format_exc()) if e is not None and ref_id is None: if e.etype == etype.train: try: t = ses.query(orm.uatrains.E).\ options(orm.joinedload_all(orm.uatrains.E.t_ss, orm.uatrains.TrainStation.s)).\ filter(orm.uatrains.E.id == prepared_eid).one() l = layout.getTrain(t, t.t_ss, lng) except: nlog.info('Uatrains error', 'Can\'t find train by id = ' + str(prepared_eid) + '\n' +\ traceback.format_exc()) elif e.etype == etype.ptrain: try: t = ses.query(orm.uatrains.E).\ options(orm.joinedload_all(orm.uatrains.E.t_ss, orm.uatrains.TrainStation.s)).\ filter(orm.uatrains.E.id == prepared_eid).one() l = layout.getPTrain(t, t.t_ss, lng) except: nlog.info('Uatrains error', 'Can\'t find ptrain by id = ' + str(prepared_eid) + '\n' +\ traceback.format_exc()) elif e.etype == etype.station: try: s = ses.query(orm.uatrains.E).\ options(orm.joinedload_all(orm.uatrains.E.s_ts, orm.uatrains.TrainStation.t)).\ filter(orm.uatrains.E.id == prepared_eid).one() l = layout.getStation(s, s.s_ts, lng) except: nlog.info('Uatrains error', 'Can\'t find station by id = ' + str(prepared_eid) + '\n' +\ traceback.format_exc()) ses.close() conn.close() else: l = self.index(eid) if l == '': cherrypy.response.status = 301 if ref_id is not None: cherrypy.response.headers['Location'] = '/' + str(ref_id) else: cherrypy.response.headers['Location'] = '/' return l
import traceback import zmq import json from werp import nlog from werp.common import sockets from werp.uatrains.engine import drv try: ctx = zmq.Context() froxly_data_server_socket = ctx.socket(zmq.REQ) froxly_data_server_socket.connect(sockets.froxly_data_server) froxly_data_server_socket.send_unicode(json.dumps({'method': 'list_for_url', 'params': {'url': drv.passengers.domain}})) froxly_data_server_socket.recv_unicode() except: nlog.info('uatrains bot - ptrain check proxies error', traceback.format_exc())
trains = ses.query(orm.uatrains.E).\ filter(orm.or_(orm.uatrains.E.etype == 1, orm.uatrains.E.etype == 4, orm.uatrains.E.etype == 5)).all() for t in trains: if len(t.t_ss) > 0: ua_graph = '' ru_graph = '' en_graph = '' for ts in t.t_ss: ua_graph += (ts.s.ua_title.lower() if ts.s.ua_title is not None else '') + '; ' ru_graph += (ts.s.ru_title.lower() if ts.s.ru_title is not None else '') + '; ' en_graph += (ts.s.en_title.lower() if ts.s.en_title is not None else '') + '; ' t.ua_graph = ua_graph t.ru_graph = ru_graph t.en_graph = en_graph ses.commit() ses.close() conn.close() end_time = time.time() exec_delta = datetime.timedelta(seconds=int(end_time - start_time)) exec_log.info('uatrains bot grapher %s %s' % (str(start_dt), str(exec_delta))) except: if ses is not None: ses.close() if conn is not None: conn.close() nlog.info('uatrains bot - grapher fatal', traceback.format_exc())