def thread_fetch(self): conn = sqlite3.connect(self.path+'/verycd.sqlite3.db') conn.text_factory = str while True: topic = self.q.get() if str(topic)=='feed': open(self.path+'/static/feed.xml','w').write(feed(self.path,conn)) self.q.task_done() continue try: fetchvc.fetch(topic,conn) except: pass self.q.task_done()
def main(): """ The frame classifier """ svcf = svc_frame() svcf.load() """ Still handler """ s = still() """ Start the feed off """ f = feed() f.start() while (1): """ Load a still from the feed """ s.load(f.take_still()) print s.compress_make_linear() """ Classify """ print svcf.is_frame(s.compress_make_linear()) ts = time.time() stamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d_%H-%M-%S.jpg') s.save(stamp) time.sleep(2)
def do_feed(self): from feed import feed if pie_namespace in self.dispatcher.defaultNamespaces: from validators import eater return eater() return feed()
# 添加trade if ed.is_tradetime_now(): file_name = "/home/way/signal/chicang" # file_name1 = '/home/way/signal/eryue' # with open(file_name) as f: # yiyue = str(f.readlines()[0]) m = multiprocessing.Process(target=account_info, args=(account_flag,)) m.start() if account_flag.read_flag() == 2: if pre_signal(1) <= -2.31: while True: now_time = time.localtime() now = (now_time.tm_hour, now_time.tm_min, now_time.tm_sec) if now >= (9, 24, 56): chiyou_feed = feed.feed("chicang") chicang_trade = pre_trade.pre_trade(chiyou_feed.load(), user) chicang_trade.stop() account_flag.set_flag(4) break else: time.sleep(1) else: chiyou_feed = feed.feed("chicang") chicang_trade = trade.trade(chiyou_feed.load(), user) print("chiyou") p = multiprocessing.Process(target=chiyou, args=(chicang_trade, limit)) p.start() elif account_flag.read_flag() in (4, 8): zhenfu = cser.zhenfu()
@app.route('/read/thread_day') def read_next_day(): forum_list[forum_index].get_next_day() return redirect('/') @app.route('/thread/<int:thread_id>') def thread_page(thread_id): posts = [make_ascii(p.text) for p in forum_list[forum_index].thread_list[int(thread_id)].post_list()] return render_template('thread.html', posts=posts) def make_ascii(text): return Markup(filter(lambda x: x in string.printable, text)) if __name__ == '__main__': forum_list.append(forum(feed('OnlineGames'))) forum_list.append(forum(feed('OnlineGames2'))) forum_list.append(forum(feed('Nutrition'))) forum_list.append(forum(feed('Nutrition2'))) forum_list.append(forum(feed('Innovation'))) forum_list.append(forum(feed('Innovation2'))) forum_list.append(forum(feed('POSA'))) forum_list.append(forum(feed('POSA2'))) for f in forum_list: heap.priority_list(f, analyze.evaluate, 'default') heap.priority_list(f, analyze.question_marks, 'question_marks') heap.priority_list(f, analyze.question_words, 'question_words') heap.priority_list(f, analyze.social_words, 'social_words') f.get_next_day() app.run(debug=True, host='0.0.0.0')
def setUp(self): self.database = "OnlineGames" self.feed = feed.feed(self.database)
#gather info from subscribed csv file sub, ref, ignore = read_sub('subscribed') #read subscribed.txt # sub contains [[sub_status,folder_name,feed_url],[sub_status_2,folder_name_2,feed_url_2],[etc,etc,etc]] #as long as there is a subscription assert (len(sub) != 0) for each_sub in sub: sub_folder = each_sub[1] feed_url = each_sub[2] make_log(path_pods, sub_folder) episodes = read_episodes(path_pods, sub_folder) ## print('episodes: {}'.format(len(episodes))) title_episode_links = feed(feed_url) ## print('first episode title: {}'.format(title_episode_links[0][0])) clean_title = clean_titles(title_episode_links) new_titles = find_new(path_pods, sub_folder, clean_title, episodes) print('For: {}\n New: {}'.format(sub_folder, len(new_titles))) download_backlog = True if len(new_titles) > 1: get = input('Download backlog of {} episodes? (y or n): '.format( len(new_titles))) if get.lower() == 'y': download_backlog = True else: download_backlog = False
def update_outfeeds(self): self.log(self.logger.INFO, 'reading outfeeds..') counter_new = 0 current_feedlist = list() self.feed_db = dict() for outfeed in os.listdir(os.path.join('config', 'hooks', 'outfeeds')): outfeed_file = os.path.join('config', 'hooks', 'outfeeds', outfeed) if os.path.isfile(outfeed_file): f = open(outfeed_file) sync_on_startup = False debuglevel = self.loglevel proxy_type = None proxy_ip = None proxy_port = None for line in f: lowerline = line.lower() if lowerline.startswith('#start_param '): if lowerline.startswith( '#start_param sync_on_startup=true'): sync_on_startup = True elif lowerline.startswith('#start_param debug='): try: debuglevel = int(lowerline.split('=')[1][0]) except: pass elif lowerline.startswith('#start_param proxy_type='): proxy_type = lowerline.split('=', 1)[1].rstrip() elif lowerline.startswith('#start_param proxy_ip='): proxy_ip = lowerline.split('=', 1)[1].rstrip() elif lowerline.startswith('#start_param proxy_port='): proxy_port = lowerline.split('=', 1)[1].rstrip() f.close() if ':' in outfeed: host = ':'.join(outfeed.split(':')[:-1]) port = int(outfeed.split(':')[-1]) else: # FIXME: how to deal with ipv6 and no default port? host = outfeed port = 119 name = "outfeed-{0}-{1}".format(host, port) # open track db here, read, close if sync_on_startup == True: self.feed_db[name] = list() try: f = open('{0}.trackdb'.format(name), 'r') except IOError as e: if e.errno == 2: pass else: self.log( self.logger.ERROR, 'cannot open: %s: %s' % ('{0}.trackdb'.format(name), e.strerror)) else: for line in f.readlines(): self.feed_db[name].append(line.rstrip('\n')) current_feedlist.append(name) proxy = None if proxy_type != None: if proxy_ip != None: try: proxy_port = int(proxy_port) proxy = (proxy_type, proxy_ip, proxy_port) self.log( self.logger.INFO, "starting outfeed %s using proxy: %s" % (name, str(proxy)), 2) except: pass if name not in self.feeds: try: self.log(self.logger.DEBUG, 'starting outfeed: %s' % name) self.feeds[name] = feed.feed( self, self.logger, outstream=True, host=host, port=port, sync_on_startup=sync_on_startup, proxy=proxy, debug=debuglevel) self.feeds[name].start() counter_new += 1 except Exception as e: self.log(self.logger.WARNING, 'could not start outfeed %s: %s' % (name, e)) counter_removed = 0 feeds = list() for name in self.feeds: if name.startswith('outfeed'): feeds.append(name) for name in feeds: if not name in current_feedlist and name in self.feeds: self.feeds[name].shutdown() counter_removed += 1 self.log(self.logger.INFO, 'outfeeds added: %i' % counter_new) self.log(self.logger.INFO, 'outfeeds removed: %i' % counter_removed)
def testGetHtml(): feeder = feed.feed() url = "https://en.wikipedia.org/wiki/Article" print(feeder.getHtml(url))
def run(self): self.running = True self.feeds = dict() self.update_outfeeds() if len(self.plugins) > 0: self.log(self.logger.INFO, 'starting plugins..') for plugin in self.plugins: self.plugins[plugin].start() time.sleep(0.1) self.update_hooks() current_sync_targets = list() synclist = dict() groups = os.listdir('groups') # sync groups in random order random.shuffle(groups) for group in groups: group_dir = os.path.join('groups', group) if os.path.isdir(group_dir): self.log(self.logger.DEBUG, 'startup sync, checking %s..' % group) current_sync_targets = list() for group_item in self.hooks: if (group_item[-1] == '*' and group.startswith(group_item[:-1])) or group == group_item: # group matches whitelist for current_hook in self.hooks[group_item]: if current_hook.startswith('filesystem-'): continue # loop through matching hooks in whitelist current_hook_blacklisted = False for blacklist_group_item in self.hook_blacklist: # loop through blacklist if (blacklist_group_item[-1] == '*' and group.startswith(blacklist_group_item[:-1])) or group == blacklist_group_item: # group matches blacklist if current_hook in self.hook_blacklist[blacklist_group_item]: # current hook is blacklisted, don't add and try next whitelist_hook current_hook_blacklisted = True break if not current_hook_blacklisted: if current_hook.startswith('outfeeds-'): # FIXME this doesn't look like its working with ipv6? if current_hook[9:].find(':') == -1: self.log(self.logger.ERROR, 'outfeed filename should be in host:port format') break parts = current_hook[9:].split(':') name = 'outfeed-' + ':'.join(parts[:-1]) + '-' + parts[-1] if name in self.feeds: if self.feeds[name].sync_on_startup and name not in current_sync_targets: self.log(self.logger.DEBUG, 'startup sync, adding %s' % name) current_sync_targets.append(name) else: self.log(self.logger.WARNING, 'unknown outfeed detected. wtf? %s' % name) elif current_hook.startswith('plugins-'): name = 'plugin-' + current_hook[8:] if name in self.plugins: if self.plugins[name].sync_on_startup and name not in current_sync_targets: self.log(self.logger.DEBUG, 'startup sync, adding %s' % name) current_sync_targets.append(name) else: self.log(self.logger.WARNING, 'unknown plugin detected. wtf? %s' % name) else: self.log(self.logger.WARNING, 'unknown hook detected. wtf? %s' % current_hook) # got all whitelist matching hooks for current group which are not matched by blacklist as well in current_sync_targets. hopefully. if len(current_sync_targets) > 0: # send fresh articles first file_list = os.listdir(group_dir) file_list = [int(k) for k in file_list] file_list.sort() synclist[group] = {'targets': current_sync_targets, 'file_list': file_list } while len(synclist) > 0: for group in synclist: empty_sync_group = list() if len(synclist[group]['file_list']) == 0: empty_sync_group.append(group) else: group_dir = os.path.join('groups', group) sync_chunk = synclist[group]['file_list'][:500] for link in sync_chunk: link = str(link) try: message_id = os.path.basename(os.readlink(os.path.join(group_dir, link))) if os.stat(os.path.join(group_dir, link)).st_size == 0: self.log(self.logger.WARNING, 'empty article found in group %s with id %s pointing to %s' % (group_dir, link, message_id)) continue except: self.log(self.logger.ERROR, 'invalid link found in group %s with id %s' % (group_dir, link)) continue for current_hook in synclist[group]['targets']: if current_hook.startswith('outfeed-'): try: self.feed_db[current_hook].index(message_id) except ValueError: self.feeds[current_hook].add_article(message_id) elif current_hook.startswith('plugin-'): self.plugins[current_hook].add_article(message_id) else: self.log(self.logger.WARNING, 'unknown sync_hook detected. wtf? %s' % current_hook) del synclist[group]['file_list'][:500] for group in empty_sync_group: del synclist[group] self.log(self.logger.DEBUG, 'startup_sync done. hopefully.') del current_sync_targets del self.feed_db #files = filter(lambda f: os.stat(os.path.join(group_dir, f)).st_size > 0, os.listdir(group_dir) #files = filter(lambda f: os.path.isfile(os.path.join('articles', f)), os.listdir('articles')) #files.sort(key=lambda f: os.path.getmtime(os.path.join('articles', f))) #for name in self.feeds: # if name.startswith('outfeed-127.0.0.1'): # for item in files: # self.feeds[name].add_article(item) self.dropper.start() # setup admin control socket # FIXME: add path of linux socket to SRNd.conf s_addr = 'control.socket' try: os.unlink(s_addr) except OSError: if os.path.exists(s_addr): raise ctl_socket_server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) ctl_socket_server.bind(s_addr) ctl_socket_server.listen(10) ctl_socket_server.setblocking(0) os.chmod(s_addr, 0o660) poller = select.poll() poller.register(self.socket.fileno(), select.POLLIN) poller.register(ctl_socket_server.fileno(), select.POLLIN) self.poller = poller self.ctl_socket_clients = dict() self.ctl_socket_handlers = dict() self.ctl_socket_handlers["status"] = self.ctl_socket_handler_status self.ctl_socket_handlers["log"] = self.ctl_socket_handler_logger self.ctl_socket_handlers["stats"] = self.ctl_socket_handler_stats self.start_up_timestamp = int(time.time()) while self.running: result = poller.poll(-1) for fd, mask in result: if fd == self.socket.fileno(): try: con = self.socket.accept() name = 'infeed-{0}-{1}'.format(con[1][0], con[1][1]) if name not in self.feeds: self.feeds[name] = feed.feed(self, self.logger, connection=con, debug=self.infeed_debug) self.feeds[name].start() else: self.log(self.logger.WARNING, 'got connection from %s but its still in feeds. wtf?' % name) except socket.error as e: if e.errno == 22: break # wtf is this? add comments or use STATIC_VARS instead of strange numbers elif e.errno == 4: continue # system call interrupted else: raise e continue elif fd == ctl_socket_server.fileno(): con, addr = ctl_socket_server.accept() con.setblocking(0) poller.register(con.fileno(), select.POLLIN) self.ctl_socket_clients[con.fileno()] = (con, os.fdopen(con.fileno(), 'w', 1)) continue else: try: try: data = os.read(fd, 4) except: data = '' if len(data) < 4: self.terminate_ctl_socket_connection(fd) continue length = self.decode_big_endian(data, 4) data = os.read(fd, length) if len(data) != length: self.terminate_ctl_socket_connection(fd) continue try: data = json.loads(data) except Exception as e: self.log(self.logger.WARNING, "failed to decode json data: %s" % e) continue self.log(self.logger.DEBUG, "got something to read from control socket at fd %i: %s" % (fd, data)) if not "command" in data: self.ctl_socket_send_data(fd, { "type": "response", "status": "failed", "data": "no command given"}) continue if not "data" in data: data["data"] = '' if data["command"] in self.ctl_socket_handlers: try: self.ctl_socket_send_data(fd, { "type": "response", "status": "success", "command": data["command"], "args": data["data"], "data": self.ctl_socket_handlers[data["command"]](data, fd)}) except Exception as e: try: self.ctl_socket_send_data(fd, { "type": "response", "status": "failed", "command": data["command"], "args": data["data"], "data": "internal SRNd handler returned exception: %s" % e }) except Exception as e1: self.log(self.logger.INFO, "can't send exception message to control socket connection using fd %i: %s, original exception was %s" % (fd, e1, e)) self.terminate_ctl_socket_connection(fd) continue self.ctl_socket_send_data(fd, { "type": "response", "status": "failed", "command": data["command"], "args": data["data"], "data": "no handler for given command '%s'" % data["command"] }) except Exception as e: self.log(self.logger.INFO, "unhandled exception while processing control socket request using fd %i: %s" % (fd, e)) self.terminate_ctl_socket_connection(fd) ctl_socket_server.shutdown(socket.SHUT_RDWR) ctl_socket_server.close() self.socket.close()
def update_outfeeds(self): self.log(self.logger.INFO, 'reading outfeeds..') counter_new = 0 current_feedlist = list() self.feed_db = dict() for outfeed in os.listdir(os.path.join('config', 'hooks', 'outfeeds')): outfeed_file = os.path.join('config', 'hooks', 'outfeeds', outfeed) if os.path.isfile(outfeed_file): f = open(outfeed_file) sync_on_startup = False debuglevel = self.loglevel proxy_type = None proxy_ip = None proxy_port = None for line in f: lowerline = line.lower() if lowerline.startswith('#start_param '): if lowerline.startswith('#start_param sync_on_startup=true'): sync_on_startup = True elif lowerline.startswith('#start_param debug='): try: debuglevel = int(lowerline.split('=')[1][0]) except: pass elif lowerline.startswith('#start_param proxy_type='): proxy_type = lowerline.split('=', 1)[1].rstrip() elif lowerline.startswith('#start_param proxy_ip='): proxy_ip = lowerline.split('=', 1)[1].rstrip() elif lowerline.startswith('#start_param proxy_port='): proxy_port = lowerline.split('=', 1)[1].rstrip() f.close() if ':' in outfeed: host = ':'.join(outfeed.split(':')[:-1]) port = int(outfeed.split(':')[-1]) else: # FIXME: how to deal with ipv6 and no default port? host = outfeed port = 119 name = "outfeed-{0}-{1}".format(host, port) # open track db here, read, close if sync_on_startup == True: self.feed_db[name] = list() try: f = open('{0}.trackdb'.format(name), 'r') except IOError as e: if e.errno == 2: pass else: self.log(self.logger.ERROR, 'cannot open: %s: %s' % ('{0}.trackdb'.format(name), e.strerror)) else: for line in f.readlines(): self.feed_db[name].append(line.rstrip('\n')) current_feedlist.append(name) proxy = None if proxy_type != None: if proxy_ip != None: try: proxy_port = int(proxy_port) proxy = (proxy_type, proxy_ip, proxy_port) self.log(self.logger.INFO, "starting outfeed %s using proxy: %s" % (name, str(proxy)), 2) except: pass if name not in self.feeds: try: self.log(self.logger.DEBUG, 'starting outfeed: %s' % name) self.feeds[name] = feed.feed(self, self.logger, outstream=True, host=host, port=port, sync_on_startup=sync_on_startup, proxy=proxy, debug=debuglevel) self.feeds[name].start() counter_new += 1 except Exception as e: self.log(self.logger.WARNING, 'could not start outfeed %s: %s' % (name, e)) counter_removed = 0 feeds = list() for name in self.feeds: if name.startswith('outfeed'): feeds.append(name) for name in feeds: if not name in current_feedlist and name in self.feeds: self.feeds[name].shutdown() counter_removed += 1 self.log(self.logger.INFO, 'outfeeds added: %i' % counter_new) self.log(self.logger.INFO, 'outfeeds removed: %i' % counter_removed)
def run(self): self.running = True self.feeds = dict() self.update_outfeeds() if len(self.plugins) > 0: self.log(self.logger.INFO, 'starting plugins..') for plugin in self.plugins: self.plugins[plugin].start() time.sleep(0.1) self.update_hooks() current_sync_targets = list() synclist = dict() groups = os.listdir('groups') # sync groups in random order random.shuffle(groups) for group in groups: group_dir = os.path.join('groups', group) if os.path.isdir(group_dir): self.log(self.logger.DEBUG, 'startup sync, checking %s..' % group) current_sync_targets = list() for group_item in self.hooks: if (group_item[-1] == '*' and group.startswith( group_item[:-1])) or group == group_item: # group matches whitelist for current_hook in self.hooks[group_item]: if current_hook.startswith('filesystem-'): continue # loop through matching hooks in whitelist current_hook_blacklisted = False for blacklist_group_item in self.hook_blacklist: # loop through blacklist if (blacklist_group_item[-1] == '*' and group.startswith( blacklist_group_item[:-1]) ) or group == blacklist_group_item: # group matches blacklist if current_hook in self.hook_blacklist[ blacklist_group_item]: # current hook is blacklisted, don't add and try next whitelist_hook current_hook_blacklisted = True break if not current_hook_blacklisted: if current_hook.startswith('outfeeds-'): # FIXME this doesn't look like its working with ipv6? if current_hook[9:].find(':') == -1: self.log( self.logger.ERROR, 'outfeed filename should be in host:port format' ) break parts = current_hook[9:].split(':') name = 'outfeed-' + ':'.join( parts[:-1]) + '-' + parts[-1] if name in self.feeds: if self.feeds[ name].sync_on_startup and name not in current_sync_targets: self.log( self.logger.DEBUG, 'startup sync, adding %s' % name) current_sync_targets.append(name) else: self.log( self.logger.WARNING, 'unknown outfeed detected. wtf? %s' % name) elif current_hook.startswith('plugins-'): name = 'plugin-' + current_hook[8:] if name in self.plugins: if self.plugins[ name].sync_on_startup and name not in current_sync_targets: self.log( self.logger.DEBUG, 'startup sync, adding %s' % name) current_sync_targets.append(name) else: self.log( self.logger.WARNING, 'unknown plugin detected. wtf? %s' % name) else: self.log( self.logger.WARNING, 'unknown hook detected. wtf? %s' % current_hook) # got all whitelist matching hooks for current group which are not matched by blacklist as well in current_sync_targets. hopefully. if len(current_sync_targets) > 0: # send fresh articles first file_list = os.listdir(group_dir) file_list = [int(k) for k in file_list] file_list.sort() synclist[group] = { 'targets': current_sync_targets, 'file_list': file_list } while len(synclist) > 0: for group in synclist: empty_sync_group = list() if len(synclist[group]['file_list']) == 0: empty_sync_group.append(group) else: group_dir = os.path.join('groups', group) sync_chunk = synclist[group]['file_list'][:500] for link in sync_chunk: link = str(link) try: message_id = os.path.basename( os.readlink(os.path.join(group_dir, link))) if os.stat(os.path.join(group_dir, link)).st_size == 0: self.log( self.logger.WARNING, 'empty article found in group %s with id %s pointing to %s' % (group_dir, link, message_id)) continue except: self.log( self.logger.ERROR, 'invalid link found in group %s with id %s' % (group_dir, link)) continue for current_hook in synclist[group]['targets']: if current_hook.startswith('outfeed-'): try: self.feed_db[current_hook].index( message_id) except ValueError: self.feeds[current_hook].add_article( message_id) elif current_hook.startswith('plugin-'): self.plugins[current_hook].add_article( message_id) else: self.log( self.logger.WARNING, 'unknown sync_hook detected. wtf? %s' % current_hook) del synclist[group]['file_list'][:500] for group in empty_sync_group: del synclist[group] self.log(self.logger.DEBUG, 'startup_sync done. hopefully.') del current_sync_targets del self.feed_db #files = filter(lambda f: os.stat(os.path.join(group_dir, f)).st_size > 0, os.listdir(group_dir) #files = filter(lambda f: os.path.isfile(os.path.join('articles', f)), os.listdir('articles')) #files.sort(key=lambda f: os.path.getmtime(os.path.join('articles', f))) #for name in self.feeds: # if name.startswith('outfeed-127.0.0.1'): # for item in files: # self.feeds[name].add_article(item) self.dropper.start() # setup admin control socket # FIXME: add path of linux socket to SRNd.conf s_addr = 'control.socket' try: os.unlink(s_addr) except OSError: if os.path.exists(s_addr): raise ctl_socket_server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) ctl_socket_server.bind(s_addr) ctl_socket_server.listen(10) ctl_socket_server.setblocking(0) os.chmod(s_addr, 0o660) poller = select.poll() poller.register(self.socket.fileno(), select.POLLIN) poller.register(ctl_socket_server.fileno(), select.POLLIN) self.poller = poller self.ctl_socket_clients = dict() self.ctl_socket_handlers = dict() self.ctl_socket_handlers["status"] = self.ctl_socket_handler_status self.ctl_socket_handlers["log"] = self.ctl_socket_handler_logger self.ctl_socket_handlers["stats"] = self.ctl_socket_handler_stats self.start_up_timestamp = int(time.time()) while self.running: result = poller.poll(-1) for fd, mask in result: if fd == self.socket.fileno(): try: con = self.socket.accept() name = 'infeed-{0}-{1}'.format(con[1][0], con[1][1]) if name not in self.feeds: self.feeds[name] = feed.feed( self, self.logger, connection=con, debug=self.infeed_debug) self.feeds[name].start() else: self.log( self.logger.WARNING, 'got connection from %s but its still in feeds. wtf?' % name) except socket.error as e: if e.errno == 22: break # wtf is this? add comments or use STATIC_VARS instead of strange numbers elif e.errno == 4: continue # system call interrupted else: raise e continue elif fd == ctl_socket_server.fileno(): con, addr = ctl_socket_server.accept() con.setblocking(0) poller.register(con.fileno(), select.POLLIN) self.ctl_socket_clients[con.fileno()] = (con, os.fdopen( con.fileno(), 'w', 1)) continue else: try: try: data = os.read(fd, 4) except: data = '' if len(data) < 4: self.terminate_ctl_socket_connection(fd) continue length = self.decode_big_endian(data, 4) data = os.read(fd, length) if len(data) != length: self.terminate_ctl_socket_connection(fd) continue try: data = json.loads(data) except Exception as e: self.log(self.logger.WARNING, "failed to decode json data: %s" % e) continue self.log( self.logger.DEBUG, "got something to read from control socket at fd %i: %s" % (fd, data)) if not "command" in data: self.ctl_socket_send_data( fd, { "type": "response", "status": "failed", "data": "no command given" }) continue if not "data" in data: data["data"] = '' if data["command"] in self.ctl_socket_handlers: try: self.ctl_socket_send_data( fd, { "type": "response", "status": "success", "command": data["command"], "args": data["data"], "data": self.ctl_socket_handlers[ data["command"]](data, fd) }) except Exception as e: try: self.ctl_socket_send_data( fd, { "type": "response", "status": "failed", "command": data["command"], "args": data["data"], "data": "internal SRNd handler returned exception: %s" % e }) except Exception as e1: self.log( self.logger.INFO, "can't send exception message to control socket connection using fd %i: %s, original exception was %s" % (fd, e1, e)) self.terminate_ctl_socket_connection(fd) continue self.ctl_socket_send_data( fd, { "type": "response", "status": "failed", "command": data["command"], "args": data["data"], "data": "no handler for given command '%s'" % data["command"] }) except Exception as e: self.log( self.logger.INFO, "unhandled exception while processing control socket request using fd %i: %s" % (fd, e)) self.terminate_ctl_socket_connection(fd) ctl_socket_server.shutdown(socket.SHUT_RDWR) ctl_socket_server.close() self.socket.close()
def do_feed(self): from feed import feed return feed()
def testGetBody(): feeder = feed.feed() url = "https://en.wikipedia.org/wiki/Sexual_stimulation" print(feeder.getBody(url))
user.prepare('/data/pyquant/ht.json') user.keepalive() account_flag = flag.flag() limit = zxb_limit.zxb_limit(-2.31) #添加trade if ed.is_tradetime_now(): file_name = '/home/way/signal/chicang' file_name1 = '/home/way/signal/zhenfu' # with open(file_name) as f: # yiyue = str(f.readlines()[0]) m = multiprocessing.Process(target=account_info,args=(account_flag,)) m.start() if account_flag.read_flag() == 2: chiyou_feed = feed.feed('chicang') chicang_trade = trade.trade(chiyou_feed.load(), user) print('chiyou') p=multiprocessing.Process(target=chiyou, args=(chicang_trade, limit)) p.start() elif account_flag.read_flag() in (4,8): # with open(file_name1) as f: # yiyue = str(f.readlines()[0]) # s = stockpool.Stockpool() # # eryuedata = s.load(table=yiyue, day=120) # # base_info, data = tradable() #
def update_outfeeds(self): self.log(self.logger.INFO, "reading outfeeds..") counter_new = 0 current_feedlist = list() for outfeed in os.listdir(os.path.join("config", "hooks", "outfeeds")): outfeed_file = os.path.join("config", "hooks", "outfeeds", outfeed) if os.path.isfile(outfeed_file): f = open(outfeed_file) sync_on_startup = False debuglevel = 2 proxy_type = None proxy_ip = None proxy_port = None for line in f: lowerline = line.lower() if lowerline.startswith("#start_param "): if lowerline.startswith("#start_param sync_on_startup=true"): sync_on_startup = True elif lowerline.startswith("#start_param debug="): try: debuglevel = int(lownerline.split("=")[1][0]) except: pass elif lowerline.startswith("#start_param proxy_type="): proxy_type = lowerline.split("=", 1)[1].rstrip() elif lowerline.startswith("#start_param proxy_ip="): proxy_ip = lowerline.split("=", 1)[1].rstrip() elif lowerline.startswith("#start_param proxy_port="): proxy_port = lowerline.split("=", 1)[1].rstrip() f.close() if ":" in outfeed: host = ":".join(outfeed.split(":")[:-1]) port = int(outfeed.split(":")[-1]) else: # FIXME: how to deal with ipv6 and no default port? host = outfeed port = 119 name = "outfeed-{0}-{1}".format(host, port) current_feedlist.append(name) proxy = None if proxy_type != None: if proxy_ip != None: try: proxy_port = int(proxy_port) proxy = (proxy_type, proxy_ip, proxy_port) self.log(self.logger.INFO, "starting outfeed %s using proxy: %s" % (name, str(proxy)), 2) except: pass if name not in self.feeds: try: self.feeds[name] = feed.feed( self, self.logger, outstream=True, host=host, port=port, sync_on_startup=sync_on_startup, proxy=proxy, debug=debuglevel, ) self.feeds[name].start() counter_new += 1 except Exception as e: self.log(self.logger.WARNING, "could not start outfeed %s: %s" % (name, e), 0) counter_removed = 0 feeds = list() for name in self.feeds: if name.startswith("outfeed"): feeds.append(name) for name in feeds: if not name in current_feedlist and name in self.feeds: self.feeds[name].shutdown() counter_removed += 1 self.log(self.logger.INFO, "outfeeds added: %i" % counter_new) self.log(self.logger.INFO, "outfeeds removed: %i" % counter_removed)