def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() try: while True: load_config() db_instance.load_cfg() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait( get_config().UPDATE_TIME ) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() shell.log_shadowsocks_version() import resource logging.info('current process RLIMIT_NOFILE resource: soft %d hard %d' % resource.getrlimit(resource.RLIMIT_NOFILE)) try: while True: load_config() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() db_instance.del_server_out_of_bound_safe(last_rows, rows) db_instance.detect_text_ischanged = False db_instance.detect_hex_ischanged = False last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(get_config().MYSQL_UPDATE_TIME) or not db_instance.is_all_thread_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() ServerPool.get_instance() shell.log_shadowsocks_version() import resource logging.info('current process RLIMIT_NOFILE resource: soft %d hard %d' % resource.getrlimit(resource.RLIMIT_NOFILE)) try: while True: load_config() db_instance.load_cfg() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(get_config().UPDATE_TIME) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def thread_db(): ''' :param obj: 就是DbTransfer 线程的入口函数 ''' logging.debug('thread_db') import socket global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = DbTransfer() ServerPool.get_instance() shell.log_shadowsocks_version() try: import resource logging.info( 'current process RLIMIT_NOFILE resource: soft %d hard %d' % resource.getrlimit(resource.RLIMIT_NOFILE)) except: pass rows = db_instance.pull_db_all_user() try: while True: load_config() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True config = shell.get_config(False) for port in config['additional_ports']: val = config['additional_ports'][port] val['port'] = int(port) val['enable'] = 1 val['transfer_enable'] = 1024**7 val['u'] = 0 val['d'] = 0 if "password" in val: val["passwd"] = val["password"] rows.append(val) db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) # self.logger.warn('db thread except:%s' % e) if db_instance.event.wait( get_config().UPDATE_TIME ) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() ServerPool.get_instance() shell.log_shadowsocks_version() try: import resource logging.info('current process RLIMIT_NOFILE resource: soft %d hard %d' % resource.getrlimit(resource.RLIMIT_NOFILE)) except: pass try: while True: load_config() db_instance.load_cfg() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True config = shell.get_config(False) for port in config['additional_ports']: val = config['additional_ports'][port] val['port'] = int(port) val['enable'] = 1 val['transfer_enable'] = 1024 ** 7 val['u'] = 0 val['d'] = 0 if "password" in val: val["passwd"] = val["password"] rows.append(val) db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(get_config().UPDATE_TIME) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def thread_db(obj): import socket import webapi_utils global db_instance global webapi timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() webapi = webapi_utils.WebApi() shell.log_shadowsocks_version() try: import resource logging.info( "current process RLIMIT_NOFILE resource: soft %d hard %d" % resource.getrlimit(resource.RLIMIT_NOFILE)) except: pass try: while True: load_config() try: ping = webapi.getApi("ping") if ping is None: logging.error( "something wrong with your http api, please check your config and website status and try again later." ) else: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() db_instance.del_server_out_of_bound_safe( last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) # logging.warn('db thread except:%s' % e) if db_instance.event.wait( 60) or not db_instance.is_all_thread_alive(): break if db_instance.has_stopped: break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def get_instance(): """Return an instance of Client.""" config = load_config('client') user_agents = config['user-agents'] proxies = config['proxies'] if len(user_agents) > 0: user_agent = user_agents[ random.randint(0, len(user_agents) - 1) ] else: user_agent = DEFAULT_UA if len(proxies) > 0: proxy = proxies[ random.randint(0, len(proxies) - 1) ] else: proxy = None try: instance = _instances[user_agent] except KeyError: instance = Client(user_agent, proxy) _instances[user_agent] = instance return instance
def __init__(self): self.logger = logging.getLogger() self.config = load_config() self.url_base = self.config.get('sspanel_url') self.key = self.config.get('key') self.node_id = self.config.get('node_id') self.webapi = WebApi()
def __init__(self): self.config = load_config() # set logger self.logger = logging.getLogger() # mix self.mu_api = MuAPI() self.manager = V2Manager() self.first_time_start = True self.event = threading.Event() self.has_stopped = False
def __init__(self, current_node_info=None, next_node_info=None): self.config = load_config() self.logger = logging.getLogger() self.client = Client("127.0.0.1", self.config.get("api_port", "2333")) self.current_node_info = current_node_info self.next_node_info = next_node_info self.if_user_change = False self.logger.info('Manager initializing.') self.INBOUND_TAG = "MAIN_INBOUND" self.users_to_be_removed = {} self.users_to_be_add = {} self.current_inbound_tags = set() self.users = {} if not self.config.get("docker", False): self.restart()
def thread_db(obj): config = load_config() if config.get("speedtest", 0) == 0: return global db_instance db_instance = obj() try: while True: try: db_instance.speedtest_thread() except Exception as e: import traceback trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(config.get("speedtest") * 3600): break if db_instance.has_stopped: break except KeyboardInterrupt as e: pass db_instance = None
def get_instance(): """Return an instance of Client.""" config = load_config('client') user_agents = config['user-agents'] proxies = config['proxies'] if len(user_agents) > 0: user_agent = user_agents[random.randint(0, len(user_agents) - 1)] else: user_agent = DEFAULT_UA if len(proxies) > 0: proxy = proxies[random.randint(0, len(proxies) - 1)] else: proxy = None try: instance = _instances[user_agent] except KeyError: instance = Client(user_agent, proxy) _instances[user_agent] = instance return instance
def __init__(self): import threading self.event = threading.Event() self.has_stopped = False self.config = load_config() self.webapi = WebApi()
#header_info.py: Script to print relevant information about the image files. #Marjorie Decleir #Updated (to Python 3.6) on 24-10-2018 #Import the necessary packages. import os from astropy.io import fits import configloader #Load the configuration file. config = configloader.load_config() #Specify the galaxy and the path to the raw images. galaxy = config['galaxy'] path = config['path'] + galaxy + "/Raw_images/" #Print titles of columns. print("filename"+ "\t" + "\t" + "\t" + "#frames" + "\t" + "filter"+ "\t" + "date" + "\n") #For all files in the path directory: for filename in sorted(os.listdir(path)): #If the file is not a raw image file, skip this file and continue with the next file. if not filename.endswith("rw.img"): continue #Open the image, calculate the number of individual frames in the image. Print relevant header information. hdulist = fits.open(path+filename) number_of_frames = len(hdulist)-1 print(filename + "\t"+ "\t" + str(number_of_frames) + "\t" + hdulist[0].header["FILTER"] + "\t" + hdulist[0].header["DATE-OBS"].split('T')[0])
# -*- coding: utf-8 -*- """Worker methods for Parker.""" import os.path from consumemodel import get_instance as get_consume from crawlmodel import get_instance as get_crawl from consumestore import get_instance as get_consumestore from redisset import get_instance as get_redisset from queues import crawl_q, consume_q from configloader import load_site_config, load_config from fileops import IMG_DIR, DATA_DIR _config = load_config("parker") _get_instance = {'consume': get_consume, 'crawl': get_crawl} def consumer(site, uri): """Consume URI using site config.""" config = load_site_config(site) model = _get_model('consume', config, uri) consumestore = get_consumestore(model=model, method=_config.get('storage', 'file'), bucket=_config.get('s3_data_bucket', None)) consumestore.save_media() consumestore.save_data() def crawler(site, uri=None): """Crawl URI using site config.""" config = load_site_config(site)
# -*- coding: utf-8 -*- """HTTP client for Parker.""" import random import requests from configloader import load_config DEFAULT_UA = "Parker v0.1.0" _PERMITTED_STATUS_CODES = [200] _instances = dict() _config = load_config('client') def get_proxy(): """Return a random proxy from proxy config.""" proxies = _config['proxies'] return proxies[ random.randint(0, len(proxies) - 1) ] if len(proxies) > 0 else None def get_instance(): """Return an instance of Client.""" global _instances user_agents = _config['user-agents'] user_agent = user_agents[ random.randint(0, len(user_agents) - 1) ] if len(user_agents) > 0 else DEFAULT_UA
# -*- coding: utf-8 -*- """File functions for Parker.""" import json import os from configloader import load_config from stringops import generate_chunks _config = load_config("parker") VAR_DIR = _config.get( "storage-directory", os.path.join( os.environ.get('PROJECT'), 'var' ) ) LOG_DIR = os.path.join(VAR_DIR, 'log') DATA_DIR = os.path.join(VAR_DIR, 'data') IMG_DIR = os.path.join(VAR_DIR, 'img') def create_dirs(path): """Create all directories in @path.""" try: os.makedirs(path) except OSError, error: if error.errno != 17: raise
def __init__(self): self.session_pool = requests.Session() self.config = load_config() self.WEBAPI_TOKEN = self.config.get('key') self.WEBAPI_URL = self.config.get('sspanel_url')
# -*- coding: utf-8 -*- """RQ queues for Parker.""" from rq import Queue from redis import StrictRedis from configloader import load_config _redis_config = load_config('redis') crawl_q = Queue( 'crawl', connection=StrictRedis( **_redis_config ) ) consume_q = Queue( 'consume', connection=StrictRedis( **_redis_config ) )
# -*- coding: utf-8 -*- """RedisSet object for Parker.""" import redis from configloader import load_config _instances = dict() _redis_config = load_config("redis") _redis = redis.StrictRedis(**_redis_config) _seconds_in_a_day = 60 * 60 * 24 _seconds_in_five_days = 5 * _seconds_in_a_day def get_instance(key, expire=None): """Return an instance of RedisSet.""" global _instances try: instance = _instances[key] except KeyError: instance = RedisSet( key, _redis, expire=expire ) _instances[key] = instance return instance class RedisSet(object):