def update_ip(ip_type, cache, dns, proxy_list): """ 更新IP """ ipname = 'ipv' + ip_type domains = get_config(ipname) if not domains: return None if not isinstance(domains, list): domains = domains.strip('; ').replace(',', ';').replace(' ', ';').split(';') index_rule = get_config('index' + ip_type, "default") # 从配置中获取index配置 address = get_ip(ip_type, index_rule) if not address: error('Fail to get %s address!', ipname) return False elif cache and (address == cache[ipname]): print('.', end=" ") # 缓存命中 return True record_type = (ip_type == '4') and 'A' or 'AAAA' update_fail = False # https://github.com/NewFuture/DDNS/issues/16 for domain in domains: if change_dns_record(dns, proxy_list, domain=domain, ip=address, record_type=record_type): update_fail = True if cache is not False: # 如果更新失败删除缓存 cache[ipname] = update_fail and address
def settings_view(): if request.method == 'GET': wifi_name, wifi_password = parse_wpa_supplicant_conf( '/boot/wpa_supplicant.conf') with open('/boot/id_ed25519.pub', 'r') as fp: ssh_key = fp.read() return render_template('settings.html.j2', config=get_config(), wifi_name=wifi_name, wifi_password=wifi_password, username=current_user.username, ssh_pub_key=ssh_key) if request.method == 'POST': # save wifi form if 'wifi_name' in request.form: wpa_config = render_template( 'wpa_supplicant.conf.j2', wifi_name=request.form['wifi_name'].replace('"', '\\"'), wifi_password=request.form['wifi_password'].replace( '"', '\\"')) with writeable_config(): with open('/boot/wpa_supplicant.conf', 'w') as fp: fp.write(wpa_config) os.system('systemctl restart wpa_supplicant_wlan0') os.system('systemctl restart network') # save username/password form if 'username' in request.form: current_user.username = request.form['username'] current_user.update_password(request.form['password']) current_user.save() return redirect(url_for('settings.settings_view'))
def fetch_all_csv(): """ Fetches all .csv files from config'd s3 bucket and returns in a dictionary of dataframes """ cfg = get_config() # Target s3 bucket s3 = boto3.resource('s3') bucket = s3.Bucket(cfg['s3']['bucket']) logging.info("Fetching csv files from %s", cfg['s3']['bucket']) # Loop through and append all csv files as dataframe in dict data = dict() for obj in bucket.objects.all(): if 'output' in obj.key: continue if '.csv' in obj.key: table = os.path.basename(obj.key)[:-4] logging.info(' ' + table) # Load string-typed data into memory buffer mem_buffer = StringIO(obj.get()['Body'].read().decode('latin1')) # Parse to dataframe data[table] = pd.read_csv(mem_buffer) return data
def camera_view(): settings = get_config()['image_settings'] if request.method == 'GET': return render_template('camera.html.j2', settings=settings) if request.method == 'POST': # resolution/rotation form if 'rotation' in request.form: settings['resolution_x'] = request.form['resolution_x'] settings['resolution_y'] = request.form['resolution_y'] settings['rotation'] = request.form['rotation'] save_config(get_config()) # exposure form if 'iso' in request.form: settings['iso'] = request.form['iso'] settings['exposure_mode'] = request.form['exposure_mode'] settings['exposure_compensation'] = request.form[ 'exposure_compensation'] settings['metering_mode'] = request.form['metering_mode'] settings['drc'] = request.form['drc'] save_config(get_config()) # color form if 'awb_mode' in request.form: settings['awb_mode'] = request.form['awb_mode'] settings['awb_gain_red'] = request.form['awb_gain_red'] settings['awb_gain_blue'] = request.form['awb_gain_blue'] settings['brightness'] = request.form['brightness'] settings['contrast'] = request.form['contrast'] settings['saturation'] = request.form['saturation'] save_config(get_config()) # postprocessing form if 'sharpness' in request.form: settings['sharpness'] = request.form['sharpness'] if 'denoise' in request.form: settings['denoise'] = 'on' else: settings['denoise'] = 'off' save_config(get_config()) return redirect(url_for('camera.camera_view'))
def scrape_rotoguru(table_cfg): """ Run rotoguru scraper using account login """ ROTO_TABLES = table_cfg['rotoguru'] scraper = RotoGuruScraper() LOGIN = get_config('accounts.yml')['rotoguru'] for table, info in ROTO_TABLES.items(): scraper.fetch(url=info['url'] % (LOGIN['username'], LOGIN['password']), column_list=info['columns'], table_name=table)
def __init__(self): try: self._name = get_config("sqllite3", "name") except NoOptionError: raise SQLException("配置文件中name参数不存在或参数名错误!") except NoSectionError: raise SQLException("配置文件中sqllite3模块不存在或者模块名称错误") # 获取数据库连接 self._connect = sqlite3.connect(self._name) # 获取游标 self._cursor = self._connect.cursor()
def __init__(self, *args, **kwargs): webapp2.RequestHandler.__init__(self, *args, **kwargs) twitter_config = get_config()["twitter"] consumer_key = twitter_config["consumer_key"] consumer_secret = twitter_config["consumer_secret"] access_token = twitter_config["access_token"] access_token_secret = twitter_config["access_token_secret"] auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) self._twitter = tweepy.API(auth)
def get_valid_session(): config = get_config() proxy = None while 1: ip = r.spop('ip_list').decode('utf-8') if ip[4] == 's': proxy = {'https': ip} else: proxy = {'http': ip} if is_valid_proxy(proxy): break try: sess = login(config[0][0], config[0][1], proxy) return sess except: logger.info('代理不成功,登陆失败!!')
def load_data(): """ Load all data files from s3 bucket into dict """ cfg = get_config('config.yml') # Get s3 bucket bucket = boto3.resource('s3').Bucket(cfg['s3']['bucket']) # Load *.csv into dataframes data = dict() for obj in bucket.objects.all(): if '.csv' in obj.key: table = os.path.basename(obj.key)[:-4] logging.info("Loading %s", table) mem_buffer = StringIO(obj.get()['Body'].read().decode('latin1')) data[table] = pd.read_csv(mem_buffer) return data
def write_output(df, name): """ Write a table to output folder in S3 bucket, indexed by date """ cfg = get_config() # Target S3 bucket bucket = cfg['s3']['bucket'] dirname = cfg['s3']['output_dir'] # Create target filepath today = datetime.now().strftime("%Y%m%d") target_file = os.path.join(dirname, name, name + today + '.csv') logging.info("Loading to S3 bucket %s/%s", bucket, target_file) # Write file to memory buffer mem_buffer = StringIO() df.to_csv(mem_buffer, index=False) s3 = boto3.resource('s3') s3.Object(bucket, target_file).put(Body=mem_buffer.getvalue())
def get_todays_output(name): """ Get output file matching name for today """ cfg = get_config() # Target s3 bucket bucket = cfg['s3']['bucket'] dirname = cfg['s3']['output_dir'] # Target filepath today = datetime.now().strftime("%Y%m%d") target_file = os.path.join(dirname, name, name + today + '.csv') logging.info("Fetching from S3 %s/%s", bucket, target_file) # Load into memory buffer s3 = boto3.resource('s3') mem_buffer = StringIO( s3.Object(bucket, target_file).get()['Body'].read().decode('latin1')) # Return as dataframe return pd.read_csv(mem_buffer)
def get_ldap_server_url(): return config.get_config().get('main', 'ldap_server')
def get_base_dn(): return config.get_config().get('main', 'base_dn')
def test_get_config(self): config = get_config() section = config['shadowsocks'] self.assertEqual(section.get('password'), '123456')
def get_config(self, section, name): return get_config(self.config, section, name)
def get_admin_pass(): return config.get_config().get('main', 'admin_pass')
from loguru import logger from util import util, config from db import dao import spacy import json import random import datetime import traceback from handlers import mom chaddi_config = config.get_config() BOT_USERNAME = "******" + chaddi_config["bot_username"] response_blacklist = [BOT_USERNAME] COMMAND_COST = 100 def handle(update, context): try: util.log_chat("aao", update) initiator_id = update.message.from_user["id"] if initiator_id is None: logger.error("[aao] initiator_id was None!") return if not util.paywall_user(initiator_id, COMMAND_COST):
column_list=info['columns'], table_name=table) if __name__ == '__main__': os.chdir(os.path.dirname(os.path.realpath(__file__))) # Add path to environ os.environ['PATH'] += os.pathsep + '/usr/local/bin' # Configure logging FORMAT = '[%(levelname)s %(asctime)s] %(message)s' logging.basicConfig(format=FORMAT, level=logging.INFO) # Load in table config TABLE_CFG = get_config('tables.yml') # Fangraphs scrape_fangraphs(TABLE_CFG) # Rotoguru scrape_rotoguru(TABLE_CFG) # Statcast scrape_statcast(TABLE_CFG) # Weather scrape_weather(TABLE_CFG) # Vegas scrape_vegas(TABLE_CFG)
def timelapse_view(): settings = get_config()['timelapse'] if request.method == 'GET': with open('/boot/id_ed25519.pub', 'r') as fp: ssh_key = fp.read() return render_template( 'timelapse.html.j2', settings=settings, weekday_enable=settings['weekday_enable'].split(','), weekday_from=settings['weekday_from'].split(','), weekday_to=settings['weekday_to'].split(','), ssh_pub_key=ssh_key) if request.method == 'POST': # generic form if 'enable_timelapse' in request.form: settings['enable'] = request.form['enable_timelapse'] settings['delay'] = request.form['delay'] save_config(get_config()) # destination form if 'upload_mode' in request.form: settings['upload_mode'] = request.form['upload_mode'] settings['upload_server'] = request.form['upload_server'] settings['upload_path'] = request.form['upload_path'] settings['upload_auth_user'] = request.form['upload_username'] settings['upload_auth_password'] = request.form['upload_password'] settings['upload_form_field'] = request.form['upload_form_field'] if settings['upload_mode'] == 'sftp': settings[ 'upload_cmd'] = 'SSHPASS={password} sftp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -q -i /boot/id_ed25519 {{input_file}} {user}@{host}:{path}/{{output_file}}'.format( user=request.form['upload_username'], host=request.form['upload_server'], path=request.form['upload_path'], password=request.form['upload_password']) if settings['upload_mode'] == 'scp': settings[ 'upload_cmd'] = 'SSHPASS={password} scp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -q -i /boot/id_ed25519 {{input_file}} {user}@{host}:{path}/{{output_file}}'.format( user=request.form['upload_username'], host=request.form['upload_server'], path=request.form['upload_path'], password=request.form['upload_password']) if settings['upload_mode'] in ['post', 'form']: settings['upload_url'] = "{server}{path}".format( server=request.form['upload_server'], path=request.form['upload_path'], ) if settings['upload_mode'] == 'local': # check if we have a data partition on the sd card os.system('/usr/bin/umount /data') ret = os.system('/usr/bin/mount /data') if ret != 0: # partition does not exist, try to create one # /dev/mmcblk0p3 ret = os.system('/usr/bin/make_data_partition.sh') ret = os.system('/usr/bin/mount /data') save_config(get_config()) # schedule form if 'weekday_1_from' in request.form: if 'weekday_enable' not in settings: settings['weekday_enable'] = 'off,off,off,off,off,off,off' if 'weekday_from' not in settings: settings[ 'weekday_from'] = '00:00,00:00,00:00,00:00,00:00,00:00,00:00,' if 'weekday_to' not in settings: settings[ 'weekday_to'] = '23:59,23:59,23:59,23:59,23:59,23:59,23:59,' weekday_enable = settings['weekday_enable'].split(',') weekday_from = settings['weekday_from'].split(',') weekday_to = settings['weekday_to'].split(',') for i in range(1, 8): setting = 'weekday_{}_enable'.format(i) if setting not in request.form: weekday_enable[i - 1] = 'off' else: weekday_enable[i - 1] = request.form[setting] setting = 'weekday_{}_from'.format(i) weekday_from[i - 1] = request.form[setting] setting = 'weekday_{}_to'.format(i) weekday_to[i - 1] = request.form[setting] settings['weekday_enable'] = ",".join(weekday_enable) settings['weekday_from'] = ",".join(weekday_from) settings['weekday_to'] = ",".join(weekday_to) save_config(get_config()) return redirect(url_for('timelapse.timelapse_view'))
def main() -> None: """ the main function loop :return: None """ parser = ArgumentParser( description='Download git repos from a gitea instance') parser.add_argument('--config', '-c', help='config file', default=DEFAULT_CONFIG_FILE) parser.add_argument('-v', "--verbose", help='increase verbosity', action='store_true', default=False) parser.add_argument('--no-issues', help="don't download issues", action='store_true', default=False) parser.add_argument('--always-ask', '-a', help='ask about every action', action='store_true', default=False) group = parser.add_mutually_exclusive_group() group.add_argument('--folder', '-f', help='download git repos here', default='backup/') group.add_argument('--list', '-l', help='list repos only (no download)', action='store_true', default=False) args = parser.parse_args() config: Config = get_config(args.config) if args.verbose: config.print() print("detected gitea version " + str(get_version(config))) repos: List[Repo] = get_repos(config) repos = remove_exceptions(config.exceptions, repos, args.verbose) if args.list: # List Repos print("Repos:") for repo in repos: print("\t- " + repo.name) else: create_folder(args.folder, args.verbose) if args.verbose: print("downloading to " + args.folder) check_for_git() # clone each git repo for repo in repos: if args.always_ask: if ask("download " + repo.name): download_repo(args.folder, repo) continue else: download_repo(args.folder, repo) if not args.no_issues: working_on_issues(config, repo, args)
def get_group_ou(): return config.get_config().get('main', 'group_ou')
def main(): """ 更新 """ init_config(__description__, __doc__, __version__) # Dynamicly import the dns module as configuration dns_provider = str(get_config('dns', 'dnspod').lower()) dns = getattr(__import__('dns', fromlist=[dns_provider]), dns_provider) dns.Config.ID = get_config('id') dns.Config.TOKEN = get_config('token') dns.Config.TTL = get_config('ttl') if get_config('debug'): ip.DEBUG = get_config('debug') basicConfig( level=DEBUG, format= '%(asctime)s <%(module)s.%(funcName)s> %(lineno)d@%(pathname)s \n[%(levelname)s] %(message)s' ) print("DDNS[", __version__, "] run:", os_name, sys.platform) if get_config("config"): print("Configuration was loaded from <==", path.abspath(get_config("config"))) print("=" * 25, ctime(), "=" * 25, sep=' ') proxy = get_config('proxy') or 'DIRECT' proxy_list = proxy if isinstance( proxy, list) else proxy.strip('; ').replace(',', ';').split(';') cache = get_config('cache', True) and Cache(CACHE_FILE) if cache is False: info("Cache is disabled!") elif get_config("config_modified_time") is None or get_config( "config_modified_time") >= cache.time: warning("Cache file is out of dated.") cache.clear() elif not cache: debug("Cache is empty.") update_ip('4', cache, dns, proxy_list) update_ip('6', cache, dns, proxy_list)
def __init__(self): """ Default just initialize with the config file """ self.cfg = get_config()
from views import login, settings, about, camera, timelapse, video login_manager = LoginManager() app = Flask(__name__) app.register_blueprint(login) app.register_blueprint(settings) app.register_blueprint(about) app.register_blueprint(camera) app.register_blueprint(timelapse) app.register_blueprint(video) login_manager.init_app(app) with app.app_context(): app.secret_key = get_config()['web']['secret_key'] @login_manager.user_loader def load_user(user_id): # reload config in case the user info changed reload_config() return User() @login_manager.unauthorized_handler def unauthorized(): # do stuff return redirect(url_for('login.login_view'))
from util.config import print_usage, get_config from src.RPSMNetwork import RPSMNetwork from src.h5_dataset import H5Dataset #Machine Learning Project Using Tensorflow for Convolutional Neural Networks def main(config): """The main function.""" h5_train = H5Dataset(config, 'train') h5_val = H5Dataset(config, 'valid') mynet = RPSMNetwork(h5_train, h5_val, config) mynet.train(config) if __name__ == "__main__": # ---------------------------------------- # Parse configuration config, unparsed = get_config() # If we have unparsed arguments, print usage and exit if len(unparsed) > 0: print_usage() exit(1) main(config)
import json import time from lxml import etree from util.config import get_config from util.connect_redis import get_redis from util.log_util import get_logger from util.get_login_session import login, headers logger = get_logger('api.' + __file__.split('/')[-1][:-3]) if __name__ == '__main__': id_url = 'http://d.weibo.com/p/aj/v6/mblog/mbloglist?ajwvr=6&domain=102803_ctg1_1760_-_ctg1_1760&pagebar=0&tab=home¤t_page=%d&pre_page=1&page=1&pl_name=Pl_Core_NewMixFeed__3&id=102803_ctg1_1760_-_ctg1_1760&script_uri=/&feed_type=1&domain_op=102803_ctg1_1760_-_ctg1_1760' config = get_config() sess = login(config[0][0], config[0][1]) r = get_redis() for i in range(100): page = sess.get(id_url % i, headers=headers) data = json.loads(page.content.decode('utf-8'))['data'] tree = etree.HTML(data) mid = tree.xpath('//div[@class="WB_cardwrap WB_feed_type S_bg2"]/@mid') for idx in mid: r.sadd("weibo_id", idx) if i % 10 == 0: logger.info("第%s页的微博id存入redis库中......" % (i + 1)) time.sleep(0.2)
count, callback=partial(_add_url_to_threadpool, futures=lazy_results, pool=wget_pool), sleep_min=10, sleep_max=20) #urls = pbrowser.ask_google(keyword, count, callback=(lambda new_url: lazy_results.extend( (new_url, wget(new_url)) ))) _logger.info('%d results returned by Google' % len(urls)) #return urls return list(urls), lazy_results # Call this interface to start a 'typical' crawl def start_crawler(keyword, count): crawl(keyword, count) #this function is for debugging lambda(s) def _debug(*args, **kw): print args print kw return args, kw if __name__ == "__main__": start_crawler(sys.argv[1], sys.argv[2]) else: from util.log import _logger from util.config import get_config _config = get_config('poseidon')
def get_admin_cn(): return config.get_config().get('main', 'admin_cn')