def get_old_articles_for_query(query): """ Fetches items for a query that are older than the oldest item that has already been fetched. """ logging.error("$$$$ nytArticles.get_old_articles_for_query[]") n_to_fetch = 10 query.doc['n_fetched'] = query.doc['n_fetched'] + n_to_fetch n_requests = int(math.ceil(n_to_fetch / int(Config.get("nyt_article")["response_size"]))) if n_requests == 0: n_requests = 1 prior_cached = len(query.children) logging.error(prior_cached) prior_offset = int(int(prior_cached) / int(Config.get("nyt_article")["response_size"])) logging.error(prior_offset) req_pars = { 'query_id': query.id(), 'functions': [api.make_api_request,cache_response,fetch_comments] } for i in range(0,n_requests): request = {} for k,v in req_pars.iteritems(): request[k] = v request['url'] = 'http://api.nytimes.com/svc/search/v1/article?query=text:'+query.doc['query_text'].replace(' ','+')+'+comments:y' request['url'] = request['url'] + '&offset='+str(int(prior_offset) + int(i)) request['url'] = request['url'] + '&fields=body,byline,date,title,url,geo_facet,per_facet,des_facet,org_facet' request['url'] = request['url'] + '&api-key='+Config.get('nyt_article')['key'] #logging.critical('BSM[get_old_articles_for_query] Size:'+str(sys.getsizeof(pickle.dumps(request)))) with voxpop.VPE.beanstalkd_lock: voxpop.VPE.get_beanstalkd().use("nytarticle") voxpop.VPE.get_beanstalkd().put(pickle.dumps(request), pri=100000) query.save() return query
def device_manager(context): global manager global publisher worker_address = Config.get('services', 'worker_address') worker_port = Config.get('services', 'worker_port') worker_address = "tcp://" + worker_address + ":" + worker_port publisher_address = Config.get('device_service', 'publisher_address') publisher_port = Config.get('device_service', 'publisher_port') publisher_address = "tcp://" + publisher_address + ":" + publisher_port service = Config.get('device_service', 'service_name') publisher = DeviceServicePublisher(context, publisher_address) manager = DeviceServiceManager(context, worker_address, service) try: IOLoop.instance().start() except KeyboardInterrupt: IOLoop.instance().stop() publisher.shutdown() manager.shutdown() return
def test_load_from_exports(self): # grab the file as needed Config._load_exports(fixtures.EXPORTS) assert hasattr(Config, "PATH") assert hasattr(Config, "UNDEFINED") == False
def setUpClass(cls): super(TestVolume,cls).setUpClass() cls.host = FixtureFactory(cls.api).create_host_with_depends(Config.get_instance().hosts[0].create()).host result = FixtureFactory(cls.api).create_host_with_depends(Config.get_instance().hosts[1].create()) cls.host2 = result.host cls.cluster = result.cluster cls.datacenter = result.datacenter
def import_tournament(type, path, bracket, region, name): config = Config() mongo_client = MongoClient(host=config.get_mongo_url()) if type == 'tio': scraper = TioScraper(path, bracket) elif type =='challonge': scraper = ChallongeScraper(path) else: click.echo("Illegal type") dao = Dao(region, mongo_client=mongo_client) player_map = get_player_alias_to_id_map(scraper, dao) # TODO pass in a map of overrides for specific players tournament = Tournament.from_scraper(type, scraper, player_map, region) if name: tournament.name = name dao.insert_tournament(tournament) click.echo("Generating new ranking...") rankings.generate_ranking(dao) click.echo("Done!")
def setUp(self): self.api = TestBase.api self.host = FixtureFactory(self.api).create_host_with_depends(Config.get_instance().hosts[0].create()).host result = FixtureFactory(api).create_host_with_depends(Config.get_instance().hosts[1].create()) self.host2 = result.host self.cluster = result.cluster self.datacenter = result.datacenter self.vol = create_distributed_volume() VolumePopulator().fileForEachBrick(volume)
def test_load_from_json(self): # ensure that the json was parsed properly json_config = json.loads(fixtures.JSON) Config._load_json(fixtures.JSON) for key, value in json_config.iteritems(): assert hasattr(Config, key) assert getattr(Config, key) == value
def get_connection(): config = Config() mysql_config = { 'user': config.get_db_user(), 'password': config.get_db_password(), 'host': config.get_db_host(), 'database': config.get_db_name() } return mysql.connector.connect(**mysql_config)
def test_load_from_yaml(self): # parse the yaml file yaml_config = yaml.load(fixtures.YAML) Config._load_yaml(fixtures.YAML) for key, value in yaml_config.iteritems(): assert hasattr(Config, key) assert getattr(Config, key) == value
def refresh_device(device): address = Config.get('services', 'client_address') port = Config.get('services', 'client_port') address = "tcp://" + address + ":" + port socket = zmq.Context().socket(zmq.REQ) socket.setsockopt(zmq.LINGER, 0) socket.connect(address) message = ['00000000', 'control', 'refresh', device] mdp_request(socket, 'device', message, 1)
class TestConfig(unittest.TestCase): def setUp(self): self.config = Config(TEMPLATE_CONFIG_FILE) def test_get_mongo_url(self): self.assertEquals(self.config.get_mongo_url(), 'mongodb://*****:*****@HOSTNAME/AUTH_DB') def test_get_db_host(self): self.assertEquals(self.config.get_db_host(), 'HOSTNAME') def test_get_auth_db_name(self): self.assertEquals(self.config.get_auth_db_name(), 'AUTH_DB') def test_get_db_user(self): self.assertEquals(self.config.get_db_user(), 'USER') def test_get_db_password(self): self.assertEquals(self.config.get_db_password(), 'PASSWORD') def test_challonge_api_key(self): self.assertEquals(self.config.get_challonge_api_key(), 'API_KEY') def test_get_fb_app_id(self): self.assertEquals(self.config.get_fb_app_id(), 'FB_APP_ID') def test_get_fb_app_token(self): self.assertEquals(self.config.get_fb_app_token(), 'FB_APP_TOKEN')
def fetch_comments_for_article_id(pars={}, **kwargs): if "article_id" not in pars: logging.error("**** nytCommunity.fetch_comments_for_article: NO ARTICLE ID PROVIDED") return False if "article_url" not in pars: logging.error("**** nytCommunity.fetch_comments_for_article: NO ARTICLE URL PROVIDED") return False logging.info( "$$$$ nytCommunity.fetch_comments_for_article_id[id:" + pars["article_id"] + ",url:" + pars["article_url"] + "]" ) _url = "http://api.nytimes.com/svc/community/v2/comments/url/exact-match.json?" _url = _url + "url=" + urllib.quote_plus(pars["article_url"]) _url = _url + "&sort=oldest" _url = _url + "&api-key=" + Config.get("nyt_community")["key"] request = { "url": _url, "article_id": pars["article_id"], "article_url": pars["article_url"], "functions": [api.make_api_request, fetch_remainder], } # logging.critical('BSM[fetch_comments_for_article_id] Size:'+str(sys.getsizeof(pickle.dumps(request)))) with voxpop.VPE.beanstalkd_lock: voxpop.VPE.get_beanstalkd().use("nytcommunity") voxpop.VPE.get_beanstalkd().put(pickle.dumps(request), pri=100000) return True
def __init__(self): # regular expressions: # regexp denoting successful collection index/directory listing save: self.cindex = re.compile(r'(index|listing).*saved') # regexp for timestamp string containing an 'http' hdf (or xml) target: self.timestamp = r'--\d{4}-\d{2}-\d{2} *\d{2}:\d{2}:\d{2}--' self.target = r'(http|ftp).*\.(hdf|nc).*$' self.regex_save_loc = ".*('/.*/.*') saved.*" self.timestamp_and_target = re.compile( self.timestamp + ' *' + '(' + self.target + ')') # save target as group self.regex_saved_file_loc = re.compile(r'=> (\'.*\')') # regexp denoting target was successfully downloaded (saved): self.saved = re.compile(r'saved') # regexp denoting overall completion: self.finished = re.compile(r'FINISHED') # regex for 'Remote file no newer than local file' self.regex_no_retrieve = re.compile(r'Remote file no newer than local file*not retrieving\.') # regex for 'failed: Network is unreachable' self.regex_unreachable_network = re.compile(r'Connecting.*failed: Network is unreachable') self.download_list = [] self.saved_file_locs = [] self.summary = 'Wget Log Summary (' + str(date.today()) + '):*************************************************************************\n\n' self.config = Config()
def inject_details(): g.devices = db_session.query(Device).order_by(Device.creation_time.desc())\ .all() g.connected_devices = [] g.configured_devices = [] g.new_devices = [] g.grainbin_service = Config.getboolean('services', 'grainbin_service') if g.grainbin_service: g.grainbins = [] g.configured_grainbins = [] g.new_grainbins = [] for dev in g.devices: if dev.connected: g.connected_devices.append(dev) if dev.user_configured: g.configured_devices.append(dev) else: g.new_devices.append(dev) if g.grainbin_service: if dev.user_configured: bins = dev.bins g.grainbins.extend(bins) for grainbin in bins: if grainbin.user_configured: g.configured_grainbins.append(grainbin) else: g.new_grainbins.append(grainbin) return dict()
class ChallongeScraper(object): def __init__(self, tournament_id): self.tournament_id = tournament_id self.config = Config(config_file_path=CONFIG_FILE_PATH) self.api_key = self.config.get_challonge_api_key() self.api_key_dict = {'api_key': self.api_key} self.raw_dict = None self.get_raw() def get_raw(self): if self.raw_dict == None: self.raw_dict = {} url = TOURNAMENT_URL % self.tournament_id self.raw_dict['tournament'] = self._check_for_200(requests.get(url, params=self.api_key_dict)).json() url = MATCHES_URL % self.tournament_id self.raw_dict['matches'] = self._check_for_200(requests.get(url, params=self.api_key_dict)).json() url = PARTICIPANTS_URL % self.tournament_id self.raw_dict['participants'] = self._check_for_200(requests.get(url, params=self.api_key_dict)).json() return self.raw_dict def get_name(self): return self.get_raw()['tournament']['tournament']['name'].strip() def get_date(self): return iso8601.parse_date(self.get_raw()['tournament']['tournament']['created_at']) def get_matches(self): player_map = dict((p['participant']['id'], p['participant']['name'].strip() if p['participant']['name'] else p['participant']['username'].strip()) for p in self.get_raw()['participants']) matches = [] for m in self.get_raw()['matches']: m = m['match'] winner_id = m['winner_id'] loser_id = m['loser_id'] if winner_id is not None and loser_id is not None: winner = player_map[winner_id] loser = player_map[loser_id] match_result = MatchResult(winner=winner, loser=loser) matches.append(match_result) return matches def get_players(self): return [p['participant']['name'].strip() if p['participant']['name'] else p['participant']['username'].strip() \ for p in self.get_raw()['participants']] def _check_for_200(self, response): if response.status_code != 200: raise Exception('Received status code of %d' % response.status_code) return response
def __init__(self, tournament_id): self.tournament_id = tournament_id self.config = Config(config_file_path=CONFIG_FILE_PATH) self.api_key = self.config.get_challonge_api_key() self.api_key_dict = {'api_key': self.api_key} self.raw_dict = None self.get_raw()
def _build_paths(self): rrd_location = Config.get('rrdtool', 'location') self.folder_path = rrd_location + self.folder self.file_path = self.folder_path + self.file_name if not os.path.exists(self.folder_path): os.makedirs(self.folder_path)
def __init__(self, parent): Gtk.Dialog.__init__(self, "Settings", parent, 0, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_APPLY, Gtk.ResponseType.OK)) self.set_default_size(200, 400) self.config = Config.get_config() self.__add_widget() self.show_all()
def __init__(self, configFile=None, debug=False): self.configFile = "core.config" if configFile is None else configFile self.debug = debug self.tripId = None self.start_time = None self.config = Config(self.configFile) self.error = [] self.errorPollingInterval = self.config.tryGetWithDefault("errorPollingInterval", 15) self.pollingInterval = self.config.tryGetWithDefault("pollingInterval", 2)
def delete_device(device): address = Config.get('services', 'client_address') port = Config.get('services', 'client_port') address = "tcp://" + address + ":" + port socket = zmq.Context().socket(zmq.REQ) socket.setsockopt(zmq.LINGER, 0) socket.connect(address) message = ['00000000', 'control', 'remove', device.id] mdp_request(socket, 'device', message, 1) rrd = RRD(device.id, 'device') rrd.remove(remove_folder=True) db_session.query(Device).filter_by(id=device.id).delete() db_session.commit() return
def grainbin_manager(context): global manager worker_address = Config.get('services', 'worker_address') worker_port = Config.get('services', 'worker_port') worker_address = "tcp://" + worker_address + ":" + worker_port service = Config.get('grainbin_service', 'service_name') manager = GrainbinServiceManager(context, worker_address, service) try: IOLoop.instance().start() except KeyboardInterrupt: IOLoop.instance().stop() manager.shutdown() return
def run(): from retriever.datafetch import FtpFetch, USGSFetch, NERSCFetch from validate.wgetlogvalidator import WgetLogValidator from utils.emailutils import GmailSend from config.config import Config import json config = Config() f = open(config.datapath(), "r") jsonobj = json.loads(f.read()) f.close() # queue up fetch objects fetch_list = [] if 'ftp' in jsonobj: for obj in jsonobj['ftp']: fetch_list.append(FtpFetch(obj['site'], obj['collection'], obj['file_name_root'], obj['save_dir'], obj['exts'])) if 'usgs' in jsonobj: for obj in jsonobj['usgs']: fetch_list.append(USGSFetch(obj['modis_terra'], obj['collection'], obj['save_dir'], obj['min_year'], obj['exts'])) if 'nersc' in jsonobj: for obj in jsonobj['nersc']: fetch_list.append(NERSCFetch(obj['collection'], obj['save_dir'], obj['min_year'], obj['exts'])) #fetch logs = [] for fobj in fetch_list: fobj.fetch() logs.append(fobj.log_loc) #validate logs logV = WgetLogValidator() logV.validate_logs(logs) subject = 'Daily Download Summary' text = logV.summary_str() mailObj = GmailSend(config.emailusr(), config.emailpwd(), config.emailusr(), subject, text, config.emaillist()) mailObj.send_email()
def farm_broker(): global broker worker_address = Config.get('broker', 'worker_address') worker_port = Config.get('broker', 'worker_port') worker_address = "tcp://" + worker_address + ":" + worker_port client_address = Config.get('broker', 'client_address') client_port = Config.get('broker', 'client_port') client_address = "tcp://" + client_address + ":" + client_port context = zmq.Context() broker = FarmBroker(context, main_ep=worker_address, opt_ep=client_address) try: IOLoop.instance().start() except KeyboardInterrupt: IOLoop.instance().stop() broker.shutdown() return
def get_new_articles_for_query(query): """ Fetches items for a query that are newer than the newest item that has already been fetched. """ logging.error("$$$$ nytArticles.get_new_articles_for_query[]") for i in range(0,n_requests): request = {} for k,v in req_pars.iteritems(): request[k] = v request['url'] = 'http://api.nytimes.com/svc/search/v1/article?query=text:'+query.doc['query_text'].replace(' ','+')+'+comments:y' request['url'] = request['url'] + '&offset='+str(prior_cached + (i * int(Config.get("nyt_article")["response_size"]))) request['url'] = request['url'] + '&fields=body,byline,date,title,url,geo_facet,per_facet,des_facet,org_facet' request['url'] = request['url'] + '&api-key='+Config.get('nyt_article')['key'] #logging.critical('BSM[get_new_articles_for_query] Size:'+str(sys.getsizeof(pickle.dumps(request)))) with voxpop.VPE.beanstalkd_lock: voxpop.VPE.get_beanstalkd().use("nytarticle") voxpop.VPE.get_beanstalkd().put(pickle.dumps(request), pri=100000) return message
def create(self, sources): """ creates an rrd file sources is a list of data sources to add. Every item in the list must have a unique 'name' key that identifies the item """ archives = Config.get('rrdtool', 'data_archives') data_archives = [x.strip() for x in archives.splitlines()] data_source = [] for x in sources: if 'name' in x: data_source.append("DS:" + x['name'] + ":GAUGE:7200:-40:80") path = self.file_path rrdtool.create(path, "--step", '3600', data_source, data_archives) return
def main(): context = zmq.Context() broker = Process(target=farm_broker) broker.start() device_service = Process(target=device_manager, args=(context,)) device_service.start() grainbin_enabled = Config.getboolean('services', 'grainbin_service') if grainbin_enabled: grainbin_service = Process(target=grainbin_manager, args=(context,)) grainbin_service.start() try: if grainbin_enabled: grainbin_service.join() device_service.join() broker.join() except KeyboardInterrupt: print("Keyboard interrupt in main") return
from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta from sqlalchemy.orm import sessionmaker from config.config import Config config = Config('./config/config.ini') config.load_config() engine = create_engine('mysql+mysqldb://{}:{}@{}/{}'.format(config.user, config.passwd, config.host, config.db)) Session = sessionmaker(bind=engine) Base = declarative_base()
#!/usr/bin/python3 from main.application import Application from config.config import Config from sensorlib.scale import Scale config = Config() config_data = config.get_config_data() is_calibrated = config_data['SCALE'].getboolean("calibrated") print(is_calibrated) app = Application() scale = Scale() if __name__ == '__main__': if is_calibrated: app.start() else: scale.calibrate(5000)
def __init__(self, navigation, task_list): """ To initilize Houston """ ################ INSTANCES ################ self.gate = Gate(self) self.path_1 = Path(self) self.dice = Dice(self) self.path_2 = Path(self) self.chip_1 = Chip(self) self.chip_2 = Chip(self) self.roulette = Roulette(self) self.slots = Slots(self) self.pinger_a = PingerA(self) self.pinger_b = PingerB(self) self.cash_in = CashIn(self) #self.buoy = Buoy(self) self.navigation = navigation self.cvcontroller = CVController() self.config = Config() self.counts = Counter() ################ THRESHOLD VARIABLES ################ self.task_timer = 300 self.break_timer = 600 ################ FLAG VARIABLES ################ self.is_killswitch_on = False ################ TIMER/COUNTER VARIABLES ################ self.last_time = time.time() ################ TASKS LIST ################ self.tasks = task_list ################ DICTIONARIES ################ """ self.tasks values listed below 'gate', 'path', 'dice', 'chip', 'path', 'chip', 'slots', 'pinger_b', 'roulette', 'pinger_a', 'cash_in' """ self.state_num = 0 self.states = [ self.gate, self.path_1, self.dice, self.chip_1, self.path_2, self.slots, self.chip_2, self.pinger_a, self.roulette, self.pinger_b, self.cash_in ] ################ AUV MOBILITY VARIABLES ################ #self.rotational_movement = {-1: } self.height = 1 self.queue_direction = [] self.rotation = 15 self.power = 120 ################ TASK THREAD ################ self.task_thread = None ################ ROS VARIABLES ################ self.r = rospy.Rate(30) #30hz self.msg = CVIn() ################ CURRENT TASK VARIABLE ################ self.current_task = None
import os import sys from pymongo import MongoClient sys.path.append(os.path.dirname(os.path.dirname(__file__))) from config.config import Config BASE_DIR = os.path.dirname(os.path.realpath(__file__)) conf = Config() # mongo = MongoClient(host=f"mongodb://{conf.MONGO_REMOTE_IP}:27017") mongo = MongoClient(host=f"mongodb://localhost:27017") db = mongo['aircode'] col = db['shopnt_settop'] stids_info = list(col.find()) with open(os.path.join(BASE_DIR, 'data/sentence_from_settop.txt'), 'w') as f: for info in stids_info: sentence = "" stid = info['stid'][0] sentence += f"{stid}/셋탑아이디 " gender = info['gender'] sentence += f"{gender}/성별 " orders = info['order'] for order in orders: sentence += f"{order['prod_id']}/상품코드 " sentence += f"{order['prod_name']}/상품명 " f.write(f'{sentence}\n')
from tensorflow.keras.optimizers import Adam from tensorflow.keras.layers import Dense, LSTM, LeakyReLU, Dropout from tensorflow.keras import backend from tensorflow.keras.preprocessing.sequence import TimeseriesGenerator from tensorflow.keras.models import load_model import numpy as np import tensorflow as tf from keras_preprocessing.image import ImageDataGenerator from tensorflow import keras import pandas as pd from config.config import Config from utils.utils import create_train_test_set, create_pred_data CONFIG = Config() MODEL_CONFIG = CONFIG[CONFIG["parent_key"]] class RecurrentNNWrapper(object): _model_name_ = "recurrent_nn" def __init__(self, model_details=None, verbose=False): self._verbose = verbose self._model_details = model_details self._model = None self._is_compiled = False def is_model_compiled(self): try:
def lfw_test(model, img_paths, identity_list, compair_list, batch_size): s = time.time() features, cnt = get_features(model, img_paths, batch_size=batch_size) t = time.time() - s print('total time is {}, average time is {}'.format(t, t / cnt)) fe_dict = get_feature_dict(identity_list, features) acc, th = test_performance(fe_dict, compair_list) print('face verification accuracy: ', acc, 'threshold: ', th) result = ccf_test_performance(fe_dict, compair_list) print('ccf val verification result: ', result) return acc, result if __name__ == '__main__': opt = Config() if opt.backbone == 'resnet18': model = resnet_face18(opt.use_se) elif opt.backbone == 'resnet34': model = resnet34() elif opt.backbone == 'resnet50': model = resnet50() model = DataParallel(model) # load_model(model, opt.test_model_path) model.load_state_dict(torch.load(opt.test_model_path)) model.to(torch.device("cuda")) identity_list = get_lfw_list(opt.lfw_test_list) img_paths = [os.path.join(opt.lfw_root, each) for each in identity_list]
_id : { "type" : "$rooms.roomType" }, "count": { $sum: "$rooms.qty"}, "total_sales": { $sum: "$total_by_type"}} } ]); """ # python imports import os, sys sys.path.append(os.path.realpath("src")) import pymongo from config.config import Config from booksomeplace.domain.booking import BookingService # setting up the connection + collection service = BookingService(Config()) # formulate query target_year = 2019 pipeline = [{ '$match': { '$and': [{ 'bookingInfo.arrivalDate': { '$regex': '^' + str(target_year) }, 'bookingInfo.paymentStatus': 'confirmed' }] } }, { '$unwind': '$rooms' }, {
class StartpageWindow(wx.Frame): """Main class of sc2mafia the start page, the create game page. All GUI logic is here. """ def __init__(self, parent, id, title, style): """Construct the GUI from config file, and bind GUI events to their handlers. """ # 读取sc2mafia.cfg self.readFromCfg() # 调用基类的构造函数 wx.Frame.__init__(self, parent, wx.ID_ANY, title, size=self.framesize, style=style) # 加载资源 # self.resin = Resin(os.path.join(homepath, # 'res', self.prefer.getValue("iconset"))) self.resin = Resin() # 图标 self.SetIcon(self.resin.getIcon("notalon")) # 提示 self.tips = wx.CreateFileTipProvider("Tips.txt", 0) self.showtips = int(self.config.get("other", "showtips")) if self.showtips: wx.CallAfter(self.ShowTips) # 显示Tips的同时显示主界面 # self.ShowTips() # 与上面相反,显示Tips之后才显示主界面 # 创建MenuBar self.createMenuBar() # 创建ToolBar self.createToolBar() # 创建StatusBar self.createStatusBar() # 创建主显示栏 self.createMainWindow() # 绑定关闭窗口的方法 self.Bind(wx.EVT_CLOSE, self.OnCloseWindow) # 创建TaskBarIcon self.tbIcon = tbIcon(self) # -----------------创建主框架的相关函数------------------ # def readFromCfg(self): """读取配置文件 """ self.config = Config(os.path.join(homepath, "sc2mafia.cfg")) self.framesize = (int(self.config.get("frame", "width")), int(self.config.get("frame", "height"))) def menuData(self): """菜单项数据 """ return ( (u"程序", (u"退出", u"退出程序", self.OnCloseWindow)), ( u"员工档案", (u"新建", u"在远程服务器新建一个员工档案", self.OnCreateStaff), (u"修改", u"修改一个员工档案", self.OnModifyStaff), (u"查看", u"查看所有员工档案", self.OnDisplayStaff), (u"筛选", u"筛选员工档案", self.OnFilterStaff), ), (u"工资", (u"工资单发送", "用邮件发送工资单", self.OnMailSalary)), (u"其他", (u"报销系统", u"在这里登录报销系统", self.OnOpenExpAccHtml)), (u"工具", (u"通讯录", u"快速查询通讯录的小工具", self.OnSearchAddr)), ( u"帮助", (u"用户手册", u"用户手册", self.OnManual), ("", "", ""), (u"版权", u"本软件的版权信息", self.OnCopyRight), (u"关于作者", u"本软件作者的相关信息", self.OnAuthor), (u"关于本软件", u"本软件的相关信息", self.OnAbout), ), ) def createMenuBar(self): """创建菜单栏 """ menuBar = wx.MenuBar() for eachMenuData in self.menuData(): menuLabel = eachMenuData[0] menuItems = eachMenuData[1:] menuBar.Append(self.createMenu(menuItems), menuLabel) self.SetMenuBar(menuBar) def createMenu(self, menuData): """创建一个菜单 --从创建菜单栏函数中抽象出来的函数 """ menu = wx.Menu() for eachLabel, eachStatus, eachHandler in menuData: if not eachLabel: menu.AppendSeparator() continue menuItem = menu.Append(-1, eachLabel, eachStatus) self.Bind(wx.EVT_MENU, eachHandler, menuItem) return menu def toolBarData(self): """工具栏数据 """ return ( (u"创建", "new.bmp", u"新建一个员工档案", self.OnCreateStaff), # (u"修改", self.OnModifyStaff), (u"查看", "display.bmp", u"浏览", self.OnDisplayStaff), (u"筛选", "search.bmp", u"筛选", self.OnFilterStaff), ("", "", "", ""), (u"通讯录", "addr.bmp", u"搜索通讯录", self.OnSearchAddr), ) def createToolBar(self): """创建工具栏 """ toolBar = self.CreateToolBar() for each in self.toolBarData(): self.createSimpleTool(toolBar, *each) toolBar.Realize() def createSimpleTool(self, toolbar, label, filename, help, handler): """创建一个工具按钮 --从创建工具栏函数中抽象出来的函数 """ if not label: toolbar.AddSeparator() return bmp = wx.Image(filename, wx.BITMAP_TYPE_BMP).ConvertToBitmap() tool = toolbar.AddSimpleTool(-1, bmp, label, help) self.Bind(wx.EVT_MENU, handler, tool) def createMainWindow(self): """创建主显示窗口 """ pass # 显示每日提示 def ShowTips(self): """Shows the tips window on startup; returns False if the option to turn off tips is checked. """ # constructs the tip (wx.ShowTip), which returns whether or not the # user checked the 'show tips' box if wx.ShowTip(None, self.tips, True): btmp = 1 print 1 else: btmp = 0 print 0 self.config.set("other", "showtips", btmp) return self.config.get("other", "showtips") # ----------------------主框架的事件响应函数------------------------# def OnCreateStaff(self, event): pass def OnModifyStaff(self, event): pass def OnDisplayStaff(self, event): pass def OnFilterStaff(self, event): pass def OnMailSalary(self, event): pass def OnOpenExpAccHtml(self, event): pass def OnSearchAddr(self, event): pass def OnManual(self, event): pass def OnCopyRight(self, event): pass def OnAuthor(self, event): pass def OnAbout(self, event): pass def createStatusBar(self): self.statusBar = self.CreateStatusBar() self.statusBar.SetFieldsCount(3) self.statusBar.SetStatusWidths([-1, -2, -3]) # Sash位置变动 def OnSashPosChanged(self, event): """Handler for when the splitter sash, who divided the `tree` and the `content`, is moved. """ pass # pos = self.splitter.GetSashPosition() # self.config.setValue("sashposition", pos) def OnSelChanged(self): pass # 关闭主框架,清理资源 # TODO:devo def OnCloseWindow(self, event): self.tbIcon.Destroy() self.Destroy()
def test_get(): config = Config(config_file_master) assert config.get('default.city') == 'Anyang'
def __init__(self): self.config = Config()
# financial report: future revenue trend report # python imports import pprint import json from datetime import datetime, timedelta import os, sys sys.path.append(os.path.realpath("src")) import pymongo from config.config import Config from booksomeplace.domain.booking import BookingService # setting up the connection + collection bookSvc = BookingService(Config()) # init vars params = { 'trend_city': None, 'trend_stateProvince': 'England', 'trend_locality': None, 'trend_country': 'GB', 'trend_postalCode': None, } # calculate dates for the report date_format = '%Y-%m-%d %H:%M:%S' factor = 0.10 today = datetime.now() historic = timedelta(days=18 * 30) hist_date = today - historic today_str = today.strftime(date_format)
def __init__(self): self.config = Config() self.username = self.config.BITBUCKET_USERNAME self.app_password = self.config.BITBUCKET_APP_PASSWORD self.owner = self.config.BITBUCKET_OWNER self.client = Client(self.username, self.app_password, self.owner)
def __init__(self, ws, ws_forwarder): self.ws = ws self.main_config = Config() self.access_key = "" self.device_id = "" self.wsclient = ws_forwarder
def __init__(self): self.es = elastic() self.utils = Utils() self.config = Config()
class Data: """ Data is a set of variables which are essentially global variables which hold information about the gcode file opened, the machine which is connected, and the user's settings. These variables are NOT thread-safe. The queue system should always be used for passing information between threads. """ """ Data available to all widgets """ # Gcodes contains all of the lines of gcode in the opened file clients = [] gcode = [] gcodeFileUnits = "INCHES" sentCustomGCode = "" compressedGCode = None compressedGCode3D = None version = "1.27" stockFirmwareVersion = None customFirmwareVersion = None holeyFirmwareVersion = None controllerFirmwareVersion = 0 ''' Version Updater ''' lastChecked = -1 pyInstallCurrentVersion = 0.9335 pyInstallUpdateAvailable = False pyInstallUpdateBrowserUrl = "" pyInstallUpdateVersion = 0 pyInstallPlatform = "win" pyInstallType = "singlefile" pyInstallInstalledPath = "" # all of the available COM ports comPorts = [] # This defines which COM port is used comport = "" # stores value to indicate whether or not fake_servo is enabled fakeServoStatus = False # The index of the next unread line of Gcode gcodeIndex = 0 # Index of changes in z zMoves = [] # Holds the current value of the feed rate feedRate = 20 # holds the address of the g-code file so that the gcode can be refreshed gcodeFile = "" importFile = "" # holds the current gcode x,y,z position currentGcodePost = [0.0, 0.0, 0.0] # the current position of the cutting head currentpos = [0.0, 0.0, 0.0] target = [0.0, 0.0, 0.0] units = "MM" # Gcode positioning mode: # 0 = G90 (Absolute) # 1 = G91 (Relative) positioningMode = 0 tolerance = 0.5 gcodeShift = [0.0, 0.0] # the amount that the gcode has been shifted currentTool = 0 # current tool.. upon startup, 0 is the same value as what the controller would have. currentZTarget = 0 # current target for Z-Axis move. Need to track so if user pauses, we can move back to that spot. message = "" # used to update the client logger = Logger( ) # the module which records the machines behavior to review later config = Config() # Background image stuff, persist but not saved backgroundFile = None backgroundTexture = None backgroundManualReg = [] backgroundRedraw = False """ Flags """ # sets a flag if the gcode is being uploaded currently uploadFlag = 0 previousUploadStatus = 0 manualZAxisAdjust = False # this is used to determine the first time the position is received from the machine firstTimePosFlag = 0 # report if the serial connection is open connectionStatus = 0 # is the calibration process currently underway 0 -> false calibrationInProcess = False inPIDVelocityTest = False inPIDPositionTest = False PIDVelocityTestVersion = 0 PIDPositionTestVersion = 0 """ Pointers to Objects """ serialPort = None # this is a pointer to the program serial port object requestSerialClose = False # this is used to request the serialThread to gracefully close the port triangularCalibration = None # points to the triangular calibration object holeyCalibration = None # points to the triangular calibration object opticalCalibration = None # points to the optical calibration object opticalCalibrationImage = None # stores the current image opticalCalibrationImageUpdated = False # stores whether its been updated or not opticalCalibrationTestImage = None # stores the current image opticalCalibrationTestImageUpdated = False # stores whether its been updated or not cameraImage = None cameraImageUpdated = False continuousCamera = False gpioActions = None boardManager = None """ Colors """ fontColor = "[color=7a7a7a]" drawingColor = [0.47, 0.47, 0.47] posIndicatorColor = [0, 0, 0] targetIndicatorColor = [1, 0, 0] """ Misc UI bits that need to be saved between invocations (but not saved) """ zPush = None zPushUnits = "MM" zReadoutPos = 0.00 zPopupUnits = None zStepSizeVal = 0.1 """ Queues """ message_queue = LoggingQueue(logger) ui_controller_queue = queue.Queue() ui_queue1 = UIQueue() alog_streamer_queue = queue.Queue( 1000) # used for sending log to client screen.. limit to 1000 "items" log_streamer_queue = queue.Queue( 1000) # used for sending log to client screen.. limit to 1000 "items" console_queue = queue.Queue() # used for printing to terminal mcp_queue = queue.Queue( ) # used for sending messages to WebMCP(if enabled) webMCPActive = False # start false until WebMCP connects gcode_queue = queue.Queue() quick_queue = queue.Queue() """ Position and Error values """ xval = 0.0 yval = 0.0 zval = 0.0 xval_prev = -99990.0 yval_prev = -99990.0 zval_prev = -99990.0 leftError = 0.0 rightError = 0.0 leftError_prev = -99999.0 rightError_prev = -99999.9 """ Chain lengths as reported by controller """ leftChain = 1610 rightChain = 1610 """ Sled position computed from controller reported chain lengths """ computedX = 0 computedY = 0 """ Buffer size as reported by controller """ bufferSize = 127 pausedzval = 0.0 pausedPositioningMode = 0 pausedUnits = "INCHES" """ GCode Position Values """ previousPosX = 0.0 previousPosY = 0.0 previousPosZ = 0.0 """ Board data """ currentBoard = None shutdown = False hostAddress = "-" platform = "RPI" platformHome = "" def __init__(self): """ Initializations. """ self.logger.data = self self.config.data = self
import os from config.config import Config from linreg.plot import pipeline_plot data_file_path = 'D:/Aaron/Bio/NU-Age/Data/' figures_file_path = 'D:/Aaron/Bio/NU-Age/Linreg/Figures/' otu_file = 'otus.txt' plot_type = 'subject' # 'subject' for subject-control separation, 'all' - without separation if not os.path.isdir(figures_file_path): os.makedirs(figures_file_path) config = Config(data_file_path, figures_file_path) pipeline_plot(config, otu_file, plot_type)
model_cfg = '/home/baoxiong/Projects/LEMMA/experiments/src/settings/{}_fpv_plain_hoi.yaml'.format( model_name) cfg.merge_from_file(model_cfg) cfg.EXP.VIEW_TYPE = view_type cfg.EXP.MODEL_TYPE = model_type cfg = map_cfg(cfg) cfg.OUTPUT_DIR = '/home/baoxiong/HDD/features' cfg.TRAIN.CHECKPOINT_FILE_PATH = '/home/baoxiong/models/{}_{}_{}.pyth'.format( model_name, model_type, view_type) cfg.MODEL.NUM_CLASSES = len(Metadata.hoi) cfg.EXTRACT_FEATURE = args.extract cfg.VIS = args.preds cfg.EMBED_PATH = str( Path(Config().intermediate_path) / 'embeddings' / 'embedding.p') cfg.TRAIN.CHECKPOINT_TYPE = 'pytorch' cfg.DEBUG = args.debug cfg.TEST.BATCH_SIZE = args.batch_size cfg.DATA_LOADER.NUM_WORKERS = args.workers cfg.NUM_GPUS = args.num_gpus cfg.TRAIN.CACHE = args.cache torch.multiprocessing.spawn( mpu.run, nprocs=cfg.NUM_GPUS, args=( cfg.NUM_GPUS, extract, args.init_method, cfg.SHARD_ID, cfg.NUM_SHARDS,
class ChallongeScraper(object): def __init__(self, tournament_id): self.tournament_id = tournament_id self.config = Config(config_file_path=CONFIG_FILE_PATH) self.api_key = self.config.get_challonge_api_key() self.api_key_dict = {'api_key': self.api_key} self.raw_dict = None self.get_raw() def get_raw(self): if self.raw_dict == None: self.raw_dict = {} url = TOURNAMENT_URL % self.tournament_id print(url) print(self.api_key_dict) rawrequest = requests.get(url, params=self.api_key_dict).request print(rawrequest.path_url) self.raw_dict['tournament'] = self._check_for_200( requests.get(url, params=self.api_key_dict)).json() url = MATCHES_URL % self.tournament_id self.raw_dict['matches'] = self._check_for_200( requests.get(url, params=self.api_key_dict)).json() url = PARTICIPANTS_URL % self.tournament_id self.raw_dict['participants'] = self._check_for_200( requests.get(url, params=self.api_key_dict)).json() return self.raw_dict def get_name(self): return self.get_raw()['tournament']['tournament']['name'].strip() def get_date(self): return iso8601.parse_date( self.get_raw()['tournament']['tournament']['created_at']) def get_matches(self): player_map = dict((p['participant']['id'], p['participant']['name'].strip() if p['participant'] ['name'] else p['participant']['username'].strip()) for p in self.get_raw()['participants']) matches = [] for m in self.get_raw()['matches']: m = m['match'] winner_id = m['winner_id'] loser_id = m['loser_id'] if winner_id is not None and loser_id is not None: winner = player_map[winner_id] loser = player_map[loser_id] match_result = MatchResult(winner=winner, loser=loser) matches.append(match_result) return matches def get_players(self): return [p['participant']['name'].strip() if p['participant']['name'] else p['participant']['username'].strip() \ for p in self.get_raw()['participants']] def _check_for_200(self, response): if response.status_code != 200: raise Exception('Received status code of %d' % response.status_code) return response
unique_rowid.remove(i) snekdb.processStagedPayment(unique_rowid) # payment run complete print('Payment Run Completed!') # sleep 10 minutes between tx blasts time.sleep(600) else: time.sleep(150) if __name__ == '__main__': data = Config() network = Network(data.network) u = Util(data.network) snekdb = SnekDB(data.database_user, data.network, data.delegate) exchange = Exchange(snekdb, data) client = u.get_client(network.api_port) build_network() dynamic = Dynamic(data.database_user, data.network, network.api_port) #get dot path for load_env and load dot = u.core + '/.env' load_dotenv(dot) while True: if data.multi == "Y": share_multipay() else:
def payments(request): # init vars loan = None amtPaid = 0.00 amtDue = 0.00 totalPaid = 0.00 totalDue = 0.00 message = '' borrower = None borrowerKey = None borrowerName = '' overpayment = 0.00 loan_docs = [] # init payments payment = Payment() payments = [payment.populate()] # init services publisher = Publisher() config = Config() userSvc = UserService(config, 'User', publisher) loanSvc = LoanService(config, 'Loan', publisher) utils = Utils() # set up attachments publisher.attach(publisher.EVENT_LOAN_UPDATE_BORROWER, userSvc.updateBorrowerListener) # check cache to see if there are any messages message = utils.read_cache('message', True) # get list of borrowers borrowers = userSvc.fetchBorrowerKeysAndNames() # if this is a POST request process the form data if request.method == 'POST': if 'borrowerKey' in request.POST: borrowerKey = request.POST['borrowerKey'] utils.write_cache('borrowerKey', borrowerKey) if 'amount_paid' in request.POST: amtPaid = float(request.POST['amount_paid']) else: borrowerKey = utils.read_cache('borrowerKey') # use user service to get borrower's name if borrowerKey: borrower = userSvc.fetchUserByBorrowerKey(borrowerKey) # look for loan documents to be uploaded to GridFS grid = GridFS(loanSvc.getDatabase()) if request.FILES and 'loan_doc' in request.FILES: fn = request.FILES['loan_doc'] newFn = borrowerKey + '/' + os.path.basename(fn.name) grid.put(fn, filename=newFn) message = 'File uploaded' # get list of documents currently loaded loan_docs = [] temp = grid.list() for name in temp: if name.find(borrowerKey) >= 0: loan_docs.append(os.path.basename(name)) # if borrower instance is in play, process the payment if borrower: borrowerName = borrower.getFullName() totalDue = borrower.get('amountDue').to_decimal() totalPaid = borrower.get('amountPaid').to_decimal() # use loan service to get loan info loan = loanSvc.fetchLoanByBorrowerKey(borrowerKey) if loan: # collect info overpayment = loan.get('overpayment') loanInfo = loan.getLoanInfo() amtDue = loanInfo.getMonthlyPayment() # process payment if amtPaid > 0: if loanSvc.processPayment(borrowerKey, amtPaid, loan): message = 'Payment processed' loan = loanSvc.fetchLoanByBorrowerKey(borrowerKey) else: message = 'Problem processing payment' # convert loan document from BSON to Decimal for presentation loan.convertBsonToDecimal() payments = loan.getPayments() overpayment = loan.get('overpayment') loanInfo = loan.getLoanInfo() amtDue = loanInfo.getMonthlyPayment() # render results params = { 'loan': loan, 'payments': payments, 'borrowers': borrowers, 'borrowerName': borrowerName, 'borrowerKey': borrowerKey, 'amountDue': amtDue, 'overpayment': overpayment, 'totalPaid': totalPaid, 'totalDue': totalDue, 'loan_docs': loan_docs, 'message': message } main = render_to_string('maintenance.html', params) home = render_to_string('layout.html', {'contents': main}) return HttpResponse(home)
class Houston(): # implements(Task) def __init__(self, navigation, task_list): """ To initilize Houston """ ################ INSTANCES ################ self.gate = Gate(self) self.path_1 = Path(self) self.dice = Dice(self) self.path_2 = Path(self) self.chip_1 = Chip(self) self.chip_2 = Chip(self) self.roulette = Roulette(self) self.slots = Slots(self) self.pinger_a = PingerA(self) self.pinger_b = PingerB(self) self.cash_in = CashIn(self) #self.buoy = Buoy(self) self.navigation = navigation self.cvcontroller = CVController() self.config = Config() self.counts = Counter() ################ THRESHOLD VARIABLES ################ self.task_timer = 300 self.break_timer = 600 ################ FLAG VARIABLES ################ self.is_killswitch_on = False ################ TIMER/COUNTER VARIABLES ################ self.last_time = time.time() ################ TASKS LIST ################ self.tasks = task_list ################ DICTIONARIES ################ """ self.tasks values listed below 'gate', 'path', 'dice', 'chip', 'path', 'chip', 'slots', 'pinger_b', 'roulette', 'pinger_a', 'cash_in' """ self.state_num = 0 self.states = [ self.gate, self.path_1, self.dice, self.chip_1, self.path_2, self.slots, self.chip_2, self.pinger_a, self.roulette, self.pinger_b, self.cash_in ] ################ AUV MOBILITY VARIABLES ################ #self.rotational_movement = {-1: } self.height = 1 self.queue_direction = [] self.rotation = 15 self.power = 120 ################ TASK THREAD ################ self.task_thread = None ################ ROS VARIABLES ################ self.r = rospy.Rate(30) #30hz self.msg = CVIn() ################ CURRENT TASK VARIABLE ################ self.current_task = None # print_task ################################################################################## def print_tasks(self): counter = 0 for i in self.tasks: print '{}: {}'.format(counter, i) counter += 1 # do_task ################################################################################## def start_all_tasks(self): if self.state_num > 10: print 'no more tasks to complete' self.state = self.states[self.state_num] if not self.state.is_task_running: self.state.reset() print 'doing task: {}'.format(self.tasks[self.state_num]) self.navigation.cancel_h_nav() self.navigation.cancel_m_nav() self.navigation.cancel_r_nav() self.task_thread_start(self.state, self.tasks[self.state_num], self.navigation, self.cvcontroller, self.power, self.rotation) else: print '\nTask is currently running.' print '\nPlease wait for task to finish or cancel' if self.state.is_complete: self.state_num += 1 # do_one_task ################################################################################## def do_one_task(self, task_num): print '\nattempting to run task number: {}\ \ntask: {}'.format(task_num, self.tasks[task_num]) self.state = self.states[task_num] if not self.state.is_task_running: self.state.reset() self.navigation.cancel_h_nav() self.navigation.cancel_m_nav() self.navigation.cancel_r_nav() self.task_thread_start(self.state, self.tasks[task_num], self.navigation, self.cvcontroller, self.power, self.rotation) else: print '\nTask is currently running.' print '\nPlease wait for task to finish or cancel' # stop_task ################################################################################## def stop_task(self): # self.state = self.states[self.state_num] try: self.state.stop_task = True except: print 'no task currently running to stop' self.navigation.cancel_h_nav() self.navigation.cancel_m_nav() self.navigation.cancel_r_nav() # return_raw_frame ################################################################################## def return_raw_frame(self): if self.state.is_task_running: return self.cvcontroller.current_raw_frame() else: print 'camera is currently not running' # return_processed_frame ################################################################################## def return_processed_frame(self): if self.state.is_task_running: return self.cvcontroller.current_processed_frame() else: print 'camera is currently not running' # task_thread_start ################################################################################## def task_thread_start(self, task_call, task_name, navigation, cvcontroller, power, rotation): self.reset_thread() self.task_thread = Thread(target = task_call.start, args = (task_name, navigation, cvcontroller, power, rotation)) self.task_thread.start() # reset_thread ################################################################################## def reset_thread(self): if self.task_thread: self.task_thread = None # TODO currently unused. will remove eventually # get_task ################################################################################## def get_task(self): self.tasks = self.config.get_config('auv', 'tasks') # ['gate', 'path', 'dice', 'chip', 'path', 'chip', 'slots', 'pinger_b', # 'roulette', 'pinger_a', 'cash_in'] # start ################################################################################## def start(self): #self.get_task() # similar start to other classes, such as auv, and keyboard #self.is_killswitch_on = True self.navigation.start() # stop ################################################################################## def stop(self): # similar start to other classes, such as auv, and keyboard #self.is_killswitch_on = False self.navigation.stop()
import json import gspread from oauth2client.service_account import ServiceAccountCredentials from config.config import Config config = Config() if not config.sheets_id: print("Please set a sheets id in config.ini") if not config.sheets_worksheet: print("Please set a sheets worksheet in config.ini") def addRecord(List): scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] credentials = ServiceAccountCredentials.from_json_keyfile_name( 'credentials.json', scope) # get email and key from creds file = gspread.authorize(credentials) # authenticate with Google sheet = file.open_by_key(config.sheets_id).worksheet( config.sheets_worksheet) # open sheet sheet.append_row(List)
def main(): parser = argparse.ArgumentParser( description="netplot - plots programs accessing the network") parser.add_argument( "-i", "--iface", help="Interface to use, default is scapy's default interface") parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", help="Verbose output for each processed packet") parser.add_argument("-vv", "--verbose-extra", dest="verbose_extra", action="store_true", help="Extra verbose output for each processed packet") parser.add_argument("-d", "--resolve-domain", dest="collect_hosts", action="store_true", help="Resolve domains called instead of processes") parser.add_argument("-r", "--raw", dest="raw", action="store_true", help="Disable both domain and process resolution") parser.add_argument( "-f", "--file", dest="filename", action="store", help= "Read packets from input file instead of directly accessing network") parser.add_argument("-m", "--missed", dest="show_missed", action="store_true", help="Show not supported protocols as missed packets") parser.add_argument( "-p", "--show-port", dest="show_port", action="store_true", help= "Show which port each process is listening on and its PID. Not compatible with -r and -d" ) parser.add_argument("-F", "--filter", dest="flt", action="store", help="Filter in BPF syntax (same as scapy)") parser.add_argument("-x", "--incoming", dest="incoming", action="store_true", help="Process incoming packets instead of outgoing") parser.add_argument("-b", "--both", dest="both", action="store_true", help="Process both incoming and outgoing packets") parser.add_argument( "-n", "--no-analysis", dest="no_analysis", action="store_true", help= "Don't plot anything, just display collected entries (ideal for further processing). " "This ignores -m") args = parser.parse_args() global config, processor config = Config(args) if config.collect_hosts: processor = HostProcessor(config, DomainProvider(config)) elif config.raw: processor = RawProcessor(config) else: processor = ProcessProcessor(config) sniff_packets()
def __new__(mcs, *args, **kwargs): x = super().__new__(mcs, *args, **kwargs) x.config = Config.from_json(x.__name__.lower()) return x
messenger.add_handler(GetAccountInfo.url_request, GetAccountInfo, GetAccountInfo.methods, async=True) if __name__ == '__main__': # TODO RESTMessenger go into infinite loop # TODO If you want to use it you must launch messenger in thread # TODO may be for requests create appropriate classes # GUI server while work as web server and don't make large requests curr_dir_path = os.path.dirname(os.path.abspath(__file__)) + '/' config_file_path = curr_dir_path + CONFIG_FILE config = Config(config_file_path) config.read_config() c2_config_dict = config.get_sub_config_by_key("c2_config") oss_bss_config_dict = config.get_sub_config_by_key("oss_bss_config") rest_messenger = RESTMessenger(server_config=c2_config_dict, client_config=oss_bss_config_dict, async=True, ssl=True) # rest_messenger.print_attributes() add_handlers(rest_messenger) auth_manager = AuthManager() handler_list = [LoginRESTMsg, LogoutRESTMsg, StatusRESTMsg] auth_manager.add_handler_list(handler_list)
def __init__(self): self.config = Config() self.utils = Utils()
def __init__(self): self.const = Constants() self.config = Config()
class LiquiTrader: """ Needs: - self.exchange - Config - Buy/sell/dca Strategies functions: - Analyze buy strategies - analyze sell strategies - analyze dca strategies - Handle possible sells - Handle buys - handle dca buys - get active config - update config - update strategies """ def __init__(self, shutdown_handler): self.shutdown_handler = shutdown_handler self.market_change_24h = 0 self.exchange = None self.statistics = {} self.config = None self.buy_strategies = None self.sell_strategies = None self.dca_buy_strategies = None self.trade_history = [] self.indicators = None self.timeframes = None self.owned = [] self.possible_trades = [] self.below_max_pairs = False # ---- def initialize_config(self): self.config = Config(self.update_config) self.config.load_general_settings() self.config.load_global_trade_conditions() self.config.load_pair_settings() self.indicators = self.config.get_indicators() self.timeframes = self.config.timeframes # ---- def update_config(self, strategies=False): old_timeframes = self.timeframes self.config.load_general_settings() self.config.load_global_trade_conditions() self.config.load_pair_settings() self.timeframes = self.config.timeframes self.load_strategies() self.indicators = self.config.get_indicators() #todo fix and make more efficient, currently always updating timeframes_changed = False for tf in self.config.timeframes: if tf not in old_timeframes: timeframes_changed = True if timeframes_changed: print("timeframe_changed") self.exchange.reload_candles() # ---- def initialize_exchange(self): general_settings = self.config.general_settings general_settings['starting_balance'] = float( general_settings['starting_balance']) from gui.gui_server import get_keys keys = get_keys() if general_settings["start_delay"]: time.sleep(int(general_settings["start_delay"])) if general_settings['exchange'].lower( ) == 'binance' and general_settings['paper_trading']: self.exchange = PaperBinance.PaperBinance( 'binance', general_settings['market'].upper(), general_settings['starting_balance'], keys, self.timeframes) # use USDT in tests to decrease API calls (only ~12 pairs vs 100+) elif general_settings['exchange'].lower() == 'binance': self.exchange = BinanceExchange.BinanceExchange( 'binance', general_settings['market'].upper(), general_settings['starting_balance'], keys, self.timeframes) elif general_settings['paper_trading']: self.exchange = GenericPaper.PaperGeneric( general_settings['exchange'].lower(), general_settings['market'].upper(), general_settings['starting_balance'], keys, self.timeframes) else: self.exchange = GenericExchange.GenericExchange( general_settings['exchange'].lower(), general_settings['market'].upper(), general_settings['starting_balance'], keys, self.timeframes) asyncio.get_event_loop().run_until_complete(self.exchange.initialize()) # ---- def run_exchange(self): import utils.runtime_handler utils.runtime_handler.enable_traceback_hook( ) # Enable custom traceback handling (to strip build path info) self.shutdown_handler.add_task() try: self.exchange.start() # Catch Twisted connection lost bullshit except Exception as _ex: exception_data = traceback.format_exc() if 'connectionLost' in exception_data: pass else: raise _ex # ---- def stop_exchange(self): self.exchange.stop() self.shutdown_handler.remove_task() # ---- # return total current value (pairs + balance) def get_tcv(self): pending = 0 self.owned = [] for pair, value in self.exchange.pairs.items(): if 'total' not in value or 'close' not in value: continue if value['close'] is None: continue pending += value['close'] * value['total'] if value['close'] * value['total'] > self.exchange.get_min_cost( pair): self.owned.append(pair) else: self.exchange.pairs[pair]['dca_level'] = 0 return pending + self.exchange.balance # ---- def load_strategies(self): # TODO get candle periods and indicators here or in load config # instantiate strategies buy_strategies = [] for strategy in self.config.buy_strategies: buy_strategies.append( BuyCondition(strategy, pair_settings=self.config.pair_specific_settings)) dca_buy_strategies = [] for strategy in self.config.dca_buy_strategies: dca_buy_strategies.append( DCABuyCondition( strategy, pair_settings=self.config.pair_specific_settings)) sell_strategies = [] for strategy in self.config.sell_strategies: sell_strategies.append( SellCondition( strategy, pair_settings=self.config.pair_specific_settings)) self.buy_strategies = buy_strategies self.sell_strategies = sell_strategies self.dca_buy_strategies = dca_buy_strategies # ---- def get_possible_buys(self, pairs, strategies): possible_trades = {} tcv = self.get_tcv() for strategy in strategies: for pair in pairs: # strategy.evaluate(pairs[pair],statistics[pair]) try: result = strategy.evaluate(pairs[pair], self.statistics[pair], tcv) except Exception as ex: print('exception in get possible buys: {}'.format( traceback.format_exc())) self.exchange.reload_single_candle_history(pair) continue if result is not None: if pair not in possible_trades or possible_trades[ pair] > result: possible_trades[pair] = result self.possible_trades = possible_trades return possible_trades # ---- def get_possible_sells(self, pairs, strategies): possible_trades = {} for strategy in strategies: for pair in pairs: # strategy.evaluate(pairs[pair],statistics[pair]) result = strategy.evaluate(pairs[pair], self.statistics[pair]) if result is not None: if pair not in possible_trades or possible_trades[ pair] < result: possible_trades[pair] = result return possible_trades # ---- @staticmethod def check_for_viable_trade(current_price, orderbook, remaining_amount, min_cost, max_spread, dca=False): can_fill, minimum_fill = process_depth(orderbook, remaining_amount, min_cost) if can_fill is not None and in_max_spread(current_price, can_fill.price, max_spread): return can_fill elif minimum_fill is not None and in_max_spread( current_price, minimum_fill.price, max_spread) and not dca: return minimum_fill else: return None # ---- # check min balance, max pairs, quote change, market change, trading enabled, blacklist, whitelist, 24h change # todo add pair specific settings def handle_possible_buys(self, possible_buys): # Alleviate lookup cost exchange = self.exchange config = self.config exchange_pairs = exchange.pairs for pair in possible_buys: exch_pair = exchange_pairs[pair] if self.pair_specific_buy_checks( pair, exch_pair['close'], possible_buys[pair], exchange.balance, exch_pair['percentage'], config.global_trade_conditions['min_buy_balance']): # amount we'd like to own target_amount = possible_buys[pair] # difference between target and current owned quantity. remaining_amount = target_amount - exch_pair['total'] # lowest cost trade-able min_cost = exchange.get_min_cost(pair) current_price = exch_pair['close'] # get orderbook, if time since last orderbook check is too soon, it will return none orderbook = exchange.get_depth(pair, 'BUY') if orderbook is None: continue # get viable trade, returns None if none available price_info = self.check_for_viable_trade( current_price, orderbook, remaining_amount, min_cost, config.global_trade_conditions['max_spread']) # Check to see if amount remaining to buy is greater than min trade quantity for pair if price_info is None or price_info.amount * price_info.average_price < min_cost: continue # place order order = exchange.place_order(pair, 'limit', 'buy', price_info.amount, price_info.price) # store order in trade history self.trade_history.append(order) self.save_trade_history() # ---- def handle_possible_sells(self, possible_sells): # Alleviate lookup cost exchange = self.exchange exchange_pairs = exchange.pairs for pair in possible_sells: exch_pair = exchange_pairs[pair] # lowest cost trade-able min_cost = exchange.get_min_cost(pair) if exch_pair['total'] * exch_pair['close'] < min_cost: continue orderbook = exchange.get_depth(pair, 'sell') if orderbook is None: continue lowest_sell_price = possible_sells[pair] can_fill, minimum_fill = process_depth(orderbook, exch_pair['total'], min_cost) if can_fill is not None and can_fill.price > lowest_sell_price: price = can_fill elif minimum_fill is not None and minimum_fill.price > lowest_sell_price: price = minimum_fill else: continue current_value = exch_pair['total'] * price.average_price order = exchange.place_order(pair, 'limit', 'sell', exch_pair['total'], price.price) self.trade_history.append(order) self.save_trade_history() # ---- def handle_possible_dca_buys(self, possible_buys): # Alleviate lookup cost exchange = self.exchange config = self.config exchange_pairs = exchange.pairs dca_timeout = float(config.global_trade_conditions['dca_timeout']) * 60 for pair in possible_buys: exch_pair = exchange_pairs[pair] # lowest cost trade-able min_cost = exchange.get_min_cost(pair) if (exch_pair['total'] * exch_pair['close'] < min_cost or time.time() - exch_pair['last_order_time'] < dca_timeout): continue if self.pair_specific_buy_checks( pair, exch_pair['close'], possible_buys[pair], exchange.balance, exch_pair['percentage'], config.global_trade_conditions['dca_min_buy_balance'], True): current_price = exch_pair['close'] # get orderbook, if time since last orderbook check is too soon, it will return none orderbook = exchange.get_depth(pair, 'BUY') if orderbook is None: continue # get viable trade, returns None if none available price_info = self.check_for_viable_trade( current_price, orderbook, possible_buys[pair], min_cost, config.global_trade_conditions['max_spread'], True) # Check to see if amount remaining to buy is greater than min trade quantity for pair if price_info is None or price_info.amount * price_info.average_price < min_cost: continue order = exchange.place_order(pair, 'limit', 'buy', possible_buys[pair], exch_pair['close']) if order['cost'] > min_cost: exch_pair['dca_level'] += 1 self.trade_history.append(order) self.save_trade_history() # ---- def pair_specific_buy_checks(self, pair, price, amount, balance, change, min_balance, dca=False): # Alleviate lookup cost global_trade_conditions = self.config.global_trade_conditions min_balance = min_balance if not isinstance(min_balance, str) \ else percentToFloat(min_balance) * self.get_tcv() self.below_max_pairs = self.is_below_max_pairs( len(self.owned), int(global_trade_conditions['max_pairs'])) checks = [ not exceeds_min_balance(balance, min_balance, price, amount), below_max_change(change, global_trade_conditions['max_change']), above_min_change(change, global_trade_conditions['min_change']), not is_blacklisted(pair, global_trade_conditions['blacklist']), is_whitelisted(pair, global_trade_conditions['whitelist']) ] if not dca: checks.append(self.exchange.pairs[pair]['total'] < 0.8 * amount) checks.append(self.below_max_pairs) return all(checks) @staticmethod def is_below_max_pairs(current_pairs, max_pairs): return current_pairs < max_pairs or max_pairs == 0 # ---- def global_buy_checks(self): # Alleviate lookup cost quote_change_info = self.exchange.quote_change_info market_change = self.config.global_trade_conditions self.market_change_24h = get_average_market_change(self.exchange.pairs) self.below_max_pairs = self.is_below_max_pairs( len(self.owned), int(self.config.global_trade_conditions['max_pairs'])) self.check_24h_quote_change = in_range( quote_change_info['24h'], market_change['min_24h_quote_change'], market_change['max_24h_quote_change']) self.check_1h_quote_change = in_range( quote_change_info['1h'], market_change['min_1h_quote_change'], market_change['max_1h_quote_change']) self.check_24h_market_change = in_range( self.market_change_24h, market_change['min_24h_market_change'], market_change['max_24h_market_change']) return all((self.check_1h_quote_change, self.check_24h_market_change, self.check_24h_quote_change)) # ---- def do_technical_analysis(self): candles = self.exchange.candles for pair in self.exchange.pairs: if self.indicators is None: raise TypeError( '(do_technical_analysis) LiquiTrader.indicators cannot be None' ) try: self.statistics[pair] = run_ta(candles[pair], self.indicators) except Exception as ex: print('err in do ta', pair, ex) self.exchange.reload_single_candle_history(pair) continue # ---- def save_trade_history(self): self.save_pairs_history() fp = 'tradehistory.json' with open(fp, 'w') as f: json.dump(self.trade_history, f) # ---- def save_pairs_history(self): self.exchange.save() # ---- def load_pairs_history(self): fp = 'pair_data.json' with open(fp, 'r') as f: pair_data = json.load(f) exchange_pairs = self.exchange.pairs for pair in exchange_pairs: if pair in pair_data: exch_pair = exchange_pairs[pair] # TODO @Kyle :: was the 'or' removed? if exch_pair[ 'total_cost'] is None or self.config.general_settings[ 'paper_trading']: exch_pair.update(pair_data[pair]) else: exch_pair['dca_level'] = pair_data[pair]['dca_level'] exch_pair['last_order_time'] = pair_data[pair][ 'last_order_time'] # ---- def load_trade_history(self): fp = 'tradehistory.json' with open(fp, 'r') as f: self.trade_history = json.load(f) def pairs_to_df(self, basic=True, friendly=False, holding=False, fee=0.075): df = pd.DataFrame.from_dict(self.exchange.pairs, orient='index') times = [] timezone = self.config.general_settings['timezone'] try: arrow.utcnow().to(timezone) # Try to parse timezone except arrow.parser.ParserError: print('Invalid timezone in config, defaulting to UTC') timezone = 'UTC' for t in df.last_order_time.values: times.append(arrow.get(t).to(timezone).datetime) df.last_order_time = pd.DatetimeIndex(times) if 'total_cost' in df and 'close' in df: df['current_value'] = df.close * df.total * (1 - (fee / 100)) df['gain'] = (df.bid - df.avg_price) / df.avg_price * 100 - fee if holding: dust = 0.02 if self.config.general_settings['market'].upper( ) == 'ETH' else 0.002 df = df[df.total_cost > dust] if friendly: try: if len(df) > 0: df = prettify_dataframe(df, self.exchange.quote_price) except ValueError as ex: pass except TypeError as ex: pass try: df = df[DEFAULT_COLUMNS] if basic else df except KeyError as ex: pass df.rename(columns=COLUMN_ALIASES, inplace=True) return df else: return df # ---- def get_pending_value(self): df = self.pairs_to_df() if 'total_cost' in df: return df.total_cost.sum() + self.exchange.balance else: return 0 # ---- def get_pair(self, symbol): return self.exchange.pairs[symbol] # ---- @staticmethod def calc_gains_on_df(df): if 'bought_price' in df: df['total_cost'] = df.bought_price * df.filled df['gain'] = df['cost'] - df['total_cost'] df['percent_gain'] = (df['cost'] - df['total_cost']) / df['total_cost'] * 100 return df else: df['total_cost'] = 0 df['gain'] = 0 df['percent_gain'] = 0 return df # ---- def get_daily_profit_data(self): if len(self.trade_history) < 1: df = pd.DataFrame( self.trade_history + [PaperBinance.create_paper_order(0, 0, 'sell', 0, 0, 0)]) else: df = pd.DataFrame(self.trade_history) df = self.calc_gains_on_df(df[df.side == 'sell']) times = [] # todo timezones df = df.set_index(pd.to_datetime(df.timestamp, unit='ms')) for t in df.timestamp.values: times.append( arrow.get(t / 1000).to( self.config.general_settings['timezone']).datetime) df.timestamp = pd.DatetimeIndex(times) df.index = pd.DatetimeIndex(times) df = df.resample('1d').sum() df['date'] = df.index return df # ---- def get_pair_profit_data(self): df = pd.DataFrame(self.trade_history) df = self.calc_gains_on_df(df) if len(df) == 0 or 'symbol' not in df: return df return df.groupby('symbol').sum()[[ 'total_cost', 'cost', 'amount', 'gain' ]] # ---- def get_total_profit(self): df = pd.DataFrame(self.trade_history) if 'side' not in df: return 0 df = df[df.side == 'sell'] if len(df) == 0: return 0 # filled is the amount filled df['total_cost'] = df.bought_price * df.filled df['gain'] = df['cost'] - df['total_cost'] return df.gain.sum() # ---- def get_cumulative_profit(self): df = self.get_daily_profit_data().drop(['date'], axis=1).cumsum() df['date'] = df.index return df # ---- def get_trailing_pairs(self): return { "buy": list(map(vars, self.buy_strategies)), "sell": list(map(vars, self.sell_strategies)), "dca": list(map(vars, self.dca_buy_strategies)) }
def generate_and_store_embeddings(): """ Generate and store sentence embedding for title + abstract. """ resources_dir = Config.get_config("resources_dir_key") embeddings_path = Config.get_config("embeddings_path_key") title_embeddings_filename = Config.get_config( "title_embeddings_filename_key") abstract_embeddings_filename = Config.get_config( "abstract_embeddings_filename_key") sentence_embeddings_filename = Config.get_config( "sentence_embeddings_filename_key") title_embeddings_path = os.path.join(resources_dir, embeddings_path, title_embeddings_filename) abstract_embeddings_path = os.path.join(resources_dir, embeddings_path, abstract_embeddings_filename) sentence_embeddings_path = os.path.join(resources_dir, embeddings_path, sentence_embeddings_filename) dataset_df = DatasetUtils.get_microsoft_cord_19_dataset() dataset_df[Config.get_config("title_key")].map( ModelUtils.format_abstract) dataset_df[Config.get_config("abstract_key")].map( ModelUtils.format_abstract) if os.path.exists(title_embeddings_path): logging.info( "Microsoft CORD-19 dataset title embeddings have already been generated." ) else: # Read UID and title from dataset uid_title_mapping_dict = dict() for index, row in dataset_df.iterrows(): uid_title_mapping_dict[row[Config.get_config( "cord_uid_key")]] = row[Config.get_config("title_key")] logging.info( "Generating sentence embeddings for Microsoft CORD-19 dataset titles ..." ) uid_title_embedding_mapping_dict = ModelUtils.multiprocessing(ModelUtils. \ get_sentence_embeddings_from_dict, uid_title_mapping_dict.items(), 4) ModelUtils.write_to_pickle_file(title_embeddings_path, uid_title_embedding_mapping_dict) logging.info( "Generating sentence embeddings for Microsoft CORD-19 dataset titles completed..." ) if os.path.exists(abstract_embeddings_path): logging.info( "Microsoft CORD-19 dataset abstract embeddings have already been generated." ) else: # Read UID and abstract from dataset uid_abstract_mapping_dict = dict() for index, row in dataset_df.iterrows(): uid_abstract_mapping_dict[row[Config.get_config( "cord_uid_key")]] = row[Config.get_config("abstract_key")] logging.info( "Generating sentence embeddings for Microsoft CORD-19 dataset abstracts ..." ) uid_abstract_embedding_mapping_dict = ModelUtils.multiprocessing(ModelUtils. \ get_sentence_embeddings_from_dict, uid_abstract_mapping_dict.items(), 4) ModelUtils.write_to_pickle_file( abstract_embeddings_path, uid_abstract_embedding_mapping_dict) logging.info( "Generating sentence embeddings for Microsoft CORD-19 dataset abstracts completed..." ) if os.path.exists(sentence_embeddings_path): logging.info( "Microsoft CORD-19 dataset sentence embeddings have already been generated" ) else: logging.info( "Generating text embeddings for Microsoft CORD-19 dataset ...." ) resources_dir = Config.get_config("resources_dir_key") dataset_dir = Config.get_config("dataset_dir_key") microsoft_dir = Config.get_config("microsoft_dir_key") paper_sentence_text_dir = Config.get_config( "paper_sentence_text_dir_key") dir = os.path.join(resources_dir, dataset_dir, microsoft_dir, paper_sentence_text_dir) os.mkdir(sentence_embeddings_path) files = os.listdir(dir) for file in files: try: output_pkl_filename = file.split( Config.get_config("text_extension_key") )[0] + Config.get_config("pickle_extension_key") output_pkl_filepath = os.path.join( sentence_embeddings_path, output_pkl_filename) if os.path.exists(output_pkl_filepath): continue sentence_embeddings_mapping_dict = ModelUtils.generate_sentence_embeddings_from_file( file) if sentence_embeddings_mapping_dict: print("Writing path = {}".format(output_pkl_filepath)) ModelUtils.write_to_pickle_file( output_pkl_filepath, sentence_embeddings_mapping_dict) except Exception as e: print( "Error occurred while generating sentence embeddings for File = {}. Skipping." .format(file)) traceback.print_exc()
propName: 1, address: 1, reviews: 1 } } """ # python imports import os, sys sys.path.append(os.path.realpath("src")) import pymongo from config.config import Config from booksomeplace.domain.property import PropertyService # setting up the connection + collection service = PropertyService(Config()) # formulate query params cutoff = 3 cat = 'cleanliness' key = 'reviews.' + cat query = {key: {'$lt': 3}} proj = {'propName': 1, 'address': 1, 'reviews': 1} sort = [('propName', pymongo.ASCENDING)] result = service.fetch(query, proj, sort) # set counters # display results pattern = "{:20}\t{:40}\t{:5.2f}" print('{:20}\t{:40}\t{:5}'.format('Property Name', 'Address', 'Score'))
import os import sys from pymongo import MongoClient # add root directory to python path sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/../')) from config.config import Config from dao import Dao # script to create new user in db if __name__ == "__main__": if len(sys.argv) < 4: print "incorrect number of arguments!" print "usage: python create_user.py username password region1 [region2] [region3]...." sys.exit() username = sys.argv[1] password = sys.argv[2] regions = sys.argv[3:] config = Config() mongo_client = MongoClient(host=config.get_mongo_url()) dao = Dao(None, mongo_client) if dao.create_user(username, password, regions): print "user created:", username
self.layer['target']: text_y }) print("test: accuracy {}, recall {}, precision {}".format( acc, recall, precision)) saver.save(sess, self.config.root + '/event_detect/saver/cnn/cnn', global_step=step) print('checkpoint saved') #print(sess.run([self.layer['class_prob']], feed_dict={self.layer['input']: input})) print(test_restlt) def classify(self, sess, input_): class_prediction, confidence = sess.run( [self.layer['class_prediction'], self.layer['class_prob']], feed_dict={self.layer['input']: input_}) confidence = confidence[:, 1] if confidence > self.config.prob: class_prediction = 1 else: class_prediction = 0 return class_prediction, confidence if __name__ == '__main__': cnn = CNN() cnn.train(Config().iteration, new_training=True)
def readFromCfg(self): """读取配置文件 """ self.config = Config(os.path.join(homepath, "sc2mafia.cfg")) self.framesize = (int(self.config.get("frame", "width")), int(self.config.get("frame", "height")))
dense_inv = cmocean_to_plotly(cmocean.cm.dense, 10) type = 'original' path = get_path() in_path = path out_path = path + '/spearman/supp_fig_11/' + type if not os.path.isdir(out_path): os.makedirs(out_path) f = open(path + '/original/cytokines.txt') cytokines = f.read().splitlines() f.close() config = Config(in_path, out_path) common_subjects = config.get_common_subjects_with_adherence_and_cytokines( cytokines) xs = cytokines otus, _, _ = get_otus(type) ys = otus common_subjects_entire = common_subjects + common_subjects subject_row_dict_T0 = config.otu_counts.subject_row_dict_T0 subject_row_dict_T1 = config.otu_counts.subject_row_dict_T1 common_otu_t0, common_otu_t1, common_otu_col_dict = config.separate_common_otus( ) common_otu_entire = np.concatenate((common_otu_t0, common_otu_t1), axis=0)