def initialize(self, client: "Client") -> None: """ Initialize this EventHandler Args: client: Ready Discord client object Raises: RuntimeError when passed discord client is not ready """ if not client.is_ready(): raise RuntimeError( "Discord client passed into EventHandler was not ready for use" ) self.client = client # Make sure each of the entries in these arrays have an entry in the function_map dictionary for their relevant functions self.msg_author_triggers: List[str] = [] self.msg_contains_triggers: List[str] = [] self.msg_first_word_triggers = [ "danr", "spam", "choose", "waifu", "imouto", "oneechan", "oneesan" ] if get_config("linux_nag") == "true": self.msg_contains_triggers.append("linux") # Functions which handle messages taking in the params (client, message) self.function_map = { "danr": handle_danr, "spam": handle_spam, "linux": lib.misc_functions.linux_saying, "choose": lib.misc_functions.handle_choose, "waifu": handle_waifu, "imouto": handle_waifu, "oneechan": handle_waifu, "oneesan": handle_waifu, } try: if get_config("cleverbot_integration") == "true": self.clever = Cleverbot(get_config("cleverbot_api_key")) self_mention_1 = "<@!{}>".format(self.client.user.id) self_mention_2 = "<@{}>".format(self.client.user.id) self.msg_first_word_triggers.append(self_mention_1) self.msg_first_word_triggers.append(self_mention_2) self.function_map[ self_mention_1] = self.clever.handle_cleverbot self.function_map[ self_mention_2] = self.clever.handle_cleverbot except Exception: print("WARNING: Error processing cleverbot integration") try: if get_config("remind_enabled") == "true": self.remind = Reminder(self.client) self.msg_first_word_triggers.append("remind") self.function_map["remind"] = self.remind.handle_remind except Exception: print("WARNING: Error processing reminder integration") self.add_triggers("author_triggers", self.msg_author_triggers) self.add_triggers("contains_triggers", self.msg_contains_triggers) self.add_triggers("first_word_triggers", self.msg_first_word_triggers)
class Solr: limit = 100 host = get_config('solr', 'host') port = get_config('solr', 'port') core = get_config('solr', 'core') solr_url = 'http://' + host + ':' + port + '/solr/' + core + '/' @classmethod def facet_duplicate_doc(cls): """ This function returns duplicate documents by Solr facet. :return: Document list """ url = cls.solr_url + 'select?facet.field=TC_ID&facet=on&q=*:*&rows=0&alt=json' print(url) req = request.Request(url) res = request.urlopen(req) res = res.read() res = json.loads(res) return res['facet_counts']['facet_fields']['TC_ID'] @classmethod def query_doc_list(cls, tc_id): """ This function returns documents by given TC_ID :return: """ url = cls.solr_url + 'select?q=TC_ID:' + tc_id + '&rows=' + cls.limit + '&alt=json' print(url) req = request.Request(url) res = request.urlopen(req) res = res.read() res = json.loads(res) return res @classmethod def delete_doc(cls, solr_id): """ This function delete document by given solr id :param solr_id: :return: """ url = cls.solr_url + 'update?commitWithin=1000&overwrite=true&wt=json' data = """<add><delete><query>_id:"%s"</query></delete><commit/></add>""" % solr_id header = {'Content-type': 'text/xml; charset=utf-8'} req = request.Request(url=url, data=data.encode(encoding='utf-8'), headers=header) res = request.urlopen(req) res = res.read() return res
def startServers(servers): client = docker.from_env() cs_cfg = config.get_config('cs') d_cfg = config.get_config('docker') try: client.images.get('docknarr') except docker.errors.ImageNotFound: print("No image found, building...") df = './Dockerfile' path = os.path.dirname(df) client.images.build(path=path, dockerfile='Dockerfile', quiet=False) for s in servers: sname = s['name'] ip = s['ip'] token = getToken(sname) env = { 'TICKRATE': cs_cfg['tickrate'], 'GSLT': token, 'MAP': cs_cfg['map'], 'MAXPLAYERS': cs_cfg['maxplayers'], 'MAPGROUP': cs_cfg['mapgroup'], 'GAMEMODE': cs_cfg['gamemode'], 'GAMETYPE': cs_cfg['gametype'], 'HOSTNAME': sname, 'RCONPW': cs_cfg['rcon_pw'], 'IP': ip } vols = { d_cfg['demovol']: { 'bind': '/home/cs/serverfiles/csgo/matches', 'mode': 'rw' } } try: client.containers.get(sname) print("Container {} already running...".format(sname)) except: client.containers.run( image='docknarr', name=sname, network_mode='host', entrypoint=d_cfg['entrypoint'], volumes=vols, environment=env, privileged=True, detach=True )
def main(args): try: conf = config.get_config().CONF except OSError: print("Failed to parse settings") return 2 log_helper.LogHelper(logfile=conf.get('default').get('log_file'), verbose=conf.get('default').get('verbose')) try: version = conf.get('default').get('build_version') version = 'refs/heads/master' if version == 'latest' else ( 'refs/tags/' + version) setup_versions_repository(config.VERSIONS_REPOSITORY, config.COMPONENTS_DIRECTORY, version) # rediscovery software if it was not set conf['default']['packages'] = conf['default']['packages'] if ( conf.get('default').get('packages')) else ( config.discover_software()) except exception.RepositoryError as e: return e.errno build_manager = manager.BuildManager() return build_manager()
def main(): logger.info(MSG_STARTING) try: config = get_config() logger.info(config['app']['name']) logger.info(MSG_GET_PAYLOAD) payload = get_payload() if config['debug']['postulant']: logger.info(payload) logger.info(MSG_GET_FIELDS_PAYLOAD) fields_payload = get_fieds_payload() logger.info(MSG_VALIDATE_PAYLOAD) validate_payload(fields_payload, payload) code = payload['code'] validate_code_response_200(code) except QACliException as error: logger.error(str(error)) logger.info(MSG_ENDING)
def main(): @atexit.register def kthxbye(): print("Exit") parser = argparse.ArgumentParser(description='VoctoMIDI') parser.add_argument( '--config-file', default=None, help='Add another config file to the read list.' ) args = parser.parse_args() Config = get_config(args.config_file) host = Config.get("server", "host") port = Config.get("server", "port", fallback=9999) feedback_mode = \ Config.get("midi", "feedback", fallback="false").lower() == "true" midi_device_name = Config.get("midi", "device") event_map = {int(key): value for key, value in Config.items("eventmap")} mih = MidiInputHandler(host, port, feedback_mode, event_map) mih.init(midi_device_name) mih.run()
def main(): @atexit.register def kthxbye(): print("Exit") parser = argparse.ArgumentParser(description='VoctoMIDI') parser.add_argument('--config-file', default=None, help='Add another config file to the read list.') args = parser.parse_args() Config = get_config(args.config_file) host = Config.get("server", "host") port = Config.get("server", "port", fallback=9999) feedback_mode = \ Config.get("midi", "feedback", fallback="false").lower() == "true" midi_device_name = Config.get("midi", "device") event_map = {int(key): value for key, value in Config.items("eventmap")} mih = MidiInputHandler(host, port, feedback_mode, event_map) mih.init(midi_device_name) mih.run()
def ack(): config = get_config() taeget_path = 'http://' + config['air_conditioner_api_server']['host'] + ':' + str(config['air_conditioner_api_server']['port']) # off request try: response = requests.get(f'{taeget_path}/ping') status_code = response.status_code api_state = status_code == 200 except: api_state = False publish( topic = '/'.join([ config['project_id'], 'air_conditioner_api_mqtt_agent', 'ack' ]), message = json.dumps({ "mqttAgent": True, "airConditionerApi": api_state }), qos = 0, retain = False, keepalive = 60, )
def checkout_from(cls, remote_repo_url, repo_path): """ Checkout a repository from a remote URL into a local path. """ LOG.info("Checking out repository from '%s' into '%s'" % (remote_repo_url, repo_path)) command = 'svn checkout ' CONF = config.get_config().CONF proxy = CONF.get('common').get('http_proxy') if proxy: url = urlparse.urlparse(proxy) host = url.scheme + '://' + url.hostname port = url.port options = ("servers:global:http-proxy-host='%s'" % host, "servers:global:http-proxy-port='%s'" % port) proxy_conf = ['--config-option ' + option for option in options] command += ' '.join(proxy_conf) + ' ' command += '%(remote_repo_url)s %(local_target_path)s' % \ {'remote_repo_url': remote_repo_url, 'local_target_path': repo_path} try: utils.run_command(command) return SvnRepository(remote_repo_url, repo_path) except: message = "Failed to clone repository" LOG.exception(message) raise exception.RepositoryError(message=message)
def get_git_repository(remote_repo_url, parent_dir_path): """ Get a local git repository located in a subdirectory of the parent directory, named after the file name of the URL path (git default), updating the main remote URL, if needed. If the local repository does not exist, clone it from the remote URL. """ # infer git repository name from its URL url_parts = urlparse.urlparse(remote_repo_url) name = os.path.basename(os.path.splitext(url_parts.path)[0]) repo_path = os.path.join(parent_dir_path, name) if os.path.exists(repo_path): MAIN_REMOTE_NAME = "origin" repo = GitRepository(repo_path) if any(remote.name == MAIN_REMOTE_NAME for remote in repo.remotes): previous_url = repo.remotes[MAIN_REMOTE_NAME].url if previous_url != remote_repo_url: LOG.debug("Removing previous {name}'s repository remote with " "URL '{previous_url}'".format( name=name, previous_url=previous_url)) repo.delete_remote(MAIN_REMOTE_NAME) if not any(remote.name == MAIN_REMOTE_NAME for remote in repo.remotes): LOG.debug( "Creating {name}'s repository remote with URL '{url}'".format( name=name, url=remote_repo_url)) repo.create_remote(MAIN_REMOTE_NAME, remote_repo_url) return repo else: CONF = config.get_config().CONF return GitRepository.clone_from( remote_repo_url, repo_path, proxy=CONF.get('common').get('http_proxy'))
def setup(self): self.config = get_config() base_url = "{}://{}".format( self.config['enviroment']['prod']['protocol'], self.config['enviroment']['prod']['base_url'], ) api = GithubApi(base_url) self.response = r = api.get_user()
async def add_random_reaction(message: "Message") -> None: """ Add a reaction sequence to a message Args: message: Relevant discord message object to add reactions for """ reaction = random.choice(get_config("random_reaction_values").split(",")) for char in reaction: await message.add_reaction(char)
def init_weixin_api(request): ''' 初始化微信API模块 :param request: ''' print("\nWeixin API logining...") env = request.config.getoption('--testenv') cfg = get_config(env) init.wx_api_login(cfg) print("Initializing weixin api sets...")
def __get_sync_id(self): # get detail sync_id self.oSyncId = SyncIdList().getInitSyncId(self.SYNC_TYPE) if self.oSyncId is None: return None # lock status if not get_config().TESTING: self.oSyncId.lockStatus(SyncIdList.STATUS_DOING) # set sync id return {'start_id': self.oSyncId.start_id, 'end_id': self.oSyncId.end_id}
def process_call_reuqest(): c = config.get_config() pd = pd_api.PagerDuty(c['pd_subdomain'], c['pd_api_key']) oncall_number = pd.get_oncall_phone_number() oncall_message = 'This is the on-call phone service. Press any key to connect' resp = twilio.twiml.Redirect() resp.body(phone_api.Twillio.contruct_twimlet_api(oncall_number, oncall_message)) return str(resp)
def __init__(self, name, ticker, period, live_mode, periods_needed=200): """ - name: string, the name of the bot - ticker: string, the ticker formatted like that: ASSET1/ASSET2 - period: string, the period on which the loop will be set, and the resolution of the candles - live_mode: bool, should we launch the live loop and start trading live - periods_needed: int, the number of candles you will get every loop, optional """ self.live_mode = live_mode self.name = name self.ticker = ticker self.period_text = period self.periods_needed = periods_needed self.offset_seconds = 10 if (not self.name in config.get_config()): print("❌ Cannot instantiate bot: no config entry") exit(1) self.config = config.get_config()[self.name] if (not "capitalAllowed" in self.config): print("❌ Cannot instantiate bot: no 'capitalAllowed' property") exit(1) try: self.logger = Logger(self.name, live_mode) except: print("❌ Cannot connect to the log DB, are you sure it's running?") raise if (self.live_mode): self.data = Data(self.name) else: self.data = Data(self.name + "-test") self.exchange = Exchange(self.logger, self.data, self.config['capitalAllowed'], live_mode, self.ticker, self.period_text) try: self.period = period_matching[period] except: print("Available periods: 1m, 3m, 5m, 15m, 30m, 1h, 2h, 3h, 4h, 1d, 1w") raise self.logger.log("ℹ️", f"Bot {self.name} started with a period of {period}") self.logger.log("ℹ️", f"Capital allowed: {self.config['capitalAllowed']}%") self.setup() if (self.live_mode): self.preloop()
def add_triggers(self, config_entry: str, trigger_array: List[str]) -> None: """ Read bot triggers from a settings configuration entry and make them active for the bot Args: config_entry: settings config key of the entries to read trigger_array: list to add the parsed triggers into """ for item in get_config(config_entry).split(","): try: response_type = get_config(section=item, key="type") if response_type == "message": self.function_map[ item] = lib.misc_functions.send_simple_message else: raise RuntimeError( "Response type {} not supported".format(response_type)) trigger_array.append(item) except Exception: print("WARNING: Error adding trigger for {}".format(item))
async def send_simple_message(message: "Message", trigger_type: str, trigger: str) -> None: """ Send a simple message response Args: message: Discord message object related to this request trigger_type: the trigger type that called this function ('author', 'first_word', or 'contains') trigger: the relevant string from the message that triggered this call """ msg = get_config(section=trigger, key="message") await message.channel.send(msg)
def off_control() -> None: config = get_config() project_root_path = config['project_root_path'].rstrip('/') run_air_conditioner( irrppy_path='/'.join([project_root_path, 'irrp.py']), command_file_path='/'.join([ project_root_path, 'signal', config['product_name'], 'other.json' ]), gpio=config['ir_led_gpio_bcm'], command='off', )
def init_admin_api(request): ''' 初始化API模块 :param request: ''' print("\nAPI logining...") env = request.config.getoption('--testenv') cfg = get_config(env) init.api_login(cfg) print("Initializing api sets...") #Config.member = Member() Config.COMMON = Common()
async def db(): app = init_app() app['config'] = get_config(TEST_CONFIG_PATH) await init_mongo(app) db = app['db'] await db.command('dropDatabase', 1) yield db await db.command('dropDatabase', 1)
def main(): """Our main function that does all the work""" new_files = [] c = get_config() ser = series() for s in ser: # load the correct module provider = 'Providers.%s' % s['provider'] __import__(provider) episodes = sys.modules[provider].episodes print '\n' print 'Searching for episodes in %s using provider %s' % (s['name'], s['provider']) epis = episodes(s) new = new_episodes(epis, c['cachefile'], s['name']) for ep in new: # cgecj iyt data us wgat we expect if type(ep) != tuple: raise Exception('Your regular expression needs to pull a URL and unique episode number') print 'Found new episode!', ep # extract our ep_number and torrent url for e in ep: if e.isdigit(): ep_number = e elif not e.isdigit(): torrent = e # add prefix if we have it configured try: tor = '%s%s' % (s['prefix'], torrent) except KeyError, e: tor = torrent # some sites tend to use ampersand urls tor = tor.replace('amp;', '') # download our torrent file file_name = '%s-%s%s' % (s['name'], ep_number, tor, c['download_path']) print 'Attempting to Download %s' % file_name download = download_torrent(s['name'], ep_number, tor, c['download_path']) print download add_cache(c['cachefile'], (torrent, s['name'] + ep_number)) newfiles.append(file_name)
def __getInitStrateryData(self): # 获取评测请求表锁单数据 strategyData = StrategyRequest().getInitData() dataLen = len(strategyData) if dataLen == 0: logger.info("no strategy data to deal with") return None # 设置清洗锁单状态 if not get_config().DEBUG: res = StrategyRequest().lock(strategyData, 1) if not res: logger.info("strategy lock fail") return None logger.info("there's %s data to deal with" % dataLen) return strategyData
def createSignByMd5(params): sorted_params = sorted(params.items(), key=lambda item: item[0], reverse=False) list_iterms = [] for iterm in sorted_params: list_iterms.append('='.join(iterm)) params_str = '&'.join(list_iterms) ms = hashlib.md5() ms.update(params_str.encode('utf-8')) md5_str = ms.hexdigest() SECRET_KEY = get_config().SECRET_KEY md5obj = hashlib.md5() md5obj.update((md5_str[:30] + SECRET_KEY).encode('utf-8')) sign = md5obj.hexdigest() return sign
def control_by_temperature(type_: str, temperature: int, airflow_level: str) -> None: _type_validation(type_) _airflow_level_validation(airflow_level) config = get_config() project_root_path = config['project_root_path'].rstrip('/') run_air_conditioner( irrppy_path='/'.join([project_root_path, 'irrp.py']), command_file_path='/'.join([ project_root_path, 'signal', config['product_name'], f'{type_}.json' ]), gpio=config['ir_led_gpio_bcm'], command=f'{temperature}:{airflow_level}', )
def conf(request): ''' 全局功能及配置 :param request: ''' flag = Config.get_flag() env = request.config.getoption('--testenv') cfg = get_config(env) if flag == 1: return cfg else: cfg = init_conf(request) init_admin_api(request) init_weixin_api(request) #cfg.Ui = Ui(cfg) #cfg.Api = Api(cfg) #cfg.Param = Param(cfg) return cfg
def dehumidify_control(dehumidification_level: int, airflow_level: str) -> None: _dehumidification_level_validation(dehumidification_level) _airflow_level_validation(airflow_level) config = get_config() project_root_path = config['project_root_path'].rstrip('/') dehumidification_level_cmd = DEHUMIDIFICATION_LEVELS[ dehumidification_level] run_air_conditioner( irrppy_path='/'.join([project_root_path, 'irrp.py']), command_file_path='/'.join([ project_root_path, 'signal', config['product_name'], 'dehumidify.json' ]), gpio=config['ir_led_gpio_bcm'], command=f'{dehumidification_level_cmd}:{airflow_level}', )
def main(): parser = argparse.ArgumentParser( description='Collect useful information from given url.') parser.add_argument('url', help='Url of a web page.') args = parser.parse_args() # load content by given url r = requests.get(args.url) r.raise_for_status() # extract useful data config = get_config() reader = Reader(config) pretty_content = reader.from_string(r.text) # save retrieved info file_dst = save_by_url(args.url, pretty_content) print('Useful content save to: "{}".'.format(file_dst))
def run_boss_task(): search_name = get_category_name() driver_path = config.get_config("selenium", "web_driver") driver = webdriver.Chrome(driver_path) driver.get("https://www.zhipin.com/shenzhen/") try: # 等待页面搜索input的载入 WebDriverWait(driver, 10).until( EC.presence_of_element_located( (By.XPATH, '//*[@class="ipt-wrap"]/input'))) for i in search_name: search_input = driver.find_element_by_xpath( '//*[@class="ipt-wrap"]/input') search_input.clear() search_input.send_keys(i) driver.find_element_by_xpath( '//button[@class="btn btn-search"]').click() # 获取数据,数据清洗 =》入库 =》 循环点击下一页 while True: WebDriverWait(driver, 10).until( EC.presence_of_element_located( (By.XPATH, '//*[@class="ipt-wrap"]/input'))) extract_params(driver, i) try: print("点击-----") href = driver.find_element_by_xpath( '//div[@class="page"]/a[@ka="page-next"]' ).get_attribute("href") if href == "javascript:;": raise Exception("没有下一页了") driver.find_element_by_xpath( '//div[@class="page"]/a[@ka="page-next"]').click() except Exception as e: print("没有下一页了") break except Exception as e: print(e) finally: print("退出操作")
def off(): config = get_config() taeget_path = 'http://' + config['air_conditioner_api_server'][ 'host'] + ':' + str(config['air_conditioner_api_server']['port']) # off request response = requests.delete(f'{taeget_path}/off') publish( topic='/'.join( [config['project_id'], 'air_conditioner_api_mqtt_agent', 'state']), # TODO get correct state message=json.dumps({ 'isRunning': False, 'type': 'cool', 'temperature': 26, 'dehumidificationLevel': None, 'airflowLevel': '1' }), qos=0, retain=True, keepalive=60, )
def _getDomain(self, create_time): ''' 获取域名 @param datetime $create_time 时间格式 @return str 域名 ''' #@todo from lib.config import get_config cf = get_config() if cf.TESTING: return 'http://182.92.80.211:8091' now = datetime.now() ta = now - create_time if ta.days > 1: #domain = "http://124.193.149.180:8100" #domain = "http://123.207.141.180" domain = "http://10.139.36.194" else: domain = "http://open.xianhuahua.com" return domain
def init_conf(request): ''' 初始化 :param request: ''' print("\nInitializing environment...") env = request.config.getoption('--testenv') run_flag = request.config.getoption('--runflag') #debug = request.config.getoption('--mydebug') debug_level = request.config.getoption('--dbg') run_author = request.config.getoption('--author') print("DEBUG_LEVEL:%s"% debug_level) print("RUN_AUTHOR:%s"% run_author) cfg = get_config(env) Config.set_flag(1) Config.ENV = env Config.RUN_AUTHOR = run_author #os.environ["RUNFLAG"]=run_flag Config.RUN_FLAG = run_flag Config.DEBUG_LEVEL = debug_level print("Env:%s" % env) return cfg
def _hg_download(source, directory): """ Clones a mercurial [source] to [directory] and returns a source dict. """ CONF = config.get_config().CONF proxy = CONF.get('common').get('http_proxy') hg_source = source['hg'] repo_name = os.path.basename(hg_source['src']) dest = os.path.join(directory, repo_name) command = 'hg ' commit_id = hg_source.get('commit_id') branch = hg_source.get('branch') if commit_id is None and branch is None: raise ValueError('invalid hg source dict: missing both `commit_id` ' 'and `branch`') command += 'clone "{}" -r "{}" "{}" '.format(hg_source['src'], hg_source['branch'], dest) if proxy: command += '--config http_proxy.host="{}" '.format(proxy) command += "--ssh '/usr/bin/env ssh -o ConnectTimeout={}'" def _clone_repository(timeout): cmd = command.format(timeout) utils.run_command(cmd) def _is_timeout_error(exc): if not isinstance(exc, exception.SubprocessError): return False return ('timed out' in exc.stdout or 'timed out' in exc.stderr) utils.retry_on_timeout(_clone_repository, is_timeout_error_f=_is_timeout_error, initial_timeout=60) source['hg']['dest'] = dest return source
def __init__(self, client: "Client"): """ Constructor for the reminder client Args: client: Ready Discord client object Raises: RuntimeError when passed discord client is not ready """ if not client.is_ready(): raise RuntimeError( "Discord client passed into Reminder client was not ready for use" ) self.client = client self.event_loop = asyncio.get_event_loop() self.save_time = int(get_config("remind_save_time")) self.file = os.path.join(os.getcwd(), "reminders.bin") # Create the reminders file if it doesn't exist if not os.path.exists(self.file): open(self.file, "a").close() self.init_from_file() self.runner = threading.Thread(target=self.thread_loop) self.runner.start()
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Copy of Data Commons Python Client API Core without pandas dependency.""" import base64 import collections import json import logging import zlib import urllib import requests import lib.config as libconfig cfg = libconfig.get_config() API_ROOT = cfg.API_ROOT # --------------------------------- CONSTANTS --------------------------------- # REST API endpoint paths API_ENDPOINTS = { 'query': '/query', 'translate': '/translate', 'search': '/search', 'get_property_labels': '/node/property-labels', 'get_property_values': '/node/property-values', 'get_triples': '/node/triples', 'get_places_in': '/node/places-in', 'get_place_obs': '/bulk/place-obs',
# Global namespace import os import sys from qtflow import get_flowcontrol from instruments import get_instruments from lib import config as _config from data import Data from scripts import Scripts, Script config = _config.get_config() data = Data.get_named_list() instruments = get_instruments() frontpanels = {} sliders = {} scripts = Scripts() flow = get_flowcontrol() msleep = flow.measurement_idle mstart = flow.measurement_start mend = flow.measurement_end def version(): version_file = os.path.join(config['execdir'], 'VERSION') try: f = file(version_file,'r') str = f.readline() str = str.rstrip('\n\r') f.close() except:
from functools import total_ordering import fcntl import logging import os import urllib2 import yaml from lib import config from lib import exception from lib import package_source from lib import repository from lib import utils from lib.versions_repository import get_versions_repository from lib.constants import REPOSITORIES_DIR CONF = config.get_config().CONF LOG = logging.getLogger(__name__) BUILD_CACHE_DIR = os.path.join(CONF.get('work_dir'), "cache") PACKAGES_REPOS_TARGET_PATH = os.path.join(CONF.get('work_dir'), REPOSITORIES_DIR) @total_ordering class Package(object): __created_packages = dict() @classmethod def get_instance(cls, package_name, *args, **kwargs): """
Contains: - a data class (HDF5Data) which is essentially a wrapper of a h5py data object, adapted for usage with qtlab - name generators in the style of qtlab Data objects - functions to create standard data sets """ import gobject import os import time import h5py import logging import numpy as np from lib.config import get_config config = get_config() in_qtlab = config.get('qtlab', False) from lib.network.object_sharer import SharedGObject, cache_result if in_qtlab: import qt import data class DateTimeGenerator(data.DateTimeGenerator): def new_filename(self, data_obj): base, ext = os.path.splitext(data.DateTimeGenerator.new_filename( self, data_obj)) return base + '.hdf5'
import tempfile # create universal qkit-config. Every module in qkit can rely on these entries to exist # this is independent from qtlab cfg = {} cfg['datadir'] = tempfile.gettempdir() cfg['qtlab'] = False cfg['plot_engine'] = 'qkit.gui.qviewkit_2.main' # if qtlab is used (qt_cfg exists and qt_cfg['qtlab']): qkit config entries are overridden by the qtlab ones try: from lib.config import get_config qt_cfg = get_config() in_qt = qt_cfg.get('qtlab', False) except ImportError: in_qt = False if in_qt: for entry in qt_cfg.get_all(): if entry in cfg.keys(): cfg[entry] = qt_cfg[entry] # there can also be a local config file for qkit (qkit/config/local.py) with variable cfg_local = {...} try: from qkit.config.local import cfg_local for entry in cfg_local.iterkeys(): cfg[entry] = cfg_local[entry] except ImportError: pass
logging.info('Closing client') qt.config.save(delay=0) try: gtk.main_quit() except: pass sys.exit() if __name__ == "__main__": srv = share_gtk.start_client(args.host, port=args.port, nretry=60) logging.debug('Connected to %s', srv.get_instance_name()) # Be sure to talk to the qtlab instance that we just connected to if srv: import lib.config as cfg cfg.get_config()['instance_name'] = srv.get_instance_name() # Close when requested flow = objsh.helper.find_object('%s:flow' % srv.get_instance_name()) flow.connect('close-gui', _close_gui_cb) # Or if disconected objsh.helper.register_event_callback('disconnected', _close_gui_cb) if args.module: logging.info('Importing module %s', args.module) __import__(args.module, globals()) if args.disable_io: os.close(sys.stdin.fileno())
# qtclient.py, module that should replace qt.py in clients # The name of this file might be somewhat confusing due to the existence # of client_qt.py which contains code to start a client based on the Qt # toolkit. from lib.network.object_sharer import helper import time import types from lib import config config = config.get_config() class constants(): FLAG_GET = 0x01 FLAG_SET = 0x02 FLAG_GETSET = 0x03 FLAG_GET_AFTER_SET = 0x04 FLAG_SOFTGET = 0x08 FLAG_PERSIST = 0x10 flow = helper.find_remote_object('%s:flow' % config['instance_name']) if flow is None: flow = helper.find_remote_object('flow') if flow is None: raise ValueError('Unable to locate qt.flow object (%s), client failed to start' % config['instance_name']) else: print 'Connected to undefined qtlab instance' for i in range(100): status = flow.get_status() if not (status is None or status == "starting"):
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import logging import os from lib import config from lib import exception from lib.constants import REPOSITORIES_DIR from lib.package import Package CONF = config.get_config().CONF LOG = logging.getLogger(__name__) class PackagesManager(object): def __init__(self, packages_names): self.packages_names = packages_names self.packages = [] def prepare_packages(self, packages_class=Package, download_source_code=True, **packages_keyword_args): """ Load packages data and optionally download files. Use packages keyword args parameter to pass extra parameters to
from lib.network.object_sharer import helper import time import types from lib import config config = config.get_config() class constants(): FLAG_GET = 0x01 FLAG_SET = 0x02 FLAG_GETSET = 0x03 FLAG_GET_AFTER_SET = 0x04 FLAG_SOFTGET = 0x08 FLAG_PERSIST = 0x10 flow = helper.find_object('%s:flow' % config['instance_name']) for i in range(100): status = flow.get_status() if not (status is None or status == "starting"): break print 'Status: %r, waiting...' % status time.sleep(2) instruments = helper.find_object('%s:instruments1' % config['instance_name']) plots = helper.find_object('%s:namedlist_plot' % config['instance_name']) data = helper.find_object('%s:namedlist_data' % config['instance_name']) interpreter = helper.find_object('%s:python_server' % config['instance_name']) frontpanels = {} sliders = {}
from pprint import pprint from datetime import datetime import pdb def delete_paragraph(paragraph): p = paragraph._element p.getparent().remove(p) p._p = p._element = None if __name__ == '__main__': now = datetime.now().strftime('%Y%m%d_%H%M') cfg = get_config("config.yml") # pprint(cfg) server = cfg.get("zabbix_server") username = cfg.get("zabbix_user") password = cfg.get("zabbix_pw") graph_path = cfg.get("graph_dir") period = cfg.get('period') stime = cfg.get('start_time') template_name = cfg.get('template_name') client_name = template_name.split('.')[0] template = cfg.get('template_dir') + template_name output_path = cfg.get('output_dir') output_name = client_name + '_' + now + '.docx'
save_checkpoint(os.path.join( path2log, 'model_{}.pt'.format(total_it)), epoch=epoch_it, it=total_it, model=model, optimizer=optimizer, scheduler=scheduler, config=cfg, best_val=val_losses['total_loss']) # After the epoch if finished update the scheduler scheduler.step() # Quit after the maximum number of epochs is reached logger.info( 'Training completed after {} Epochs ({} it) with best val metric ({})={}' .format(epoch_it, it, model_selection_metric, metric_val_best)) if __name__ == "__main__": # Parse the command line arguments parser = argparse.ArgumentParser() parser.add_argument('config', type=str, help='Path to the config file.') args = parser.parse_args() # Combine the two config files cfg = config.get_config(args.config, 'configs/default.yaml') main(cfg, args.config)
from terminaltables import AsciiTable from exceptions import NotImplementedError, StandardError from lib import config as cfg from lib import init from lib.service import Service, Service_tree from lib.stack import Stack #from lib.tree import Tree from lib.container import Container from lib.endpoint import Endpoint #from lib.stage import Stage from docker.errors import APIError import lib.lib_docker as docker config = cfg.get_config() logging = cfg.get_logger() rules = cfg.get_rules_config() session = init.init() # Service functions def create_service(name): """Creates a new service Arguments: name: Name of the new service Constraints: name: unique """ # HERE NEEDS CHECK OF STATE