def create_main_window(self): config = Config.load() w = QtGui.QWidget() w.setWindowTitle('Dota Replay Uploader 0.1') w.setWindowFlags(QtCore.Qt.Dialog) w.setMinimumSize(W, H) w.setMaximumSize(W, H) l = QtGui.QVBoxLayout() self.status_box = sb = QtGui.QListWidget(w) sb.setWrapping(True) l.addWidget(sb, 1) b = QtGui.QPushButton("&Start TFT...", w) QtCore.QObject.connect( b, QtCore.SIGNAL('clicked()'), self.start_tft) l.addWidget(b) self.cb_autostart = cb = QtGui.QCheckBox( w.tr("Start &automatically"), w) cb.setChecked(Config.get('tft_autostart', False)) QtCore.QObject.connect( cb, QtCore.SIGNAL('clicked()'), self.trigger_autostart) l.addWidget(cb) w.setLayout(l) return w
def paintEvent(self,e): qp=QtGui.QPainter() qp.begin(self) qp.setRenderHints(QtGui.QPainter.Antialiasing | QtGui.QPainter.SmoothPixmapTransform) qp.setPen(QtGui.QColor(int(self.r),int(self.g),int(self.b))) qp.setBrush(QtGui.QColor(int(self.r),int(self.g),int(self.b))) qp.drawRoundedRect(QtCore.QRectF(10,0,self.width-10,self.height),2,2) qp.setPen(QtGui.QColor(250,250,250,255)) qp.setBrush(QtGui.QColor(250,250,250,255)) qp.drawRect(9,0,5,self.height) icon=QtGui.QIcon("/usr/share/duck-launcher/icons/win.svg") icon.paint(qp, -10,self.size/2-10, 20,20) # qp.setFont(QtGui.QFont(Config.get()["font"],12)) t_rect=QtCore.QRectF(10,0,self.width-10,30) qp.drawText(t_rect,QtCore.Qt.AlignCenter,self.app["name"]) qp.drawLine(26,30,self.width-20,30) #open if self.drawButtonRect==True and self.buttonRect!=None: qp.setPen(QtGui.QColor(0,0,0,0)) qp.setBrush(QtGui.QColor(254,254,255,60)) qp.drawRect(self.buttonRect) qp.setPen(QtGui.QColor(10,10,10,145)) qp.setFont(QtGui.QFont(Config.get()["font"],10)) o_rect=QtCore.QRectF(50,30,self.width-10,30) qp.drawText(o_rect,QtCore.Qt.AlignVCenter,"Open") removeIcon=QtGui.QIcon("/usr/share/duck-launcher/icons/open.svg") removeIcon.paint(qp, 25,34,20,20) #remove qp.setPen(QtGui.QColor(12,10,10,140)) r_rect=QtCore.QRectF(50,60,self.width-10,30) qp.drawText(r_rect, QtCore.Qt.AlignVCenter,"Remove") removeIcon=QtGui.QIcon("/usr/share/duck-launcher/icons/remove.svg") removeIcon.paint(qp, 25,64,20,20)
def run_clerk(self): """ This function deploys one test per user by creating an executable string which is sent to the RabbitMQ instance. :return: :rtype: """ math = WebUseMath() config = Config() newconfig = config.init_db_config() grouplist = newconfig.get_account().get_groups() path = newconfig.get_script_path() logging.info("Interval: " + str(newconfig.get_interval())) positiondict = {} while True: for i in grouplist: userconfig = self.interpreterServer.getUserConfig(i, "couchdb") ip = userconfig["ipaddress"] worklist = [] tenant_name = userconfig["tenant_name"] executable_string = "/root/uptime_challenge_master/testscript/clerk.pl -n " + tenant_name worklist.append(executable_string) groupdict = {} groupdict.update({i: worklist}) logging.critical(str(i) + " " + str(worklist)) newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) worklist = [] queue = Queues() queue.receive_one_message_from_q("clerk_reportq", str(newconfig.get_interval()))
def convert (fin, fout): n = 1 cin = Config(fin) # Sort the vservers vservers = list(cin['vserver']) vservers.sort(domain_cmp) for vserver in vservers: if vserver == 'default': prio = 1 else: n += 1 prio = n # Rename the virtual server entry to = "vserver!%03d" % (prio) orig = "vserver!%s" % (vserver) cin.rename (orig, to) # Add the 'nick' and 'domain' properties cin['%s!nick'%(to)] = vserver if not cin['%s!domain'%(to)]: cin['%s!domain!1'%(to)] = vserver save_result (str(cin), fout)
def createCanvas() : pygame.init() Config.loadConfig() #,pygame.FULLSCREEN|pygame.HWSURFACE,32 canvas = pygame.display.set_mode((800,600)) menu1(canvas) return
def loadWorkingDirectoryClicked(self): conf = Config.getConfig() newDir = QtGui.QFileDialog.getExistingDirectory(self, "Choose Working Directory", conf["DefaultDesignDir"], QtGui.QFileDialog.DontResolveSymlinks) if (len(newDir) <= 0): return self.workingDir = WorkingDir(newDir) if self.workingDir.exists is False: QtGui.QMessageBox.warning(self, "Not a Design Working Directory", "The directory you have selected is not a " "Wavellite working directory.") return self.txtWorkingDir.setText(self.workingDir.fileDir) self.txtDeviceName.setText(self.workingDir.device["Name"]) self.actionAdd_Design_Sub_Directories.setEnabled(True) self.actionCopy_Templates.setEnabled(True) self.actionCreate_Wafer.setEnabled(True) self.actionCreate_Unit.setEnabled(True) self.btnOpenWorkingDir.setEnabled(True) self.actionZip_it_Up.setEnabled(True) self.actionDesignAttributes.setEnabled(True) conf["LastWorkingDir"] = self.workingDir.fileDir Config.saveConfig(conf)
def fetch_config(self): """ This function is the main method where everything is governed on the manager. :return: :rtype: """ math = WebUseMath() config = Config() newconfig = config.init_db_config() grouplist = newconfig.get_account().get_groups() path = newconfig.get_script_path() logging.info("Interval: " + str(newconfig.get_interval())) positiondict = {} while True: for i in grouplist: userconfig = self.interpreterServer.getUserConfig(i, "couchdb") ip = userconfig["ipaddress"] worklist = [] worklist = [{"ip": ip, "sentance": userconfig["Sentance"], "filepath": userconfig["filepath"], "file": userconfig["file"], "timestamp": time.time()}] groupdict = {} groupdict.update({i: worklist}) logging.critical(str(i) + " " + str(worklist)) newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) worklist = [] queue = Queues() queue.receive_one_message_from_q("purser_report_q", str(newconfig.get_interval()))
def __init__(self, name): if not AbstractCheck.known_checks.get(name): AbstractCheck.known_checks[name] = self self.name = name self.verbose = False self.network_enabled = Config.getOption("NetworkEnabled", False) self.network_timeout = Config.getOption("NetworkTimeout", 10)
def onReadWeather(self, reqPkt): isError = False rspParams = [] stationParamList = reqPkt.getParamByName("StationName") if stationParamList == None: rspParams = [wsp.ParameterLine( "ErrorDesc", "Cannot find StationName")] print (" ** No station name found") return (True, rspParams) stationName = stationParamList[0].getValue().capitalize() if Config.getStations().has_key(stationName): station = Config.getStations() [stationName] print (station) else: print " ** Cannot find %s in %s" % (stationName, Config.getStations()) rspParams = [wsp.ParameterLine( "ErrorDesc", "Unknown station: %s" % stationName)] return (True, rspParams) curTemp = curPres = curHumi = airIndex = None try: curTemp = station.getTemperature() if (curTemp == None): paramLine=wsp.ParameterLine("ErrorDesc.Temperature", "no sensor data") isError = True else: paramLine=createParamLine(" Temperature=", "%.3f", curTemp) except Probes.ProbeError, err: paramLine = wsp.ParameterLine("ErrorDesc.Temperature", err.strerror) isError = True
def SetAllSoundFxObjectVolumes(self, volume = None): #MFH - single function to go through all sound objects (and iterate through all sound lists) and set object volume to the given volume #MFH TODO - set every sound object's volume here... if volume is None: self.sfxVolume = Config.get("audio", "SFX_volume") self.crowdVolume = Config.get("audio", "crowd_volume") volume = self.sfxVolume self.starDingSound.setVolume(volume) self.bassDrumSound.setVolume(volume) self.T1DrumSound.setVolume(volume) self.T2DrumSound.setVolume(volume) self.T3DrumSound.setVolume(volume) self.CDrumSound.setVolume(volume) for s in self.acceptSounds: s.setVolume(volume) for s in self.cancelSounds: s.setVolume(volume) #self.cancelSounds.setVolume(volume) self.rockSound.setVolume(volume) self.starDeActivateSound.setVolume(volume) self.starActivateSound.setVolume(volume) self.battleUsedSound.setVolume(volume) self.rescueSound.setVolume(volume) self.coOpFailSound.setVolume(volume) self.crowdSound.setVolume(self.crowdVolume) self.starReadySound.setVolume(volume) self.clapSound.setVolume(volume) self.failSound.setVolume(volume) self.starSound.setVolume(volume) self.startSound.setVolume(volume) self.selectSound1.setVolume(volume) self.selectSound2.setVolume(volume) self.selectSound3.setVolume(volume)
def DownloadConfig(self, frset, frnode): """ Upload configuration set or single configuration file to the specified configuration set. 'frset' is the configuration set from whioch data is downloaded 'frnode' is the node whose configuration is to be downloaded as a simple XML file, or "(None)" is the entire configuration set is to be downloaded as a ZIP file. """ if frnode == "(None)": frnodes = Config.listNodes(frset) zipstr = StringIO.StringIO() zipfil = zipfile.ZipFile(zipstr, mode='w') for n in frnodes: dattim = time.localtime()[0:6] zipinf = zipfile.ZipInfo(filename=n+".xml", date_time=dattim) zipfil.writestr(zipinf, Config.read(frset, n)) zipfil.close() cherrypy.response.headerMap.update( {"Content-Type": "application/octet-stream", "Content-disposition": "attachment; filename=%s.zip"%frset}) result = zipstr.getvalue() zipstr.close() return result else: data = Config.read(frset, frnode) cherrypy.response.headerMap.update( {"Content-Type": "application/xml", "Content-disposition": "attachment; filename=%s.xml"%frnode}) return data
def MoveConfig(self, frset, frnode, toset, tonode): """ Move/rename node configuration on gateway server """ if frset == toset and frnode == tonode: return self.wbConfigError("Cannot move node configuration to itself") # Check target node, and ensure it is a 3-digit string if not WebBrickCfgEdit.NodeNumRegex.match(tonode): return self.wbConfigError( "Invalid target node %s (must be a number)"%(tonode)) n = int(tonode) if n <= 0 or n > 255: return self.wbConfigError( "Invalid target node %n (must be in range 1-255)"%(n)) tonode = formatInt("%03d")(n) if Config.exists(toset, tonode): return self.wbConfigError( "Invalid target node %s/%s already exists"%(toset,tonode)) # Now copy err = Config.move(frset, frnode, toset, tonode) if err: return self.wbConfigError(err) return self.wbConfigMessage( "Moved WebBrick configuration %s/%s to %s/%s" % (frset, frnode, toset, tonode))
def main(): # Read the command line parameter try: cfg_file = sys.argv[1] except: print "Incorrect parameters: %s CONFIG_FILE" % (sys.argv[0]) raise SystemExit # Parse the configuration file cfg = Config(cfg_file) # Update the configuration file if needed ver_config = int (cfg.get_val('config!version', '000099028')) ver_release = int (config_version_get_current()) print "Upgrading '%s' from %d to %d.." % (cfg_file, ver_config, ver_release), # Convert it updated = config_version_update_cfg (cfg) print ["Not upgraded.", "Upgraded."][updated] # Save it if updated: print "Saving new configuration..", cfg.save() print "OK"
def POST(self, reqpath): # check request path if reqpath not in ['raspi', 'Raspi', 'RasPi']: raise cherrypy.HTTPError(405, 'Unsupported request path') #return 'TestMode' # get incoming XML incomingXML = getxmlreq() cherrypy.session['xmlreq'] = incomingXML if (Config.debug()): print("Incoming request xml =%s" %incomingXML) # build the response XML outgoingXML = setxmlres(incomingXML) # build cherrypy POST response cherrypy.response.status = '201 Created' cherrypy.response.headers['Location'] = '/' + reqpath if (Config.debug()): print("Outgoing response xml =%s" % outgoingXML) return outgoingXML
def __init__(self, naubino = None, pos = (0, 0), name = None): super(Naub, self).__init__() self.bind(time = lambda *_: self.property("pos").dispatch(self)) self.register_event_type('on_remove') self.register_event_type('on_remove_merged') mass = Config.naub_mass() radius = self.radius inertia = pymunk.moment_for_circle(mass, radius, radius) body = pymunk.Body(mass, inertia) body.position = pos body.data = self shape = pymunk.Circle(body, radius) shape.friction = Config.naub_friction() shape.elasticity = Config.naub_elasticity() self.alive = True self.tag = None # fill with whatever you like self.body = body self.shape = shape self.cycle_check = 0 self.cycle_number = 0 self.pointer_joints = {} self.naubs_joints = {} self.__naubino = None self.name = name or id(self) self.naubino = naubino
def __init__(self, owner, name, number): self.logClassInits = Config.get("game", "log_class_inits") if self.logClassInits == 1: Log.debug("Player class init (Player.py)...") self.owner = owner self.controls = Controls() self.reset() self.playerstring = "player" + str(number) self.whichPart = Config.get(self.playerstring, "part") self.bassGrooveEnableMode = Config.get("game", "bass_groove_enable") self.currentTheme = 1 #MFH - need to store selected practice mode and start position here self.practiceMode = False self.practiceSection = None self.startPos = 0.0 self.hopoFreq = None self.stars = 0 self.totalStreakNotes = 0 self.totalNotes = 0
def startup(): host = Config.get('Server','IP') port = Config.getint('Server','Listen') signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) pid = os.getpid() f = open('server.pid', 'wb') f.write(str(pid)) f.close() application = tornado.web.Application([ (r"/s/(.*)", ShortUrlHandler), (r"/api/(.*)", APIHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, {"path": "./static"}), ]) global http_server print "Startup server on %s:%d, PID:%d ..." % (host,port,pid) http_server = tornado.httpserver.HTTPServer(application) http_server.bind(port, host) http_server.start(num_processes = Config.getint('Server','NumProcesses')) # tornado将按照cpu核数来fork进程 tornado.ioloop.IOLoop.instance().start()
def __init__(self, asch): self.asch = asch # asynScheduler print ("ProbePoller:__init__(): set freq %f sec" % Config.getPollFreq_s()) self.myTimer = asch.createTimer('ProbePoller', self) self.myTimer.startTmr(Config.getPollFreq_s(), True)
def main (argv): # set dynamic defaults Config.stat_year = time.gmtime (time.time())[0] # process command line arguments files = Config.parse_args (argv[1:]) if not files: print >>sys.stderr, '%s: %s' % (sys.argv[0], 'missing input files') print >>sys.stderr, Config.usage_help() sys.exit (1) # read, parse and sort input import LogParser print >>sys.stderr, '%s: sorting %u files...' % (sys.argv[0], len (files)) sort_pool = LogParser.log_file_sort_pool (files) print >>sys.stderr, '%s: parsing %u sorted files...' % (sys.argv[0], len (sort_pool)) lparser = LogParser.log_file_parse_pool (sort_pool) # collect statistics stats = Statistics.Statistics (int (Config.stat_year)) import TopVisits, DailyVisits, GeoHour stats.gauges += [ TopVisits.TopVisits (stats), DailyVisits.DailyVisits (stats), GeoHour.GeoHour (stats) ] stats.walk_hits (lparser) print >>sys.stderr, '%s: generating report...' % sys.argv[0] stats.done() # generate report print "Hits:\t%s" % stats.hits print "Visits:\t%s" % stats.visits destdir = './logreport' if not os.path.isdir (destdir) or not os.access (destdir, os.X_OK): try: os.mkdir (destdir) except OSError, ex: die (5, "failed to create or access directory %s: %s" % (destdir, ex.strerror))
def close_task(): counter = 0 while Config.is_element_present(Config.task_closeButton): try: Config.find_element(Config.task_closeButton).click() except ElementNotClickableException: break counter+=1 if counter >=3: break
def main(): # Load config options from config file and command line args cfg = Config() cfg.load_options() if cfg.background is 1: # Detach a process from the controlling terminal and run it in the # background as a daemon. try: pid = os.fork() except OSError, e: raise Exception, "%s [%d]" % (e.strerror, e.errno) if (pid == 0): # The first child. os.setsid() try: pid = os.fork() # Fork a second child. except OSError, e: raise Exception, "%s [%d]" % (e.strerror, e.errno) if (pid == 0): # The second child. os.chdir('/') os.umask(0) else: sys.exit(0) # Exit parent (the first child) of the second child.
def buildHeader(root): objectServerID = root.find("./OBJECTSERVERID").text objectID = root.find("./OBJECTID").text objectType = root.find("./OBJECTTYPE").text objectFlags = root.find("./OBJECTFLAGS").text if (Config.debug()): print(("objectServerID = %s" % objectServerID)) header ="<XMLCOMMAND><OBJECTID>" header += objectID header +="</OBJECTID><OBJECTSERVERID>" header += objectServerID header += "</OBJECTSERVERID><OBJECTTYPE>" header += objectType header += "</OBJECTTYPE>" header += "<OBJECTFLAGS>" header += objectFlags header += "</OBJECTFLAGS>" header += "<RASPICONNECTSERVERVERSIONNUMBER>" header += Config.version_number().decode('utf-8') header += "</RASPICONNECTSERVERVERSIONNUMBER>" return header
def select_view(view): #Selects a view inside a list if view != "": if Config.find_element([By.CSS_SELECTOR, "a#" + view]).get_attribute("class") != "view topbar-icon selected": close_task() Config.find_element([By.CSS_SELECTOR, "a#" + view]).click()
def spam_naub_bunch(self): naubs_n = Config.naubs_per_bunch() naubs_max = Config.max_naubs() for i in xrange(naubs_n): if len(self.naubino.naubs) > naubs_max: return self.spam_naub_pair()
def __init__(self, configFile = None): if configFile is None: settings = Config.read() else: settings = Config.read(configFile) self.meanRad = settings['NODE']['MIN_RADIUS'] # Dimensional units in nm self.minRad = settings['NODE']['MEAN_RADIUS'] self.maxRad = settings['NODE']['MAX_RADIUS'] self.sigmaRad = settings['NODE']['SIGMA_RADIUS'] self.bBoxDims = np.array([settings['FOREST']['DIMENSIONS']['MAX_X'],settings['FOREST']['DIMENSIONS']['MAX_Y'],settings['FOREST']['DIMENSIONS']['MAX_Z']]) self.pDensity = settings['FOREST']['SURFACE_DENSITY'] #Unitless fraction covered by particles self.bBoxArea = self.bBoxDims[0]*self.bBoxDims[1] self.areaC = 0 self.ANGULAR_MEAN = settings['SPRINGS']['ANGLES']['MEAN_PHI'] self.ANGULAR_SIGMA = settings['SPRINGS']['ANGLES']['SIGMA_PHI'] self.THETA_MIN = settings['SPRINGS']['ANGLES']['MIN_THETA'] self.THETA_MAX = settings['SPRINGS']['ANGLES']['MAX_THETA'] self.NODE_MASS = settings['NODE']['MASS'] self.SEGMENT_LENGTH = 8 self.TIME_STEP = .1 self.GROWTH_SPEED = 1 self.MAX_SEGMENTS = 20 #Defines the end condition self.segmentNum = 0 self.TORSION_K = 1 self.TORSION_DAMP = 1 self.LINEAR_K = 1 self.LINEAR_DAMP = 1
def activityList(self, page): if not page or page == '': raise Common.InvalidPageException("# brand activityList: not get JHS brand home.") self.ju_brand_page = page # 保存html文件 page_datepath = 'act/marketing/' + time.strftime("%Y/%m/%d/%H/", time.localtime(self.begin_time)) Config.writefile(page_datepath,'brand.htm',self.ju_brand_page) # 数据接口URL list self.top_brands = self.brand_temp.activityTopbrandTemp(page) b_url_valList = self.brand_temp.activityListTemp(page) if b_url_valList != []: # 从接口中获取的数据列表 bResult_list = [] json_valList = [] for b_url_val in b_url_valList: b_url, f_name, f_catid = b_url_val json_valList.append((b_url,Config.ju_brand_home,(f_catid,f_name))) bResult_list = self.jsonpage.get_json(json_valList) act_valList = [] if bResult_list and bResult_list != []: a_val = (Config.JHS_Brand,'',self.begin_time,) act_valList = self.jsonpage.parser_brandjson(bResult_list,a_val) if act_valList != []: print '# get brand act num:',len(act_valList) self.run_brandAct(act_valList) else: print '# err: not get brandjson parser val list.' else: print '# err: not find activity json data URL list.'
def on_stop(self): print "[Duck Settings] Quiting and saving configuration" #update config file dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) # Get the session bus bus = dbus.SessionBus() # Get the remote object remote_object = bus.get_object("org.duck.Launcher","/DBusWidget") # Get the remote interface for the remote object iface = dbus.Interface(remote_object, "org.duck.Launcher") Dict={ "r":iface.getR1(), "g":iface.getG1(), "b":iface.getB1(), "r2":iface.getR2(), "g2":iface.getG2(), "b2":iface.getB2(), "alpha":iface.getAlpha(), "font":iface.getFont(), "animation-speed":iface.getAnimationSpeed(), "size":iface.getLauncherWidth(), "dock-apps":iface.getDockApps(), "icon-size":iface.getIconSize(), "blocks":iface.getBlocks(), "init-manager":iface.getInit() } d_cfg.check_dict(Dict)
def run_leeshore(self): """ This function deploys one leeshore-test per user every second hour. s :return: :rtype: """ day = int(sys.argv[1]) config = Config() newconfig = config.init_db_config() grouplist = newconfig.get_account().get_groups() path = newconfig.get_script_path() runinterval = int(newconfig.get_interval()) / len(grouplist) logging.info("Interval: " + str(newconfig.get_interval())) positiondict = {} while True: for i in grouplist: userconfig = self.interpreterServer.getUserConfig(i, "couchdb") if datetime.datetime.today().weekday() == day or userconfig["leeshore_enabled"] == 0: logging.critical("Today is a day off for leeshore") time.sleep(runinterval) else: tenant_name = userconfig["tenant_name"] ip = userconfig["ipaddress"] executable_string = "" executable_string = "/root/uptime_challenge_master/testscript/leeshore_short.pl -n " + tenant_name worklist = [] worklist.append(executable_string) groupdict = {} groupdict.update({i: worklist}) newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) worklist = [] queue = Queues() queue.receive_one_message_from_q("leeshore_reportq", str(runinterval))
def post(self): username = self.get_argument('username') password = self.get_argument('password') if username == Config.read_config('config.properties', 'admin', 'admin_username') and password == Config.read_config('config.properties', 'admin', 'admin_passwd'): self.set_secure_cookie('user', username) self.redirect('/admin')
def init(engine): # define configuration keys for all available mods for m in getAvailableMods(engine): Config.define("mods", "mod_" + m, bool, False, text = m, options = {False: _("Off"), True: _("On")}) # init all active mods for m in getActiveMods(engine): activateMod(engine, m)
#coding:utf-8 import tensorflow as tf import sys,time import numpy as np import cPickle, os import random import Config import Model config_tf = tf.ConfigProto() config_tf.gpu_options.allow_growth = True config_tf.inter_op_parallelism_threads = 1 config_tf.intra_op_parallelism_threads = 1 config = Config.Config() char_to_idx, idx_to_char = cPickle.load(open(config.model_path+'.voc', 'r')) config.vocab_size = len(char_to_idx) is_sample = config.is_sample is_beams = config.is_beams beam_size = config.beam_size len_of_generation = config.len_of_generation start_sentence = config.start_sentence def run_epoch(session, m, data, eval_op, state=None): """Runs the model on the given data.""" x = data.reshape((1,1)) prob, _state, _ = session.run([m._prob, m.final_state, eval_op], {m.input_data: x, m.initial_state: state})
# # check appdata file format violation # # http://people.freedesktop.org/~hughsient/appdata/ # from Filter import addDetails, printError from Pkg import getstatusoutput import AbstractCheck import Config STANDARD_BIN_DIRS = ['/bin/', '/sbin/', '/usr/bin/', '/usr/sbin/'] DEFAULT_APPDATA_CHECKER = ('appstream-util', 'validate-relax') appdata_checker = Config.getOption("AppDataChecker", DEFAULT_APPDATA_CHECKER) class AppDataCheck(AbstractCheck.AbstractFilesCheck): def __init__(self): # desktop file need to be in $XDG_DATA_DIRS # $ echo $XDG_DATA_DIRS/applications # /var/lib/menu-xdg:/usr/share AbstractCheck.AbstractFilesCheck.__init__( self, "AppDataCheck", "/usr/share/appdata/.*\.appdata.xml$") def check_file(self, pkg, filename): root = pkg.dirName() f = root + filename try: st = getstatusoutput(appdata_checker + (f, ))
__author__ = "github.com/AndrewSazonov" __version__ = '0.0.1' import os, sys import Functions, Config CONFIG = Config.Config() def setupExePath(): d = { 'macos': os.path.join(CONFIG.setup_full_name, 'Contents', 'MacOS', CONFIG.setup_name), 'ubuntu': CONFIG.setup_full_name, 'windows': CONFIG.setup_full_name } return os.path.join(CONFIG.dist_dir, d[CONFIG.os]) def runInstallerSilently(): try: message = f'install {CONFIG.app_name}' silent_script = CONFIG['ci']['scripts']['silent_install'] silent_script_path = os.path.join(CONFIG.scripts_dir, silent_script) Functions.installSilently(installer=setupExePath(), silent_script=silent_script_path) except Exception as exception: Functions.printFailMessage(message, exception)
import Config from Common.Utils.UserInput import read from Common.ModelCalculatorCompareFixedAndFloat import run from Common.ModelCalculatorWrapper import ConvertR2N as conversionHandler from Common.Utils.InputGenerator import getRandomDistribution as distributionFunc run(read(default=1000),Config.Logger(),conversionHandler,distributionFunc)
count = self.db.selectScalar( "SELECT count(*) FROM organizations WHERE id=%s", (orgId, )) if count > 0: finalCopyHolders.append((orgName, orgId)) else: print("Organization does not exist for ", orgName, orgId) errorCount2 += 1 if errorCount1 > 0 or errorCount2 > 0: print("Add missing organizations and try again") sys.exit() sql = "INSERT INTO lpts_organizations (lpts_organization, organization_role, organization_id) VALUES (%s, 2, %s)" self.db.executeBatch(sql, finalLicensors) sql = "INSERT INTO lpts_organizations (lpts_organization, organization_role, organization_id) VALUES (%s, 1, %s)" self.db.executeBatch(sql, finalCopyHolders) if (__name__ == '__main__'): config = Config() lptsReader = LPTSExtractReader(config) db = SQLUtility(config) test = MatchOrganizationNames(config, db, lptsReader) test.addTables() test.processAlansLicensorMatch() test.processAlansCopyrightMatch() test.processLicensor() test.processCopyHolder() test.displayOutput() test.updateDatabase() db.close()
def __init__(self, resource, svg): self.logClassInits = Config.get("game", "log_class_inits") if self.logClassInits == 1: Log.debug("Data class init (Data.py)...") self.logLoadings = Config.get("game", "log_loadings") self.logImageNotFound = Config.get("log", "log_image_not_found") self.resource = resource self.svg = svg self.sfxVolume = Config.get("audio", "SFX_volume") self.crowdVolume = Config.get("audio", "crowd_volume") #Get theme themename = Config.get("coffee", "themename") self.themeLabel = themename self.themeCoOp = False self.players = None self.players = Player.loadPlayers() #myfingershurt: check for existance of theme path themepath = os.path.join(Version.dataPath(), "themes") self.themepath = themepath self.path = Version.dataPath() if not self.checkImgDrawing( os.path.join("themes", themename, "notes.png")): #myfingershurt: here need to ensure an existing theme is selected themes = [] defaultTheme = None #myfingershurt allthemes = os.listdir(themepath) for name in allthemes: if self.checkImgDrawing( os.path.join("themes", name, "notes.png")): themes.append(name) if name == "MegaLight": #myfingershurt defaultTheme = name #myfingershurt i = len(themes) if defaultTheme != "MegaLight": #myfingershurt defaultTheme = themes[0] #myfingershurt #not a valid theme if notes.png isn't there! Force default theme: Config.set("coffee", "themename", defaultTheme) #re-init Data with new default themename = defaultTheme self.themeLabel = themename if not os.path.exists( os.path.join(Version.dataPath(), "themes", themename, "vocals")): self.vocalPath = "vocals" else: self.vocalPath = os.path.join("themes", themename, "vocals") if self.checkImgDrawing(os.path.join("themes", themename, "spfill.png")): self.theme = 0 elif self.checkImgDrawing( os.path.join("themes", themename, "overdrive fill.png")): self.theme = 2 self.themeCoOp = True else: self.theme = 1 if self.checkImgDrawing( os.path.join("themes", themename, "coop_rockmeter.png")): self.themeCoOp = True self.fontScreenBottom = 0.75 #from our current viewport's constant 3:4 aspect ratio (which is always stretched to fill the video resolution) self.loadPartImages() #myfingershurt: multi-OS compatibility file access fixes using os.path.join() # load font customization images #Worldrave - Use new defined Star3 and star4. Using star1 and star2 as a fallback. #MFH - no more custom glyphs, these are wasting memory. #MFH - but we do need these star1-4 images anyway. Leaving them loaded here in the Data object. self.loadImgDrawing(self, "star1", os.path.join("themes", themename, "star1.png"), textureSize=(128, 128)) self.loadImgDrawing(self, "star2", os.path.join("themes", themename, "star2.png"), textureSize=(128, 128)) #MFH - let's not rely on errors here if we don't have to... if not self.loadImgDrawing(self, "star3", os.path.join("themes", themename, "star3.png"), textureSize=(128, 128)): self.star3 = self.star1 if not self.loadImgDrawing(self, "star4", os.path.join("themes", themename, "star4.png"), textureSize=(128, 128)): self.star4 = self.star2 if self.loadImgDrawing(self, "starPerfect", os.path.join("themes", themename, "starperfect.png"), textureSize=(128, 128)): self.perfectStars = True self.maskStars = False else: self.starPerfect = self.star2 self.fcStars = False self.starFC = self.star2 self.maskStars = True self.perfectStars = False #self.perfectStars = False if self.perfectStars: if self.loadImgDrawing(self, "starFC", os.path.join("themes", themename, "starfc.png"), textureSize=(128, 128)): self.fcStars = True else: #self.starFC = None self.starFC = self.starPerfect self.fcStars = False #self.loadImgDrawing(self, "left", "left.png", textureSize = (128, 128)) #self.loadImgDrawing(self, "right", "right.png", textureSize = (128, 128)) # load misc images self.loadImgDrawing(self, "loadingImage", os.path.join("themes", themename, "loadingbg.png"), textureSize=(256, 256)) self.loadImgDrawing(self, "choiceImage", os.path.join("themes", themename, "editor.png")) if self.loadImgDrawing( self, "submenuSelect", os.path.join("themes", themename, "submenuselect.png")): subSelectImgW = self.submenuSelect.width1() self.submenuSelectFound = True self.subSelectWFactor = 640.000 / subSelectImgW self.subSelectImgH = self.submenuSelect.height1() else: self.submenuSelectFound = False self.loadImgDrawing( self, "submenuSelect", os.path.join("themes", themename, "menu", "selected.png")) self.subSelectWFactor = 0 self.loadAllImages(self, os.path.join("themes", themename, "common")) # load all the data in parallel # asciiOnly = not bool(Language.language) or Language.language == "Custom" # reversed = _("__lefttoright__") == "__righttoleft__" and True or False asciiOnly = True reversed = False scale = 1 scale2 = .5 # evilynux - Load bigger fonts so they're nicer when scaled, scaling readjusted fontSize = [44, 108, 34, 32, 30] if asciiOnly: font = resource.fileName("default.ttf") bigFont = resource.fileName("title.ttf") else: Log.debug("Main font International.ttf used!") font = \ bigFont = resource.fileName("international.ttf") # load fonts w, h = [int(s) for s in Config.get("video", "resolution").split("x")] aspectRatio = float(w) / float(h) if os.path.isdir(os.path.join(self.themepath, "fonts")): self.fontDict = {} for file in os.listdir(os.path.join(self.themepath, "fonts")): splitext = os.path.splitext(file) if splitext[1] == ".ttf": self.fontDict[splitext[0]] = Font(os.path.join( self.themepath, "fonts", file), 64, scale=1, reversed=False, systemFont=False, aspectRatio=aspectRatio) else: font1 = lambda: Font(font, fontSize[0], scale=scale * .5, reversed=reversed, systemFont=not asciiOnly, aspectRatio=aspectRatio) font2 = lambda: Font(bigFont, fontSize[1], scale=1, reversed=reversed, systemFont=not asciiOnly) if self.theme == 1: # evilynux - No outline for GH3 font3 = lambda: Font(pauseFont, fontSize[2], scale=scale2, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio) else: font3 = lambda: Font(pauseFont, fontSize[2], scale=scale2, reversed=reversed, systemFont=not asciiOnly, aspectRatio=aspectRatio) font4 = lambda: Font(scoreFont, fontSize[3], scale=scale2, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio) font5 = lambda: Font(streakFont, fontSize[3], scale=scale2, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio) if self.theme == 1: font6 = lambda: Font( loadingFont, fontSize[3], scale=scale2 * 1.4, reversed=reversed, systemFont=not asciiOnly, outline=False, shadow=True, aspectRatio=aspectRatio ) #Worldrave - Added shadow to Loading Phrases in GH-Based Theme's else: font6 = lambda: Font(loadingFont, fontSize[3], scale=scale2, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio) if self.theme == 2: font7 = lambda: Font( songFont, fontSize[4], scale=scale2, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio ) #kk69: loads font specific for song name in Guitar Scene =) else: font7 = lambda: Font( songFont, fontSize[0], scale=scale2, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio ) #kk69: loads font specific for song name in Guitar Scene =) font8 = lambda: Font(songListFont, fontSize[3], scale=scale2, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio) #MFH font9 = lambda: Font(shadowfont, fontSize[3], scale=scale2, reversed=reversed, systemFont=not asciiOnly, outline=False, shadow=True, aspectRatio=aspectRatio) #blazingamer font10 = lambda: Font( streakFont2, fontSize[2], scale=scale2 * 1.08, reversed=reversed, systemFont=not asciiOnly, outline=False, shadow=True, aspectRatio=aspectRatio ) #blazingamer - Worldrave modified size to accuracy. resource.load(self, "font", font1, synch=True) resource.load(self, "bigFont", font2, synch=True) #MFH - seems like these should be up here... menuFont = resource.fileName( os.path.join("themes", themename, "menu.ttf")) pauseFont = resource.fileName( os.path.join("themes", themename, "pause.ttf")) scoreFont = resource.fileName( os.path.join("themes", themename, "score.ttf")) if self.fileExists(os.path.join("themes", themename, "Streak.ttf")): streakFont = resource.fileName( os.path.join("themes", themename, "streak.ttf")) else: streakFont = resource.fileName( os.path.join("themes", themename, "score.ttf")) if self.fileExists(os.path.join("themes", themename, "Song.ttf")): songFont = resource.fileName( os.path.join("themes", themename, "song.ttf")) else: songFont = resource.fileName( os.path.join("themes", themename, "menu.ttf") ) #kk69: use menu font when song font is not present if self.fileExists(os.path.join("themes", themename, "loading.ttf")): loadingFont = resource.fileName( os.path.join("themes", themename, "loading.ttf")) else: loadingFont = resource.fileName("default.ttf") if self.fileExists( os.path.join("themes", themename, "songlist.ttf")): songListFont = resource.fileName( os.path.join("themes", themename, "songlist.ttf")) else: songListFont = menuFont if self.fileExists( os.path.join("themes", themename, "songlist.ttf")): shadowfont = resource.fileName( os.path.join("themes", themename, "songlist.ttf")) else: shadowfont = menuFont #blazingamer if self.fileExists( os.path.join("themes", themename, "streakphrase.ttf")): streakFont2 = resource.fileName( os.path.join("themes", themename, "streakphrase.ttf")) else: streakFont2 = menuFont #blazingamer:Reorganized if self.theme == 0: font1 = lambda: Font(menuFont, fontSize[2], scale=scale * .5, reversed=reversed, systemFont=not asciiOnly, aspectRatio=aspectRatio) font2 = lambda: Font(menuFont, fontSize[2], scale=scale * .5, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio) resource.load(self, "lfont", font2, synch=True) resource.load(self, "font", font1, synch=True) elif self.theme == 1: font1 = lambda: Font( menuFont, fontSize[3], scale=scale * .5, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio ) #Worldrave - Removed outline from options text on GH-Based theme's. No other drawbacks noticed. font2 = lambda: Font(menuFont, fontSize[3], scale=scale * .5, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio) resource.load(self, "lfont", font2, synch=True) resource.load(self, "font", font1, synch=True) elif self.theme == 2: font1 = lambda: Font(menuFont, fontSize[4], scale=scale * .5, reversed=reversed, systemFont=not asciiOnly, outline=False, aspectRatio=aspectRatio) resource.load(self, "font", font1, synch=True) resource.load(self, "pauseFont", font3, synch=True) resource.load(self, "scoreFont", font4, synch=True) resource.load(self, "streakFont", font5, synch=True) resource.load(self, "songFont", font7, synch=True) resource.load(self, "streakFont2", font10, synch=True) #blazingamer resource.load(self, "songListFont", font8, synch=True) resource.load(self, "shadowfont", font9, synch=True) resource.load(self, "loadingFont", font6, synch=True) self.fontDict = {"font": self.font, "bigFont": self.bigFont, "pauseFont": self.pauseFont, "scoreFont": self.scoreFont, \ "streakFont": self.streakFont, "songFont": self.songFont, "streakFont2": self.streakFont2, \ "songListFont": self.songListFont, "shadowfont": self.shadowfont, "loadingFont": self.loadingFont} try: self.fontDict['lfont'] = self.lfont except AttributeError: pass if self.fileExists( os.path.join("themes", themename, "sounds", "starding.ogg")): self.loadSoundEffect( self, "starDingSound", os.path.join("themes", themename, "sounds", "starding.ogg")) self.starDingSoundFound = True else: Log.debug("Star ding sound not found, loading another sound.") self.loadSoundEffect(self, "starDingSound", os.path.join("sounds", "clapsound.ogg")) self.starDingSoundFound = False if self.fileExists( os.path.join("themes", themename, "sounds", "starlost.ogg")): self.loadSoundEffect( self, "starLostSound", os.path.join("themes", themename, "sounds", "starlost.ogg")) self.starLostSoundFound = True else: if self.fileExists(os.path.join("sounds", "starlost.ogg")): self.loadSoundEffect(self, "starLostSound", os.path.join("sounds", "starlost.ogg")) self.starLostSoundFound = True else: Log.debug("Star lost sound not found, loading another sound.") self.loadSoundEffect(self, "starLostSound", os.path.join("sounds", "clapsound.ogg")) self.starLostSoundFound = False if self.fileExists(os.path.join("sounds", "bassdrum.ogg")): self.loadSoundEffect(self, "bassDrumSound", os.path.join("sounds", "bassdrum.ogg")) self.bassDrumSoundFound = True else: Log.debug("Bass drum sound not found, loading another sound.") self.loadSoundEffect(self, "bassDrumSound", os.path.join("sounds", "clapsound.ogg")) self.bassDrumSoundFound = False #Faaa Drum sound if self.fileExists(os.path.join("sounds", "tom01.ogg")): self.loadSoundEffect(self, "T1DrumSound", os.path.join("sounds", "tom01.ogg")) self.T1DrumSoundFound = True else: Log.debug("Drum sound tom01 not found, loading another sound.") self.loadSoundEffect(self, "T1DrumSound", os.path.join("sounds", "clapsound.ogg")) self.T1DrumSoundFound = False if self.fileExists(os.path.join("sounds", "tom02.ogg")): self.loadSoundEffect(self, "T2DrumSound", os.path.join("sounds", "tom02.ogg")) self.T2DrumSoundFound = True else: Log.debug("Drum sound tom02 not found, loading another sound.") self.loadSoundEffect(self, "T2DrumSound", os.path.join("sounds", "clapsound.ogg")) self.T2DrumSoundFound = False if self.fileExists(os.path.join("sounds", "tom03.ogg")): self.loadSoundEffect(self, "T3DrumSound", os.path.join("sounds", "tom03.ogg")) self.T3DrumSoundFound = True else: Log.debug("Drum sound tom03 not found, loading another sound.") self.loadSoundEffect(self, "T3DrumSound", os.path.join("sounds", "clapsound.ogg")) self.T3DrumSoundFound = False if self.fileExists(os.path.join("sounds", "crash.ogg")): self.loadSoundEffect(self, "CDrumSound", os.path.join("sounds", "crash.ogg")) self.CDrumSoundFound = True else: Log.debug("Drum sound crash not found, loading another sound.") self.loadSoundEffect(self, "CDrumSound", os.path.join("sounds", "clapsound.ogg")) self.CDrumSoundFound = False # load sounds resource.load(self, "screwUpsounds", self.loadScrewUpsounds) resource.load(self, "screwUpsoundsBass", self.loadScrewUpsoundsBass) resource.load( self, "screwUpsoundsDrums", self.loadScrewUpsoundsDrums) #myfingershurt: drum screw up sounds resource.load(self, "acceptSounds", self.loadAcceptSounds) #myfingershurt resource.load(self, "cancelSounds", self.loadBackSounds) #myfingershurt resource.load(self, "symcsounds", self.loadScrewUpsounds) self.loadSoundEffect( self, "selectSound1", os.path.join("themes", themename, "sounds", "select1.ogg")) self.loadSoundEffect( self, "selectSound2", os.path.join("themes", themename, "sounds", "select2.ogg")) self.loadSoundEffect( self, "selectSound3", os.path.join("themes", themename, "sounds", "select3.ogg")) self.loadSoundEffect( self, "startSound", os.path.join("themes", themename, "sounds", "start.ogg")) self.loadSoundEffect( self, "starSound", os.path.join("themes", themename, "sounds", "starpower.ogg")) if self.fileExists( os.path.join("themes", themename, "sounds", "failsound.ogg")): self.loadSoundEffect( self, "failSound", os.path.join("themes", themename, "sounds", "failsound.ogg")) else: #MFH: Fallback on general failsound.ogg self.loadSoundEffect(self, "failSound", os.path.join("sounds", "failsound.ogg")) Log.warn( themename + "/sounds/failsound.ogg not found -- using general failsound.ogg instead." ) #myfingershurt: integrating Capo's starpower clap sounds self.loadSoundEffect(self, "clapSound", os.path.join("sounds", "clapsound.ogg")) if self.fileExists( os.path.join("themes", themename, "sounds", "starpowerready.ogg")): self.loadSoundEffect( self, "starReadySound", os.path.join("themes", themename, "sounds", "starpowerready.ogg")) else: #MFH: Fallback on starpower.ogg self.loadSoundEffect( self, "starReadySound", os.path.join("themes", themename, "sounds", "starpower.ogg")) Log.warn( themename + "/sounds/starpowerready.ogg not found -- using starpower.ogg instead." ) #MFH - fallback on sounds/crowdcheers.ogg, and then starpower.ogg. Note if the fallback crowdcheers was used or not. if self.fileExists( os.path.join("themes", themename, "sounds", "crowdcheers.ogg")): self.loadSoundEffect(self, "crowdSound", os.path.join("themes", themename, "sounds", "crowdcheers.ogg"), crowd=True) self.cheerSoundFound = 2 elif self.fileExists(os.path.join("sounds", "crowdcheers.ogg")): self.loadSoundEffect(self, "crowdSound", os.path.join("sounds", "crowdcheers.ogg"), crowd=True) self.cheerSoundFound = 1 Log.warn( themename + "/sounds/crowdcheers.ogg not found -- using data/sounds/crowdcheers.ogg instead." ) else: #MFH: Fallback on starpower.ogg self.loadSoundEffect( self, "crowdSound", os.path.join("themes", themename, "sounds", "starpower.ogg")) self.cheerSoundFound = 0 Log.warn( themename + "/sounds/crowdcheers.ogg not found -- using starpower.ogg instead." ) if self.fileExists( os.path.join("themes", themename, "sounds", "staractivate.ogg")): self.loadSoundEffect( self, "starActivateSound", os.path.join("themes", themename, "sounds", "staractivate.ogg")) else: #MFH: Fallback on starpower.ogg self.loadSoundEffect( self, "starActivateSound", os.path.join("themes", themename, "sounds", "starpower.ogg")) Log.warn( themename + "/sounds/staractivate.ogg not found -- using starpower.ogg instead." ) if self.fileExists( os.path.join("themes", themename, "sounds", "battleused.ogg")): self.loadSoundEffect( self, "battleUsedSound", os.path.join("themes", themename, "sounds", "battleused.ogg")) elif self.fileExists( os.path.join("themes", themename, "sounds", "staractivate.ogg")): self.loadSoundEffect( self, "battleUsedSound", os.path.join("themes", themename, "sounds", "staractivate.ogg")) Log.warn( themename + "/sounds/battleused.ogg not found -- using staractive.ogg instead." ) else: #Fallback on starpower.ogg self.loadSoundEffect( self, "battleUsedSound", os.path.join("themes", themename, "sounds", "starpower.ogg")) Log.warn( themename + "/sounds/battleused.ogg not found -- using starpower.ogg instead." ) if self.fileExists( os.path.join("themes", themename, "sounds", "stardeactivate.ogg")): self.loadSoundEffect( self, "starDeActivateSound", os.path.join("themes", themename, "sounds", "stardeactivate.ogg")) self.starDeActivateSoundFound = True else: #MFH: Fallback on starpower.ogg - required to load, but will not be played. self.loadSoundEffect( self, "starDeActivateSound", os.path.join("themes", themename, "sounds", "starpower.ogg")) self.starDeActivateSoundFound = False Log.warn(themename + "/sounds/stardeactivate.ogg not found -- sound disabled.") if self.fileExists( os.path.join("themes", themename, "sounds", "rescue.ogg")): self.loadSoundEffect( self, "rescueSound", os.path.join("themes", themename, "sounds", "rescue.ogg")) elif self.fileExists( os.path.join("themes", themename, "sounds", "staractivate.ogg")): self.loadSoundEffect( self, "rescueSound", os.path.join("themes", themename, "sounds", "staractivate.ogg")) Log.warn( themename + "/sounds/rescue.ogg not found -- using staractivate.ogg instead." ) else: self.loadSoundEffect( self, "rescueSound", os.path.join("themes", themename, "sounds", "starpower.ogg")) Log.warn( themename + "/sounds/rescue.ogg not found -- using starpower.ogg instead.") if self.fileExists( os.path.join("themes", themename, "sounds", "coopfail.ogg")): self.loadSoundEffect( self, "coOpFailSound", os.path.join("themes", themename, "sounds", "coopfail.ogg")) elif self.fileExists( os.path.join("themes", themename, "sounds", "stardeactivate.ogg")): self.loadSoundEffect( self, "coOpFailSound", os.path.join("themes", themename, "sounds", "stardeactivate.ogg")) Log.warn( themename + "/sounds/coopfail.ogg not found -- using stardeactivate.ogg instead" ) elif self.fileExists( os.path.join("themes", themename, "sounds", "out.ogg")): #MFH - not all themes have out.ogg! self.loadSoundEffect( self, "coOpFailSound", os.path.join("themes", themename, "sounds", "out.ogg")) Log.warn(themename + "/sounds/coopfail.ogg not found -- using out.ogg instead") else: self.loadSoundEffect( self, "coOpFailSound", os.path.join("themes", themename, "sounds", "back1.ogg")) Log.warn( themename + "/sounds/coopfail.ogg not found -- using back1.ogg instead") #myfingershurt: adding You Rock sound effect if self.fileExists( os.path.join("themes", themename, "sounds", "rocksound.ogg")): self.loadSoundEffect( self, "rockSound", os.path.join("themes", themename, "sounds", "rocksound.ogg")) else: self.loadSoundEffect(self, "rockSound", os.path.join("sounds", "rocksound.ogg"))
client_socket = ClientSocket.ClientSocket(HOST, PORT) sender = Sender.Sender(client_socket) plot_frame1 = tk.Frame(root) # plot_frame.pack_propagate(0) plot_frame1.pack(fill='both', side='top', expand='True') plot_frame2 = tk.Frame(root) # plot_frame.pack_propagate(0) plot_frame2.pack(fill='both', side='top', expand='True') x_plot = Plot.Plot(plot_frame1, "both", "left", "True") y_plot = Plot.Plot(plot_frame1, "both", "right", "True") z_plot = Plot.Plot(plot_frame2, "both", "left", "True") throttle_plot = Plot.Plot(plot_frame2, "both", "right", "True") config = Config.Config("config.ini") controller = Controller.Controller(root, "x", "bottom", "False", sender, config) plot_handler = PlotHandler.PlotHandler(x_plot, y_plot, z_plot, throttle_plot) recorder = Recorder.Recorder(args.recordpath) def d(event): plot_handler.redraw() root.bind('<Configure>', d) if args.mode == 0: recorder.start_recording() receiver = Receiver.Receiver(client_socket, plot_handler, recorder,
def test_debug(): settings = {'debug': True} config = Config(settings) assert config.debug is True
import PyCA.Core as ca from mpi4py import MPI import os import sys # This is meant to be a general computation configuration spec # Nothing in here should be specific to the algorithm being run # The idea is that subsections conforming to this spec could be held in a # central spot such as ~/.pyca/ and be included on the fly by command-line # overrides such as # ./Matching.py study.yaml computeConfig="~/.pyca/mpicluster.yaml" # or by simply changing a single line within study.yaml ComputeConfigSpec = { 'useCUDA': Config.Param(default=False, comment="Use GPU if available"), 'gpuID': Config.Param( default=0, comment= "Used when useCUDA is true. This is when a node has multiple GPUs and \ user needs to specify which GPU to use. Can be any integer in [0,#GPUs-1]. Typically not useful with MPI processes." ), 'useMPI': Config.Param( default=False, comment= "Use MPI for multiple nodes. Is overriden and set to True if you spawn the mpi processes from outside." ), 'interactive': Config.Param(default=False,
import my_modules import Config # Parameters import model_definition # DNN definitions ################################### # Print environment chainer.print_runtime_info() # GPU setting cuda.check_cuda_available() DEVICE_NUM = 2 xp = cuda.cupy DEVICE_INFO = cuda.get_device_from_id(DEVICE_NUM) ############################################################################### # load parameters sp_param, dnn_param, training_param = Config.load_config() # Dev set of normal sounds toy_type = 'ToyCar' toy_type = 'ToyConveyor' toy_type = 'ToyTrain' obs_dir = './exp1_dataset_' + toy_type + '/train_normal/' # save dir dnn_dir = './dnn_dir/' # model file name model_fn = toy_type + ".h5" # analysis condition rho = 0.1 # FPR = 10% # anomaly list anomaly_cond_xlsx_dir = './anomaly_conditions/' xlsx_fn = anomaly_cond_xlsx_dir + toy_type + '_anomay_condition.xlsx' anm_cnd = pd.read_excel(xlsx_fn)
def run(self, tarfile, targets): sourcedir = os.path.join( self.dest_dir, "opkg-%s-%s" % (self.opkgName, self.opkgDesc.getVersion('upstream'))) # Rename tar to follow Debian non-native package rule debtarfile = os.path.join( self.dest_dir, "opkg-%s_%s.orig.tar.gz" % (self.opkgName, self.opkgDesc.getVersion('upstream'))) os.rename(tarfile, debtarfile) # Uncompress tar if os.path.exists(sourcedir): Tools.rmDir(sourcedir) if not Tools.untar(debtarfile, self.dest_dir): Logger().error("Error while extracting tar file: %s" % debtarfile) raise SystemExit(1) # Create debian dir debiandir = os.path.join(sourcedir, "debian") os.makedirs(debiandir) # Compile template files debDesc = DebDescription(self.opkgDesc, self.dist) templateDir = os.path.abspath(Config().get(self.configSection, "templatedir")) tmplList = [ os.path.join(templateDir, t) for t in Tools.ls(templateDir) ] Logger().debug("Templates: %s" % tmplList) for template in tmplList: if re.search("\.tmpl", template): (head, tail) = os.path.split(template) (base, ext) = os.path.splitext(tail) Tools.cheetahCompile(debDesc, template, os.path.join(debiandir, base)) else: shutil.copy(template, debiandir) Logger().info("Copy %s to %s" % (template, debiandir)) # GV: For the rules file, we need to do some simple updates and # I do not know cheetah enough to do that quickly... there we # execute a sed command (yes, it is far from perfect). rulescript = debiandir + "/rules" cmd = "/bin/sed s/OPKGNAME/" + self.opkgName + "/g < " + debiandir + "/rules.in > " + rulescript os.system(cmd) Logger().info("Executing %s" % (cmd)) os.chmod(rulescript, 0744) for part in ['api', 'server', 'client']: fl = debDesc.getPackageFiles(part) installFile = os.path.join(debiandir, debDesc.getInstallFile(part)) filelist = open(installFile, "a") for f in fl: filelist.write("%s /%s/\n" % (f['sourcedest'], f['dest'])) filelist.close() # Build targets cmd = "%s -rfakeroot -sa" % self.buildCmd if 'source' in targets and 'binary' in targets: opts = "" elif 'source' in targets: opts = "-S" elif 'binary' in targets: opts = "-B" if Tools.command("%s %s" % (cmd, opts), sourcedir): Logger().info("Packages succesfully generated") else: Logger().error("Packages generation failed") raise SystemExit(1)
#!/usr/bin/env python # -*- coding: ISO-8859-1 -*- # # Copyright (C) 2007 by Frank Lübeck # import sys, os, time # get paths right homedir = os.path.abspath(sys.path[0]) okudir = os.path.join(homedir, '..') os.environ["OKUSONHOME"] = okudir sys.path = [os.path.join(okudir, 'server')] + sys.path # read config import Config Config.ReadConfig() Config.PostProcessing() # read exercises and sheets from fmTools import Utils, AsciiData Utils.Error('Reading exercises and sheets...', prefix='Info: ') import Exercises for d in Config.conf['ExerciseDirectories']: Exercises.ReadExercisesDirectory(d) for d in Config.conf['SheetDirectories']: Exercises.ReadSheetsDirectory(d) # list of triples (number, name, sheet) sheets = Exercises.SheetList() # read data
def main(): scen = "SAT12-HAND" config = Cfg.Config().get_config_dic() net = Net.Network(config) data = get_data(scen, config) cross_validation(scen, config, data, net, True)
def post(self, *args, **kwargs): """ Handles the creation of a new engine based on a slightly modified config, supplied via HTTP Post. The request requires 1 URL parameter: new_config_url, which points to a URL (can be local) from where to download the config. This call is blocking. WARNING All configs must have the following added: extract_output_layer_name = "<OUTPUT LAYER ID>" :param args: :param kwargs: :return: ASCII encoded hash of the new engine based on the provided config. """ # Overview: Clean up, create engine, download full config, create hash, and add it to the main list # TODO: Recheck clean up function global _max_amount_engines if (len(_engines) + 1) > _max_amount_engines: self._remove_oldest_engine() data = json.loads(self.request.body) print('Received new config for new engine', file=log.v3) hash_engine = hashlib.new('ripemd160') hash_engine.update(json.dumps(args) + str(datetime.datetime.now())) hash_temp = hash_engine.hexdigest() # Download new config file and save to temp folder. urlmanager = urllib.URLopener() basefile = "configs/" config_file = basefile + str(datetime.datetime.now()) + ".config" try: urlmanager.retrieve(data["new_config_url"], config_file) except (urllib2.URLError, urllib2.HTTPError): self.write('Error: Loading in config file from URL') return # Load and setup config try: config = Config.Config() config.load_file(config_file) except Exception: self.write('Error: Processing config file') return if config.value('task', 'daemon') != 'train': config.set( key='task', value='daemon' ) # Assume we're only using for classification or training try: _devices[hash_temp] = self._init_devices(config=config) except Exception: self.write('Error: Loading devices failed') return new_engine = Engine.Engine(_devices[hash_temp]) try: new_engine.init_network_from_config(config=config) except Exception: self.write('Error: Loading engine failed') return _engines[hash_temp] = new_engine _engine_usage[hash_temp] = datetime.datetime.now() _configs[hash_temp] = config print( 'Finished loading new config in, server running. Currently number of active engines: %i' % len(_engines), file=log.v3) self.write(hash_temp)
# Author : Frederic Lepied # Created on : Wed Oct 27 21:17:03 1999 # Purpose : verify source package correctness. ############################################################################# import re import AbstractCheck import Config from Filter import addDetails, printError, printWarning DEFAULT_VALID_SRC_PERMS = (int("644", 8), int("755", 8)) source_regex = re.compile('\\.(tar|patch|tgz|diff)$') compress_ext = Config.getOption("CompressExtension", "bz2") valid_src_perms = Config.getOption("ValidSrcPerms", DEFAULT_VALID_SRC_PERMS) class SourceCheck(AbstractCheck.AbstractCheck): def __init__(self): AbstractCheck.AbstractCheck.__init__(self, 'SourceCheck') def check_source(self, pkg): # process file list spec_file = None for fname, pkgfile in pkg.files().items(): if fname.endswith('.spec'): if spec_file: printError(pkg, 'multiple-specfiles', spec_file, fname)
# Web service running on the PSA interacting with the PSC # # import falcon import json import Config import logging import subprocess from execInterface import execInterface from getConfiguration import getConfiguration from psaExceptions import psaExceptions from dumpLogFile import dumpLogFile #old conf = Config.Configuration() date_format = "%m/%d/%Y %H:%M:%S" log_format = "[%(asctime)s.%(msecs)d] [%(module)s] %(message)s" logging.basicConfig(filename=conf.LOG_FILE, level=logging.DEBUG, format=log_format, datefmt=date_format) #older logging #logging.basicConfig(filename=conf.LOG_FILE,level=logging.DEBUG,format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') pscAddr = conf.PSC_ADDRESS configsPath = conf.PSA_CONFIG_PATH psaID = conf.PSA_ID #confID = conf.CONF_ID
def getHostnameFromBucket(bucket): # return Config.Config().host_bucket % { 'bucket' : bucket } return Config.Config().host_base
import rospy import numpy as np from std_msgs.msg import Bool, Float32MultiArray from geometry_msgs.msg import PoseStamped, Pose from nav_msgs.msg import Path from rosgraph_msgs.msg import Clock from sensor_msgs.msg import Imu import Config import time import math import csv DR_NETWORK = False vehicle = Config.Vehicle() LoamPath = Path() DrPath = Path() # Loam_x = 0 # Loam_y = 0 current_velocity = 0 curr_steering = 0 curr_velocity = 0 loam_x = 0 loam_y = 0 forward_v = True start_vel = None IMU_pub = rospy.Publisher('Corrected_vel', Float32MultiArray)
def __init__(self): self.conf = Config()
from flask import Flask, request, abort from linebot import WebhookHandler, LineBotApi from linebot.exceptions import InvalidSignatureError from linebot.models import TextMessage, MessageEvent, TextSendMessage, MessageAction, QuickReplyButton, QuickReply import Config import stock_api import stock_message from database.database import create_database from database.sqlite.sqlite_api import add_user_stock app = Flask(__name__) line_bot_api = LineBotApi(Config.get_channel_access_token()) handler = WebhookHandler(Config.get_channel_secret()) @app.route('/') def hello_world(): return 'Hello world' @app.route("/callback", methods=['POST']) def callback(): # get X-Line-Signature header value signature = request.headers['X-Line-Signature'] # get request body as text body = request.get_data(as_text=True) app.logger.info("Request body: " + body)
def __init__(self): c = Config.Config() self.saveImageDir = c.get("CONFIG", "SAVE_IMAGE_DIR") self.log = LoggingQueue.LoggingProducer().getlogger() pass
#!/bin/env python from Config import * from DBInterface import * config = Config("InfoTable", Config.Audio) db = DBInterface(":memory:", config) db.clear() db.printStats() ### from TagLoader import * l = TagLoaderFactory.GetLoader("zztop.flac") entry = l.getTags("zztop.flac") db.addEntries((entry, )) ### db.printStats() db.dumpDB() print db.getEntries({}) fname = {} fname["filename"] = "zztop.flac" print db.getEntries(fname) print db.getEntries(fname, ["album"]) fname["filename"] = "fump.mp3" print db.getEntries(fname)
Now = datetime.datetime.now() FileName = "SAVE/" FileName += Now.strftime("%Y-%m-%d_%H-%M-%S_") # Get Vehicle VIN for report filename. FileName += ThisELM327.DoPID("0902").replace(' ', '') + ".pdf" # Save PDF Report. Result = SavePdfReport(FileName) # Display PDF saved message. ThisDisplay.CurrentTab["CONFIRM"] = Confirm.Confirm(ThisDisplay.ThisSurface, "CONFIRM_PDF", "OBDII Report Saved:\n" + Result, ThisDisplay.GetDisplayWidth()/1.5, True) # If reset plot button is pressed. elif ButtonGadgit["BUTTON"] == "RESET": ThisDisplay.Plots["PLOT"].ClearData() # If configure button is pressed. elif ButtonGadgit["BUTTON"] == "CONFIG": # Display configuration dialog. ThisDisplay.CurrentTab["CONFIGURE"] = Config.Config(ThisDisplay.ThisSurface, "CONFIGURE", "CONFIGURE") # If save config button is pressed. elif ButtonGadgit["BUTTON"] == "SAVE_CONFIG": ThisDisplay.CurrentTab.pop("CONFIGURE", None) ApplyConfig() elif ButtonGadgit["BUTTON"] == "SELECT_FONT": # Remember which gadgit the select is for. SelectGadgit = ButtonGadgit["GADGIT"] # Get a list of mono space font names. SelectText = ThisDisplay.CurrentTab["CONFIGURE"].GetFontNameList() # Display a font name selection dialog. ThisDisplay.CurrentTab["SELECT"] = Select.Select(ThisDisplay.ThisSurface, "SELECT_FONT_NAME", SelectText) elif ButtonGadgit["BUTTON"] == "SELECT_VEHICLE": # Remember which gadgit the select is for. SelectGadgit = ButtonGadgit["GADGIT"] # Get a list of vehicle trouble code file names.
"""Generalized optimization configs and routines""" import Config def _valspec(_): """Hook to do extra validation""" pass OptimLongitudinalConfigSpec = { 'Niter': Config.Param(default=100, comment="Number gradient descent iterations to run"), 'stepSizeGroup': Config.Param( default=0.01, comment= "Gradient descent step size for initial momenta for group geodesic"), 'stepSizeResidual': Config.Param( default=0.01, comment= "Gradient descent step size for initial momenta for residual geodesic" ), 'maxPert': Config.Param(default=None, comment="Maximum perturbation. Rough method of automatic " + "step-size selection."), 'method': Config.Param(default="FIXEDGD",
import Config from discord.ext import commands from discord.ext.commands import MemberConverter bot = commands.Bot(command_prefix='!', description='Сomitete System') @bot.command(name="say") async def say(ctx, arg, *args): author = await MemberConverter().convert(ctx, arg) if author.id == bot.user.id: async for i in ctx.message.channel.history(limit=1): await i.delete() await ctx.send(' '.join(args)) @bot.event async def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) bot.run(Config.GetAccessToken(botname))
class Loader: """ This class loads the insert instructions data files into the relational database. Relational databases can be very messy and crazy. This class doesn't relies in the database administrators. But at the same time it isn't the best example of generalization. That's because this class is supposed to be manipulated at every time you need to load a new relational database. And as you can see in the code, everything is so clear in you face. Read the code, make the changes (keep a backup) and run it. """ def __init__( self ): # THIS --> IS <-- THE EXPECTED RELATIONAL DATABASE TABLES AND COLUMNS!!! # AND!!!! THE COLUMN ORDER MATTERS!! self.files = [ { 'file': 'ecsInsert.psql', 'table': 'ecs', 'columns': [ 'id', 'ec' ] }, { 'file': 'organismsInsert.psql', 'table': 'organisms', 'columns': [ 'id', 'code', 'name', 'internal_id', 'taxonomy_id' ] }, { 'file': 'pathwaySuperClassesInsert.psql', 'table': 'pathway_super_classes', 'columns': [ 'id', 'name' ] }, { 'file': 'pathwayClassesInsert.psql', 'table': 'pathway_classes', 'columns': [ 'id', 'super_class_id', 'name' ] }, { 'file': 'pathwayNamesInsert.psql', 'table': 'pathway_maps', 'columns': [ 'id', 'class_id', 'identification', 'name' ] } , { 'file': 'taxonomiesInsert.psql', 'table': 'taxonomies', 'columns': [ 'id', 'taxonomy', 'tax_id', 'tax_type' ] }, { 'file': 'organismTaxonomiesInsert.psql', 'table': 'organism_taxonomies', 'columns': [ 'id', 'organism_id', 'taxonomy_id' ] }, { 'file': 'proteinsInsert.psql', 'table': 'proteins', 'columns': [ 'id', 'identification', 'full_fasta_header', 'description', 'organism_id', 'sequence' ] }, { 'file': 'proteinEcsInsert.psql', 'table': 'protein_ecs', 'columns': [ 'id', 'protein_id', 'ec_id' ] }, { 'file': 'proteinMapsInsert.psql', 'table': 'protein_maps', 'columns': [ 'id', 'protein_id', 'map_id' ] }, { 'file': 'organismEcsInsert.psql', 'table': 'organism_ecs', 'columns': [ 'id', 'organism_id', 'ec_id' ] }, { 'file': 'organismMapsInsert.psql', 'table': 'organism_maps', 'columns': [ 'id', 'organism_id', 'map_id' ] }, { 'file': 'accessionsInsert.psql', 'table': 'accessions', 'columns': [ 'id', 'accession' ] }, { 'file': 'proteinAccessionsInsert.psql', 'table': 'protein_accessions', 'columns': [ 'id', 'protein_id', 'accession_id' ] }, { 'file': 'ecMapsInsert.psql', 'table': 'ec_maps', 'columns': [ 'id', 'ec_id', 'map_id' ] }, ] def start( self ): """ Actual it only loads the destination directory to put the instructions insert files. """ self.config = Config() self.afs = AnendbFileSystem() self.config.loadConfiguration() self.conf = self.config.getConfigurations() def checkPsqlCanExecuteCommand( self ): """ Check if this loader can execute 'psql' commands. If not... nothing is going to work. Returns: (boolean): True (you can do stuff in PostgreSQL), False (you're screwed, call for help, it's a loader, don't expect to be happy here. You not even know if at this moment you're really using PostgreSQL). """ # Database user name from the configuration file. username = self.getConfiguration( 'database', 'user' ) # This command only list the tables from the database. command = 'psql -U ' + username + " -c '\dt'" result = subprocess.call( command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Zero means (from the subprocess package) the command could be executed without errors. if result == 0: return True else: return False def checkYouHaveTheRightTables( self ): """ This method won't check foreign keys, tables columns nor any kind of contraints. It only check if you have the right table names in you database. And if you don't have the exacly expected tables, you're screwed. Call for help. Read the code comments in this method to understand better what's happening. """ expectedTables = [] foundTables = [] # Fill a list containing all the expected tables from self.files dictionary. # And self.files is THE true in this class. for myFile in self.files: expectedTables.append( myFile['table'] ) # Just get data to be possible to enter into postgresql. username = self.getConfiguration( 'database', 'user' ) # Get all tables from the EXPECTED relational database you've created. sqlTables = "SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE';" # Actual get the data said above executing the 'psql' command. p = subprocess.Popen(["psql", '-U', 'kegg2017', '-t', '-c', sqlTables ], stdout=subprocess.PIPE) # Store the result of the command above. out, err = p.communicate() # The result is not cool, we have to split that in lines. tables = out.split('\n') # And we have to remove empty lines (the result has it!). reRemoveEmptyLines = re.compile('^$') # Iterate through the result tables from the actual, real, relational database you've created. for t in tables: # Actual ignore empty lines. if reRemoveEmptyLines.search( t ): continue # Remove blank spaces and weird useless '.' characters. tableName = re.sub( '\ ', '', t ) tableName = re.sub( '\.', '', tableName ) # Now we fill the found tables from your relational database. foundTables.append( tableName ) # TODO: fix lack of expected tables. # ------------------------------------------------------------------------------------------- # Some table doesn't belong to the Loader but it exists... # It means tables that have to be populated manually or tables you know are # critical but you don't populate those using this Loader. # If you're doing smart things in your relational database, this thing have to # be concerned. expectedTables.append( 'source_databases' ) # this goes manually. expectedTables.append( 'protein_pdbs' ) # this is important and will be filled later. expectedTables.append( 'ec_reaction_classes' ) # this goes manually. expectedTables.append( 'clusters' ) # this goes by outside process. expectedTables.append( 'clustering_methods' ) # this goes by outside process. # END of not fancy workaround. But loaders... come on... you know... loaders... # If you're a developer, you're horrified. If you're a database administrator # you're horrified. But if you are a guy that deals with loaders through big data # well... you know... # ------------------------------------------------------------------------------------------- # Remove possible duplications (we never know). expectedTables = set(expectedTables) expectedTables = list(expectedTables) # Oh my god... if all the expected tables are exacly the same as the tables we're going # to fill using this class... everything is so fine and cool!! # It means you created a database with exacly the same tables this class expect. Congratulations!! if set(foundTables) == set(expectedTables): return True else: return False def getConfiguration( self, section=None, option=None ): """ Load the configurations from configuration file. Returns configurations found in the keggimporter.conf file. Args: section(str): Section form keggimporter.conf file. option(str): What option to read from keggimporter.conf file. Returns: (str): Configuration value from the keggimporter.conf file, in the spe """ return self.conf.get( section, option ) def setConfigurationFile( self, conf_file=None ): """ Set the current keggimporter.conf file. Args: conf_file(str): Full path for the keggimporter.conf """ self.config.configurationFile = conf_file self.config.loadConfiguration() self.conf = self.config.getConfigurations() def loadFiles( self ): """ This is the serious business. This methos insert into the relational database all the data caught from KeggReader package. """ if not self.checkPsqlCanExecuteCommand(): print( "ERROR:") print( "I cannot execute 'psql' command properly, so I won't try to load data.") print( "Maybe it means you don't have a properly $HOME/.pgpass correct file." ) print( "Try to create in you $USER directory a file .pgpass with the following data: ") print( "localhost:5432:kegg2017:darkmatter" ) print( "BUT it's only a suggestion. Call your database administrator to know what it is." ) sys.exit() if not self.checkYouHaveTheRightTables(): print( "ERROR:") print( "You don't have the correct tables created in you PostgreSQL database.") print( "Maybe it means you don't execute the correct restore procedure using the 'sql' file provided by this package." ) print( "Maybe there's a different set of tables in you database. ") print( "You have to have the exacly table names expected by this module, no more table names, no less table names.") print( "Check if you created any kind of test table or something.") print( "Again, you have to have the exactly set of tables expected by this module.") print( "Here's the list: " ) for myTable in self.files: print( myTable['table'] ) print( '--------------------------------------------------------------------------------' ) print( "\n" ) sys.exit() # Load PostgreSQL database logging data. dataSource = self.getConfiguration( 'directories', 'inserts' ) username = self.getConfiguration( 'database', 'user' ) # ------------------------------------------------------------------------ # # ---------------- THAT'S THE STUFF WE'RE LOOKING FOR -------------------- # # ------------------------------------------------------------------------ # # This part of the code actual inserts the data into relational database # # ------------------------------------------------------------------------ # # Load each data file into relational database. for loadFile in self.files: # Only print into the screen what table is being populated. pprint.pprint( '-----------------------------------------------' ) pprint.pprint( loadFile['file'] ) pprint.pprint( '-----------------------------------------------' ) print( "\n" ) # END of telling what's happening. # Find the source insert file data. fileToLoad = dataSource + '/' + loadFile['file'] # Find the relational database table name to be filled. table = loadFile['table'] # Columns names are being put together as a list of columns (psql needed). columns = ','.join( loadFile['columns'] ) # Actual execute the command that inserts the data into relational database. process = subprocess.Popen( "psql -U " + username + " -c \"\copy " + table + "(" + columns + ") from \'" + fileToLoad + "\';\"", shell=True ) # Things got crazy here. Some table delays a lot to be inserted and the process keep going overwhelming the next process. # So we wait to make sure the tables order insertions are correct. process.wait()
def output(self): config = Config.get() return (TimestampedLocalTarget( config.get("GeneratedFiles", "percentile_popularity_with_id")))
def __init__(self): super(Provider, self).__init__() config_model = Config.Config() self.api_key = config_model.ameritrade_api_key
from MonitorData import * from Config import * a = Config() Data = MonitorData(1, 10) Data.listSpitzVMs(a.subscription_id, a.certificate_path, a.linux_user, a.linux_pass, verbose=True)
def connect_ftp(show_err=True): ftp.encoding = 'utf-8' # 解决中文乱码 err_counter = 0 connect_flag = False while err_counter <= 3: try: ftp.connect(self._settings['ftp']['server'], self._settings['ftp']['port']) # 连接 FTP ftp.login(self._settings['ftp']['user'], self._settings['ftp']['pwd']) # 登陆 connect_flag = True break except ftplib.error_temp as e: if show_err: if 'Too many connections' in str(e): detail = self._video_filename + ' 当前FTP連接數過多, 5分鐘后重試, 最多重試三次: ' + str( e) err_print(self._sn, 'FTP狀態', detail, status=1) else: detail = self._video_filename + ' 連接FTP時發生錯誤, 5分鐘后重試, 最多重試三次: ' + str( e) err_print(self._sn, 'FTP狀態', detail, status=1) err_counter = err_counter + 1 for i in range(5 * 60): time.sleep(1) except BaseException as e: if show_err: detail = self._video_filename + ' 在連接FTP時發生無法處理的異常:' + str( e) err_print(self._sn, 'FTP狀態', detail, status=1) break if not connect_flag: err_print(self._sn, '上傳失败', self._video_filename, status=1) return connect_flag # 如果连接失败, 直接放弃 ftp.voidcmd('TYPE I') # 二进制模式 if self._settings['ftp']['cwd']: try: ftp.cwd(self._settings['ftp']['cwd']) # 进入用户指定目录 except ftplib.error_perm as e: if show_err: err_print(self._sn, 'FTP狀態', '進入指定FTP目錄時出錯: ' + str(e), status=1) if bangumi_tag: # 番剧分类 try: ftp.cwd(bangumi_tag) except ftplib.error_perm: try: ftp.mkd(bangumi_tag) ftp.cwd(bangumi_tag) except ftplib.error_perm as e: if show_err: err_print(self._sn, 'FTP狀態', '創建目錄番劇目錄時發生異常, 你可能沒有權限創建目錄: ' + str(e), status=1) # 归类番剧 ftp_bangumi_dir = Config.legalize_filename( self._bangumi_name) # 保证合法 try: ftp.cwd(ftp_bangumi_dir) except ftplib.error_perm: try: ftp.mkd(ftp_bangumi_dir) ftp.cwd(ftp_bangumi_dir) except ftplib.error_perm as e: if show_err: detail = '你可能沒有權限創建目錄(用於分類番劇), 視頻文件將會直接上傳, 收到異常: ' + str( e) err_print(self._sn, 'FTP狀態', detail, status=1) # 删除旧的临时文件夹 nonlocal first_connect if first_connect: # 首次连接 remove_dir(tmp_dir) first_connect = False # 标记第一次连接已完成 # 创建新的临时文件夹 # 创建临时文件夹是因为 pure-ftpd 在续传时会将文件名更改成不可预测的名字 # 正常中斷传输会把名字改回来, 但是意外掉线不会, 为了处理这种情况 # 需要获取 pure-ftpd 未知文件名的续传缓存文件, 为了不和其他视频的缓存文件混淆, 故建立一个临时文件夹 try: ftp.cwd(tmp_dir) except ftplib.error_perm: ftp.mkd(tmp_dir) ftp.cwd(tmp_dir) return connect_flag