def __setitem__(self, key, val): if hasattr(self.__fieldFormat__, key): """ if val == None: self.__addReport(key, (val, ERR_FIELD_VALUE, "null value")) return """ try: attr = getattr(self.__fieldFormat__, key) if isinstance(attr, dict): val = attr.get('fmt')(val) if attr.has_key('cond'): msg = self.__getValidityMessage(attr['cond'], val) if msg: self.__addReport(key, (val, ERR_FIELD_VALUE, msg)) else: val = attr(val) if val == None: self.__addReport(key, (val, ERR_FIELD_VALUE, "null value")) return except Exception, e: log.warn("Param type error : %s - %s, %s" % (key, val, e)) self.__addReport(key, (val, ERR_FIELD_VALUE, "value type error")) return if self.has_key(key): if val == self[key]: return val if hasattr(self, "__fieldConvert__"): key = self.__fieldConvert__.get(key, key) return super(BaseContainer, self).__setitem__(key, val)
def show_fcompilers(dist = None): """ Print list of available compilers (used by the "--help-fcompiler" option to "config_fc"). """ if dist is None: from dist import Distribution dist = Distribution() dist.script_name = os.path.basename(sys.argv[0]) dist.script_args = ['config_fc'] + sys.argv[1:] dist.cmdclass['config_fc'] = config_fc dist.parse_config_files() dist.parse_command_line() compilers = [] compilers_na = [] compilers_ni = [] for compiler in fcompiler_class.keys(): v = 'N/A' try: c = new_fcompiler(compiler=compiler) c.customize(dist) v = c.get_version() except DistutilsModuleError: pass except Exception, msg: log.warn(msg) if v is None: compilers_na.append(("fcompiler="+compiler, None, fcompiler_class[compiler][2])) elif v=='N/A': compilers_ni.append(("fcompiler="+compiler, None, fcompiler_class[compiler][2])) else: compilers.append(("fcompiler="+compiler, None, fcompiler_class[compiler][2] + ' (%s)' % v))
def process (to_process, config_path, ACTION): print_actions(to_process, ACTION) act = cinput('<Magenta>P</Magenta>rocess, <Magenta>E</Magenta>dit, <Magenta>C</Magenta>ancel? [Default: Edit]', default='E').lower() while act not in ['p', 'e', 'c']: log.warn('Invalid action.') act = cinput('<Magenta>P</Magenta>rocess, <Magenta>E</Magenta>dit, <Magenta>C</Magenta>ancel? [Default: Edit]', default='E').lower() if act == 'c': log.info('Aborting') return False if act == 'p': for i in range(len(to_process)): print(('({0:d}/{1:d}) Processing {2}.'.format( i+1, len(to_process), to_process[i].source))[:80], end='\r') try: getattr(action, ACTION+'Action')(to_process[i]) except: log.fail('Invalid action {}. Aborting.'.format(ACTION)) return False log.info('All files processed. Thanks for using yasR.') return True if act == 'e': process ( process_edition(to_process, config_path, ACTION), config_path, ACTION )
def get_bitmap(pathname, size, project): """ Args: pathname: path of image size: (w,h), or None if use original image size project: can be None Returns: bitmap, or None """ path = get_image_path(pathname, project) if not path: log.warn("icon file not found: %s", pathname) return None if path in _bmp_cache: return _bmp_cache[(path, size)] else: # http://trac.wxwidgets.org/ticket/15331 prev = wx.Log.GetLogLevel() wx.Log.SetLogLevel(wx.LOG_Error) img = wx.Image(path) wx.Log.SetLogLevel(prev) if size: img = img.Scale(size[0], size[1], wx.IMAGE_QUALITY_HIGH) _bmp_cache[(path, size)] = img.ConvertToBitmap() # print bmp_cache return _bmp_cache[(path, size)]
def find_distribution_cvs(self, locations): log.trace( 'entry' , [ locations ] ) for dir in locations: log.debug( 'Trying to find %s' % dir ) if not self.cvs_dir_check(dir): log.debug( 'Failed cvs_dir_check on "%s" - skipping.' % dir ) continue # Once we have a dir, let's do something with it. message = "Looking for %s in %s (don't worry if this fails)" % (self.cvs_path, dir) log.debug( message ) outmsg.verbose( message ) try: cvs.Checkout(self.cvs_tag, dir, self.cvs_root, os.path.join(tmpdir(),dir), self.cvs_timestamp) except cvs.cvs_error: ## Ignore cvs errors, report them as missing distributions instead log.warn( 'Failed to check out %s - skipping' % dir ) return dir = os.path.join(os.curdir, tmpdir(), dir) try: for f in os.listdir(dir): if string.lower(f) not in ["cvs"]: return dir except OSError: pass log.trace( 'exit' ) return None
def find_distribution_cvs(self, locations): log.trace('entry', [locations]) for dir in locations: log.debug('Trying to find %s' % dir) if not self.cvs_dir_check(dir): log.debug('Failed cvs_dir_check on "%s" - skipping.' % dir) continue # Once we have a dir, let's do something with it. message = "Looking for %s in %s (don't worry if this fails)" % ( self.cvs_path, dir) log.debug(message) outmsg.verbose(message) try: cvs.Checkout(self.cvs_tag, dir, self.cvs_root, os.path.join(tmpdir(), dir), self.cvs_timestamp) except cvs.cvs_error: ## Ignore cvs errors, report them as missing distributions instead log.warn('Failed to check out %s - skipping' % dir) return dir = os.path.join(os.curdir, tmpdir(), dir) try: for f in os.listdir(dir): if string.lower(f) not in ["cvs"]: return dir except OSError: pass log.trace('exit') return None
def getPkgOpen(subpkgs): """获取open 模块 模块地址 http://ufo.sogou-inc.com/git/open.git """ targetfolder = os.path.join( conf.getConfig()['path'] , 'static' , 'js' ) utils.createfolder(TEMP_FOLDER) if not os.path.exists( targetfolder ): utils.createfolder(targetfolder) subpkgs = subpkgs or conf.getConfig()['PKG_OPEN'] subpkgs.insert(0 , 'common') os.system('git clone http://ufo.sogou-inc.com/git/open.git ' + os.path.join(TEMP_FOLDER , 'open')) successpkg = [] for pkg in subpkgs: source = os.path.join( TEMP_FOLDER , 'open' , pkg ) if not os.path.exists(source): log.warn('Sub package ' + pkg + ' not exist in Open.') continue utils.copyfiles( source , os.path.join( targetfolder , 'open' , pkg ) ) successpkg.append( pkg ) utils.removefolder(TEMP_FOLDER) log.success( 'Adding Open package include ' + ','.join(successpkg) + ' success!' )
def get(self): """GET request; takes path as an argument.""" path = self.get_path() base = util.get_base_path_adjustment() files = [] try: file_list = os.listdir(os.path.join(base, path)) for i in file_list: try: filename = os.path.join(base, path, i) is_file = os.path.isfile(filename) confirm = '' if is_file and os.path.getsize(filename) > 10485760: confirm = 'large' if is_file and not util.is_text_file(filename): confirm = 'binary' files.append({ 'name': i, 'is_file': is_file, 'confirm': confirm }) except IOError as error: log.warn(error) except Exception as error: # pylint: disable=W0703 log.warn(error) self.do_output(files, base)
def create_module_filter_list(self): """ Creates the list of modules in which coverage information should be collected. This list is created by querying frida for the loaded modules and comparing them to the modules the user selected in the project settings. Must be called after frida was attached to the target and before any coverage is collected. """ if self.modules is None: log.warn("filterModules: self.modules is None!") return False self.watched_modules = [] for module in self.modules: if module["path"] in self.project.modules: self.watched_modules.append(module) if len(self.watched_modules) == 0: paths = "\n".join([m["path"] for m in self.modules]) log.warn( "filterModules: No module was selected! Possible choices:\n" + paths) return False else: paths = "\n".join([m["path"] for m in self.watched_modules]) log.info( "Filter coverage to only include the following modules:\n" + paths) return True
def _move_extra_results(self, src, dest, rv): assert src assert dest if os.path.isdir(src) and os.path.isdir(dest): for f in os.listdir(src): sp = os.path.join(src, f) dp = os.path.join(dest, f) self._move_extra_results(sp, dp, rv) os.rmdir(src) else: sf = state.File(name=dest) if sf == self.delegate: dest = os.path.join(sf.tmpfilename("out"), sf.basename()) debug("rename %r %r\n", src, dest) os.rename(src, dest) sf.copy_deps_from(self.target) else: sf.dolock().trylock() if sf.dolock().owned == state.LOCK_EX: try: sf.build_starting() debug("rename %r %r\n", src, dest) os.rename(src, dest) sf.copy_deps_from(self.target) sf.build_done(rv) finally: sf.dolock().unlock() else: warn("%s: discarding (parallel build)\n", dest) unlink(src)
def detach(self): try: self._frida_script_obj.unload() except frida.InvalidOperationError as e: log.warn("Could not unload frida script: " + str(e)) self.frida_session.detach()
def get_module_map(self): if self.frida_script is None: log.warn("getModuleMap: self.frida_script is None!") return None try: module_map = self.frida_script.get_maps() except frida.core.RPCException as e: log.info("RPCException: " + repr(e)) return None self.modules = [] for image in module_map: idx = image['id'] path = image['path'] base = int(image['base'], 0) end = int(image['end'], 0) size = image['size'] m = { 'id': idx, 'path': path, 'base': base, 'end': end, 'range': range(base, end), 'size': size } self.modules.append(m) return self.modules
def _flush(self, buf): if self.flushcmd == "redo" and self.recursive: f = buf[:-1] olddepth = vars.DEPTH vars.DEPTH = vars.DEPTH + ' ' try: main([os.path.join(self.target.dirname(), f)]) finally: vars.DEPTH = olddepth self.doing = buf elif self.flushcmd == "redo_done" and self.doing == buf: self.doing = None elif self.flushcmd == "std" and self.doing == None: sys.stdout.write(buf) sys.stdout.flush() elif self.flushcmd == "err" and self.doing == None: sys.stderr.write(buf) sys.stderr.flush() elif self.flushcmd == "log" and not self.recursive: LOGFILE.write(buf) LOGFILE.flush() elif self.flushcmd == "redo_err" and self.recursive: err(" " + buf) elif self.flushcmd == "redo_warn" and self.recursive: warn(" " + buf)
def set_band_dic(self): list_o = [] #dict {'Band':'','Radiance file:' '','Solar flux file:' '','Lambda file:' ''} print self.dim_file xmldoc = minidom.parse(self.dim_file) if self.dim_data_exist: input_list = glob.glob(os.path.join(self.dim_data, '*radiance*img')) for k, rec in enumerate(input_list): dict = {} file_name = os.path.basename(rec) radical = file_name.replace('.hdr', '') scaling_factor = ( xmldoc.getElementsByTagName('Spectral_Band_Info')[k] ).getElementsByTagName('SCALING_FACTOR')[0].firstChild.data bd = (np.int( (os.path.basename(rec).split('_')[0]).replace('Oa', ''))) dict['Band'] = bd dict['Radiance file'] = input_list[k] dict['Scaling factor'] = scaling_factor dict['Solar flux file'] = glob.glob( os.path.join(self.dim_data, 'solar_flux_band_' + str(bd) + '.img'))[0] list_o.append(dict) self.band_dict = list_o self.band_dict_exist = True return list_o else: log.warn(' Dim data does not exist')
def send_fuzz_payload_in_process(self, payload, original_corpus_file, corpus): """ Send fuzzing payload to target process by invoking the target function directly in frida """ # Call function under fuzz: encoded = payload.hex() cov = None try: cov = self.frida_script.fuzz_internal(encoded) except frida.core.RPCException as e: log.info("RPCException: " + repr(e)) truncated_payload = str(binascii.hexlify(payload))[:25] log.warn("had payload: " + truncated_payload + "[...]") if original_corpus_file not in self.corpus_blacklist: log.info("adding %s to corpus blacklist due to crash." % original_corpus_file) self.corpus_blacklist.append(original_corpus_file) # save crash file crash_file = self.project.crash_dir + \ time.strftime("/%Y%m%d_%H%M%S_crash") with open(crash_file + "_" + str(self.project.pid), "wb") as f: f.write(bytes(str(self.project.seed) + "\n", "utf8")) f.write(bytes(repr(e), "utf8") + bytes('\n', "utf8")) f.write(binascii.hexlify(payload)) log.info("Payload is written to " + crash_file) self.project.crashes += 1
def getMetaData(fullname, playlists): log.info('accessing metadata...') index = 0 tagInfo = [] for track in playlists: name= playlists[track] if os.path.isfile(name): try: filename = os.path.basename(name) log.success('-------------------------') tag = TinyTag.get(name) if tag.title != '' or tag.artist != '': song = str(tag.title+':'+tag.artist) tagInfo.append(song) log.warn('tag info:', filename.encode("ascii", "ignore")) log.info('Artist:', tag.artist) log.info('Album:', tag.album) log.info('Title:', tag.title.encode("ascii", "ignore")) log.info('Track number:', tag.track) index += 1 else: log.warn('WARN: no id3 info provide') except Exception as e: log.err("An error occurred while getting metadata of the file:", name) log.err("Error:", e) else: log.err("The file: %s does not exist, check the path or filename" % (name)) print log.err('track processing:', str(index)) saveMetaData(fullname, tagInfo, index) return tagInfo
def yield_events(self): """Yields events from LHE file.""" if len(self.header) <= 0: self.load_header() while len(self.footer) <= 0: for line in self.infile.xreadlines(): #read event self.event.append(line) if "</event>" in line: self.events_counter += 1 yield self.event self.event = [] break for line in self.infile.xreadlines(): #read what's after event if "<event>" in line: self.event.append(line) break self.footer.append(line) if len(self.event) > 0: log.warn("Last event has not ending tag!") self.events_counter += 1 yield self.event self.event = []
def admin_analysis(): if not request.json: abort(400) if request.method == 'POST': try: analysis = request.json['analysis'] source_id = analysis['source_id'] dimensions = analysis['dimensions'] metric = analysis['metric'] query = analysis['query'] result = dal.new_analysis(source_id,dimensions,metric,query) return responsify(result) except Exception as e: log.warn(e) abort(400) if request.method == 'PUT': try: analysis = request.json['analysis'] uid = analysis['analysis_id'] result = dal.update_analysis(uid,analysis) return responsify(result) except Exception as e: log.warn(e) abort(400)
def handleDB(it): for jj in range(10): ll, origSet = getFive() if not ll: if it > 2: log.info('empty turning off') #time.sleep(120) # wait for master to finish turnOff() return browser = selenium.getBrowser() log.info(str(len(ll)) + str(ll) + str(jj)) try: i = 0 for uns in ll: if i > 6: break i += 1 log.info('hashh', uns.hashh) res = unsubscribe(uns, browser) if not res: log.info('failed confirmation', uns.hashh) addEmailToSqlAnalytics(uns, False) else: log.info('confirmed unsub') commit('insert into usercount (another) values (1)') addEmailToSqlAnalytics(uns, True) #browser = selenium.refreshBrowser(browser) except Exception as e: log.warn(e) log.info('deleting from unsubs ' + str(origSet)) for ss in origSet: commit('delete from unsubs where hash=%s', ss) selenium.closeBrowser(browser)
def done(fn): if fn.cancelled(): log.warn('{}: cancelled'.format(fn.arg)) elif fn.done(): error = fn.exception() if error: log.error('error returned: {}'.format(error))
def import_band(self, list): gv = self.gv #Browse the dic and apply convert to put band geotiff into <out> list_o = list for k, dict in enumerate(list): band = dict['Band'] log.info(" processing of bands : " + str(band)) input_image = dict['Radiance file'] new_file = os.path.join(self.tiff_import_dir, 'band_' + str(band) + '.tif') #Check if file already import and geocoded geo_file = os.path.join(self.tiff_import_dir, 'band_' + str(band) + '_geo.tif') if not os.path.exists(geo_file): cmd = gv.gdal_translate_bin + ' ' cmd += '-of GTiff ' cmd += '-ot Float32 ' cmd += input_image + ' ' cmd += new_file + ' ' os.system(cmd) else: log.warn(' Geo Radiance File already processed') log.info(' File exist :' + new_file) dict['Radiance file'] = new_file log.info(' Radiance file : ' + (list_o[k])['Radiance file'])
def build_asm2(dirs, logLevel=1): from os.path import join, exists, split for dir in dirs: path = join(current_dir, dir) if not exists(path): log.warn(f'Unknown directory “{path}”!', logLevel) return directories, files = _getDirsAndFiles(path) build_asm2(directories, logLevel + 1) for file in glob.glob(join(path, '*.s'), recursive=False): log.info('Compiling file “{}”...'.format(file), logLevel) path2, filename = split(file) command = [AS] + AS_FLAGS + [ '-o', join(path2, filename.replace('.s', '.o')) ] + [file] if props['debug']: log.debug('Command used: {}'.format(' '.join(command)), logLevel) cmd = subprocess.Popen(command, stdout=subprocess.PIPE) cmd.wait() if cmd.returncode != 0: log.error('Subcommand returned with a non-0 exit code.', logLevel) exit(cmd.returncode)
def save_to_files(self, directory: str) -> None: """ persist this Vocabulary to files so it can be reloaded later. each namespace corresponds to one file. parameters ---------- directory : ``str`` The directory where we save the serialized vocabulary. """ os.makedirs(directory, exist_ok=True) if os.listdir(directory): warn("vocabulary serialization directory %s is not empty", directory) with codecs.open(os.path.join(directory, NAMESPACE_PADDING_FILE), 'w', 'utf-8') as namespace_file: for namespace_str in self._non_padded_namespaces: print(namespace_str, file=namespace_file) for namespace, mapping in self._index_to_token.items(): # Each namespace gets written to its own file, in index order. with codecs.open(os.path.join(directory, namespace + '.txt'), 'w', 'utf-8') as token_file: num_tokens = len(mapping) start_index = 1 if mapping[0] == self._padding_token else 0 for i in range(start_index, num_tokens): print(mapping[i].replace('\n', '@@NEWLINE@@'), file=token_file)
def init(args): if os.path.exists(args.project): log.warn("Project '%s' already exists!" % args.project) return log.info("Creating project '%s'!" % args.project) if not project.createProject(args.project): log.warn("Could not create project!")
def _set_dataclass_value(self, data, name, converter): # TODO: Support nesting unlimited levels # https://github.com/jacebrowning/datafiles/issues/22 nested_data = data.get(name) if nested_data is None: return log.debug(f'Converting nested data to Python: {nested_data}') dataclass = getattr(self._instance, name) if dataclass is None: for field in dataclasses.fields(converter.DATACLASS): if field.name not in nested_data: # type: ignore nested_data[field.name] = None # type: ignore dataclass = converter.to_python_value(nested_data, target_object=dataclass) try: mapper = dataclass.datafile except AttributeError: log.warn( f"{dataclass} has not yet been patched to have 'datafile'") mapper = create_mapper(dataclass) for name2, converter2 in mapper.attrs.items(): _value = nested_data.get( # type: ignore name2, mapper._get_default_field_value(name2)) value = converter2.to_python_value(_value, target_object=getattr( dataclass, name2)) log.debug(f"'{name2}' as Python: {value!r}") setattr(dataclass, name2, value) log.debug(f"Setting '{name}' value: {dataclass!r}") setattr(self._instance, name, dataclass)
def main(): args = parse_args() if args.command != "init": # Load project if not project.loadProject(args.project): log.warn("Error: Could not load project '%s'!" % args.project) return if args.seed: project.getInstance().seed = args.seed if args.command in ["fuzz", "replay"]: # Create Fuzzer and attach to target fuzzer = FridaFuzzer(project.getInstance()) if not fuzzer.attach(): return # Invoke subcommand function with instantiated fuzzer args.func(args, fuzzer) log.info("Detach Fuzzer ...") fuzzer.detach() else: # Invoke subcommand function args.func(args) log.info("Done") return
def userFunction (self): print "++++++ Helper2FileClass: userFunction" error( ' ++ Helper2FileClass userFunction: Error logging\n' ) warn( ' ++ Helper2FileClass userFunction: warn loggging\n' ) debug( ' ++ Helper2FileClass userFunction: debug logging\n' ) info( ' ++ Helper2FileClass userFunction: info logging\n' ) output( ' ++ Helper2FileClas userFunction: output logging\n' )
def get_head_commit(self): args = ['rev-parse', '--verify', 'HEAD^{commit}'] try: return self._gitout(args) except OSError as err: warn("get_head_commit() failed: " + str(err)) return None
def __init__ (self): print "++++++ Helper2FileClass __init__" error( ' ++ Helper2FileClass __init__: Error logging\n' ) warn( ' ++ Helper2FileClass __init__: warn loggging\n' ) debug( ' ++ Helper2FileClass __init__: debug logging\n' ) info( ' ++ HelperFileClass __init__: info logging\n' ) output( ' ++ Helper2FileClass __init__: output logging\n' )
def generate_def(dll, dfile): """Given a dll file location, get all its exported symbols and dump them into the given def file. The .def file will be overwritten""" dump = dump_table(dll) for i in range(len(dump)): if _START.match(dump[i]): break if i == len(dump): raise ValueError("Symbol table not found") syms = [] for j in range(i + 1, len(dump)): m = _TABLE.match(dump[j]) if m: syms.append((int(m.group(1).strip()), m.group(2))) else: break if len(syms) == 0: log.warn('No symbols found in %s' % dll) d = open(dfile, 'w') d.write('LIBRARY %s\n' % os.path.basename(dll)) d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n') d.write(';DATA PRELOAD SINGLE\n') d.write('\nEXPORTS\n') for s in syms: #d.write('@%d %s\n' % (s[0], s[1])) d.write('%s\n' % s[1]) d.close()
def init(conf_path=None, mutt_conf_path=None, read_muttrc=True): global rc, config_path rc = muttrc.Muttrc(defaults=mutt_defaults) if read_muttrc: if not mutt_conf_path: paths = [os.path.expanduser(p) for p in ['~/.muttrc', '~/.mutt/muttrc']] mutt_conf_path = paths[0] if os.path.exists(paths[0]) else paths[1] rc.parse(mutt_conf_path) defaults['editor_type'] = default_editor_type(rc.get('editor')) rc.merge_defaults(defaults) # mailboxes command is reused del rc.mailboxes[:] if conf_path: config_path = os.path.realpath(conf_path) if not os.path.exists(config_path): save_default() log.error('config file did not exist, created default one in: %s', config_path) else: rc.parse(config_path) options = rc.variables options['output_file'] = rc.expand_path(options['output_file']) if options['gen_sig'] and (not options['sig_dashes'] or options['sig_on_top']): log.error('config error: if gen_sig=yes, these settings are required: sig_dashes=yes and sig_on_top=no') if options['exclude_mails_to_me'] and not rc.alternates: log.warn('config conflict: exclude_mails_to_me is set, but no "alternates" are specified!') options['exclude_mails_to_me'] = False if options['only_include_mails_from_me'] and not rc.alternates: log.warn('config conflict: only_include_mails_from_me is set, but no "alternates" are specified!') options['only_include_mails_from_me'] = False
def parse_tag_grep(cls, file, settings): for line in open(file).readlines(): attr= {} srcfile, comment = None, None grep_nH_rs_match = grep_nH_rs.match(line) if grep_nH_rs_match: srcfile, linenr, comment = grep_nH_rs_match.groups() attr['linenr'] = linenr rad_cmnt_rs_match = rad_cmnt_rs.match(line) if rad_cmnt_rs_match: groups = list(rad_cmnt_rs_match.groups()) srcfile, comment = groups.pop(0), groups.pop() fa = groups[0].count(':') fr = map(int, groups[0].strip(':').split('-')) if fa == 4: inner_char_range = fr attr['inner_range'] = inner_char_range if not srcfile: log.warn('No match for %r', line) continue attr['issue_id'] = try_parse_issue_id( settings.project_slug, comment ) yield Comment(srcfile, text=comment, **attr)
def terminate(self, signal_number, stack_frame): """ 收到关闭的信号,在主线程上抛出异常 """ log.warn("Terminating on signal %r", signal_number) exception = SystemExit( "Terminating on signal %(signal_number)r" % vars()) self._main_let.throw(exception)
def _build_import_library_x86(): """ Build the import libraries for Mingw32-gcc on Windows """ lib_name = "python%d%d.lib" % tuple(sys.version_info[:2]) lib_file = os.path.join(sys.prefix, 'libs', lib_name) out_name = "libpython%d%d.a" % tuple(sys.version_info[:2]) out_file = os.path.join(sys.prefix, 'libs', out_name) if not os.path.isfile(lib_file): log.warn('Cannot build import library: "%s" not found' % (lib_file)) return if os.path.isfile(out_file): log.debug('Skip building import library: "%s" exists' % (out_file)) return log.info('Building import library (ARCH=x86): "%s"' % (out_file)) from numpy.distutils import lib2def def_name = "python%d%d.def" % tuple(sys.version_info[:2]) def_file = os.path.join(sys.prefix, 'libs', def_name) nm_cmd = '%s %s' % (lib2def.DEFAULT_NM, lib_file) nm_output = lib2def.getnm(nm_cmd) dlist, flist = lib2def.parse_nm(nm_output) lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, open(def_file, 'w')) dll_name = "python%d%d.dll" % tuple(sys.version_info[:2]) args = (dll_name, def_file, out_file) cmd = 'dlltool --dllname %s --def %s --output-lib %s' % args status = os.system(cmd) # for now, fail silently if status: log.warn('Failed to build import library for gcc. Linking will fail.') #if not success: # msg = "Couldn't find import library, and failed to build it." # raise DistutilsPlatformError, msg return
def item_from_table_ex(loot_table, loot_level=1): if loot_table is None or loot_table not in table: log.warn("loot", "Couldn't find loot table {}", loot_table) return None loot_table = table[loot_table] chances = {k: v.get('weight', 10) for k, v in loot_table.items()} item_id = main.random_choice(chances) choice = loot_table[item_id] import items prototype = items.table()[item_id] material = choice.get('material', None) quality = choice.get('quality', None) if prototype['category'] == 'weapon': if material is None: material = choice.get('material', choose_weapon_material(loot_level)) if quality is None: quality = choice.get('quality', choose_quality(loot_level)) if prototype['category'] == 'armor': if material is None: material = choice.get('material', choose_armor_material(loot_level)) if quality is None: quality = choice.get('quality', choose_quality(loot_level)) if quality is None: quality = '' return main.create_item(item_id, material, quality)
def main(): result = check_who() pprint(result) invader, myself, unknown = [], [], [] for data in result: try: city = data.get('city').lower() except: log.warn('Get city info failed! (%s)' % data) unknown.append(data) continue myplace = False for each in MYPLACE: if each == city: myplace = True break if myplace: myself.append([each, data.get('ipAddress') ]) # only need show city if is myself else: invader.append(data) if invader: log.warn('Invader found: %s' % pformat(invader)) if myself: log.info('Youself found: %s' % pformat(myself)) if unknown: log.info('unknown: %s' % pformat(unknown))
def __init__(self): self.base_path = Path().home() / '.config' / 'alacritty' if not self.base_path.exists(): raise ConfigError(f'Config directory not found: {self.base_path}') self.config_file = self.base_path / 'alacritty.yml' if not self.config_file.is_file(): log.warn('Config file not found') self.config_file.touch() print('Created config file =>', end=' ') log.color_print(self.config_file, log.Color.BLUE) self.config = self._load(self.config_file) if self.config is None: self.config = {} log.warn('Alacritty config file was empty') self.resources = { 'themes': { 'type': 'Themes directory', 'path': self.base_path / 'themes', 'exists': lambda: self.resources['themes']['path'].is_dir(), 'create': lambda: self.resources['themes']['path'].mkdir() }, 'fonts': { 'type': 'Fonts file', 'path': self.base_path / 'fonts.yaml', 'exists': lambda: self.resources['fonts']['path'].is_file(), 'create': lambda: self.resources['fonts']['path'].touch() } }
def change_font(self, font: str): if 'font' not in self.config: self.config['font'] = {} log.warn('"font" prop was not present in alacritty.yml') fonts_file = self._resource_path('fonts') fonts = self._load(fonts_file) if fonts is None: raise ConfigError(f'File "{fonts_file}" is empty') if 'fonts' not in fonts: raise ConfigError(f'No font config found in "{fonts_file}"') fonts = fonts['fonts'] if font not in fonts: raise ConfigError(f'Config for font "{font}" not found') font_types = ['normal', 'bold', 'italic'] if isinstance(fonts[font], str): font_name = fonts[font] fonts[font] = {} for t in font_types: fonts[font][t] = font_name if not isinstance(fonts[font], Mapping): raise ConfigError(f'Font "{font}" has wrong format') for t in font_types: if t not in fonts[font]: raise ConfigError(f'Font "{font}" does not have "{t}" property') if t not in self.config['font']: self.config['font'][t] = {'family': 'tmp'} self.config['font'][t]['family'] = fonts[font][t] log.ok(f'Font {font} applied')
def slotTick( self ): try: if not self.method(): self.stop() del self except BaseException, e: log.warn( 'TIMER FAILED: %s' % str( e ) )
def userFunction (self): print "------ MainFileClassA: userFunction" error( ' -- MainFileClass userFunction: Error logging\n' ) warn( ' -- MainFileClass userFunction: warn loggging\n' ) debug( ' -- MainFileClass userFunction: debug logging\n' ) info( ' -- MainFileClass userFunction: info logging\n' ) output( ' -- MainFileClass userFunction: output logging\n' )
def get(self): """GET request; takes an argument file as the full path of the file to load into the editor. """ server = self.get_connection() if server: details = self.get_connection_details() filename = self.get_file() text = '' saved = False try: tmp_path = tempfile.mkstemp() tmp_path = tmp_path[1] server.get(os.path.join(details['path'], filename), tmp_path) file_handler = open(tmp_path, 'r') text = file_handler.read() file_handler.close() os.remove(tmp_path) saved = True except Exception as error: # pylint: disable=W0703 log.warn(error) self.do_output(text, self.generate_prefix(details), self.get_connection_id(), '&connection=' + self.get_connection_id(), saved) server.close()
def __init__(self, file_uri: str, encoding: str = DEFAULT_ENCODING, cache_dir: str = None) -> None: self.uri = file_uri self._encoding = encoding self._cache_dir = cache_dir self._archive_handle: Any = None # only if the file is inside an archive main_file_uri, path_inside_archive = parse_embeddings_file_uri( file_uri) main_file_local_path = cached_path(main_file_uri, cache_dir=cache_dir) if zipfile.is_zipfile(main_file_local_path): # ZIP archive self._open_inside_zip(main_file_uri, path_inside_archive) elif tarfile.is_tarfile(main_file_local_path): # TAR archive self._open_inside_tar(main_file_uri, path_inside_archive) else: # all the other supported formats, including uncompressed files if path_inside_archive: raise ValueError('Unsupported archive format: %s' + main_file_uri) # All the python packages for compressed files share the same interface of io.open extension = get_file_extension(main_file_uri) package = { '.txt': io, '.vec': io, '.gz': gzip, '.bz2': bz2, '.lzma': lzma, }.get(extension, None) if package is None: warn( 'The embeddings file has an unknown file extension "%s". ' 'We will assume the file is an (uncompressed) text file', extension) package = io self._handle = package.open(main_file_local_path, 'rt', encoding=encoding) # type: ignore # To use this with tqdm we'd like to know the number of tokens. It's possible that the # first line of the embeddings file contains this: if it does, we want to start iteration # from the 2nd line, otherwise we want to start from the 1st. # Unfortunately, once we read the first line, we cannot move back the file iterator # because the underlying file may be "not seekable"; we use itertools.chain instead. first_line = next(self._handle) # this moves the iterator forward self.num_tokens = EmbeddingsTextFile._get_num_tokens_from_first_line( first_line) if self.num_tokens: # the first line is a header line: start iterating from the 2nd line self._iterator = self._handle else: # the first line is not a header line: start iterating from the 1st line self._iterator = itertools.chain([first_line], self._handle)
def handle_regnick(self): """\ Handles identifying to NickServ """ ret = "id %s" % tools.password log.warn("NICK-REG", "%s" % self.nick) self.respond(ret, None, "nickserv")
def get(self): """GET request; takes an argument file as the full path of the file to load into the editor. """ server = self.get_connection() if server: details = self.get_connection_details() filename = self.get_file() text = '' saved = False try: tmp_path = tempfile.mkstemp() tmp_path = tmp_path[1] server.get(os.path.join(details['path'], filename), tmp_path) file_handler = open(tmp_path, 'r') text = file_handler.read() file_handler.close() os.remove(tmp_path) saved = True except Exception as error: # pylint: disable=W0703 log.warn(error) self.do_output( text, self.generate_prefix(details), self.get_connection_id(), '&connection=' + self.get_connection_id(), saved ) server.close()
def OnNotifyMic1(self, ins): ssid = self.uid2ssid.get(ins.user.uid) # user who change ssid but did not login yet if ssid and ssid != ins.subsid: log.warn("register ssid not match. uid:%d" % ins.user.uid) return True return None
def generate_def(dll, dfile): """Given a dll file location, get all its exported symbols and dump them into the given def file. The .def file will be overwritten""" dump = dump_table(dll) for i in range(len(dump)): if _START.match(dump[i]): break if i == len(dump): raise ValueError("Symbol table not found") syms = [] for j in range(i+1, len(dump)): m = _TABLE.match(dump[j]) if m: syms.append((int(m.group(1).strip()), m.group(2))) else: break if len(syms) == 0: log.warn('No symbols found in %s' % dll) d = open(dfile, 'w') d.write('LIBRARY %s\n' % os.path.basename(dll)) d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n') d.write(';DATA PRELOAD SINGLE\n') d.write('\nEXPORTS\n') for s in syms: #d.write('@%d %s\n' % (s[0], s[1])) d.write('%s\n' % s[1]) d.close()
def _build_import_library_x86(): """ Build the import libraries for Mingw32-gcc on Windows """ lib_name = "python%d%d.lib" % tuple(sys.version_info[:2]) lib_file = os.path.join(sys.prefix,'libs',lib_name) out_name = "libpython%d%d.a" % tuple(sys.version_info[:2]) out_file = os.path.join(sys.prefix,'libs',out_name) if not os.path.isfile(lib_file): log.warn('Cannot build import library: "%s" not found' % (lib_file)) return if os.path.isfile(out_file): log.debug('Skip building import library: "%s" exists' % (out_file)) return log.info('Building import library (ARCH=x86): "%s"' % (out_file)) from numpy.distutils import lib2def def_name = "python%d%d.def" % tuple(sys.version_info[:2]) def_file = os.path.join(sys.prefix,'libs',def_name) nm_cmd = '%s %s' % (lib2def.DEFAULT_NM, lib_file) nm_output = lib2def.getnm(nm_cmd) dlist, flist = lib2def.parse_nm(nm_output) lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, open(def_file, 'w')) dll_name = "python%d%d.dll" % tuple(sys.version_info[:2]) args = (dll_name,def_file,out_file) cmd = 'dlltool --dllname %s --def %s --output-lib %s' % args status = os.system(cmd) # for now, fail silently if status: log.warn('Failed to build import library for gcc. Linking will fail.') #if not success: # msg = "Couldn't find import library, and failed to build it." # raise DistutilsPlatformError, msg return
def load_level_info(default): try: return load(level_info_path, level_info_file) except: log.warn("Couldn't load level file, returning default level {}".format( default)) return {"level": default}
def get(self): """GET request; takes path as an argument.""" server = self.get_connection() if server: details = self.get_connection_details() path = self.get_path() base = details['path'] files = [] try: file_list = server.listdir(os.path.join(base, path)) for i in file_list: try: filename = os.path.join(base, path, i) is_file = True command = ''.join([ 'if test -d ', filename.replace(' ', '\ '), '; then echo -n 1; fi' ]) if len(server.execute(command)): is_file = False confirm = '' files.append({ 'name': i, 'is_file': is_file, 'confirm': confirm }) except Exception as error: # pylint: disable=W0703 log.warn(error) except Exception as error: # pylint: disable=W0703 log.warn(error) self.do_output(files, base, self.generate_prefix(details), '&connection=' + self.get_connection_id()) server.close()
def __init__ (self): print "------ MainFileClassA __init__" error( ' -- MainFileClassA __init__: Error logging\n' ) warn( ' -- MainFileClassA __init__: warn loggging\n' ) debug( ' -- MainFileClassA __init__: debug logging\n' ) info( ' -- MainFileClassA __init__: info logging\n' ) output( ' -- MainFileClassA __init__: output logging\n' )
def tail_log(self): """Tail log.""" if not self._log_filename: log.warn("Log not enabled.") return False self.ssh.run_command("tail " + self._log_filename)
def runloop(self): while (self.running): item = self.q.get() try: item() except: log.warn(traceback.format_exc()) self.q.task_done()
def socket_add( socket, method, condition = IO_READ ): """The first argument specifies a socket, the second argument has to be a function that is called whenever there is data ready in the socket.""" global _qt_socketIDs if _get_fd( socket ) in map( lambda s: _get_fd( s ), _qt_socketIDs[ condition ].keys() ): log.warn( 'Socket %d already registered for condition %d' % ( _get_fd( socket ), condition ) ) return _qt_socketIDs[ condition ][ socket ] = Socket( socket, method, condition )
def __init__ (self): print "***** HelperFileClass __init__" error( ' ** HelperFileClass __init__: Error logging\n' ) warn( ' ** HelperFileClass __init__: warn loggging\n' ) debug( ' ** HelperFileClass __init__: debug logging\n' ) info( ' ** HelperFileClass __init__: info logging\n' ) output( ' ** MainFileClassA __init__: output logging\n' ) obj_help = Helper2FileClass()
def run(): global server, running log.info('HTTP', 'Starting server at http://localhost:%s' % config.http_port) server = HTTPServer(('', config.http_port), Handler); try: server.serve_forever() except KeyboardInterrupt, e: log.warn('HTTP', 'Stopping by <Control-C>')
def parse_event(lines): """Takes event lines from LHE file and returns list of particles.""" header = parse_event_header(lines[1]) if header["npart"] != len(lines)-3: log.warn("Warning: Number of particles=%i and \ number of lines=%i don't agree!" % (header["npart"], len(lines)) ) return list( parse_particle_line(line) for line in lines[2:-1] )
def run(self, chunksize=1): try: self.pool.map(self.map_func, self.inputs, chunksize=chunksize) except KeyboardInterrupt: warn("^C pressed") self.pool.terminate() except: import traceback traceback.print_exc()