def getRstHtml(self, includeStyleSheet=True, includeTitle=True, includeMeta=False, styleForWebKit=False, webenv=False): from docutils.core import publish_string text = convertToUnicode(self.editBoxes[self.ind].toPlainText()) # WpGen directives text = text.replace('%HTMLDIR%', 'html') text = text.replace('%\\HTMLDIR%', '%HTMLDIR%') defaultCssFile = pathJoin(templatesDir, "default.css") defaultTmplFile = pathJoin(templatesDir, "template.html") rst_opt = { 'no_generator': True, 'no_source_link': True, 'tab_width': 4, 'file_insertion_enabled': False, 'raw_enabled': False, 'stylesheet_path': None, 'traceback': True, 'halt_level': 5, 'output_encoding' : 'unicode', } rst_opt['stylesheet'] = defaultCssFile rst_opt['template'] = defaultTmplFile html = publish_string(text, writer_name='html', settings_overrides=rst_opt) return html
def copytree(src, dst, symlinks=False): names = listdir(src) if pathIsdir(dst): dst = pathJoin(dst, pathBasename(src)) if not pathIsdir(dst): mkdir(dst) else: makedirs(dst) for name in names: srcname = pathJoin(src, name) dstname = pathJoin(dst, name) try: if symlinks and pathIslink(srcname): linkto = readlink(srcname) symlink(linkto, dstname) elif pathIsdir(srcname): copytree(srcname, dstname, symlinks) else: copyfile(srcname, dstname) except (IOError, OSError) as err: print("[Directories] Error %d: Copying tree '%s' to '%s'! (%s)" % (err.errno, srcname, dstname, err.strerror)) try: st = os_stat(src) try: chmod(dst, S_IMODE(st.st_mode)) except (IOError, OSError) as err: print("[Directories] Error %d: Setting modes from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) try: utime(dst, (st.st_atime, st.st_mtime)) except (IOError, OSError) as err: print("[Directories] Error %d: Setting times from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) except (IOError, OSError) as err: print("[Directories] Error %d: Obtaining stats from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror))
def photos(folder, img): # security? imgUrl = folder+'/'+img try: conn = get_db() conn.row_factory = sqlite3.Row c = conn.cursor() img = db.dict_from_row(db.select_a_photo(c, imgUrl)) #mainPhoto phs = db.select_all_photo_thumbnail(c) phs = calc_neighbours(phs, img, 2, func=lambda x: x['url']) phs = [ { 'title':title, 'url':pathJoin('/',PHOTOS,url), 'thumbnail':pathJoin(IMAGES,THUMBNAILS,PHOTOS,url) } for url,title in phs ] img['thumbnail'] = pathJoin(IMAGES,THUMBNAILS,PHOTOS,img['url']) img['url'] = pathJoin(IMAGES,PHOTOS,img['url']) except Exception as e: print('\x1b[31m', e, '\x1b[0m') abort(500) finally: c.close() conn.close() return render_template('photos.html.j2', img=img, photos=phs)
def saveValue(self,name,value): ''' Save a value with the name name and the value value. ''' debug.add('saving value '+str(name),value) # create a file assocation for the name to store the value if name not in self.names: debug.add('name not in self.names') # create a counter for the generated filename counter=0 # seed value for while loop newName = (str(counter)+'.value') # find a filename that does not already exist in # the database directory while newName in listdir(self.path): # increment the counter counter+=1 # iterate the value newName=(str(counter)+'.value') debug.add('newname',newName) # set the metadata value for the filepaths in this table instance self.namePaths[name]=pathJoin(self.path,newName) # write the newly created name assocation to table metadata on disk writeFile(pathJoin(self.path,'names.table'),pickle(self.namePaths)) debug.add('namePaths',self.namePaths) # update the length and names attributes self.names=self.namePaths.keys() self.length=len(self.names) # saves a table changes back onto the disk fileData=writeFile(self.namePaths[name],pickle(value)) return fileData
def saveValue(self, name, value): ''' Save a value with the name name and the value value. ''' debug.add('saving value ' + str(name), value) # create a file assocation for the name to store the value if name not in self.names: debug.add('name not in self.names') # create a counter for the generated filename counter = 0 # seed value for while loop newName = (str(counter) + '.value') # find a filename that does not already exist in # the database directory while newName in listdir(self.path): # increment the counter counter += 1 # iterate the value newName = (str(counter) + '.value') debug.add('newname', newName) # set the metadata value for the filepaths in this table instance self.namePaths[name] = pathJoin(self.path, newName) # write the newly created name assocation to table metadata on disk writeFile(pathJoin(self.path, 'names.table'), pickle(self.namePaths)) debug.add('namePaths', self.namePaths) # update the length and names attributes self.names = self.namePaths.keys() self.length = len(self.names) # saves a table changes back onto the disk fileData = writeFile(self.namePaths[name], pickle(value)) return fileData
def plotFromPath(path, npy_file, y_lim=[0.5, 1]): trans_dict = dict([("accord.npy", u"Indeks Zgodności"), ("neigh.npy", u"Korygowany Współczynnik Przekrywania")]) data_file = open(pathJoin(path, "data.txt")) matrix = np.load(pathJoin(path, npy_file)) if npy_file == "accord.npy": matrix = matrix[:, :20] if npy_file == "neigh.npy": matrix = matrix[:, :190] means = np.mean(matrix, axis=1) errors = np.std(matrix, axis=1) data_dict = {} for line in data_file: parameter, value = line.rstrip("\n").split("\t") data_dict[parameter] = value plt.errorbar(np.arange(len(matrix)), means, yerr=errors) # title_string = ("Algorithm: {algorithm}, n_iter: {n_iter}," # "throw away: {throw_away}").format(**data_dict) # plt.ylim(y_lim) # plt.title(title_string) plt.xlim(-1, len(matrix) + 1) plt.ylabel(trans_dict[npy_file], fontsize=16) plt.xlabel(u"Ilość skupisk", fontsize=16) plt.tick_params(labelsize=14) plt.xticks(np.arange(len(matrix)), eval(data_dict["list_of_cluster_n"]))
def __init__(self, log_save_dir): self.addition_log = [] self.latest = pathJoin(log_save_dir, "latest") makedirs_if_not_exist(self.latest) self.move_old_log() self.log_save_dir = pathJoin(self.latest, get_new_dirname()) makedirs_if_not_exist(self.log_save_dir)
def getRecordingFilename(basename, dirname=None): # Filter out non-allowed characters. non_allowed_characters = "/.\\:*?<>|\"" basename = basename.replace("\x86", "").replace("\x87", "") filename = "" for c in basename: if c in non_allowed_characters or ord(c) < 32: c = "_" filename += c # Max filename length for ext4 is 255 (minus 8 characters for .ts.meta) # but must not truncate in the middle of a multi-byte utf8 character! # So convert the truncation to unicode and back, ignoring errors, the # result will be valid utf8 and so xml parsing will be OK. filename = filename[:247] if dirname is not None: if not dirname.startswith("/"): dirname = pathJoin(defaultRecordingLocation(), dirname) else: dirname = defaultRecordingLocation() filename = pathJoin(dirname, filename) path = filename i = 1 while True: if not pathIsfile(path + ".ts"): return path path = "%s_%03d" % (filename, i) i += 1
def isPluginInstalled(pluginname, pluginfile="plugin", pluginType=None): path = resolveFilename(SCOPE_PLUGINS) pluginfolders = [name for name in listdir(path) if pathIsdir(pathJoin(path, name)) and name not in ["__pycache__"]] if pluginType is None or pluginType in pluginfolders: plugintypes = pluginType and [pluginType] or pluginfolders for fileext in [".pyc", ".py"]: for plugintype in plugintypes: if pathIsfile(pathJoin(path, plugintype, pluginname, pluginfile + fileext)): return True return False
def lsof(): lsof = [] for pid in listdir("/proc"): if pid.isdigit(): try: prog = readlink(pathJoin("/proc", pid, "exe")) dir = pathJoin("/proc", pid, "fd") for file in [pathJoin(dir, file) for file in listdir(dir)]: lsof.append((pid, prog, readlink(file))) except OSError: pass return lsof
def setUp(self): this_dir = abspath(dirname(__file__)) self.log_save_dir = pathJoin(this_dir, "test1") self.log_dir = pathJoin(this_dir, "abc") makedirs(self.log_dir) self.log_files = [pathJoin(self.log_dir, "f1")] self.log_files += [pathJoin(self.log_dir, "f2")] self.log_files += [pathJoin(self.log_dir, "f3")] for file in self.log_files: open(file, "a").close() self.reporter = Reporter(self.log_save_dir) self.reporter.add_log(self.log_dir)
def get_logs(self, logcat_tag=["HopeBay", "TeraService"]): version = self.get_version() logs_dir = pathJoin(self.report_dir, get_new_dirname(version)) if not dirExists(logs_dir): os.makedirs(logs_dir) self.get_tombstones(logs_dir) self.get_hcfs_bin(logs_dir) self.get_hcfs_log(logs_dir) for tag in logcat_tag: self.get_logcat(pathJoin(logs_dir, tag + "_logcat"), tag) self.get_logcat(pathJoin(logs_dir, "logcat")) self.get_dmesg(pathJoin(logs_dir, "dmesg")) return logs_dir
def getFullFilepath(self, guild: discord.Guild, imageDetails: dict, imageType="icons", mkdir=False): if imageType not in ["icons", "banners"]: raise ValueError("Image type not defined!") directory = pathJoin(self.dataFolder, str(guild.id), imageType) if mkdir: Path(directory).mkdir(parents=True, exist_ok=True) filename = imageDetails["filename"] filepath = pathJoin(directory, filename) return filepath
def copyfile(src, dst): f1 = None f2 = None status = 0 try: f1 = open(src, "rb") if pathIsdir(dst): dst = pathJoin(dst, pathBasename(src)) f2 = open(dst, "w+b") while True: buf = f1.read(16 * 1024) if not buf: break f2.write(buf) except (IOError, OSError) as err: print("[Directories] Error %d: Copying file '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) status = -1 if f1 is not None: f1.close() if f2 is not None: f2.close() try: st = os_stat(src) try: chmod(dst, S_IMODE(st.st_mode)) except (IOError, OSError) as err: print("[Directories] Error %d: Setting modes from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) try: utime(dst, (st.st_atime, st.st_mtime)) except (IOError, OSError) as err: print("[Directories] Error %d: Setting times from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) except (IOError, OSError) as err: print("[Directories] Error %d: Obtaining stats from '%s' to '%s'! (%s)" % (err.errno, src, dst, err.strerror)) return status
def deleteValue(self, name): ''' Delete a value with name name. ''' # clean up names to avoid stupid debug.add('deleting value ', name) # figure out the path to the named value file if name in self.names: filePath = self.namePaths[name] # remove the metadata entry del self.namePaths[name] # write changes to database metadata file writeFile(pathJoin(self.path, 'names.table'), pickle(self.namePaths)) # update the length and names attributes self.names = self.namePaths.keys() self.length = len(self.names) else: return False if pathExists(filePath): # remove the file accocated with the value removeFile(filePath) return True else: return False
def compress(srcPath, tgtPath): # #Note: shutil.make_archive isn't used, due to its forcing of the zip extension and due to the need for maintaing a compression standard. if not exists(srcPath): raise('No such path: %s' % srcPath) ensureParent(tgtPath) from zipfile import ZipFile, ZIP_DEFLATED ZipFileObj = ZipFile(tgtPath, 'w', ZIP_DEFLATED) cwd = os.getcwd() # The CWD circus is to reduce the relpath calls. if isContainer(srcPath): os.chdir(srcPath) for root, dummy1, Files in os.walk('.', followlinks=True): for file in Files: ZipFileObj.write(pathJoin(root, file)) else: dir, name = pathSplit(srcPath) os.chdir(dir) ZipFileObj.write(name) os.chdir(cwd) ZipFileObj.close()
def __init__(self, bot: Red): self.bot = bot self.config = Config.get_conf(self, identifier=5842647, force_registration=True) # Register default (empty) settings. self.config.register_guild(**BASE_GUILD) # Initialize logger, and save to cog folder. self.dataFolder = data_manager.cog_data_path(cog_instance=self) self.logger = logging.getLogger("red.luicogs.ServerManage") if self.logger.level == 0: # Prevents the self.logger from being loaded again in case of module reload. self.logger.setLevel(logging.INFO) logPath = pathJoin(self.dataFolder, "info.log") handler = logging.FileHandler(filename=logPath, encoding="utf-8", mode="a") handler.setFormatter( logging.Formatter("%(asctime)s %(message)s", datefmt="[%d/%m/%Y %H:%M:%S]")) self.logger.addHandler(handler) # On cog load, we want the loop to run once. self.lastChecked = datetime.now() - timedelta(days=1) self.bgTask = self.bot.loop.create_task(self.imageLoop())
def recGlob (path, pattern): res = [] for dirpath, dirnames, files in walk (path): for filename in fnmatch.filter (files, pattern): res.append (pathJoin (dirpath, filename)) return res
def get_tombstones(self, path): out, _ = self.exec_shell("su 0 ls -l /data/tombstones/") # -rw------- system system 491314 2016-12-09 09:56 tombstone_09 for line in out.rstrip().split("\r\n"): _, _, _, _, day, time, tombstone = filter(None, line.split(" ")) dest = pathJoin(path, day + "-" + time + "-" + tombstone) self.pull_as_root("/data/tombstones/" + tombstone, dest)
def get_ndk_path(): env_dict = os.environ for key in env_dict: for path in env_dict[key].split(":"): if fileExists(pathJoin(path, "ndk-build")): return path raise Exception("Unable to find ndk path")
def downloadFile(self, title, link): printPage = self.adapter.print_page(link) pdfFile = self.generate_print_page(title, link) if not pdfFile: return None folder_to_save = pathJoin(self.tempDir, self.adapter.getPrefixName()) if not os.path.exists(folder_to_save): os.makedirs(folder_to_save) pdf_file_to_save = pathJoin(folder_to_save, pdfFile) result = self.pdf_saver.save_url(printPage, pdf_file_to_save) if result: print result self.saved_files.append(result) return result
def get_sdk_path(): env_dict = os.environ for key in env_dict: for path in env_dict[key].split(":"): if dirExists(pathJoin(path, "platforms", "android-23")): return path raise Exception("Unable to find sdk path")
def error(self, errStr, cfgPath): """ Error handler GUI implementation """ # it must handle two types of error cases: # - yandex-disk is not installed (errStr=='' in that case) - just show error message and return # - yandex-disk is not configured (errStr!='' in that case) - suggest to configure it and run ya-setup if needed if errStr == '': text1 = _('Yandex.Disk Indicator: daemon start failed') buttons = Gtk.ButtonsType.OK text2 = (_( 'Yandex.Disk utility is not installed.\n' + 'Visit www.yandex.ru, download and install Yandex.Disk daemon.' )) else: text1 = _('Yandex.Disk Indicator: daemon start failed') buttons = Gtk.ButtonsType.OK_CANCEL text2 = (_( 'Yandex.Disk daemon failed to start because it is not' + ' configured properly\n\n' + errStr + '\n\n' + ' To configure it up: press OK button.\n Press Cancel to exit.' )) dialog = Gtk.MessageDialog(None, 0, Gtk.MessageType.INFO, buttons, text1) dialog.format_secondary_text(text2) dialog.set_icon(APPLOGO) response = dialog.run() if errStr != '' and response == Gtk.ResponseType.OK: # Launch Set-up utility LOGGER.debug('starting configuration utility') retCode = call([pathJoin(APPINSTPATH, 'ya-setup'), cfgPath]) else: retCode = 1 dialog.destroy() return retCode # 0 when error is not critical or fixed (daemon has been configured via ya-setup)
def update(self, vals, yddir): # Update information in menu self.folder = yddir # Update status data on first run or when status has changed if vals['statchg'] or vals['laststatus'] == 'unknown': self.status.set_label( _('Status: ') + self.YD_STATUS[vals['status']] + (vals['progress'] if vals['status'] == 'busy' else ' '. join((':', vals['error'], shortPath(vals['path']) )) if vals['status'] == 'error' else '')) # Update pseudo-static items on first run or when daemon has stopped or started if 'none' in (vals['status'], vals['laststatus'] ) or vals['laststatus'] == 'unknown': started = vals['status'] != 'none' self.status.set_sensitive(started) # zero-space UTF symbols are used to detect requered action without need to compare translated strings self.daemon_ss.set_label(( '\u2060' + _('Stop Yandex.Disk daemon')) if started else ( '\u200B' + _('Start Yandex.Disk daemon'))) if self.ID != '': # Set daemon identity row in multidaemon mode self.yddir.set_label(self.ID + _(' Folder: ') + (shortPath(yddir) if yddir else '< NOT CONFIGURED >')) self.open_folder.set_sensitive( yddir != '') # Activate Open YDfolder if daemon configured # Update sizes data on first run or when size data has changed if vals['szchg'] or vals['laststatus'] == 'unknown': self.used.set_label( _('Used: ') + vals['used'] + '/' + vals['total']) self.free.set_label( _('Free: ') + vals['free'] + _(', trash: ') + vals['trash']) # Update last synchronized sub-menu on first run or when last data has changed if vals['lastchg'] or vals['laststatus'] == 'unknown': # Update last synchronized sub-menu self.lastItems.destroy( ) # Disable showing synchronized sub menu while updating it - temp fix for #197 self.lastItems = Gtk.Menu() # Create new/empty Sub-menu: for filePath in vals['lastitems']: # Create new sub-menu items # Create menu label as file path (shorten it down to 50 symbols when path length > 50 # symbols), with replaced underscore (to disable menu acceleration feature of GTK menu). widget = Gtk.MenuItem.new_with_label(shortPath(filePath)) filePath = pathJoin(yddir, filePath) # Make full path to file if pathExists(filePath): widget.set_sensitive( True) # If it exists then it can be opened widget.connect("activate", self.openPath, filePath) else: widget.set_sensitive( False) # Don't allow to open non-existing path self.lastItems.append(widget) self.last.set_submenu(self.lastItems) # Switch off last items menu sensitivity if no items in list self.last.set_sensitive(vals['lastitems']) LOGGER.debug("Sub-menu 'Last synchronized' has %s items", str(len(vals['lastitems']))) self.show_all() # Renew menu
def pathSelect(self, path): if path is not None: path = pathJoin(path, "") self.buildChoices("TimeshiftPath", config.usage.timeshift_path, path) self["config"].invalidateCurrent() self.changedEntry()
def defaultRecordingLocation(candidate=None): if candidate and pathExists(candidate): return candidate # First, try whatever /hdd points to, or /media/hdd. try: path = readlink("/hdd") except OSError: path = "/media/hdd" if not pathExists(path): # Find the largest local disk. from Components import Harddisk mounts = [m for m in Harddisk.getProcMounts() if m[1].startswith("/media/")] # Search local devices first, use the larger one path = bestRecordingLocation([m for m in mounts if m[0].startswith("/dev/")]) # If we haven't found a viable candidate yet, try remote mounts. if not path: path = bestRecordingLocation([m for m in mounts if not m[0].startswith("/dev/")]) if path: # If there's a movie subdir, we'd probably want to use that. movie = pathJoin(path, "movie") if pathIsdir(movie): path = movie if not path.endswith("/"): path += "/" # Bad habits die hard, old code relies on this. return path
def setupdom(plugin=None): if plugin: setupfile = file(resolveFilename(SCOPE_PLUGINS, pathJoin(plugin, "setup.xml")), "r") else: setupfile = file(resolveFilename(SCOPE_SKIN, "setup.xml"), "r") setupfiledom = xml.etree.cElementTree.parse(setupfile) setupfile.close() return setupfiledom
def getLogos(): logos = os.listdir(pathJoin(FULL_IMAGES,LOGOS)) logos = [logo for logo in logos if logo.endswith(IMGEXT)] return [{ 'name':splitext(logo)[0], 'link':pathlib.Path(app.static_folder+ '/' + IMAGES+LOGOS+splitext(logo)[0]+'.txt').read_text(), 'filename':logo } for logo in logos]
def resolve(basePath, relation): r"""Resolves a relative path of the given basePath. Args: basePath (path): The path of the base. relation (str): The relation. Ex: '..', '../ops' etc. """ return abspath(pathJoin(basePath, relation))
def build_release(cwd): # _exec_cmd(android update sdk --all --no-ui --filter # extra-android-m2repository", cwd) out, err = exec_task("assembleRelease", cwd) release_apk = pathJoin(cwd, RELEASE_APK_RELATIVE) if not fileExists(release_apk): raise Exception("Fail to build apk:" + str((out, err))) return release_apk
def __init__(self, *_, local_path, local_tar=None, remote_tar=None, host_path=None, container_path=None, pypath=False, name=None, excludes=None, file_mask=None, compress=True, exclude_vcs=True, exclude_from=None, **tar_options): self.local_path = local_path self.host_path = host_path self.container_path = container_path or host_path self.pypath = pypath name = name or str(uuid4()) self.name = name self.compress = compress self.file_mask = file_mask or "." # file_mask can Not be None or "". self.excludes = excludes or "--exclude='*__pycache__' --exclude='*.git' --exclude='*.idea' --exclude='*.egg-info'" from .jaynes import RUN local_path = os.path.expandvars(local_path) local_abs = os.path.join(RUN.config_root, local_path) tar_options = ' '.join([ f"--{key.replace('_', '-')}={value}" for key, value in tar_options.items() ]) if exclude_vcs: tar_options += " --exclude-vcs" if exclude_from: ignore_file_path = os.path.join(RUN.config_root, exclude_from) tar_options += f" --exclude-from='{ignore_file_path}'" if local_tar is None: tar_name = f"{name}.tar" self.temp_dir = get_temp_dir() self.local_tar = pathJoin(self.temp_dir, tar_name) else: tar_name = os.path.basename(local_tar) self.temp_dir = os.path.dirname(local_tar) self.local_tar = local_tar self.remote_tar = remote_tar or f"$TMPDIR/{tar_name}" self.local_script = f""" type gtar >/dev/null 2>&1 && alias tar=`which gtar` mkdir -p {self.temp_dir} # Do not use absolute path in tar. tar {self.excludes} {tar_options} -c{"z" if compress else ""}f {self.local_tar} -C {local_abs} {self.file_mask} """ self.host_setup = f"""
def openFile(): if not LOG_ACTIVE: debugInfo("Dosyaya raporlama kapalı!") return if not pathExist(LOG_DIR): pathMakedirs(LOG_DIR) return open(pathJoin(LOG_DIR, LOG_FILE), "a+", encoding="utf-8")
def lookup(self, path): candidates = map(lambda stem: pathJoin(stem, path), self.pathList) for candidate in candidates: if pathExists(candidate): log('incdir: {0} -> {1}'.format(path, candidate)) return candidate raise FileNotFound(path)
def main(folder, file_name): gms = [mgm.GraphicalModel.loadFromUAI(pathJoin(folder, f)) for f in os.listdir(folder)] random.seed(31337) random.shuffle(gms) gms = filter(lambda x: x.n_vars <= 180, gms) lp.prepareGMs(gms, n_cpus) cPickle.dump(gms, open(file_name, "w"), protocol=2)
def getSize(path, pattern=".*"): path_size = 0 if pathIsdir(path): files = crawlDirectory(path, pattern) for file in files: filepath = pathJoin(file[0], file[1]) path_size += pathGetsize(filepath) elif pathIsfile(path): path_size = pathGetsize(path) return path_size
def rmtree(tgtPath): r"""Removes a dir tree. It can unlink junctions (even as sub-dirs), without removing the descendant files. """ absPath = abspath(tgtPath) for root, Dirs, Files in os.walk(absPath): for file in Files: unlink(pathJoin(root, file)) for dir in Dirs: dir = pathJoin(root, dir) if isLinkedDir(dir): rmlink(dir) else: rmtree(dir) rmdir(tgtPath)
def photogallery(): try: conn = get_db() c = conn.cursor() phs = db.select_all_photo_thumbnail(c) phs = [ { 'title':title, 'url':pathJoin('/',PHOTOS,url), 'thumbnail':pathJoin(IMAGES,THUMBNAILS,PHOTOS,url) } for url,title in phs ] except Exception as e: print('\x1b[31m', e, '\x1b[0m') abort(500) finally: c.close() conn.close() return render_template('photogallery.html.j2', photos=phs)
def iconAnimation( ): # Changes busy icon by loop (triggered by self.timer) # As it called from timer (main GUI thread) there is no need to use idle_add here # Set next animation icon self.ind.set_icon_full( pathJoin(self.themePath, 'yd-busy' + str(self._seqNum) + '.png'), '') # Calculate next icon number self._seqNum = self._seqNum % 5 + 1 # 5 icon numbers in loop (1-2-3-4-5-1-2-3...) return True # True required to continue triggering by timer
def __init__(self, bot: Red): self.bot = bot saveFolder = data_manager.cog_data_path(cog_instance=self) self.logger = logging.getLogger("red.luicogs.Tags") if self.logger.level == 0: # Prevents the self.logger from being loaded again in case of module reload. self.logger.setLevel(logging.INFO) logPath = pathJoin(saveFolder, "info.log") handler = logging.FileHandler(filename=logPath, encoding="utf-8", mode="a") handler.setFormatter( logging.Formatter("%(asctime)s %(message)s", datefmt="[%d/%m/%Y %H:%M:%S]")) self.logger.addHandler(handler) # if tags.json doesnt exist, create it universal_path = pathJoin(str(saveFolder), "tags.json") if not isfile(universal_path): with open(universal_path, "w+") as f: empty = dict() json.dump(empty, f) self.config = Config( str(saveFolder), "tags.json", encoder=TagEncoder, object_hook=tagDecoder, loop=bot.loop, load_later=True, ) self.configV3 = ConfigV3.get_conf(self, identifier=5842647, force_registration=True) self.configV3.register_guild( **BASE_GUILD) # Register default (empty) settings. self.lock = Lock() tagGroup = self.get_commands()[0] self.tagCommands = tagGroup.all_commands.keys() self.syncLoopCreated = False if self.bot.guilds: self.bot.loop.create_task(self.syncAllowedRoles())
def get_device_serial_num(): import ConfigParser from os.path import abspath from os.path import dirname from os.path import join as pathJoin config = ConfigParser.ConfigParser() this_dir = abspath(dirname(__file__)) config.read(pathJoin(this_dir, "device.ini")) serial_num = config.get("one_device", "serial_num") return serial_num if serial_num else None
def setIconTheme(self, theme): """ Determine paths to icons according to current theme """ # global APPINSTPATH, APPCONFPATH theme = 'light' if theme else 'dark' # Determine theme from application configuration settings defaultPath = pathJoin(APPINSTPATH, 'icons', theme) userPath = pathJoin(APPCONFPATH, 'icons', theme) # Set appropriate paths to all status icons self.icon = {} for status in ['idle', 'error', 'paused', 'none', 'no_net', 'busy']: name = ('yd-ind-pause.png' if status in { 'paused', 'none', 'no_net' } else 'yd-busy1.png' if status == 'busy' else 'yd-ind-' + status + '.png') userIcon = pathJoin(userPath, name) self.icon[status] = userIcon if pathExists(userIcon) else pathJoin( defaultPath, name) # userIcon corresponds to busy icon on exit from this loop # Set theme paths according to existence of first busy icon self.themePath = userPath if pathExists(userIcon) else defaultPath
def test_get_log_old_log(self): self.reporter.get_logs() makedirs(self.log_dir) for file in self.log_files: open(file, "a").close() new_reporter = Reporter(self.log_save_dir) new_reporter.add_log(self.log_dir) new_reporter.get_logs() old_full_logs = [pathJoin(self.log_save_dir, x) for x in listdir(self.log_save_dir) if x != "latest"] self.assertEqual(1, len(old_full_logs)) old_full_log_dir = old_full_logs[0] log_dir = pathJoin(old_full_log_dir, "abc") log_files = [pathJoin(log_dir, "f1")] log_files += [pathJoin(log_dir, "f2")] log_files += [pathJoin(log_dir, "f3")] self.assertTrue(dirExists(log_dir)) self.assertTrue(all(map(fileExists, log_files)), str( map(fileExists, log_files)) + ":" + str(log_files))
def index_upload(): if request.method=="POST": k=False if request.files and request.files['myImage'].filename != '' : filename=request.files['myImage'] print(filename) filename.save(pathJoin(app.config['IMAGE_UPLOAD'],filename.filename)) k=True path="../static/uploaded_images" path=pathJoin(path,filename.filename) #path="../static/uploaded_images/" return render_template("img.html",fileName=path) #return render_template("index.html",val=k,msg="Plz Upload Again",sz=fSize(pathJoin(app.config['IMAGE_UPLOAD'],filename.filename))//(1024*1024)) else: k=False return render_template("index.html",val=k,msg="Plz Upload Again" , sz=0) #return render_template("img.html",fileName=path) else: return render_template('index.html',msg="Upload The Image",sz=0)
def itemExists(resolveList, base): baseList = [base] if base.endswith(".png"): baseList.append("%s%s" % (base[:-3], "svg")) elif base.endswith(".svg"): baseList.append("%s%s" % (base[:-3], "png")) for item in resolveList: for base in baseList: file = pathJoin(item, base) if pathExists(file): return file return base
def setLimit(self, limit): ''' Set the limit of values that are stored in this table. This ignores protected values. ''' # write the limit value to the limit file in the table filePath = pathJoin(self.path, 'limit.table') # set the limit in this instance self.limit = limit # write the new limit back to the storage success = writeFile(filePath, limit) return success
def setLimit(self,limit): ''' Set the limit of values that are stored in this table. This ignores protected values. ''' # write the limit value to the limit file in the table filePath=pathJoin(self.path,'limit.table') # set the limit in this instance self.limit=limit # write the new limit back to the storage success=writeFile(filePath,limit) return success
def __init__(self,path): ''' DB table to store things as files and directories. This is designed to reduce ram usage when reading things from large databases. Specifically this is designed for caches. # variables # .path The path on the filesystem where the table is stored. .names Gives you a list containing the names of all stored values as strings. .namePaths Gives you a dict where the keys are the names and the value is the path of that value database file .length The length of names stored in this table ''' # path of the root of the cache, this is where files # will be stored on the system self.path=path # create the paths if they do not exist if not pathExists(self.path): makedirs(self.path) debug.add('table path',self.path) # the path prefix is for tables stored in tables self.pathPrefix='' # tables are stored as files tempTable=[] # array of all the value names stored on table namesPath=pathJoin(self.path,'names.table') # if no namepaths exist create them if not pathExists(pathJoin(namesPath)): # write the new value to the system writeFile(namesPath,pickle(dict())) # load the name paths self.namePaths=unpickle(loadFile(namesPath)) debug.add('self.namePaths',self.namePaths) # create a array of all the names of values stored self.names=self.namePaths.keys() debug.add('self.names',self.names) # length of all the values stored on the table self.length=len(self.names) debug.add('self.length',self.length) # the protected list is a array of names that are # protected from limit cleaning protectedPath=pathJoin(self.path,'protected.table') if pathExists(pathJoin(protectedPath)): # load the list self.protectedList=unpickle(loadFile(protectedPath)) else: # create a blank list self.protectedList=[] # limit value to limit the number of values # load the limit value from file if it exists limitPath=pathJoin(self.path,'limit.table') if pathExists(pathJoin(limitPath)): self.limit=unpickle(loadFile(limitPath)) else: self.limit=None
def createTestDirectoryStructure(outdir): testDir = pathJoin(outdir, 'input') retries = 0 # There is a fundamental race condition between testing and creating a # directory. mkdir() will raise an exception if the directory is already # created. Since there is no portable way to evaluate the reason for the # exception we try to create it in a loop, but no more than two times. while not os.path.exists(testDir) and retries < 2: try: os.mkdir(testDir) except OSError: retries += 1 # Try again
def __call__(self, root, filename): from subprocess import Popen, PIPE f = pathJoin(root, filename) command = 'jpegoptim --preserve --verbose {0}'.format(f) p = Popen(command, shell=True, stdout = PIPE, stderr=PIPE) logging.debug('starting jpegoptim process') while p.poll() is None: out = p.stderr.read() err = p.stdout.read() if out != '': logging.debug(out) if err != '': logging.error(err) logging.debug('jpegoptim process finished') return filename
def getHtml(self, includeStyleSheet=True, includeTitle=True, includeMeta=False, styleForWebKit=False, webenv=False): if self.markups[self.ind].name == 'reStructuredText': return self.getRstHtml(includeStyleSheet, includeTitle, includeMeta, styleForWebKit, webenv) if self.markups[self.ind] is None: markupClass = self.getMarkupClass() errMsg = self.tr('Could not parse file contents, check if ' 'you have the <a href="%s">necessary module</a> installed!') try: errMsg %= markupClass.attributes[markups.MODULE_HOME_PAGE] except: # Remove the link if markupClass doesn't have the needed attribute errMsg = errMsg.replace('<a href="%s">', '') errMsg = errMsg.replace('</a>', '') return '<p style="color: red">%s</p>' % errMsg text = convertToUnicode(self.editBoxes[self.ind].toPlainText()) # WpGen directives text = text.replace('%HTMLDIR%', 'html') text = text.replace('%\\HTMLDIR%', '%HTMLDIR%') headers = '' if includeStyleSheet: fontline = '' if styleForWebKit: fontname = self.font.family() if self.font else 'Sans' fontsize = (self.font if self.font else QFont()).pointSize() fontline = 'body { font-family: %s; font-size: %spt }\n' % \ (fontname, fontsize) headers += '<style type="text/css">\n' + fontline + self.ss + '</style>\n' cssFileName = self.getDocumentTitle(baseName=True)+'.css' defaultCssFile = pathJoin(templatesDir, "default.css") if QFile(cssFileName).exists(): headers += '<link rel="stylesheet" type="text/css" href="%s">\n' \ % QUrl.fromLocalFile(QFileInfo(cssFileName).absoluteFilePath()).toString() elif QFile(defaultCssFile).exists(): headers += '<link rel="stylesheet" type="text/css" href="%s">\n' \ % QUrl.fromLocalFile(QFileInfo(defaultCssFile).absoluteFilePath()).toString() if includeMeta: headers += '<meta name="generator" content="%s %s">\n' % \ (app_name, app_version) fallbackTitle = self.getDocumentTitle() if includeTitle else '' return self.markups[self.ind].get_whole_html(text, custom_headers=headers, include_stylesheet=includeStyleSheet, fallback_title=fallbackTitle, webenv=webenv)
def loadOldGroups(self): from os.path import join as pathJoin from string import split, strip groups_file = open(pathJoin(CLIENT_HOME, 'old_groups.txt'), 'r') out = '' for group_line in groups_file.readlines(): group_line_elements = split(strip(group_line), ' ') group_name = group_line_elements[0] group_members = group_line_elements[1:] if self.getGroupById(group_name, default=None) is None: out = out + 'adding group %s\n' % group_name self.userFolderAddGroup(group_name) out = out + 'setting group %s membership to %s\n' % (group_name, group_members) self.setUsersOfGroup(group_members, group_name) return out
def get_all_casings_file(file): """ >>> set(get_all_casings_file("/a/b/c/abc")) == set(["/a/b/c/abc", "/a/b/c/Abc", "/a/b/c/aBc", "/a/b/c/abC", "/a/b/c/ABc", "/a/b/c/AbC", "/a/b/c/aBC", "/a/b/c/ABC"]) True >>> set(get_all_casings_file("abc")) == set(["abc", "Abc", "aBc", "abC", "ABc", "AbC", "aBC", "ABC"]) True >>> set(get_all_casings_file("")) Traceback (most recent call last): File "<stdin>", line 1, in ? ValueError: Argument should have value. >>> set(get_all_casings_file(None)) Traceback (most recent call last): File "<stdin>", line 1, in ? ValueError: Argument should have value. """ if not file: raise ValueError("Argument should have value.") for case in all_casings(basename(file)): yield pathJoin(dirname(file), case)
def test_get_log_first_time(self): self.reporter.get_logs() latest_dir = pathJoin(self.log_save_dir, "latest") sub_latest_dir = [pathJoin(latest_dir, x) for x in listdir(latest_dir)] self.assertEqual(1, len(sub_latest_dir)) final_dir = sub_latest_dir[0] final_log_dir = pathJoin(final_dir, "abc") final_log_files = [pathJoin(final_log_dir, "f1")] final_log_files += [pathJoin(final_log_dir, "f2")] final_log_files += [pathJoin(final_log_dir, "f3")] self.assertTrue(dirExists(final_log_dir)) self.assertTrue(all(map(fileExists, final_log_files)), str( map(fileExists, final_log_files)) + ":" + str(final_log_files))
def setProtected(self,name): ''' Set a name in the table to be protected from removal because of limits. ''' # generate the filepath to the protected values # list filePath=pathJoin(self.path,'protected.table') # check if the path exists if pathExists(filePath): # read the protected list from the file protectedList=unpickle(loadFile(filePath)) else: # create the list and append the name protectedList=[] # append the new value to the list protectedList.append(name) # pickle the protected list for storage protectedList=pickle(protectedList) # write the changes back to the protected list writeFile(filePath,protectedList)
def deleteValue(self,name): ''' Delete a value with name name. ''' # clean up names to avoid stupid debug.add('deleting value ',name) # figure out the path to the named value file if name in self.names: filePath=self.namePaths[name] # remove the metadata entry del self.namePaths[name] # write changes to database metadata file writeFile(pathJoin(self.path,'names.table'),pickle(self.namePaths)) # update the length and names attributes self.names=self.namePaths.keys() self.length=len(self.names) else: return False if pathExists(filePath): # remove the file accocated with the value removeFile(filePath) return True else: return False
def test_get_log_not_first_time(self): self.reporter.get_logs() makedirs(self.log_dir) for file in self.log_files: open(file, "a").close() new_reporter = Reporter(self.log_save_dir) new_reporter.add_log(self.log_dir) new_reporter.get_logs() latest_dir = pathJoin(self.log_save_dir, "latest") sub_latest_dir = [pathJoin(latest_dir, x) for x in listdir(latest_dir)] self.assertEqual(1, len(sub_latest_dir)) final_dir = sub_latest_dir[0] final_log_dir = pathJoin(final_dir, "abc") final_log_files = [pathJoin(final_log_dir, "f1")] final_log_files += [pathJoin(final_log_dir, "f2")] final_log_files += [pathJoin(final_log_dir, "f3")] self.assertTrue(dirExists(final_log_dir)) self.assertTrue(all(map(fileExists, final_log_files)), str( map(fileExists, final_log_files)) + ":" + str(final_log_files))
def move_old_log(self): latest_parent = pathJoin(self.latest, "..") map(move_to(latest_parent), listdir_abs(self.latest))
import makeUtils import timeout import config from ..adb import adb from ..androidUtils import fileUtils from ..dockerBuildUtils import dockerBuildUtils logger = config.get_logger().getChild(__name__) THIS_DIR = abspath(dirname(__file__)) HCFS_LIB_PHONE_PATH = "/system/lib64/libHCFS_api.so" HCFS_LIB = THIS_DIR + "/libHCFS_api.so" ADAPTER_NAME = "adapter" ADAPTER_BIN_LOCAL = pathJoin(THIS_DIR, ADAPTER_NAME) ADAPTER_BIN = "/data/" + ADAPTER_NAME def setup(): logger.info("adapter setup") cleanup() check_build_env() get_hcfs_lib_from_phone() build_hcfs_adapter() install_hcfs_adapter_to_phone() def check_build_env(): logger.info("check_build_env") adb.check_availability()
def test_get_log_not_first_time(self): self.reporter.get_logs() makedirs(self.log_dir) for file in self.log_files: open(file, "a").close() new_reporter = Reporter(self.log_save_dir) new_reporter.add_log(self.log_dir) self.hcfs_logs_dir = pathJoin( basename(adb.report_dir), basename(adb.get_logs())) new_reporter.add_log(adb.report_dir) new_reporter.get_logs() latest_dir = pathJoin(self.log_save_dir, "latest") sub_latest_dir = [pathJoin(latest_dir, x) for x in listdir(latest_dir)] self.assertEqual(1, len(sub_latest_dir)) final_dir = sub_latest_dir[0] final_log_dir = pathJoin(final_dir, "abc") hcfs_log_dir = pathJoin(final_dir, self.hcfs_logs_dir) adb_log_dir = pathJoin(hcfs_log_dir, "..") final_log_files = [pathJoin(final_log_dir, "f1")] final_log_files += [pathJoin(final_log_dir, "f2")] final_log_files += [pathJoin(final_log_dir, "f3")] final_log_files += [pathJoin(hcfs_log_dir, "hcfs_android_log")] final_log_files += [pathJoin(hcfs_log_dir, "API_logcat")] final_log_files += [pathJoin(hcfs_log_dir, "HopeBay_logcat")] final_log_files += [pathJoin(hcfs_log_dir, "TeraService_logcat")] final_log_files += [pathJoin(hcfs_log_dir, "logcat")] final_log_files += [pathJoin(hcfs_log_dir, "dmesg")] final_log_files += [pathJoin(adb_log_dir, "ADB")] self.assertTrue(dirExists(final_log_dir)) self.assertTrue(all(map(fileExists, final_log_files)), str( map(fileExists, final_log_files)) + ":" + str(final_log_files))
def test_get_log_old_log(self): self.reporter.get_logs() makedirs(self.log_dir) for file in self.log_files: open(file, "a").close() new_reporter = Reporter(self.log_save_dir) new_reporter.add_log(self.log_dir) adb.get_logs() new_reporter.add_log(adb.report_dir) new_reporter.get_logs() old_full_logs = [pathJoin(self.log_save_dir, x) for x in listdir(self.log_save_dir) if x != "latest"] self.assertEqual(1, len(old_full_logs)) old_full_log_dir = old_full_logs[0] log_dir = pathJoin(old_full_log_dir, "abc") hcfs_log_dir = pathJoin(old_full_log_dir, self.hcfs_logs_dir) adb_log_dir = pathJoin(hcfs_log_dir, "..") log_files = [pathJoin(log_dir, "f1")] log_files += [pathJoin(log_dir, "f2")] log_files += [pathJoin(log_dir, "f3")] log_files += [pathJoin(hcfs_log_dir, "hcfs_android_log")] log_files += [pathJoin(hcfs_log_dir, "API_logcat")] log_files += [pathJoin(hcfs_log_dir, "HopeBay_logcat")] log_files += [pathJoin(hcfs_log_dir, "TeraService_logcat")] log_files += [pathJoin(hcfs_log_dir, "logcat")] log_files += [pathJoin(hcfs_log_dir, "dmesg")] log_files += [pathJoin(adb_log_dir, "ADB")] self.assertTrue(dirExists(log_dir)) self.assertTrue(all(map(fileExists, log_files)), str( map(fileExists, log_files)) + ":" + str(log_files))