def main(): """Prevent running two instances of autobisectjs concurrently - we don't want to confuse hg.""" options = parseOpts() repo_dir = None if options.build_options: repo_dir = options.build_options.repo_dir with LockDir( sm_compile_helpers.get_lock_dir_path( Path.home(), options.nameOfTreeherderBranch, tbox_id="Tbox" ) if options.useTreeherderBinaries else sm_compile_helpers. get_lock_dir_path(Path.home(), repo_dir)): if options.useTreeherderBinaries: print_("TBD: We need to switch to the autobisect repository.", flush=True) sys.exit(0) else: # Bisect using local builds findBlamedCset(options, repo_dir, compile_shell.makeTestRev(options)) # Last thing we do while we have a lock. # Note that this only clears old *local* cached directories, not remote ones. rm_old_local_cached_dirs( sm_compile_helpers.ensure_cache_dir(Path.home()))
def get_options(): mc_path_default = str(Path.home() / "multichain") parser = ArgumentParser( description="Build v8_data.lib from .bin and .dat files") parser.add_argument("-v", "--verbose", action="store_true", help="print debug messages to log") parser.add_argument("-m", "--multichain", metavar="DIR", default=mc_path_default, help="MultiChain path prefix (default: %(default)s)") parser.add_argument( "-o", "--platform", default=sys.platform, help="override platform definition (default: %(default)s)") options = parser.parse_args() if options.verbose: logger.setLevel(logging.DEBUG) if not Path(options.multichain).exists(): parser.error("{!r}: MultiChain path does not exist".format( options.multichain)) logger.info("{} - {}".format(logger.name, parser.description)) logger.info(" Path: {}".format(options.multichain)) logger.info(" Platform: {}".format(options.platform)) return options
class HgHelpersTests(TestCase): """"TestCase class for functions in hg_helpers.py""" trees_location = Path.home() / "trees" def test_get_cset_hash_in_bisectmsg(self): """Test that we are able to extract the changeset hash from bisection output.""" self.assertEqual( hg_helpers.get_cset_hash_from_bisect_msg("x 12345:abababababab"), "abababababab") self.assertEqual( hg_helpers.get_cset_hash_from_bisect_msg("x 12345:123412341234"), "123412341234") self.assertEqual( hg_helpers.get_cset_hash_from_bisect_msg("12345:abababababab y"), "abababababab") self.assertEqual( hg_helpers.get_cset_hash_from_bisect_msg( "Testing changeset 41831:4f4c01fb42c3 (2 changesets remaining, ~1 tests)" ), "4f4c01fb42c3") with self.assertRaisesRegex( ValueError, (r"^Bisection output format required for hash extraction unavailable. " "The variable msg is:")): hg_helpers.get_cset_hash_from_bisect_msg("1a2345 - abababababab") # pylint: disable=no-member @pytest.mark.skipif( not (trees_location / "mozilla-central" / ".hg" / "hgrc").is_file(), reason="requires a Mozilla Mercurial repository") def test_hgrc_repo_name(self): """Test that we are able to extract the repository name from the hgrc file.""" self.assertEqual( hg_helpers.hgrc_repo_name(self.trees_location / "mozilla-central"), "mozilla-central")
def get_authorized_session(self): auth_token_file = Path("%s/Documents/Libra/oauth_session.json" % Path.home()) scopes = [ 'https://www.googleapis.com/auth/photoslibrary', 'https://www.googleapis.com/auth/photoslibrary.readonly' ] cred = None if auth_token_file: try: cred = Credentials.from_authorized_user_file( auth_token_file, scopes) except OSError as err: logging.debug( "Error opening auth token file - {0}".format(err)) except ValueError: logging.debug("Error loading auth tokens - Incorrect format") if not cred: cred = self.auth(scopes) session = AuthorizedSession(cred) if auth_token_file: try: self.save_credentials(cred, auth_token_file) except OSError as err: logging.debug("Could not save auth tokens - {0}".format(err)) return session
def get_shell_cache_dir(self): """Retrieve the shell cache directory of the intended js binary. Returns: Path: Full path to the shell cache directory of the intended js binary """ return sm_compile_helpers.ensure_cache_dir(Path.home()) / self.get_shell_name_without_ext()
def get_s3_tar_with_ext_full_path(self): """Retrieve the path to the tarball downloaded from S3. Returns: Path: Full path to the tarball in the local shell cache directory """ return sm_compile_helpers.ensure_cache_dir(Path.home()) / self.get_s3_tar_name_with_ext()
def add_standard_args(argparser): dd = os.environ.get('ZCLI_DATADIR') defaultdatadir = Path(dd) if dd else Path.home() / '.zcash' argparser.add_argument( '--datadir', dest='DATADIR', type=Path, default=defaultdatadir, help='Node datadir. Default: {!r}'.format(str(defaultdatadir)), ) g = argparser.add_mutually_exclusive_group() argparser.set_defaults(VERBOSITY='standard') g.add_argument( '--quiet', dest='VERBOSITY', action='store_const', const='quiet', help='Suppress status updates (but not failures) on stderr.', ) g.add_argument( '--debug', dest='VERBOSITY', action='store_const', const='debug', help='Include debug output on stderr.', )
def user_shared_data_dir(self): user_config_dir = '{home_path}/.local/share'.format( home_path=str(Path.home()) ) return user_config_dir
def parseOpts(args): # pylint: disable=invalid-name,missing-docstring,missing-return-doc,missing-return-type-doc parser = OptionParser() parser.disable_interspersed_args() parser.add_option( "--compare-jit", action="store_true", dest="use_compare_jit", default=False, help="After running the fuzzer, run the FCM lines against the engine " "in two configurations and compare the output.") parser.add_option( "--random-flags", action="store_true", dest="randomFlags", default=False, help="Pass a random set of flags (e.g. --ion-eager) to the js engine") parser.add_option( "--repo", action="store", dest="repo", help="The hg repository (e.g. ~/trees/mozilla-central/), for bisection" ) parser.add_option( "--build", action="store", dest="build_options_str", help="The build options, for bisection", default=None ) # if you run loop directly w/o --build, lithium_helpers.pinpoint will try to guess parser.add_option("--valgrind", action="store_true", dest="valgrind", default=False, help="use valgrind with a reasonable set of options") options, args = parser.parse_args(args) # optparse does not recognize pathlib - we will need to move to argparse if options.repo: options.repo = Path(options.repo) else: options.repo = Path.home() / "trees" / "mozilla-central" if options.valgrind and options.use_compare_jit: print( "Note: When running compare_jit, the --valgrind option will be ignored" ) # kill js shell if it runs this long. # jsfunfuzz will quit after half this time if it's not ilooping. # higher = more complex mixing, especially with regression tests. # lower = less time wasted in timeouts and in compare_jit testcases that are thrown away due to OOMs. options.timeout = int(args[0]) # FIXME: We can probably remove args[1] # pylint: disable=fixme options.knownPath = "mozilla-central" options.jsEngine = Path(args[2]) options.engineFlags = args[3:] return options
def get_cache(self): path = Path("%s/Documents/Libra/cache.json" % Path.home()) if not path.exists(): raise SystemError( "Application not properly configured. No cache found.") else: with open(str(path), 'r') as cache: return json.load(cache)
def get_shell_cache_js_bin_path(self): """Retrieve the full path to the js binary located in the shell cache. Returns: Path: Full path to the js binary in the shell cache """ return (sm_compile_helpers.ensure_cache_dir(Path.home()) / self.get_shell_name_without_ext() / self.get_shell_name_with_ext())
def _get_config_path(): xdg_config_home = os.environ.get('XDG_CONFIG_HOME') if xdg_config_home: config_home = Path(xdg_config_home) else: config_home = Path.home().joinpath('.config') return config_home.joinpath('jiraprompt')
class CompileShellTests(unittest.TestCase): """"TestCase class for functions in compile_shell.py""" # Paths mc_hg_repo = Path.home() / "trees" / "mozilla-central" shell_cache = Path.home() / "shell-cache" @pytest.mark.slow @lru_cache(maxsize=None) def test_shell_compile(self): """Test compilation of shells depending on the specified environment variable. Returns: Path: Path to the compiled shell. """ assert self.mc_hg_repo.is_dir() # pylint: disable=no-member # Change the repository location by uncommenting this line and specifying the right one # "-R ~/trees/mozilla-central/") default_parameters_debug = ( "--enable-debug --disable-optimize --enable-more-deterministic " "--build-with-valgrind --enable-oom-breakpoint") # Remember to update the corresponding BUILD build parameters in .travis.yml as well build_opts = os.environ.get("BUILD", default_parameters_debug) opts_parsed = js.build_options.parse_shell_opts(build_opts) hg_hash_of_default = util.hg_helpers.get_repo_hash_and_id( opts_parsed.repo_dir)[0] # Ensure exit code is 0 assert not js.compile_shell.CompiledShell( opts_parsed, hg_hash_of_default).run(["-b", build_opts]) file_name = None if default_parameters_debug in build_opts: # Test compilation of a debug shell with determinism, valgrind and OOM breakpoint support. file_name = "js-dbg-optDisabled-64-dm-vg-oombp-linux-" + hg_hash_of_default elif "--disable-debug --disable-profiling --without-intl-api" in build_opts: # Test compilation of an opt shell with both profiling and Intl support disabled. # This set of builds should also have the following: 32-bit with ARM, with asan, and with clang file_name = "js-64-profDisabled-intlDisabled-linux-" + hg_hash_of_default js_bin_path = self.shell_cache / file_name / file_name if platform.system() == "Windows": js_bin_path.with_suffix(".exe") assert js_bin_path.is_file() return js_bin_path
def _machine_stats(self): """ :return: machine stats dictionary, all values expressed in megabytes """ cpu_usage = [float(v) for v in psutil.cpu_percent(percpu=True)] stats = { "cpu_usage": sum(cpu_usage) / float(len(cpu_usage)), } bytes_per_megabyte = 1024 ** 2 def bytes_to_megabytes(x): return x / bytes_per_megabyte virtual_memory = psutil.virtual_memory() stats["memory_used_gb"] = bytes_to_megabytes(virtual_memory.used) / 1024 stats["memory_free_gb"] = bytes_to_megabytes(virtual_memory.available) / 1024 disk_use_percentage = psutil.disk_usage(Text(Path.home())).percent stats["disk_free_percent"] = 100.0-disk_use_percentage with warnings.catch_warnings(): if logging.root.level > logging.DEBUG: # If the logging level is bigger than debug, ignore # psutil.sensors_temperatures warnings warnings.simplefilter("ignore", category=RuntimeWarning) sensor_stat = (psutil.sensors_temperatures() if hasattr(psutil, "sensors_temperatures") else {}) if "coretemp" in sensor_stat and len(sensor_stat["coretemp"]): stats["cpu_temperature"] = max([float(t.current) for t in sensor_stat["coretemp"]]) # update cached measurements net_stats = psutil.net_io_counters() stats["network_tx_mbs"] = bytes_to_megabytes(net_stats.bytes_sent) stats["network_rx_mbs"] = bytes_to_megabytes(net_stats.bytes_recv) io_stats = psutil.disk_io_counters() stats["io_read_mbs"] = bytes_to_megabytes(io_stats.read_bytes) stats["io_write_mbs"] = bytes_to_megabytes(io_stats.write_bytes) # check if we can access the gpu statistics if self._gpustat: try: gpu_stat = self._gpustat.new_query() for i, g in enumerate(gpu_stat.gpus): # only monitor the active gpu's, if none were selected, monitor everything if self._active_gpus and i not in self._active_gpus: continue stats["gpu_%d_temperature" % i] = float(g["temperature.gpu"]) stats["gpu_%d_utilization" % i] = float(g["utilization.gpu"]) stats["gpu_%d_mem_usage" % i] = 100. * float(g["memory.used"]) / float(g["memory.total"]) # already in MBs stats["gpu_%d_mem_free_gb" % i] = float(g["memory.total"] - g["memory.used"]) / 1024 stats["gpu_%d_mem_used_gb" % i] = float(g["memory.used"]) / 1024 except Exception: # something happened and we can't use gpu stats, self._gpustat_fail += 1 if self._gpustat_fail >= 3: self._task.get_logger().report_text('TRAINS Monitor: GPU monitoring failed getting GPU reading, ' 'switching off GPU monitoring') self._gpustat = None return stats
def make_cdb_cmd(prog_full_path, crashed_pid): """Construct a command that uses the Windows debugger (cdb.exe) to turn a minidump file into a stack trace. Args: prog_full_path (Path): Full path to the program crashed_pid (int): PID of the program Returns: list: cdb command list """ assert platform.system() == "Windows" # Look for a minidump. dump_name = Path.home() / "AppData" / "Local" / "CrashDumps" / ( "%s.%s.dmp" % (prog_full_path.name, crashed_pid)) if platform.uname()[2] == "10": # Windows 10 win64_debugging_folder = Path(os.getenv( "PROGRAMFILES(X86)")) / "Windows Kits" / "10" / "Debuggers" / "x64" else: win64_debugging_folder = Path( os.getenv("PROGRAMW6432")) / "Debugging Tools for Windows (x64)" # 64-bit cdb.exe seems to also be able to analyse 32-bit binary dumps. cdb_path = win64_debugging_folder / "cdb.exe" if not cdb_path.is_file(): # pylint: disable=no-member print() print( "WARNING: cdb.exe is not found - all crashes will be interesting.") print() return [] if is_win_dumping_to_default(): loops = 0 max_loops = 300 while True: if dump_name.is_file(): dbggr_cmd_path = Path(__file__).parent / "cdb_cmds.txt" assert dbggr_cmd_path.is_file() # pylint: disable=no-member cdb_cmd_list = [] cdb_cmd_list.append("$<" + str(dbggr_cmd_path)) # See bug 902706 about -g. return [ cdb_path, "-g", "-c", ";".join(cdb_cmd_list), "-z", str(dump_name) ] time.sleep(0.200) loops += 1 if loops > max_loops: # Windows may take some time to generate the dump. print( "make_cdb_cmd waited a long time, but %s never appeared!" % str(dump_name)) return [] else: return []
def setup_logging(level=None, config_path=_default_config_path, to_file=True, log_file_path=None, env_key='LOG_CFG'): """ Setup logging configuration Args: config_path: The yaml config file path. default_level: The default level of logging. env_key: An environment variable. You can set it in cmd before execute your python file to specify a config file in cmd. Usage: `LOG_CFG=my_logging.yaml python my_server.py` """ system_info = SysInfo() if system_info.is_linux or system_info.is_macos: default_log_file_path = f"/tmp/miracle_debug_{time_string()}.log" else: default_log_file_path = str(Path.home() / "Downloads" / f"miracle_debug_{time_string()}.log") if log_file_path is not None: _parts = os.path.splitext(log_file_path) log_file_path = _parts[0] + "_" + time_string() + _parts[1] if level is not None: level = level.upper() path = config_path value = os.getenv(env_key, None) if value: path = value if os.path.exists(path): # Load the yaml file config with open(path, 'r') as f: config = yaml.load(f, Loader=yaml.Loader) # Modify the loaded logging config if level is not None: config["root"]["level"] = level if to_file: config["root"]["handlers"].append("file_handler") log_file_path = default_log_file_path if log_file_path is None else log_file_path config["handlers"]["file_handler"]["filename"] = str(log_file_path) # Parse the config dict into real logging config logging.config.dictConfig(config) else: # Init the basic stream handler level = _default_level if level is not None else level logging.basicConfig(level=_str_logging_level[level]) # Add the file log support if to_file: log_file_path = default_log_file_path if log_file_path is None else log_file_path fileHandler = logging.FileHandler(log_file_path) logging.getLogger().addHandler(fileHandler)
def make_collector(): """Creates a jsfunfuzz collector specifying ~/sigcache as the signature cache dir Returns: Collector: jsfunfuzz collector object """ sigcache_path = Path.home() / "sigcache" sigcache_path.mkdir(exist_ok=True) # pylint: disable=no-member return Collector(sigCacheDir=str(sigcache_path), tool="jsfunfuzz")
def _machine_stats(self): """ :return: machine stats dictionary, all values expressed in megabytes """ cpu_usage = [float(v) for v in psutil.cpu_percent(percpu=True)] stats = { "cpu_usage": sum(cpu_usage) / float(len(cpu_usage)), } bytes_per_megabyte = 1024**2 def bytes_to_megabytes(x): return x / bytes_per_megabyte virtual_memory = psutil.virtual_memory() # stats["memory_used_gb"] = bytes_to_megabytes(virtual_memory.used) / 1024 stats["memory_used_gb"] = bytes_to_megabytes( self._get_process_used_memory( ) if self._process_info else virtual_memory.used) / 1024 stats["memory_free_gb"] = bytes_to_megabytes( virtual_memory.available) / 1024 disk_use_percentage = psutil.disk_usage(Text(Path.home())).percent stats["disk_free_percent"] = 100.0 - disk_use_percentage with warnings.catch_warnings(): if logging.root.level > logging.DEBUG: # If the logging level is bigger than debug, ignore # psutil.sensors_temperatures warnings warnings.simplefilter("ignore", category=RuntimeWarning) sensor_stat = (psutil.sensors_temperatures() if hasattr( psutil, "sensors_temperatures") else {}) if "coretemp" in sensor_stat and len(sensor_stat["coretemp"]): stats["cpu_temperature"] = max( [float(t.current) for t in sensor_stat["coretemp"]]) # update cached measurements net_stats = psutil.net_io_counters() stats["network_tx_mbs"] = bytes_to_megabytes(net_stats.bytes_sent) stats["network_rx_mbs"] = bytes_to_megabytes(net_stats.bytes_recv) io_stats = psutil.disk_io_counters() stats["io_read_mbs"] = bytes_to_megabytes(io_stats.read_bytes) stats["io_write_mbs"] = bytes_to_megabytes(io_stats.write_bytes) # check if we can access the gpu statistics if self._gpustat: # noinspection PyBroadException try: stats.update(self._get_gpu_stats()) except Exception: # something happened and we can't use gpu stats, self._gpustat_fail += 1 if self._gpustat_fail >= 3: self._task.get_logger().report_text( 'ClearML Monitor: GPU monitoring failed getting GPU reading, ' 'switching off GPU monitoring') self._gpustat = None return stats
def ensureBuild(options): # pylint: disable=invalid-name,missing-docstring,missing-return-doc,missing-return-type-doc if options.existingBuildDir: # Pre-downloaded treeherder builds bDir = options.existingBuildDir # pylint: disable=invalid-name bType = "local-build" # pylint: disable=invalid-name bSrc = bDir # pylint: disable=invalid-name bRev = "" # pylint: disable=invalid-name manyTimedRunArgs = [] # pylint: disable=invalid-name elif not options.useTreeherderBuilds: options.build_options = build_options.parse_shell_opts( options.build_options) options.timeout = options.timeout or (300 if options.build_options.runWithVg else JS_SHELL_DEFAULT_TIMEOUT) with LockDir( sm_compile_helpers.get_lock_dir_path( Path.home(), options.build_options.repo_dir)): bRev = hg_helpers.get_repo_hash_and_id( options.build_options.repo_dir)[0] # pylint: disable=invalid-name cshell = compile_shell.CompiledShell(options.build_options, bRev) updateLatestTxt = ( options.build_options.repo_dir == "mozilla-central") # pylint: disable=invalid-name compile_shell.obtainShell(cshell, updateLatestTxt=updateLatestTxt) bDir = cshell.get_shell_cache_dir() # pylint: disable=invalid-name # Strip out first 3 chars or else the dir name in fuzzing jobs becomes: # js-js-dbg-opt-64-dm-linux bType = build_options.computeShellType(options.build_options)[3:] # pylint: disable=invalid-name bSrc = ( # pylint: disable=invalid-name "Create another shell in shell-cache like this one:\n" '%s -u -m %s -b "%s -R %s" -r %s\n\n' "==============================================\n" "| Fuzzing %s js shell builds\n" "| DATE: %s\n" "==============================================\n\n" % ( "python" + re.search("python.*[2-3]", os.__file__).group(0).replace( "/", "").split("python")[-1], "funfuzz.js.compile_shell", options.build_options.build_options_str, options.build_options.repo_dir, bRev, cshell.get_repo_name(), time.asctime(), )) manyTimedRunArgs = mtrArgsCreation(options, cshell) # pylint: disable=invalid-name print("buildDir is: %s" % bDir) print("buildSrc is: %s" % bSrc) else: print("TBD: We need to switch to the fuzzfetch repository.") sys.exit(0) return BuildInfo(bDir, bType, bSrc, bRev, manyTimedRunArgs)
def grab_mac_crash_log(crash_pid, log_prefix, use_log_files): """Find the required crash log in the given crash reporter directory. Args: crash_pid (str): PID value of the crashed process log_prefix (str): Prefix (may include dirs) of the log file use_log_files (bool): Boolean that decides whether *-crash.txt log files should be used Returns: str: Absolute (if use_log_files is False) or relative (if use_log_files is True) path to crash log file """ assert parse_version(platform.mac_ver()[0]) >= parse_version("10.6") for base_dir in [Path.home(), Path("/")]: # Sometimes the crash reports end up in the root directory. # This possibly happens when the value of <value>: # defaults write com.apple.CrashReporter DialogType <value> # is none, instead of server, or some other option. # It also happens when ssh'd into a computer. # And maybe when the computer is under heavy load. # See http://en.wikipedia.org/wiki/Crash_Reporter_%28Mac_OS_X%29 reports_dir = base_dir / "Library" / "Logs" / "DiagnosticReports" # Find a crash log for the right process name and pid, preferring # newer crash logs (which sort last). if reports_dir.is_dir(): # pylint: disable=no-member crash_logs = sorted([x for x in reports_dir.iterdir()], reverse=True) # pylint: disable=no-member else: crash_logs = [] for file_name in (x for x in crash_logs if crash_logs): full_report_path = reports_dir / file_name try: with io.open(str(full_report_path), "r", encoding="utf-8", errors="replace") as f: first_line = f.readline() if first_line.rstrip().endswith("[%s]" % crash_pid): if use_log_files: # Copy, don't rename, because we might not have permissions # (especially for the system rather than user crash log directory) # Use copyfile, as we do not want to copy the permissions metadata over crash_log = (log_prefix.parent / (log_prefix.stem + "-crash")).with_suffix(".txt") shutil.copyfile(str(full_report_path), str(crash_log)) subprocess.run(["chmod", "og+r", str(crash_log)], # pylint: disable=no-member cwd=os.getcwdu() if sys.version_info.major == 2 else os.getcwd(), check=True, timeout=9) return str(crash_log) return str(full_report_path) except OSError: # Maybe the log was rotated out between when we got the list # of files and when we tried to open this file. If so, it's # clearly not The One. pass return None
def beet_import(album_file_path): command = [ "beet", "--library=%s" % (Path.home() / Path("Personal/Media/Music/By Artist/beet.db")), "--directory=%s" % (Path.home() / Path("Personal/Media/Music/By Artist")), "import", "--quiet", "." ] def skipping_should_not_be_in_output(exit_code, lines): for line in lines: if "Skipping." in line: return False return True if not execute(command, in_folder=str(album_file_path), return_decider=skipping_should_not_be_in_output): raise BeetImportError()
def parse_args(args): p = argparse.ArgumentParser(description=main.__doc__) p.add_argument( '--datadir', dest='DATADIR', type=Path, default=Path.home() / '.zcash', help='Node datadir.', ) p.add_argument( '--statsdir', dest='STATSDIR', type=Path, default=Path.home() / 'zc-priv-stats', help='Stats db dir.', ) return p.parse_args(args)
def main(): try: path_omd = Path("%s/git/check_mk/omd/" % Path.home()) path_license_texts = path_omd / "license_sources/license_texts/" path_pdf = path_omd / "License_texts.pdf" path_cover = path_omd / "license_sources/licenses_cover.pdf" except: raise OSError registerFont(TTFont('Calibri', 'Calibri.ttf')) doc = SimpleDocTemplate( str(path_pdf), pagesize=letter, bottomMargin=.4 * inch, topMargin=.6 * inch, rightMargin=.8 * inch, leftMargin=.8 * inch) toc = TableOfContents() toc.levelStyles = [ PS(fontName='Calibri', fontSize=14, name='TOCHeading1', leftIndent=20, firstLineIndent=-20, spaceBefore=5, leading=16), PS(fontSize=12, name='TOCHeading2', leftIndent=40, firstLineIndent=-20, leading=12), ] title = PS(name='Title', fontSize=24, leading=16) h1 = PS(name='Heading1', fontSize=16, leading=16) normal = PS(name='Normal', fontSize=8) spacer = Spacer(width=0, height=2 * cm) story = [] story.append(Paragraph('<b>Content</b>', title)) story.append(spacer) story.append(toc) for file_path in sorted(path_license_texts.iterdir()): with file_path.open(encoding="utf-8") as txt_file: headline = "<b>%s</b>" % txt_file.readline().replace("\n", "<br /><br />\n") text_content = txt_file.read().replace("\n", "<br />\n") story.append(PageBreak()) story.append(heading(headline, h1)) story.append(Paragraph(text_content, normal)) doc = MyDocTemplate(str(path_pdf)) doc.multiBuild(story) pdf_merger = PdfFileMerger() pdf_merger.append(PdfFileReader(str(path_cover))) pdf_merger.append(PdfFileReader(str(path_pdf))) pdf_merger.write(str(path_pdf))
def temp_dir(directory_name): """Context manager providing a temp directory that will be deleted on exit. Args: directory_name(:obj:`str`): directory name """ dir_path = Path.home().joinpath("tmp", "personio", directory_name) try: dir_path.mkdir(parents=True, exist_ok=True) yield dir_path finally: if dir_path.exists(): shutil.rmtree(ustr(dir_path))
def updateRepos(): # pylint: disable=invalid-name """Update Mercurial and Git repositories located in ~ and ~/trees .""" home_dir = Path.home() trees = [home_dir, home_dir / "trees"] for tree in trees: for name in sorted(os.listdir(str(tree))): name_path = Path(tree) / name if name_path.is_dir() and (name in REPOS or (name.startswith("funfuzz") and "-" in name)): logger.info("Updating %s ...", name) updateRepo(name_path)
def _machine_stats(self): """ :return: machine stats dictionary, all values expressed in megabytes """ cpu_usage = psutil.cpu_percent(percpu=True) stats = {"cpu_usage": sum(cpu_usage) / len(cpu_usage)} virtual_memory = psutil.virtual_memory() stats["memory_used"] = BytesSizes.megabytes(virtual_memory.used) stats["memory_free"] = BytesSizes.megabytes(virtual_memory.available) disk_use_percentage = psutil.disk_usage(Text(Path.home())).percent stats["disk_free_percent"] = 100 - disk_use_percentage sensor_stat = ( psutil.sensors_temperatures() if hasattr(psutil, "sensors_temperatures") else {} ) if "coretemp" in sensor_stat and len(sensor_stat["coretemp"]): stats["cpu_temperature"] = max([t.current for t in sensor_stat["coretemp"]]) # update cached measurements net_stats = psutil.net_io_counters() stats["network_tx_mbs"] = BytesSizes.megabytes(net_stats.bytes_sent) stats["network_rx_mbs"] = BytesSizes.megabytes(net_stats.bytes_recv) io_stats = psutil.disk_io_counters() stats["io_read_mbs"] = BytesSizes.megabytes(io_stats.read_bytes) stats["io_write_mbs"] = BytesSizes.megabytes(io_stats.write_bytes) # check if we need to monitor gpus and if we can access the gpu statistics if self._active_gpus is not False and self._gpustat: try: gpu_stat = self._gpustat.new_query() for i, g in enumerate(gpu_stat.gpus): # only monitor the active gpu's, if none were selected, monitor everything if self._active_gpus and i not in self._active_gpus: continue stats["gpu_temperature_{:d}".format(i)] = g["temperature.gpu"] stats["gpu_utilization_{:d}".format(i)] = g["utilization.gpu"] stats["gpu_mem_usage_{:d}".format(i)] = ( 100.0 * g["memory.used"] / g["memory.total"] ) # already in MBs stats["gpu_mem_free_{:d}".format(i)] = ( g["memory.total"] - g["memory.used"] ) stats["gpu_mem_used_%d" % i] = g["memory.used"] except Exception as ex: # something happened and we can't use gpu stats, log.warning("failed getting machine stats: %s", report_error(ex)) self._failure() return stats
def write_cache_from_objs(self, objs_list: list): my_path = Path("%s/Documents/Libra/cache.json" % Path.home()) if not my_path.exists(): raise SystemError("Application not properly configured.") else: with open(str(my_path), 'w') as cache: content = {'mediaItems': []} content['mediaItems'] = objs_list json.dump(content, cache, sort_keys=True, indent=4) cache_path = Path("%s/Documents/Libra/cached" % Path.home()) for the_file in os.listdir(cache_path): file_path = os.path.join(cache_path, the_file) try: if os.path.isfile(file_path): os.unlink(file_path) # elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: print(e) for obj in objs_list: urlretrieve( str(obj['baseUrl'] + "=d"), "%s/%s.%s" % (cache_path, obj['id'], str(obj['mimeType']).split("/")[1]))
def set_up(self): my_path = Path("%s/Documents/Libra/" % Path.home()) if not my_path.exists(): os.mkdir(my_path) os.mkdir("%s/cached" % my_path) with open("messages.json", 'r') as msg_ex: msg_contents = json.load(msg_ex) with open('%s/messages.json' % my_path, 'w') as msg_out: json.dump(msg_contents, msg_out, sort_keys=True, indent=4) with open("cache.json", 'r') as cch_ex: cch_contents = json.load(cch_ex) with open("%s/cache.json", 'w') as cch_out: json.dump(cch_contents, cch_out, sort_keys=True, indent=4)
def auth(scopes): path = Path.home() flow = InstalledAppFlow.from_client_secrets_file( '%s/Documents/Libra/client_secret.json' % path, scopes=scopes) credentials = flow.run_local_server( host='localhost', port=8080, authorization_prompt_message="", success_message= 'The auth flow is complete; you may close this window.', open_browser=True) return credentials
def _machine_stats(): """ :return: machine stats dictionary, all values expressed in megabytes """ cpu_usage = [float(v) for v in psutil.cpu_percent(percpu=True)] stats = { "cpu_usage": sum(cpu_usage) / float(len(cpu_usage)), } bytes_per_megabyte = 1024**2 def bytes_to_megabytes(x): return x / bytes_per_megabyte virtual_memory = psutil.virtual_memory() stats["memory_used_gb"] = bytes_to_megabytes( virtual_memory.used) / 1024 stats["memory_free_gb"] = bytes_to_megabytes( virtual_memory.available) / 1024 disk_use_percentage = psutil.disk_usage(Text(Path.home())).percent stats["disk_free_percent"] = 100.0 - disk_use_percentage sensor_stat = (psutil.sensors_temperatures() if hasattr( psutil, "sensors_temperatures") else {}) if "coretemp" in sensor_stat and len(sensor_stat["coretemp"]): stats["cpu_temperature"] = max( [float(t.current) for t in sensor_stat["coretemp"]]) # update cached measurements net_stats = psutil.net_io_counters() stats["network_tx_mbs"] = bytes_to_megabytes(net_stats.bytes_sent) stats["network_rx_mbs"] = bytes_to_megabytes(net_stats.bytes_recv) io_stats = psutil.disk_io_counters() stats["io_read_mbs"] = bytes_to_megabytes(io_stats.read_bytes) stats["io_write_mbs"] = bytes_to_megabytes(io_stats.write_bytes) # check if we can access the gpu statistics if gpustat: gpu_stat = gpustat.new_query() for i, g in enumerate(gpu_stat.gpus): stats["gpu_%d_temperature" % i] = float(g["temperature.gpu"]) stats["gpu_%d_utilization" % i] = float(g["utilization.gpu"]) stats["gpu_%d_mem_usage" % i] = 100. * float( g["memory.used"]) / float(g["memory.total"]) # already in MBs stats["gpu_%d_mem_free_gb" % i] = float(g["memory.total"] - g["memory.used"]) / 1024 stats["gpu_%d_mem_used_gb" % i] = float(g["memory.used"]) / 1024 return stats
#!/usr/bin/env python2 # coding=utf-8 from __future__ import print_function from peewee import fn, Entity from pathlib2 import Path import subprocess from termcolor import colored from pyjoplin.models import Note, NoteIndex, database as db from pyjoplin.configuration import config from pyjoplin import notification path_repo = Path.home() / 'Backup/joplin' printc = lambda x: print(colored(x, 'cyan')) notes = Note.select().order_by(Note.title) print("Listing empty notes:") for note in notes: if not note.body: printc('Empty: %s %s' % (note.id, note.title)) notefile = '%s.md' % note.id cmd = 'git rev-list HEAD -- %s ' % notefile out = subprocess.check_output(cmd, shell=True, cwd=str(path_repo)) for sha in out.strip().split('\n'): cmd = 'git show %s:%s' % (sha, notefile) note_content = subprocess.check_output(cmd, shell=True, cwd=str(path_repo)) filtered_lines = list()