Exemple #1
0
def run_experiment(compiler, opt_levels, directory, number_of_experiments,
                   csv_file, csv_writer, paths, configs):
    for config in configs:
        for opt_level in opt_levels:
            for path in paths:
                try:
                    exe_path = Path(str(path).replace('.c', '.exe'))
                    name, permutation = exe_path.stem.rsplit('_', 1)
                    print(
                        f"Running: {name}, permutation: {permutation}, optimization: {opt_level}, config: {config}"
                    )
                    run(f'{compiler} {path} -O{opt_level} -o {exe_path}',
                        shell=True,
                        check=True)
                    runtimes = []
                    for i in range(1, number_of_experiments + 1):
                        output = run(f'{exe_path} {config}',
                                     shell=True,
                                     check=True,
                                     capture_output=True)
                        runtimes.append(float(output.stdout.decode('ascii')))
                    csv_writer.writerow(
                        [name, config, permutation, opt_level] + runtimes +
                        [median(runtimes), mean(runtimes)])
                    csv_file.flush()
                    # fsync()
                except Exception as e:
                    print(f"{name} failed with exception: {e}. Skipping")
                finally:
                    if exe_path.exists():
                        exe_path.unlink()
    for path in paths:
        path.unlink()
Exemple #2
0
 def _do_generate_directory(self, sources: Tuple[pathlib.Path, ...],
                            destination: pathlib.Path):
     destination_base = destination
     logger.info("Generating %s from %s", destination,
                 ', '.join(map(str, sources)))
     for base, source in yield_all_sources(*sources):
         destination = destination_base / source.relative_to(base)
         if source.is_symlink():
             self._create_symlink(source, destination)
         elif source.is_dir():
             if destination.exists():
                 logger.debug("Clearing directory %s", destination)
                 # Clear destination directory contents
                 if destination.is_symlink() or not destination.is_dir():
                     raise GeneratorError(
                         "Destination {} is not a directory".format(
                             destination))
                 for path in destination.iterdir():
                     if path.is_dir():
                         shutil.rmtree(str(path))
                     else:
                         path.unlink()
             else:
                 logger.debug("Creating directory %s", destination)
                 destination.mkdir(self.dir_mode, exist_ok=True)
             shutil.copystat(str(source),
                             str(destination),
                             follow_symlinks=False)
             self._setstat(destination, self.dir_mode)
         else:
             if source.suffix == self.TEMPLATE_SUFFIX:
                 destination = destination.with_name(destination.stem)
                 self._generate_template_to_file(base, source, destination)
             else:
                 self._copy_to_file(source, destination)
Exemple #3
0
    def serve_content(self, content, alias=None, as_file=True):
        uri = None

        if as_file:
            filepath, filename = self.get_random_path_at_webroot()
            try:
                with open(filepath, 'w') as out:
                    out.write(content)
                self.served_files.add(filepath)
            except:
                if path.isfile(filepath):
                    path.unlink(filepath)

                raise

            uri = self.wwwroot + '/' + filename
        else:
            uri = self.random_path()
            self.app.add_handlers('.*', [(uri, StaticTextHandler, {
                'content': content
            })])

        if alias:
            self.aliases[uri] = alias

        return uri
Exemple #4
0
def remove_file(path: Union[str, Path]):
    path = make_path(path)
    if path.exists() and path.is_file():
        LOGGER.debug("delete file: %s", str(path))
        path.unlink()
    else:
        LOGGER.debug("file does not exist/path is not a file, do nothing")
Exemple #5
0
 def _do_generate_directory(self, sources: Tuple[pathlib.Path, ...],
                            destination: pathlib.Path):
     destination_base = destination
     logger.info("Generating %s from %s", destination,
                 ', '.join(map(str, sources)))
     for base, source in yield_all_sources(*sources):
         destination = destination_base / source.relative_to(base)
         if source.is_symlink():
             self._create_symlink(source, destination)
         elif source.is_dir():
             if destination.exists():
                 logger.debug("Clearing directory %s", destination)
                 # Clear destination directory contents
                 if destination.is_symlink() or not destination.is_dir():
                     raise GeneratorError("Destination {} is not a directory"
                                          .format(destination))
                 for path in destination.iterdir():
                     if path.is_dir():
                         shutil.rmtree(str(path))
                     else:
                         path.unlink()
             else:
                 logger.debug("Creating directory %s", destination)
                 destination.mkdir(self.dir_mode, exist_ok=True)
             shutil.copystat(str(source), str(destination),
                             follow_symlinks=False)
             self._setstat(destination, self.dir_mode)
         else:
             if source.suffix == self.TEMPLATE_SUFFIX:
                 destination = destination.with_name(destination.stem)
                 self._generate_template_to_file(base, source, destination)
             else:
                 self._copy_to_file(source, destination)
def delete_empty_file(local_filename):
    path = Path(local_filename)
    try:
        size = path.stat().st_size
    except FileNotFoundError:
        return
    if size == 0:
        path.unlink()
Exemple #7
0
 def _postsetup_autoremove_kernels(self) -> None:
     """Allow apt to autoremove kernels."""
     # this file is one APT creates to make sure we don't "autoremove" our
     # currently in-use kernel, which doesn't really apply to
     # debootstraps/Docker images that don't even have kernels installed
     path = self._target / 'etc' / 'apt' / 'apt.conf.d' / '01autoremove-kernels'
     try:
         path.unlink()
     except FileNotFoundError:
         pass
Exemple #8
0
    def download_file(self, url: str, sha256sum: str) -> Optional[Path]:
        path = self._file_url_to_local_path(url)

        # Avoid downloading again if we have the file and it matches the hash.
        if path.exists():
            existing_hash = utils.hash(str(path))
            if existing_hash == sha256sum:
                return None
            else:
                logger.info(
                    "Checksum mismatch with local file {}: "
                    "expected {} got {}, will re-download.".format(
                        path, sha256sum, existing_hash
                    )
                )
                path.unlink()

        # Hack: download from another host
        if self.customized_host is not None and url.startswith("https://files.pythonhosted.org/"):
            url = url.replace("https://files.pythonhosted.org/", self.customized_host)

        logger.info(f"Downloading: {url}")

        dirname = path.parent
        if not dirname.exists():
            dirname.mkdir(parents=True)

        # Even more special handling for the serial of package files here:
        # We do not need to track a serial for package files
        # as PyPI generally only allows a file to be uploaded once
        # and then maybe deleted. Re-uploading (and thus changing the hash)
        # is only allowed in extremely rare cases with intervention from the
        # PyPI admins.
        # Py3 sometimes has requests lib return bytes. Need to handle that
        r = self.mirror.master.get(url, required_serial=None, stream=True)
        checksum = hashlib.sha256()
        with utils.rewrite(path, "wb") as f:
            for chunk in r.iter_content(chunk_size=64 * 1024):
                checksum.update(chunk)
                f.write(chunk)
            existing_hash = checksum.hexdigest()
            if existing_hash == sha256sum:
                # Good case: the file we got matches the checksum we expected
                pass
            else:
                # Bad case: the file we got does not match the expected
                # checksum. Even if this should be the rare case of a
                # re-upload this will fix itself in a later run.
                raise ValueError(
                    "Inconsistent file. {} has hash {} instead of {}.".format(
                        url, existing_hash, sha256sum
                    )
                )
        return path
Exemple #9
0
def unlink_parent_dir(path: Path) -> None:
    """ Remove a file and if the dir is empty remove it """
    logger.info(f"unlink {str(path)}")
    path.unlink()

    parent_path = path.parent
    try:
        parent_path.rmdir()
        logger.info(f"rmdir {str(parent_path)}")
    except OSError as oe:
        logger.debug(f"Did not remove {str(parent_path)}: {str(oe)}")
Exemple #10
0
    async def download_file(self,
                            url: str,
                            sha256sum: str,
                            chunk_size: int = 64 * 1024) -> Optional[Path]:
        path = self._file_url_to_local_path(url)

        # Avoid downloading again if we have the file and it matches the hash.
        if path.exists():
            existing_hash = utils.hash(str(path))
            if existing_hash == sha256sum:
                return None
            else:
                logger.info(
                    f"Checksum mismatch with local file {path}: expected {sha256sum} "
                    + f"got {existing_hash}, will re-download.")
                path.unlink()

        logger.info(f"Downloading: {url}")

        dirname = path.parent
        if not dirname.exists():
            dirname.mkdir(parents=True)

        # Even more special handling for the serial of package files here:
        # We do not need to track a serial for package files
        # as PyPI generally only allows a file to be uploaded once
        # and then maybe deleted. Re-uploading (and thus changing the hash)
        # is only allowed in extremely rare cases with intervention from the
        # PyPI admins.
        r_generator = self.mirror.master.get(url, required_serial=None)
        response = await r_generator.asend(None)

        checksum = hashlib.sha256()

        with utils.rewrite(path, "wb") as f:
            while True:
                chunk = await response.content.read(chunk_size)
                if not chunk:
                    break
                checksum.update(chunk)
                f.write(chunk)

            existing_hash = checksum.hexdigest()
            if existing_hash != sha256sum:
                # Bad case: the file we got does not match the expected
                # checksum. Even if this should be the rare case of a
                # re-upload this will fix itself in a later run.
                raise ValueError(
                    f"Inconsistent file. {url} has hash {existing_hash} " +
                    f"instead of {sha256sum}.")

        return path
Exemple #11
0
 async def run(self) -> None:
     path = self.node.path
     if os.path.lexists(str(path)):
         path.unlink()
     self.logger.debug(
         "create directory <ITALIC>%(path)s<UPRIGHT>%(parents)s",
         dict(
             path=self.node.relative_path,
             parents=' with parents' if self.parents else '')
     )
     # rwxr-xr-x
     path.mkdir(mode=0b111101101, parents=self.parents)
     self.node.modified = True
     self.node.updated = True
Exemple #12
0
def delete_files(directory, filenames, files_to_keep=()):
    ensure_overwritable(*[directory.joinpath(f) for f in filenames])
    # We implement the "files to keep" logic using inodes rather than names so
    # we can safely handle case-insenstiive filesystems
    inodes_to_keep = set()
    for filename in files_to_keep:
        try:
            stat = directory.joinpath(filename).stat()
            inodes_to_keep.add((stat.st_dev, stat.st_ino))
        except FileNotFoundError:
            pass
    for filename in filenames:
        path = directory / filename
        try:
            stat = path.stat()
        except FileNotFoundError:
            continue
        inode = (stat.st_dev, stat.st_ino)
        if inode not in inodes_to_keep:
            path.unlink()
Exemple #13
0
def main(verbose, web, db):
    """
    remove blacklisted projects from the mirror
    """

    web = os.path.expanduser(web)
    if db is None:
        db = os.path.join(web, "pypi.db")
    else:
        db = os.path.expanduser(db)

    bl = pickle.load(open("blacklist.cache", "rb"))[0]
    conn = sqlite3.connect(db)
    web = pathlib.Path(web)

    total = 0
    count = 0
    for name, url, size in conn.execute("select name,url,size from file"):
        if name not in bl:
            continue

        url = urlparse(url).path[1:]
        path = web / url
        if path.exists():
            path.unlink()
            try:
                path.parent.rmdir()
            except OSError:
                # not empty, should not occur
                pass

            total += size
            count += 1

            if verbose:
                print(f"removed: {path} ({size} bytes)")

    print(f"files removed: {count}")
    print(f"space freed: {hf.format_size(total)}")
Exemple #14
0
    def record_video(self):
        print('Recording')
        url = 'http://127.0.0.1:8080/stream/video.mjpeg'
        local_filename = url.split('/')[-1]
        # NOTE the stream=True parameter
        r = requests.get(url, stream=True)

        filename = time.strftime("%Y%m%d-%H%M%S") + '.mp4'
        save_path = '/home/pi/Downloads/tesis/video'
        completed_video = os.path.join(save_path, filename)

        i = 0
        with open(completed_video, 'wb') as f:
            for chunk in r.iter_content(chunk_size=1024):
                if chunk:  # filter out keep-alive new chunks
                    f.write(chunk)
                    i = i + 1
                    #f.flush() commented by recommendation from J.F.Sebastian
                    if (self.recording == False or i == 10000):
                        print('Sending')
                        f.close()
                        with open(completed_video, 'rb') as f:
                            video = f.read()
                            encode_video = base64.encodebytes(video)

                            json = {
                                'ip_address': '10.10.10.110',
                                'date': time.strftime('%Y-%m-%d %H:%M:%S'),
                                'video': encode_video
                            }

                            r = self.api.post(json, 'createvideo')
                            a = r.json()
                            print(a)
                            path = pathlib.Path(completed_video)
                            path.unlink()
                            f.close()
                            break
        return local_filename
    if db_file is None:
        db_file = p.abspath(p.join(p.dirname(__file__), "sounds.db"))

    global engine, db_session
    engine = create_engine("sqlite:////%s" % (db_file,), convert_unicode=True)
    # check if db file existst, else create db
    if p.isfile(db_file) is False:
        create_db(engine)
    db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))


def create_db(engine):
    Base.metadata.create_all(bind=engine, checkfirst=True)


if __name__ == "__main__":
    db_file = p.abspath(p.join(os.getcwd(), "sounds.db"))
    if p.isfile(db_file):
        p.unlink(db_file)

    connect_db(db_file)
    print("Engine: ", engine)
    create_db(engine)

    store_poetry("to be or not to be")
    db_session.commit()

    all_poetry = db_session.query(Poetry).all()
    for pp in all_poetry:
        print("%s spoken at %s" % (pp.poetry, pp.sounded_at))
Exemple #16
0
def sync_handler(sync_val: str, repository_val=None, requested_bundle_name=None):

    sync_val = sync_val.lower()

    if sync_val == "all" or sync_val == "force":
        # Firstly, check our remote connection before doing anything
        remote_state, remote_url = pbgit.check_remote_connection()
        if not remote_state:
            pbtools.error_state(
                f"Remote connection was not successful. Please verify that you have a valid git remote URL & internet connection. Current git remote URL: {remote_url}")
        else:
            pblog.info("Remote connection is up")

        pblog.info("------------------")

        pblog.info(f"Executing {sync_val} sync command")
        pblog.info(f"PBpy Library Version: {pbpy_version.ver}")
        pblog.info(f"PBSync Program Version: {pbsync_version.ver}")

        pblog.info("------------------")

        detected_git_version = pbgit.get_git_version()
        needs_git_update = False
        if detected_git_version == pbconfig.get('supported_git_version'):
            pblog.info(f"Current Git version: {detected_git_version}")
        else:
            pblog.error("Git is not updated to the supported version in your system")
            pblog.error(f"Supported Git Version: {pbconfig.get('supported_git_version')}")
            pblog.error(f"Current Git Version: {detected_git_version}")
            pblog.error("Please install the supported Git version from https://github.com/microsoft/git/releases")
            pblog.error("Visit https://github.com/ProjectBorealisTeam/pb/wiki/Prerequisites for installation instructions")
            if os.name == "nt":
                webbrowser.open(f"https://github.com/microsoft/git/releases/download/v{pbconfig.get('supported_git_version')}/Git-{pbconfig.get('supported_git_version')}-64-bit.exe")
            needs_git_update = True


        if os.name == "nt":
            # find Git/cmd/git.exe
            git_paths = [path for path in pbtools.whereis("git") if "cmd" in path.parts]

            if len(git_paths) > 0:
                bundled_git_lfs = False

                is_admin = pbuac.isUserAdmin()

                delete_paths = []

                for git_path in git_paths:
                    # find Git from Git/cmd/git.exe
                    git_root = git_path.parents[1]
                    possible_lfs_paths = ["cmd/git-lfs.exe", "mingw64/bin/git-lfs.exe", "mingw64/libexec/git-core/git-lfs.exe"]
                    for possible_lfs_path in possible_lfs_paths:
                        path = git_root / possible_lfs_path
                        if path.exists():
                            try:
                                if is_admin:
                                    path.unlink()
                                else:
                                    delete_paths.append(str(path))
                            except FileNotFoundError:
                                pass
                            except OSError:
                                pblog.error(f"Git LFS is bundled with Git, overriding your installed version. Please remove {path}.")
                                bundled_git_lfs = True

                if not is_admin and len(delete_paths) > 0:
                    pblog.info("Requesting permission to delete bundled Git LFS which is overriding your installed version...")
                    quoted_paths = [f'"{path}"' for path in delete_paths]
                    delete_cmdline = ["cmd.exe",  "/c", "DEL", "/q", "/f"] + quoted_paths
                    try:
                        ret = pbuac.runAsAdmin(delete_cmdline)
                    except OSError:
                        pblog.error("User declined permission. Automatic delete failed.")

                for delete_path in delete_paths:
                    path = pathlib.Path(delete_path)
                    if path.exists():
                        bundled_git_lfs = True
                        pblog.error(f"Git LFS is bundled with Git, overriding your installed version. Please remove {path}.")

                if bundled_git_lfs:
                    pbtools.error_state()

        detected_lfs_version = pbgit.get_lfs_version()
        if detected_lfs_version == pbconfig.get('supported_lfs_version'):
            pblog.info(f"Current Git LFS version: {detected_lfs_version}")
        else:
            pblog.error("Git LFS is not updated to the supported version in your system")
            pblog.error(f"Supported Git LFS Version: {pbconfig.get('supported_lfs_version')}")
            pblog.error(f"Current Git LFS Version: {detected_lfs_version}")
            pblog.error("Please install the supported Git LFS version from https://git-lfs.github.com")
            if os.name == "nt":
                supported_lfs_version = pbconfig.get('supported_lfs_version').split("/")[1]
                webbrowser.open(f"https://github.com/git-lfs/git-lfs/releases/download/v{supported_lfs_version}/git-lfs-windows-v{supported_lfs_version}.exe")
            needs_git_update = True

        if needs_git_update:
            pbtools.error_state()

        pblog.info("------------------")

        # Do not execute if Unreal Editor is running
        if pbtools.get_running_process("UE4Editor") is not None:
            pbtools.error_state("Unreal Editor is currently running. Please close it before running PBSync. It may be listed only in Task Manager as a background process. As a last resort, you should log off and log in again.")

        current_branch = pbgit.get_current_branch_name()
        # repo was already fetched in StartProject.bat
        if current_branch != "promoted":
            pblog.info("Fetching recent changes on the repository...")
            fetch_base = ["git", "fetch", "origin"]
            branches = {"promoted", "master", "trunk", current_branch}
            fetch_base.extend(branches)
            pbtools.get_combined_output(fetch_base)

        # Do some housekeeping for git configuration
        pbgit.setup_config()

        # Check if we have correct credentials
        pbgit.check_credentials()

        pblog.info("------------------")

        # Execute synchronization part of script if we're on the expected branch, or force sync is enabled
        is_on_expected_branch = pbgit.compare_with_current_branch_name(pbconfig.get('expected_branch_name'))
        if sync_val == "force" or is_on_expected_branch:
            pbtools.resolve_conflicts_and_pull()

            pblog.info("------------------")

            project_version = pbunreal.get_project_version()
            if project_version is not None:
                pblog.info(f"Current project version: {project_version}")
            else:
                pbtools.error_state(
                    "Something went wrong while fetching project version. Please request help in #tech-support.")

            if pbhub.is_pull_binaries_required():
                pblog.info("Binaries are not up to date, trying to pull new binaries...")
                ret = pbhub.pull_binaries(project_version)
                if ret == 0:
                    pblog.info("Binaries were pulled successfully")
                elif ret < 0:
                    pbtools.error_state("Binaries pull failed, please view log for instructions.")
                elif ret > 0:
                    pbtools.error_state("An error occurred while pulling binaries. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved.", True)
            else:
                pblog.info("Binaries are up-to-date")
        else:
            pblog.warning(f"Current branch is not supported for repository synchronization: {pbgit.get_current_branch_name()}. Auto synchronization "
                          "will be disabled")

        pblog.info("------------------")

        pblog.info("Checking for engine updates...")
        if pbgit.sync_file("ProjectBorealis.uproject") != 0:
            pbtools.error_state(
                "Something went wrong while updating the .uproject file. Please request help in #tech-support.")

        engine_version = pbunreal.get_engine_version(False)

        pblog.info("Trying to register current engine build if it exists. Otherwise, the build will be downloaded...")

        symbols_needed = pbunreal.is_versionator_symbols_enabled()
        bundle_name = pbconfig.get("ue4v_default_bundle")

        if pbunreal.run_ue4versionator(bundle_name, symbols_needed) != 0:
            pblog.error(f"Something went wrong while registering engine build {bundle_name}-{engine_version}. Please request help in #tech-support.")
            sys.exit(1)
        else:
            pblog.info(f"Engine build {bundle_name}-{engine_version} successfully registered")

        # Clean old engine installations, do that only in expected branch
        if is_on_expected_branch:
            if pbunreal.clean_old_engine_installations():
                pblog.info("Old engine installations are successfully cleaned")
            else:
                pblog.warning("Something went wrong while cleaning old engine installations. You may want to clean them manually.")

        pblog.info("------------------")

        if pbunreal.check_ue4_file_association():
            try:
                os.startfile(os.path.normpath(os.path.join(os.getcwd(), "ProjectBorealis.uproject")))
            except NotImplementedError:
                pblog.info("You may now launch ProjectBorealis.uproject with Unreal Engine 4.")
        else:
            pbtools.error_state(".uproject extension is not correctly set into Unreal Engine. Make sure you have Epic Games Launcher installed. If problem still persists, please get help in #tech-support.")

    elif sync_val == "engineversion":
        if repository_val is None:
            pblog.error("--repository <URL> argument should be provided with --sync engine command")
            sys.exit(1)
        engine_version = pbunreal.get_latest_available_engine_version(str(repository_val))
        if engine_version is None:
            pblog.error("Error while trying to fetch latest engine version")
            sys.exit(1)
        if not pbunreal.set_engine_version(engine_version):
            pblog.error("Error while trying to update engine version in .uproject file")
            sys.exit(1)
        pblog.info(f"Successfully changed engine version as {str(engine_version)}")

    elif sync_val == "ddc":
        pbunreal.generate_ddc_data()

    elif sync_val == "binaries":
        project_version = pbunreal.get_project_version()
        ret = pbhub.pull_binaries(project_version, True)
        if ret == 0:
            pblog.info(f"Binaries for {project_version} pulled & extracted successfully")
        else:
            pblog.error(f"Failed to pull binaries for {project_version}")
            sys.exit(1)

    elif sync_val == "engine":
        # Pull engine build with ue4versionator & register it
        if requested_bundle_name is None:
            requested_bundle_name = pbconfig.get("ue4v_default_bundle")

        engine_version = pbunreal.get_engine_version(False)
        if pbunreal.run_ue4versionator(requested_bundle_name) != 0:
            pblog.error(f"Something went wrong while registering engine build {requested_bundle_name}-{engine_version}")
            sys.exit(1)
        else:
            pblog.info(f"Engine build {requested_bundle_name}-{engine_version} successfully registered")
Exemple #17
0
async def EnsureDeleteFile(ver: AbstractFileVersion, fb: AbstractFeedbackUI):
    path = pathlib.Path(version.GetAbsolutePath())
    if path.exists():
        fb.feedback("Deleting file...")
        path.unlink()
        fb.feedback("Done.")