Example #1
0
def load_S3_SL_1_RBT_zip(s3_file_name: str) -> Dataset:
    from tempfile import mkdtemp
    ds_list = []
    with fs.open_fs(s3_file_name) as zip_fs:
        metadata = read_metadata(zip_fs)
        tmp_dir = mkdtemp()
        copy.copy_dir(zip_fs, '/', OSFS('/'), tmp_dir)
        matches = glob(f'{tmp_dir}/**/*.nc')
        for match in matches:
            with xr.load_dataset(match,
                                 decode_times=False,
                                 decode_cf=False,
                                 decode_coords=False,
                                 mask_and_scale=False,
                                 decode_timedelta=False,
                                 chunks={}) as array:
                source = s3_band_name(path.basename(match))
                ds = rename_dimensions_and_variables(array, source)
                if source == "viscal":
                    ds['viscal.calibration_time'] = xr.apply_ufunc(
                        convert_date, ds['viscal.calibration_time'])
                    ds['viscal.ANX_time'] = xr.apply_ufunc(
                        convert_date, ds['viscal.ANX_time'])
                ds_list.append(ds)
        ds = xr.merge(ds_list)
        ds = ds.chunk(chunks='auto')
        shutil.rmtree(tmp_dir)
        return Dataset(data=ds, metadata=metadata)
Example #2
0
    def build_static_directory(self, obj):
        """
        Builds an object's static subdirectory.
        """
        # The location of static files in the dynamic page directory
        source_dir = os.path.join(obj.page_directory_path, 'static')

        # The location in the build directory where we want to copy them
        target_dir = path.join(
            bigbuild.get_build_directory(),
            obj.get_static_url().lstrip("/")
        )

        # An internal django-bakery trick to gzip them if we need to
        if settings.BAKERY_GZIP:
            cmd = Build()
            cmd.set_options()
            cmd.copytree_and_gzip(
                source_dir,
                target_dir
            )
        else:
            # Or a more vanilla way of copying the files with Python
            logger.debug("Copying {}{} to {}{}".format("osfs://", source_dir, self.fs_name, target_dir))
            copy.copy_dir("osfs:///", smart_text(source_dir), self.fs, smart_text(target_dir))
Example #3
0
def load_model(filesystem: S3FS, config: PredictionConfig) -> ResUnetA:
    """ Copy the model locally if not existing and load it """
    if not os.path.exists(f'{config.temp_model_path}/{config.model_name}'):
        if not filesystem.exists(
                f'{config.model_path}/{config.model_name}/checkpoints/'):
            filesystem.makedirs(
                f'{config.model_path}/{config.model_name}/checkpoints/')
        copy_dir(filesystem,
                 f'{config.model_path}/{config.model_name}/checkpoints/',
                 f'{config.temp_model_path}/{config.model_name}',
                 'checkpoints')
        copy_file(filesystem,
                  f'{config.model_path}/{config.model_name}/model_cfg.json',
                  f'{config.temp_model_path}/{config.model_name}',
                  'model_cfg.json')

    input_shape = dict(
        features=[None, config.height, config.width, config.n_channels])

    with open(f'{config.temp_model_path}/{config.model_name}/model_cfg.json',
              'r') as jfile:
        model_cfg = json.load(jfile)

    # initialise model from config, build, compile and load trained weights
    model = ResUnetA(model_cfg)
    model.build(input_shape)
    model.net.compile()
    model.net.load_weights(
        f'{config.temp_model_path}/{config.model_name}/checkpoints/model.ckpt')

    return model
Example #4
0
 def build_object(self, obj):
     """
     Build the provided object
     """
     # If it is a Page object ...
     if isinstance(obj, Page):
         # ... use the default bakery build
         super(PageDetailView, self).build_object(obj)
         # ... and build the static directory as well.
         self.build_static_directory(obj)
     # If it is an ArchivedPage object ...
     elif isinstance(obj, ArchivedPage):
         # ... do a copy and paste from the archive to the build directory
         target = obj.build_directory_path
         if settings.BAKERY_GZIP:
             cmd = Build()
             cmd.set_options()
             cmd.copytree_and_gzip(
                 obj.archive_static_directory_path,
                 target
             )
         else:
             logger.debug("Copying {}{} to {}{}".format(
                 "osfs://",
                 obj.archive_static_directory_path,
                 self.fs_name,
                 target
             ))
             copy.copy_dir(
                 "osfs:///",
                 smart_text(obj.archive_static_directory_path),
                 self.fs,
                 smart_text(target)
             )
Example #5
0
 def export_jsonlite(self, url: str):
     self.connection.commit()
     self.connection.close()
     remote_fs = open_fs(url)
     copy.copy_dir(self.local_fs, ".", remote_fs, ".")
     self.connection = sqlite3.connect(self.db_file)
     self.connection.row_factory = sqlite3.Row
Example #6
0
    def _try_copy_to_filesystem(self, source: str, target: str, filesystem: Optional[Filesystem]) -> None:
        other_filesystem = cast(PyFilesystemBased, filesystem).internal_fs
        if self.internal_fs.isdir(source):
            fscp.copy_dir(self.internal_fs, source,
                          other_filesystem, target)
            return

        fscp.copy_file(self.internal_fs, source,
                       other_filesystem, target)
Example #7
0
    def build_static(self, *args, **options):
        """
        Builds the static files directory as well as robots.txt and favicon.ico
        """
        logger.debug("Building static directory")
        if self.verbosity > 1:
            self.stdout.write("Building static directory")
        management.call_command("collectstatic",
                                interactive=False,
                                verbosity=0)

        # Set the target directory inside the filesystem.
        target_dir = path.join(self.build_dir, settings.STATIC_URL.lstrip('/'))
        target_dir = smart_text(target_dir)

        exclude_dirs = getattr(settings, 'BAKERY_STATIC_EXCLUDE_DIRS', None)
        if not exclude_dirs:
            # explicitly set to None to make sure we don't get an empty list/tuple
            exclude_dirs = None

        if os.path.exists(self.static_root) and settings.STATIC_URL:
            if getattr(settings, 'BAKERY_GZIP', False):
                self.copytree_and_gzip(self.static_root, target_dir,
                                       exclude_dirs)
            # if gzip isn't enabled, just copy the tree straight over
            else:
                logger.debug("Copying {}{} to {}{}".format(
                    "osfs://", self.static_root, self.fs_name, target_dir))
                walker = Walker(exclude_dirs=exclude_dirs)
                copy.copy_dir("osfs:///",
                              self.static_root,
                              self.fs,
                              target_dir,
                              walker=walker)

        # If they exist in the static directory, copy the robots.txt
        # and favicon.ico files down to the root so they will work
        # on the live website.
        robots_src = path.join(target_dir, 'robots.txt')
        if self.fs.exists(robots_src):
            robots_target = path.join(self.build_dir, 'robots.txt')
            logger.debug("Copying {}{} to {}{}".format(self.fs_name,
                                                       robots_src,
                                                       self.fs_name,
                                                       robots_target))
            self.fs.copy(robots_src, robots_target)

        favicon_src = path.join(target_dir, 'favicon.ico')
        if self.fs.exists(favicon_src):
            favicon_target = path.join(self.build_dir, 'favicon.ico')
            logger.debug("Copying {}{} to {}{}".format(self.fs_name,
                                                       favicon_src,
                                                       self.fs_name,
                                                       favicon_target))
            self.fs.copy(favicon_src, favicon_target)
Example #8
0
 def build_media(self):
     """
     Build the media files.
     """
     logger.debug("Building media directory")
     if self.verbosity > 1:
         self.stdout.write("Building media directory")
     if os.path.exists(self.media_root) and settings.MEDIA_URL:
         target_dir = path.join(self.fs_name, self.build_dir, settings.MEDIA_URL.lstrip('/'))
         logger.debug("Copying {}{} to {}{}".format("osfs://", self.media_root, self.fs_name, target_dir))
         copy.copy_dir("osfs:///", smart_text(self.media_root), self.fs, smart_text(target_dir))
Example #9
0
 def build_media(self):
     """
     Build the media files.
     """
     logger.debug("Building media directory")
     if self.verbosity > 1:
         self.stdout.write("Building media directory")
     if os.path.exists(self.media_root) and settings.MEDIA_URL:
         target_dir = path.join(self.fs_name, self.build_dir,
                                settings.MEDIA_URL.lstrip('/'))
         logger.debug("Copying {}{} to {}{}".format("osfs://",
                                                    self.media_root,
                                                    self.fs_name,
                                                    target_dir))
         copy.copy_dir("osfs:///", smart_text(self.media_root), self.fs,
                       smart_text(target_dir))
Example #10
0
 def add(self, object_path):
     """Add pre-constructed object from object_path."""
     self.open_root_fs()
     self.check_root_structure()
     # Sanity check
     o = Object()
     o.open_fs(object_path)
     inventory = o.parse_inventory()
     identifier = inventory['id']
     # Now copy
     path = self.object_path(identifier)
     logging.info("Copying from %s to %s", object_path, fs.path.join(self.root, path))
     try:
         copy_dir(o.obj_fs, '/', self.root_fs, path)
         logging.info("Copied")
     except Exception as e:
         logging.error("Copy failed: %s", str(e))
         raise StoreException("Add object failed!")
Example #11
0
    def handle(self, *args, **options):
        """
        Make it happen.
        """
        # Loop through the slugs
        for slug in options['slug']:
            # Pull the object
            try:
                p = PageList()[slug]
            except Exception:
                raise CommandError("Slug provided (%s) does not exist" % slug)

            if not isinstance(p, Page):
                raise CommandError("Slug (%s) is not a Page object" % slug)

            # Build it
            PageArchiveView().build_object(p)

            # If the retired directory exists, kill it
            if os.path.exists(p.archive_static_directory_path):
                shutil.rmtree(p.archive_static_directory_path)  # pragma: no cover

            # Save that directory to the retired folder
            copy.copy_dir(
                self.fs,
                smart_text(p.build_directory_path),
                "osfs:///",
                smart_text(p.archive_static_directory_path)
            )

            # Save the metadata to the retired folder
            frontmatter_path = os.path.join(p.archive_static_directory_path, 'metadata.md')
            p.write_frontmatter(frontmatter_path)

            # Copy the dynamic page folder to its archival location
            shutil.copytree(p.page_directory_path, p.archive_dynamic_directory_path)

            # Delete the page folder
            if not options['keep_page']:
                p.delete()

        # Update the cache
        call_command("cachepages")
Example #12
0
    def build_static(self, *args, **options):
        """
        Builds the static files directory as well as robots.txt and favicon.ico
        """
        logger.debug("Building static directory")
        if self.verbosity > 1:
            self.stdout.write("Building static directory")
        management.call_command(
            "collectstatic",
            interactive=False,
            verbosity=0
        )

        # Set the target directory inside the filesystem.
        target_dir = path.join(
            self.build_dir,
            settings.STATIC_URL.lstrip('/')
        )
        target_dir = smart_text(target_dir)

        if os.path.exists(self.static_root) and settings.STATIC_URL:
            if getattr(settings, 'BAKERY_GZIP', False):
                self.copytree_and_gzip(self.static_root, target_dir)
            # if gzip isn't enabled, just copy the tree straight over
            else:
                logger.debug("Copying {}{} to {}{}".format("osfs://", self.static_root, self.fs_name, target_dir))
                copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir)

        # If they exist in the static directory, copy the robots.txt
        # and favicon.ico files down to the root so they will work
        # on the live website.
        robots_src = path.join(target_dir, 'robots.txt')
        if self.fs.exists(robots_src):
            robots_target = path.join(self.build_dir, 'robots.txt')
            logger.debug("Copying {}{} to {}{}".format(self.fs_name, robots_src, self.fs_name, robots_target))
            self.fs.copy(robots_src, robots_target)

        favicon_src = path.join(target_dir, 'favicon.ico')
        if self.fs.exists(favicon_src):
            favicon_target = path.join(self.build_dir, 'favicon.ico')
            logger.debug("Copying {}{} to {}{}".format(self.fs_name, favicon_src, self.fs_name, favicon_target))
            self.fs.copy(favicon_src, favicon_target)
Example #13
0
    def backup_scenario(self, confirm=True):
        """Copy scenario data to backup disk and remove original

        :param bool confirm: prompt before deleting each batch of files
        """
        src_fs = dst_fs = get_ssh_fs()
        items = [
            (self._join(*server_setup.INPUT_DIR), f"{self.scenario_id}_*"),
            (self._join(*server_setup.OUTPUT_DIR), f"{self.scenario_id}_*"),
            (self._data_access.tmp_folder(self.scenario_id), "**"),
        ]
        for folder, pattern in items:
            print(f"--> Moving files matching {pattern} from {folder}")
            src_path = self._join(server_setup.DATA_ROOT_DIR, folder)
            dst_path = self._join(server_setup.BACKUP_DATA_ROOT_DIR, folder)
            walker = Walker(filter=[pattern])
            try:
                copy_dir(src_fs, src_path, dst_fs, dst_path, walker=walker)
            except FSError as e:
                print(f"Operation failed: {e}")

            self._data_access.remove(self._join(folder, pattern),
                                     confirm=confirm)
Example #14
0
    def install_from_fs(self,
                        name: str,
                        source_fs: FS,
                        path: str = ".",
                        output_name: str = None):
        """
        Copy a module directory from where it is located to the installation directory.
        :param name: Name of the module
        :param source_fs: FS object pointing to the source location
        :param path: Path to the module directory from the source location root
        :param output_name: Name to give to the module's directory at installation
        """
        path_to_module = join(path, name)

        if name not in source_fs.listdir(path):
            raise InvalidModuleError(
                name,
                "Module directory not found - Given path should be the parent directory"
            )
        if "__manifest__.py" not in source_fs.listdir(path_to_module):
            raise InvalidModuleError(
                name,
                "Manifest not found - Given path should be the parent directory"
            )

        self.pretty_print(output_name,
                          "Copying from {}".format(
                              source_fs.desc(path_to_module)),
                          level=VERBOSE_FULL)
        copy_dir(source_fs, path_to_module, self.install_dir, output_name
                 or name)

        self.pretty_print(output_name,
                          "Installed and up to date.",
                          status=LOG_STATUS_OK,
                          level=VERBOSE_NORMAL)
def run_vectorisation(config: VectorisationConfig) -> List[str]:
    """ Run vectorisation process on entire AOI for the given time intervals """
    filesystem = prepare_filesystem(config)

    LOGGER.info(f'Copy tiff files locally to {config.predictions_dir}')
    for time_interval in config.time_intervals:
        if not os.path.exists(f'{config.predictions_dir}/{time_interval}'):
            if not filesystem.exists(
                    f'{config.tiffs_folder}/{time_interval}/'):
                filesystem.makedirs(f'{config.tiffs_folder}/{time_interval}/')
            copy_dir(filesystem, f'{config.tiffs_folder}/{time_interval}/',
                     f'{config.predictions_dir}/', f'{time_interval}')

    LOGGER.info(f'Move files to utm folders')
    for time_interval in config.time_intervals:
        for utm in config.utms:
            utm_dir = f'{config.predictions_dir}/{time_interval}/utm{utm}'
            os.makedirs(utm_dir, exist_ok=True)
            tiffs_to_move = glob(
                f'{config.predictions_dir}/{time_interval}/*-{utm}.tiff')
            for tiff in tiffs_to_move:
                tiff_name = os.path.basename(tiff)
                os.rename(tiff, f'{utm_dir}/{tiff_name}')

    LOGGER.info(f'Create weights file {config.weights_file}')
    with rasterio.open(config.weights_file,
                       'w',
                       driver='gTIFF',
                       width=config.shape[0],
                       height=config.shape[1],
                       count=1,
                       dtype=np.float32) as dst:
        dst.write_band(1, get_weights(config.shape, config.buffer))

    rows = []
    for time_interval in config.time_intervals:
        for utm in config.utms:
            start = time.time()
            LOGGER.info(f'Running contours for {time_interval}/{utm}!')

            contours_dir = f'{config.contours_dir}/{time_interval}/utm{utm}/'
            LOGGER.info(f'Create contour folder {contours_dir}')
            os.makedirs(contours_dir, exist_ok=True)

            predictions_dir = f'{config.predictions_dir}/{time_interval}/utm{utm}/'
            tifs = glob(f'{predictions_dir}*.tiff')
            output_vrt = f'{config.vrt_dir}/vrt_{time_interval}_{utm}.vrt'
            write_vrt(tifs, config.weights_file, output_vrt)

            results = process_rows(output_vrt,
                                   contours_dir,
                                   max_workers=config.max_workers,
                                   size=config.chunk_size,
                                   buffer=config.chunk_overlap,
                                   threshold=config.threshold,
                                   cleanup=config.cleanup,
                                   skip_existing=config.skip_existing)

            failed = [(file, excp) for file, finished, excp in results
                      if not finished]
            if len(failed):
                LOGGER.warning('Some rows failed:')
                LOGGER.warning('\n'.join(
                    [f'{file}: {excp}' for file, excp in failed]))
                # raise Exception(f'{len(failed)} rows failed! ')
                LOGGER.warning(f'{len(failed)} rows failed! ')

            rows.append({
                'time_interval':
                time_interval,
                'utm':
                utm,
                'vrt_file':
                output_vrt,
                'rows': [file for file, finished, _ in results if finished],
                'chunk_size':
                config.chunk_size,
                'chunk_overlap':
                config.chunk_overlap,
                'contours_dir':
                config.contours_dir
            })

            LOGGER.info(
                f'Row contours processing for {time_interval}/{utm} done in {(time.time() - start) / 60} min!\n\n'
            )

    list_of_merged_files = multiprocess(merging_rows,
                                        rows,
                                        max_workers=config.max_workers)

    return list_of_merged_files
Example #16
0
    if "renpy.zip" not in cwdfs.listdir("/"):
        puts("Downloading Ren'Py")
        r = requests.get(
            "https://www.renpy.org/dl/6.99.12.4/renpy-6.99.12.4-sdk.zip",
            stream=True)
        r.raise_for_status()
        with cwdfs.open("renpy.zip", 'wb') as fd:
            total_length = int(r.headers.get('content-length'))
            for chunk in progress.bar(r.iter_content(chunk_size=1024),
                                      expected_size=(total_length / 1024) + 1):
                fd.write(chunk)

    puts("Extracting Ren'Py")
    with ZipFS("./renpy.zip") as zipfs:
        fscopy.copy_dir(zipfs, "renpy-6.99.12.4-sdk", tempfs, "renpy")
    cwdfs.remove("renpy.zip")

puts("ModTemplate setup")

with indent(2):
    if "modtemplate.zip" not in cwdfs.listdir("/"):
        puts("Downloading ModTemplate")
        r = requests.get(
            "https://github.com/Monika-After-Story/DDLCModTemplate/releases/download/v1.1.0/DDLCModTemplate_1.1.0.zip",
            stream=True)
        r.raise_for_status()
        with cwdfs.open("modtemplate.zip", 'wb') as fd:
            total_length = int(r.headers.get('content-length'))
            for chunk in progress.bar(r.iter_content(chunk_size=1024),
                                      expected_size=(total_length / 1024) + 1):
Example #17
0
    def run(self):
        args = self.args
        application = WSGIApplication(
            self.location,
            self.get_settings(),
            args.server,
            disable_autoreload=True,
            master_settings=self.master_settings,
        )
        archive = application.archive

        filesystems = archive.filesystems

        fs = None
        if args.fs:
            try:
                fs = filesystems[args.fs]
            except KeyError:
                self.console.error("No filesystem called '%s'" % args.fs)
                return -1

        if args.tree is not None:
            if fs is None:
                self.console.error("Filesystem required")
                return -1
            with fs.opendir(args.tree or "/") as tree_fs:
                tree.render(tree_fs, max_levels=None)
            return

        if args.listdir:
            if fs is None:
                self.console.error("Filesystem required")
                return -1

            dir_fs = fs.opendir(args.listdir)
            file_paths = []
            dir_paths = []
            for info in dir_fs.scandir("/"):
                if info.is_dir:
                    dir_paths.append(info.name)
                else:
                    file_paths.append(info.name)

            _ls(self.console, file_paths, dir_paths)

        elif args.cat:
            if fs is None:
                self.console.error("Filesystem required")
                return -1
            contents = fs.gettext(args.cat)
            self.console.cat(contents, args.cat)

        elif args.open:
            if fs is None:
                self.console.error("Filesystem required")
                return -1

            try:
                filepath = fs.getsyspath(args.open)
            except NoSysPath:
                self.console.error(
                    "No system path for '%s' in filesystem '%s'" % (args.open, args.fs)
                )
                return -1

            import subprocess

            system = sys.platform
            if system == "darwin":
                subprocess.call(("open", filepath))
            elif system == "win32":
                subprocess.call(("start", filepath), shell=True)
            elif system == "linux2":
                subprocess.call(("xdg-open", filepath))
            else:
                self.console.error(
                    "Moya doesn't know how to open files on this platform (%s)"
                    % os.name
                )

        elif args.syspath:
            if fs is None:
                self.console.error("Filesystem required")
                return -1
            if not fs.exists(args.syspath):
                self.console.error(
                    "No file called '%s' found in filesystem '%s'"
                    % (args.syspath, args.fs)
                )
                return -1
            try:
                syspath = fs.getsyspath(args.syspath)
            except NoSysPath:
                self.console.error(
                    "No system path for '%s' in filesystem '%s'"
                    % (args.syspath, args.fs)
                )
            else:
                self.console(syspath).nl()

        elif args.copy:
            if fs is None:
                self.console.error("Filesystem required")
                return -1
            if len(args.copy) == 1:
                src = "/"
                dst = args.copy[0]
            elif len(args.copy) == 2:
                src, dst = args.copy
            else:
                self.console.error("--copy requires 1 or 2 arguments")
                return -1

            if fs.isdir(src):
                src_fs = fs.opendir(src)
                from fs.copy import copy_dir

                with open_fs(dst, create=True) as dst_fs:
                    if not args.force and not dst_fs.isempty("/"):
                        response = raw_input(
                            "'%s' is not empty. Copying may overwrite directory contents. Continue? "
                            % dst
                        )
                        if response.lower() not in ("y", "yes"):
                            return 0
                    copy_dir(src_fs, "/", dst_fs, "/")
            else:
                with fs.open(src, "rb") as read_f:
                    if os.path.isdir(dst):
                        dst = os.path.join(dst, os.path.basename(src))
                    try:
                        os.makedirs(dst)
                        with open(dst, "wb") as write_f:
                            while 1:
                                chunk = read_f.read(16384)
                                if not chunk:
                                    break
                                write_f.write(chunk)
                    except IOError as e:
                        self.error("unable to write to {}".format(dst))

        elif args.extract:
            if fs is None:
                self.console.error("Filesystem required")
                return -1
            src_path, dst_dir_path = args.extract
            src_fs = fs
            dst_fs = open_fs(dst_dir_path, create=True)

            if not args.force and dst_fs.exists(src_path):
                response = raw_input(
                    "'%s' exists. Do you want to overwrite? " % src_path
                )
                if response.lower() not in ("y", "yes"):
                    return 0

            dst_fs.makedirs(dirname(src_path), recreate=True)
            with src_fs.open(src_path, "rb") as read_file:
                dst_fs.setfile(src_path, read_file)

        elif args.serve:

            from .serve import Serve

            Serve.run_server(
                args.host, args.port, fs, show_access=True, develop=False, debug=True
            )

        else:
            table = [
                [
                    Cell("Name", bold=True),
                    Cell("Type", bold=True),
                    Cell("Location", bold=True),
                ]
            ]

            if fs is None:
                list_filesystems = filesystems.items()
            else:
                list_filesystems = [(args.fs, fs)]

            def get_type_name(name):
                name = type(fs).__name__
                return name[:-2].lower() if name.endswith("FS") else name.lower()

            for name, fs in sorted(list_filesystems):
                if isinstance(fs, MultiFS):
                    location = "\n".join(
                        mount_fs.desc("/") for name, mount_fs in fs.iterate_fs()
                    )
                    fg = "yellow"
                elif isinstance(fs, MountFS):
                    mount_desc = []
                    for path, dirmount in fs.mount_tree.items():
                        mount_desc.append("%s->%s" % (path, dirmount.fs.desc("/")))
                    location = "\n".join(mount_desc)
                    fg = "magenta"
                else:
                    try:
                        syspath = fs.getsyspath("/")
                    except NoSysPath:
                        location = syspath
                        fg = "green"
                    else:
                        try:
                            location = fs.desc("/")
                        except FSError as e:
                            location = text_type(e)
                            fg = "red"
                        else:
                            fg = "blue"
                table.append(
                    [
                        Cell(name),
                        Cell(get_type_name(fs)),
                        Cell(location, bold=True, fg=fg),
                    ]
                )
            self.console.table(table, header=True)