def _unpack(archive_file):
    """Unpacks and extracts files from an archive

    This function will unpack and extra files from the file archive_file. It
    will return the directory to which the files were unpacked.

    An AttributeError is raised when the archive is not supported (when the
    name does not end with '.zip' or '.tar.gz')

    Returns string.
    """
    logger.info("Unpacking archive '{0}'".format(archive_file))
    if archive_file.endswith('.zip'):
        archive = ZipFile(archive_file)
        rootdir = archive.namelist()[0]
    elif archive_file.endswith('.tar.gz'):
        archive = tarfile.open(archive_file)
        rootdir = archive.getnames()[0]
    else:
        raise AttributeError("Unsupported archive. Can't unpack.")

    logger.info("Archive root folder is '{0}'".format(rootdir))

    try:
        shutil.rmtree(rootdir)
    except OSError:
        pass
    logger.info("Extracting to '{0}'".format(rootdir))
    archive.extractall()
    archive.close()
    return rootdir
Exemple #2
0
def _compressed(filename, substring=None):
    contents = []
    try:  # will work for various zip file formats
        f_ = ZipFile(filename)
        contents = f_.namelist()
        prefix = 'zip'
    except BadZipFile:
        pass

    try:  # will work for .tar, .tar.gz...
        f_ = tarfile.open(filename)
        contents = f_.getnames()
        prefix = 'tar'
    except ReadError:
        pass

    if substring is None:
        substring = '*'  # match everything

    files = []
    for item in contents:
        if item.startswith('._'):  # skip hidden files
            continue

        if fnmatch(item, substring) or substring in item:
            files.append('/vsi{}/'.format(prefix) +
                         os.path.join(filename, item))
    return files
def _unpack(archive_file):
    """Unpacks and extracts files from an archive

    This function will unpack and extra files from the file archive_file. It
    will return the directory to which the files were unpacked.

    An AttributeError is raised when the archive is not supported (when the
    name does not end with '.zip' or '.tar.gz')

    Returns string.
    """
    logger.info("Unpacking archive '{0}'".format(archive_file))
    if archive_file.endswith('.zip'):
        archive = ZipFile(archive_file)
        rootdir = archive.namelist()[0]
    elif archive_file.endswith('.tar.gz'):
        archive = tarfile.open(archive_file)
        rootdir = archive.getnames()[0]
    else:
        raise AttributeError("Unsupported archive. Can't unpack.")

    logger.info("Archive root folder is '{0}'".format(rootdir))

    try:
        shutil.rmtree(rootdir)
    except OSError:
        pass
    logger.info("Extracting to '{0}'".format(rootdir))
    archive.extractall()
    archive.close()
    return rootdir
Exemple #4
0
def tree_in_zip(path, stream=None):
    print('Trying {}'.format(path))
    if stream is not None and ZIP_FILE.search(path) is not None:
        zf = ZipFile(stream)
        #Check if the file is not the first in the loop and wheter it is a .zip.

    elif stream is None and ZIP_FILE.search(path) is not None:
        zf = ZipFile(path)
        #Check if the first file is .zip.
    else:
        if stream is not None and TAR_FILE.search(path) is not None :
            try: 
                zf=TarFile.open(fileobj=stream, encoding="utf-8",mode='r:*')
            except ReadError:
                pass
            #If the file is not the first in the loop and it is a .tar(.xx) tries to read it as an archive.
        elif stream is None and TAR_FILE.search(path) is not None:
            try:
                zf=TarFile.open(path, mode='r:*')
            except ReadError:
                pass
            #If the first file is a .tar(.xx) tries to read it as an archive.
    try:
        
        if type(zf)==ZipFile:
            try:
                for name in zf.namelist(): 
                    count_extensions(name, EXTENSIONS, NR_EXTENSIONS)
                    #Count extensions of interest in the archive.
                    manifesto_maker(zf, name, MANIFESTO)
                    #fills in the dictionary
                    if ZIP_FILE.search(name) is not None:
                        print('-Found {}'.format(name))
                        yield from tree_in_zip(path+'/'+name, BytesIO(zf.read(name)))
                    elif TAR_FILE.search(name) is not None:
                        print('-Found {}'.format(name))
                        yield from tree_in_zip(path+'/'+name, BytesIO(zf.read(name)))
                    else:
                        yield path+'/'+name
            except BadZipFile:
                pass
            #Search for further archives (.zip/.tar). Exception needed to not to stop at a corrupted archive.
        elif type(zf)==TarFile:
            #No need for try checked the file at the begining.
                for name in zf.getnames():
                    count_extensions(name, EXTENSIONS, NR_EXTENSIONS)
                    manifesto_maker(zf, name, MANIFESTO)
                    if ZIP_FILE.search(name) is not None:
                        print('-Found {}'.format(name))
                        yield from tree_in_zip(path+'/'+name, BytesIO(zf.read(name)))
                    elif TAR_FILE.search(name) is not None:
                        print('- Found {}'.format(name))
                        yield from tree_in_zip(path+'/'+name, BytesIO(zf.read(name)))    
                    else:
                        yield path+'/'+name
        else:
            pass
        #if file is not .tar/.zip skip it
    except UnboundLocalError:
        pass
Exemple #5
0
def unpack_archive(path):
    tmp_dir = mkdtemp()
    file_extension = get_extention(path)

    if file_extension in ['.zip', '.cbz']:
        archive = ZipFile(path, 'r')
        namelist = archive.namelist()
    elif file_extension in [".tar", ".cbt"]:
        archive = tarfile.TarFile(path, 'r')
        namelist = archive.getnames()

    for member in namelist:
        filename = os.path.basename(member)
        # if directory break the loop
        if not filename:
            continue

        # extract each file
        source = archive.open(member)
        target = open(os.path.join(path, filename), "wb")
        with source, target:
            copyfileobj(source, target)

    return tmp_dir
Exemple #6
0
class ReleaseArchive:
    """Wrapper class for extracting a Raiden release from its archive.

    Supplies a context manager and file-type detection, which allows choosing
    the correct library for opening the archive automatically.
    """
    def __init__(self, path: pathlib.Path):
        self.path = path
        if self.path.suffix == '.gz':
            self._context = TarFile.open(self.path, 'r:*')
        else:
            self._context = ZipFile(self.path, 'r')
        self.validate()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.close()

    def __del__(self):
        # Only try closing our context if it exists.
        # An error may occur while trying to assign the context, hence it may
        # end up not being assigned to the instance.
        if hasattr(self, '_context'):
            self.close()

    @property
    def files(self):
        """Return a list of files present in the archive.

        Depending on the file extension, we choose the correct method to access
        this list.
        """
        if self.path.suffix == '.gz':
            return self._context.getnames()
        else:
            return self._context.namelist()

    @property
    def binary(self):
        """Return the name of the first file of our list of files.

        Since the archive must only contain a single file, this is automatically
        assumed to be our binary; this assumption *is not* checked for correctness.
        """
        return self.files[0]

    def validate(self):
        """Confirm there is only one file present in the archive."""
        if len(self.files) != 1:
            raise ValueError(
                f'Release archive has unexpected content. '
                f'Expected 1 file, found {len(self.files)}: {", ".join(self.files)}',
            )

    def unpack(self, target_dir: pathlib.Path):
        """Unpack this release's archive to the given `target_dir`.

        We also set the x bit on the extracted binary.
        """
        self._context.extract(self.binary, target_dir)
        target_dir.chmod(0o770)
        return target_dir

    def close(self):
        """Close the context, if possible."""
        if self._context and hasattr(self._context, 'close'):
            self._context.close()
Exemple #7
0
    def compute_fields(self):
        """Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver
        ignores them"""
        
        super(SpatialiteDriver, self).compute_fields()

        if not hasattr(self, "src_ext") and self.resource.resource_file :
            self.src_ext = self.resource.resource_file.split('.')[-1]

        # convert any other kind of file to spatialite.  this way the sqlite driver can be used with any OGR compatible
        # file
        if self.src_ext.endswith('zip'):
            archive = ZipFile(self.cached_basename + self.src_ext)
            projection_found = False
            for name in archive.namelist():
                xtn = name.split('.')[-1].lower()
                if xtn in {'shp', 'shx', 'dbf', 'prj'} and "__MACOSX" not in name:
                    projection_found = projection_found or xtn == 'prj'
                    with open(self.cached_basename + '.' + xtn, 'wb') as fout:
                        with archive.open(name) as fin:
                            chunk = fin.read(65536)
                            while chunk:
                                fout.write(chunk)
                                chunk = fin.read(65536)

            if not projection_found:
                with open(self.cached_basename + '.prj', 'w') as f:
                    srs = osr.SpatialReference()
                    srs.ImportFromEPSG(4326)
                    f.write(srs.ExportToWkt())

            in_filename = self.get_filename('shp')
            out_filename = self.get_filename('sqlite')
            if os.path.exists(out_filename):
                os.unlink(out_filename)

            sh.ogr2ogr(
                '-skipfailures',
                '-t_srs', 'epsg:3857',
                '-f', 'SQLite',
                '-dsco', 'SPATIALITE=YES',
                out_filename, in_filename
            )
        elif self.src_ext.endswith('gz'):
            archive = TarFile(self.cached_basename + self.src_ext)
            projection_found = False
            for name in archive.getnames():
                xtn = name.split('.')[-1].lower()
                if xtn in {'shp', 'shx', 'dbf', 'prj'} and "__MACOSX" not in name:
                    projection_found = projection_found or xtn == 'prj'
                    with open(self.cached_basename + '.' + xtn, 'wb') as fout:
                        with archive.open(name) as fin:
                            chunk = fin.read(65536)
                            while chunk:
                                fout.write(chunk)
                                chunk = fin.read(65536)

            if not projection_found:
                with open(self.cached_basename + '.prj', 'w') as f:
                    srs = osr.SpatialReference()
                    srs.ImportFromEPSG(4326)
                    f.write(srs.ExportToWkt())

            in_filename = self.get_filename('shp')
            out_filename = self.get_filename('sqlite')
            if os.path.exists(out_filename):
                os.unlink(out_filename)

            sh.ogr2ogr(
                '-skipfailures',
                '-t_srs', 'epsg:3857',
                '-f', 'SQLite',
                '-dsco', 'SPATIALITE=YES',
                out_filename, in_filename
            )
        elif not self.src_ext.endswith('sqlite'):
            in_filename = self.get_filename(self.src_ext)
            out_filename = self.get_filename('sqlite')

            if os.path.exists(out_filename):
                os.unlink(out_filename)

            sh.ogr2ogr(
                '-skipfailures',
                '-t_srs', 'epsg:3857',
                '-f', 'SQLite',
                '-dsco', 'SPATIALITE=YES',
                out_filename, in_filename
            )

        connection = self._connection()
        table, geometry_field, _, _, srid, _ = connection.execute("select * from geometry_columns").fetchone() # grab the first layer with a geometry
        self._srid = srid

        dataframe = self.get_filename('dfx')
        if os.path.exists(dataframe):
            os.unlink(dataframe)

        c = connection.cursor()
        c.execute("select AsText(Extent(w.{geom_field})) from {table} as w".format(
            geom_field=geometry_field,
            table=table
        ))

        try:
            xmin, ymin, xmax, ymax = GEOSGeometry(c.fetchone()[0]).extent
        except TypeError:
            xmin = ymin = xmax = ymax = 0.0

        crs = osr.SpatialReference()
        crs.ImportFromEPSG(srid)
        self.resource.native_srs = crs.ExportToProj4()

        e4326 = osr.SpatialReference()
        e4326.ImportFromEPSG(4326)
        crx = osr.CoordinateTransformation(crs, e4326)
        x04326, y04326, _ = crx.TransformPoint(xmin, ymin)
        x14326, y14326, _ = crx.TransformPoint(xmax, ymax)
        self.resource.bounding_box = Polygon.from_bbox((x04326, y04326, x14326, y14326))
        self.resource.native_bounding_box = Polygon.from_bbox((xmin, ymin, xmax, ymax))
        self.resource.three_d = False

        self.resource.save()
    def test_zip_content(self):
        os_str = ''
        build_dir = os.environ['NORDICSEMI_NRF51_BLE_DRIVER_BUILD_PATH']
        dynamic_library_prefix = ''
        dynamic_library_suffix = ''
        python_bindings_suffix = ''

        if platform.system() == 'Windows':
            os_str = 'win'
            dynamic_library_suffix = '.dll'
            python_bindings_suffix = '.pyd'

        if platform.system() == 'Linux':
            os_str = 'linux'
            dynamic_library_prefix = 'lib'
            dynamic_library_suffix = '.so'
            python_bindings_suffix = '.so'

        if platform.system() == 'Darwin':
            os_str = 'darwin'
            dynamic_library_prefix = 'lib'
            dynamic_library_suffix = '.dylib'
            python_bindings_suffix = '.so'

        archive_name = 'nrf51-ble-driver'

        archive_filter = archive_name + '*'

        if platform.system() == 'Windows':
            archive_filter = archive_filter + '.zip'
        else:
            archive_filter = archive_filter + '.tar.gz'

        files_in_archive_dir = os.listdir(build_dir)

        archive_matches = fnmatch.filter(files_in_archive_dir, archive_filter)

        self.assertTrue(len(archive_matches) > 0)
        archive_name = archive_matches[0]
        archive_path = posixpath.join(build_dir, archive_name)

        archive_base_name = archive_name
        archive_base_name = archive_base_name.replace(".zip", "")
        archive_base_name = archive_base_name.replace(".tar.gz", "")

        expected_file_list = ['README.md',
                              'license.txt',
                              'S130_license_agreement.pdf',

                              'driver/examples/Makefile.common',
                              'driver/examples/stdbool.h',
                              'driver/examples/advertising/main.c',
                              'driver/examples/advertising/gcc/Makefile',
                              'driver/examples/heart_rate_collector/main.c',
                              'driver/examples/heart_rate_collector/gcc/Makefile',
                              'driver/examples/heart_rate_monitor/main.c',
                              'driver/examples/heart_rate_monitor/gcc/Makefile',
                              'driver/examples/heart_rate_relay/main.c',
                              'driver/examples/heart_rate_relay/gcc/Makefile',
                              'driver/examples/multi_link/main.c',
                              'driver/examples/multi_link/gcc/Makefile',

                              'driver/include/ble.h',
                              'driver/include/ble_err.h',
                              'driver/include/ble_gap.h',
                              'driver/include/ble_gatt.h',
                              'driver/include/ble_gattc.h',
                              'driver/include/ble_gatts.h',
                              'driver/include/ble_hci.h',
                              'driver/include/ble_l2cap.h',
                              'driver/include/ble_ranges.h',
                              'driver/include/ble_types.h',
                              'driver/include/nrf_error.h',
                              'driver/include/nrf_svc.h',
                              'driver/include/sd_rpc.h',

                              'driver/lib/{0}s130_nrf51_ble_driver{1}'.format(dynamic_library_prefix, dynamic_library_suffix),

                              'firmware/connectivity_115k2_with_s130_1.0.0.hex',

                              'python/ble_driver_util.py',
                              'python/s130_nrf51_ble_driver.py',
                              'python/_s130_nrf51_ble_driver{0}'.format(python_bindings_suffix),
                              'python/examples/advertising/main.py',
                              'python/examples/heart_rate_monitor/main.py',
                              'python/examples/heart_rate_collector/main.py',
                              'python/examples/heart_rate_relay/main.py',
                              'python/examples/multi_link/main.py']

        if platform.system() != 'Darwin':
          expected_file_list.extend(['firmware/connectivity_1m_with_s130_1.0.0.hex'])

        if platform.system() == 'Windows':
            expected_file_list.extend(['driver/examples/advertising/msvc/advertising.vcxproj',
                                       'driver/examples/heart_rate_collector/msvc/heart_rate_collector.vcxproj',
                                       'driver/examples/heart_rate_monitor/msvc/heart_rate_monitor.vcxproj',
                                       'driver/examples/heart_rate_relay/msvc/heart_rate_relay.vcxproj',
                                       'driver/examples/multi_link/msvc/multi_link.vcxproj',
                                       'driver/lib/s130_nrf51_ble_driver.lib'])
        else:
            expected_file_list.extend(['driver',
                                       'driver/examples',
                                       'driver/examples/advertising',
                                       'driver/examples/advertising/gcc',
                                       'driver/examples/heart_rate_collector',
                                       'driver/examples/heart_rate_collector/gcc',
                                       'driver/examples/heart_rate_monitor',
                                       'driver/examples/heart_rate_monitor/gcc',
                                       'driver/examples/heart_rate_relay',
                                       'driver/examples/heart_rate_relay/gcc',
                                       'driver/examples/multi_link',
                                       'driver/examples/multi_link/gcc',
                                       'driver/include',
                                       'driver/lib',
                                       'firmware',
                                       'python',
                                       'python/examples',
                                       'python/examples/advertising',
                                       'python/examples/heart_rate_collector',
                                       'python/examples/heart_rate_monitor',
                                       'python/examples/heart_rate_relay',
                                       'python/examples/multi_link'])

        expected_file_list = map(lambda x: archive_base_name + '/' + x, expected_file_list)

        if not platform.system() == 'Windows':
            expected_file_list.append(archive_base_name)

        actual_file_list = []

        if platform.system() == 'Windows':
            archive = ZipFile(archive_path)
            actual_file_list.extend(archive.namelist())
        else:
            archive = tarfile.open(archive_path)
            actual_file_list.extend(archive.getnames())

        expected_file_list = sorted(expected_file_list)
        actual_file_list = sorted(actual_file_list)

        self.assertEqual(actual_file_list, expected_file_list)
Exemple #9
0
    def compute_fields(self):
        """Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver
        ignores them"""

        super(SpatialiteDriver, self).compute_fields()

        if not hasattr(self, "src_ext") and self.resource.resource_file:
            self.src_ext = self.resource.resource_file.name.split(".")[-1]
        elif not hasattr(self, "src_ext"):
            self.src_ext = "sqlite"

        # convert any other kind of file to spatialite.  this way the sqlite driver can be used with any OGR compatible
        # file

        if self.src_ext.endswith("zip"):
            archive = ZipFile(self.cached_basename + self.src_ext)
            projection_found = False
            for name in archive.namelist():
                xtn = name.split(".")[-1].lower()
                if xtn in {"shp", "shx", "dbf", "prj"} and "__MACOSX" not in name:
                    projection_found = projection_found or xtn == "prj"
                    with open(self.cached_basename + "." + xtn, "wb") as fout:
                        with archive.open(name) as fin:
                            chunk = fin.read(1024768)
                            while chunk:
                                fout.write(chunk)
                                chunk = fin.read(1024768)

            if not projection_found:
                with open(self.cached_basename + ".prj", "w") as f:
                    srs = osr.SpatialReference()
                    srs.ImportFromEPSG(4326)
                    f.write(srs.ExportToWkt())

            in_filename = self.get_filename("shp")
            out_filename = self.get_filename("sqlite")
            if os.path.exists(out_filename):
                os.unlink(out_filename)

            # ogr2ogr -skipfailures -overwrite -f SQLite -dsco USE_SPATIALITE=YES -lco OVERWRITE=YES -dsco OGR_SQLITE_SYNCHRONOUS=OFF -gt 131072 -t_srs epsg:3857
            sh.ogr2ogr(
                "-explodecollections",
                "-skipfailures",
                "-overwrite",
                "-gt",
                "131072",
                "-t_srs",
                "epsg:3857",
                "-f",
                "SQLite",
                "-dsco",
                "SPATIALITE=YES",
                out_filename,
                in_filename,
            )
            self.resource.resource_file = File(open(out_filename), name=self.resource.slug.split("/")[-1] + ".sqlite")

        elif self.src_ext.endswith("gz"):
            archive = TarFile(self.cached_basename + self.src_ext)
            projection_found = False
            for name in archive.getnames():
                xtn = name.split(".")[-1].lower()
                if xtn in {"shp", "shx", "dbf", "prj"} and "__MACOSX" not in name:
                    projection_found = projection_found or xtn == "prj"
                    with open(self.cached_basename + "." + xtn, "wb") as fout:
                        with archive.open(name) as fin:
                            chunk = fin.read(65536)
                            while chunk:
                                fout.write(chunk)
                                chunk = fin.read(65536)

            if not projection_found:
                with open(self.cached_basename + ".prj", "w") as f:
                    srs = osr.SpatialReference()
                    srs.ImportFromEPSG(4326)
                    f.write(srs.ExportToWkt())

            in_filename = self.get_filename("shp")
            out_filename = self.get_filename("sqlite")
            if os.path.exists(out_filename):
                os.unlink(out_filename)

            sh.ogr2ogr(
                "-explodecollections",
                "-skipfailures",
                "-overwrite",
                "-gt",
                "131072",
                "-t_srs",
                "epsg:3857",
                "-f",
                "SQLite",
                "-dsco",
                "SPATIALITE=YES",
                out_filename,
                in_filename,
            )
            self.resource.resource_file = File(open(out_filename), name=self.resource.slug.split("/")[-1] + ".sqlite")

        elif not self.src_ext.endswith("sqlite"):
            in_filename = self.get_filename(self.src_ext)
            out_filename = self.get_filename("sqlite")

            if os.path.exists(out_filename):
                os.unlink(out_filename)

            sh.ogr2ogr(
                "-explodecollections",
                "-skipfailures",
                "-overwrite",
                "-gt",
                "131072",
                "-t_srs",
                "epsg:3857",
                "-f",
                "SQLite",
                "-dsco",
                "SPATIALITE=YES",
                out_filename,
                in_filename,
            )
            self.resource.resource_file = File(open(out_filename), name=self.resource.slug.split("/")[-1] + ".sqlite")

        connection = self._connection()
        table, geometry_field, _, _, srid, _ = connection.execute(
            "select * from geometry_columns"
        ).fetchone()  # grab the first layer with a geometry
        self._srid = srid if srid else 3857

        dataframe = self.get_filename("dfx")
        if os.path.exists(dataframe):
            os.unlink(dataframe)

        c = connection.cursor()
        c.execute(
            "select AsText(Extent(w.{geom_field})) from {table} as w".format(geom_field=geometry_field, table=table)
        )

        try:
            xmin, ymin, xmax, ymax = GEOSGeometry(c.fetchone()[0]).extent
        except TypeError:
            xmin = ymin = xmax = ymax = 0.0

        crs = osr.SpatialReference()
        crs.ImportFromEPSG(srid)
        self.resource.native_srs = crs.ExportToProj4()
        c.execute("create index if not exists {table}_ogc_fid on {table} (OGC_FID)".format(table=table))
        c.close()

        e4326 = osr.SpatialReference()
        e4326.ImportFromEPSG(4326)
        crx = osr.CoordinateTransformation(crs, e4326)
        x04326, y04326, _ = crx.TransformPoint(xmin, ymin)
        x14326, y14326, _ = crx.TransformPoint(xmax, ymax)
        self.resource.bounding_box = Polygon.from_bbox((x04326, y04326, x14326, y14326))
        self.resource.native_bounding_box = Polygon.from_bbox((xmin, ymin, xmax, ymax))
        self.resource.three_d = False

        self.resource.save()
Exemple #10
0
def gdalopen(filename, substring='*_dem.tif'):
    """
    Wrapper function around :func:`gdal.Open`.

    :func:`gdal.Open` returns ``None`` if no file is read. This wrapper
    function will try to read the `filename` directly but if ``None``
    is returned, several other options, including compressed file
    formats ('zip', 'gzip', 'tar') and url retrieval are attempted.
    """

    ds = gdal.Open(filename)
    if not ds:
        # try to open compressed file container and provide more info
        # in case read is unsuccessful
        try:
            f_ = ZipFile(filename)
            contents = f_.namelist()
            prefix_ = 'zip'

        except BadZipFile:
            try:
                f_ = tarfile.open(filename)
                contents = f_.getnames()
                prefix_ = 'tar'
            except ReadError:
                # gzip:
                prefix_ = 'gzip'
                filename_ = '/vsi{}/'.format(prefix_) + filename

                ds = gdal.Open(filename_)
                if ds:
                    return ds

                # url:
                prefix_ = 'curl'
                filename_ = '/vsi{}/'.format(prefix_) + filename

                ds = gdal.Open(filename_)
                if ds:
                    return ds

        for item in contents:
            if fnmatch(item, substring) or substring in item:
                filename_ = ('/vsi{}/'.format(prefix_) +
                             os.path.join(filename, item))
                ds = gdal.Open(filename_)
                if ds:
                    return ds

        msg = ("No file with matching substring '{}' was found in {}, "
               'which contains:\n'
               '   {}\n'
               "try:\n{}").format(
                   substring, filename, contents, '\n'.join([
                       '/vsi{}/'.format(prefix_) +
                       os.path.join(filename, item) for item in contents
                   ]))

        raise OSError(msg)

    return ds
Exemple #11
0
def installDependencies():
    if sys.platform == "win32":
        if not os.path.exists("installed"):
            print("Installing dependencies for Windows")

            # Retrieve ffmpeg
            response = urllib.request.Request(
                FFMPEG_WIN_URL, headers={'User-Agent': 'Mozilla/5.0'})
            data = urllib.request.urlopen(response).read()
            f = ZipFile(BytesIO(data))
            print(f.namelist())
            if os.path.exists(FFMPEG_WIN_FOLDER):
                shutil.rmtree(FFMPEG_WIN_FOLDER)
            f.extractall()

            # Retrieve Dolphin (FM)
            response = urllib.request.Request(
                FM_WIN_URL, headers={'User-Agent': 'Mozilla/5.0'})
            data = urllib.request.urlopen(response).read()
            f = ZipFile(BytesIO(data))
            print(f.namelist())
            if os.path.exists(FM_WIN_FOLDER):
                shutil.rmtree(FM_WIN_FOLDER)
            f.extractall()

            # Retrieve Slippi playback configuration
            response = urllib.request.Request(
                FM_WIN_PLAYBACK_CONFIG_URL,
                headers={'User-Agent': 'Mozilla/5.0'})
            with open(FM_WIN_PLAYBACK_CONFIG_FOLDER + ".tar.gz",
                      "wb") as out_file:
                out_file.write(urllib.request.urlopen(response).read())
            f = tarfile.open(FM_WIN_PLAYBACK_CONFIG_FOLDER + ".tar.gz",
                             mode='r:gz')
            print(f.getnames())
            try:
                shutil.rmtree(FM_WIN_PLAYBACK_CONFIG_FOLDER)
            except Exception:
                os.makedirs(FM_WIN_PLAYBACK_CONFIG_FOLDER)
            f.extractall(FM_WIN_PLAYBACK_CONFIG_FOLDER)
            f.close()
            os.remove(FM_WIN_PLAYBACK_CONFIG_FOLDER + ".tar.gz")

            # Overwrite playback configuration onto dolphin
            recursive_overwrite(
                os.path.join(FM_WIN_PLAYBACK_CONFIG_FOLDER, "Binaries"),
                FM_WIN_FOLDER)
            shutil.rmtree(FM_WIN_PLAYBACK_CONFIG_FOLDER)

            # Overwrite GFX and Dolphin ini from slp-to-mp4
            recursive_overwrite(THIS_USER_DIR,
                                os.path.join(FM_WIN_FOLDER, "User"))
            recursive_overwrite(os.path.join(FM_WIN_FOLDER, "User"),
                                THIS_USER_DIR)

            # Create the frames folder that dolphin dumps. Dolphin will not dump frames without this
            if not os.path.isdir(os.path.join(THIS_USER_DIR, "Dump",
                                              "Frames")):
                os.makedirs(os.path.join(THIS_USER_DIR, "Dump", "Frames"))

            # Create a file to indicate that dependencies are installed and should not be reinstalled
            with open("installed", 'a'):
                os.utime("installed", None)
Exemple #12
0
    def compute_fields(self):
        """Other keyword args get passed in as a matter of course, like BBOX, time, and elevation, but this basic driver
        ignores them"""

        super(SpatialiteDriver, self).compute_fields()

        if not hasattr(self, "src_ext") and self.resource.resource_file:
            self.src_ext = self.resource.resource_file.split('.')[-1]

        # convert any other kind of file to spatialite.  this way the sqlite driver can be used with any OGR compatible
        # file
        if self.src_ext.endswith('zip'):
            archive = ZipFile(self.cached_basename + self.src_ext)
            projection_found = False
            for name in archive.namelist():
                xtn = name.split('.')[-1].lower()
                if xtn in {'shp', 'shx', 'dbf', 'prj'
                           } and "__MACOSX" not in name:
                    projection_found = projection_found or xtn == 'prj'
                    with open(self.cached_basename + '.' + xtn, 'wb') as fout:
                        with archive.open(name) as fin:
                            chunk = fin.read(65536)
                            while chunk:
                                fout.write(chunk)
                                chunk = fin.read(65536)

            if not projection_found:
                with open(self.cached_basename + '.prj', 'w') as f:
                    srs = osr.SpatialReference()
                    srs.ImportFromEPSG(4326)
                    f.write(srs.ExportToWkt())

            in_filename = self.get_filename('shp')
            out_filename = self.get_filename('sqlite')
            if os.path.exists(out_filename):
                os.unlink(out_filename)

            sh.ogr2ogr('-skipfailures', '-t_srs', 'epsg:3857', '-f', 'SQLite',
                       '-dsco', 'SPATIALITE=YES', out_filename, in_filename)
        elif self.src_ext.endswith('gz'):
            archive = TarFile(self.cached_basename + self.src_ext)
            projection_found = False
            for name in archive.getnames():
                xtn = name.split('.')[-1].lower()
                if xtn in {'shp', 'shx', 'dbf', 'prj'
                           } and "__MACOSX" not in name:
                    projection_found = projection_found or xtn == 'prj'
                    with open(self.cached_basename + '.' + xtn, 'wb') as fout:
                        with archive.open(name) as fin:
                            chunk = fin.read(65536)
                            while chunk:
                                fout.write(chunk)
                                chunk = fin.read(65536)

            if not projection_found:
                with open(self.cached_basename + '.prj', 'w') as f:
                    srs = osr.SpatialReference()
                    srs.ImportFromEPSG(4326)
                    f.write(srs.ExportToWkt())

            in_filename = self.get_filename('shp')
            out_filename = self.get_filename('sqlite')
            if os.path.exists(out_filename):
                os.unlink(out_filename)

            sh.ogr2ogr('-skipfailures', '-t_srs', 'epsg:3857', '-f', 'SQLite',
                       '-dsco', 'SPATIALITE=YES', out_filename, in_filename)
        elif not self.src_ext.endswith('sqlite'):
            in_filename = self.get_filename(self.src_ext)
            out_filename = self.get_filename('sqlite')

            if os.path.exists(out_filename):
                os.unlink(out_filename)

            sh.ogr2ogr('-skipfailures', '-t_srs', 'epsg:3857', '-f', 'SQLite',
                       '-dsco', 'SPATIALITE=YES', out_filename, in_filename)

        connection = self._connection()
        table, geometry_field, _, _, srid, _ = connection.execute(
            "select * from geometry_columns").fetchone(
            )  # grab the first layer with a geometry
        self._srid = srid if srid else 3857

        dataframe = self.get_filename('dfx')
        if os.path.exists(dataframe):
            os.unlink(dataframe)

        c = connection.cursor()
        c.execute(
            "select AsText(Extent(w.{geom_field})) from {table} as w".format(
                geom_field=geometry_field, table=table))

        try:
            xmin, ymin, xmax, ymax = GEOSGeometry(c.fetchone()[0]).extent
        except TypeError:
            xmin = ymin = xmax = ymax = 0.0

        crs = osr.SpatialReference()
        crs.ImportFromEPSG(srid)
        self.resource.native_srs = crs.ExportToProj4()

        e4326 = osr.SpatialReference()
        e4326.ImportFromEPSG(4326)
        crx = osr.CoordinateTransformation(crs, e4326)
        x04326, y04326, _ = crx.TransformPoint(xmin, ymin)
        x14326, y14326, _ = crx.TransformPoint(xmax, ymax)
        self.resource.bounding_box = Polygon.from_bbox(
            (x04326, y04326, x14326, y14326))
        self.resource.native_bounding_box = Polygon.from_bbox(
            (xmin, ymin, xmax, ymax))
        self.resource.three_d = False

        self.resource.save()
Exemple #13
0
    def test_zip_content(self):
        os_str = ''
        build_dir = os.environ['NORDICSEMI_NRF51_BLE_DRIVER_BUILD_PATH']
        dynamic_library_prefix = ''
        dynamic_library_suffix = ''
        python_bindings_suffix = ''

        if platform.system() == 'Windows':
            os_str = 'win'
            dynamic_library_suffix = '.dll'
            python_bindings_suffix = '.pyd'

        if platform.system() == 'Linux':
            os_str = 'linux'
            dynamic_library_prefix = 'lib'
            dynamic_library_suffix = '.so'
            python_bindings_suffix = '.so'

        if platform.system() == 'Darwin':
            os_str = 'darwin'
            dynamic_library_prefix = 'lib'
            dynamic_library_suffix = '.dylib'
            python_bindings_suffix = '.so'

        archive_name = 'nrf51-ble-driver'

        archive_filter = archive_name + '*'

        if platform.system() == 'Windows':
            archive_filter = archive_filter + '.zip'
        else:
            archive_filter = archive_filter + '.tar.gz'

        files_in_archive_dir = os.listdir(build_dir)

        archive_matches = fnmatch.filter(files_in_archive_dir, archive_filter)

        self.assertTrue(len(archive_matches) > 0)
        archive_name = archive_matches[0]
        archive_path = posixpath.join(build_dir, archive_name)

        archive_base_name = archive_name
        archive_base_name = archive_base_name.replace(".zip", "")
        archive_base_name = archive_base_name.replace(".tar.gz", "")

        expected_file_list = [
            'README.md', 'license.txt', 'S130_license_agreement.pdf',
            'driver/examples/Makefile.common', 'driver/examples/stdbool.h',
            'driver/examples/advertising/main.c',
            'driver/examples/advertising/gcc/Makefile',
            'driver/examples/heart_rate_collector/main.c',
            'driver/examples/heart_rate_collector/gcc/Makefile',
            'driver/examples/heart_rate_monitor/main.c',
            'driver/examples/heart_rate_monitor/gcc/Makefile',
            'driver/examples/heart_rate_relay/main.c',
            'driver/examples/heart_rate_relay/gcc/Makefile',
            'driver/examples/multi_link/main.c',
            'driver/examples/multi_link/gcc/Makefile', 'driver/include/ble.h',
            'driver/include/ble_err.h', 'driver/include/ble_gap.h',
            'driver/include/ble_gatt.h', 'driver/include/ble_gattc.h',
            'driver/include/ble_gatts.h', 'driver/include/ble_hci.h',
            'driver/include/ble_l2cap.h', 'driver/include/ble_ranges.h',
            'driver/include/ble_types.h', 'driver/include/nrf_error.h',
            'driver/include/nrf_svc.h', 'driver/include/sd_rpc.h',
            'driver/lib/{0}s130_nrf51_ble_driver{1}'.format(
                dynamic_library_prefix, dynamic_library_suffix),
            'firmware/connectivity_115k2_with_s130_1.0.0.hex',
            'python/ble_driver_util.py', 'python/s130_nrf51_ble_driver.py',
            'python/_s130_nrf51_ble_driver{0}'.format(python_bindings_suffix),
            'python/examples/advertising/main.py',
            'python/examples/heart_rate_monitor/main.py',
            'python/examples/heart_rate_collector/main.py',
            'python/examples/heart_rate_relay/main.py',
            'python/examples/multi_link/main.py'
        ]

        if platform.system() != 'Darwin':
            expected_file_list.extend(
                ['firmware/connectivity_1m_with_s130_1.0.0.hex'])

        if platform.system() == 'Windows':
            expected_file_list.extend([
                'driver/examples/advertising/msvc/advertising.vcxproj',
                'driver/examples/heart_rate_collector/msvc/heart_rate_collector.vcxproj',
                'driver/examples/heart_rate_monitor/msvc/heart_rate_monitor.vcxproj',
                'driver/examples/heart_rate_relay/msvc/heart_rate_relay.vcxproj',
                'driver/examples/multi_link/msvc/multi_link.vcxproj',
                'driver/lib/s130_nrf51_ble_driver.lib'
            ])
        else:
            expected_file_list.extend([
                'driver', 'driver/examples', 'driver/examples/advertising',
                'driver/examples/advertising/gcc',
                'driver/examples/heart_rate_collector',
                'driver/examples/heart_rate_collector/gcc',
                'driver/examples/heart_rate_monitor',
                'driver/examples/heart_rate_monitor/gcc',
                'driver/examples/heart_rate_relay',
                'driver/examples/heart_rate_relay/gcc',
                'driver/examples/multi_link', 'driver/examples/multi_link/gcc',
                'driver/include', 'driver/lib', 'firmware', 'python',
                'python/examples', 'python/examples/advertising',
                'python/examples/heart_rate_collector',
                'python/examples/heart_rate_monitor',
                'python/examples/heart_rate_relay',
                'python/examples/multi_link'
            ])

        expected_file_list = map(lambda x: archive_base_name + '/' + x,
                                 expected_file_list)

        if not platform.system() == 'Windows':
            expected_file_list.append(archive_base_name)

        actual_file_list = []

        if platform.system() == 'Windows':
            archive = ZipFile(archive_path)
            actual_file_list.extend(archive.namelist())
        else:
            archive = tarfile.open(archive_path)
            actual_file_list.extend(archive.getnames())

        expected_file_list = sorted(expected_file_list)
        actual_file_list = sorted(actual_file_list)

        self.assertEqual(actual_file_list, expected_file_list)