Esempio n. 1
0
 def set(self, key, value):
     fs = None
     try:
         if not key:
             return
         key = key.format(accountDBID=utils.getAccountDBID())
         fullFileName = os.path.join(self.cache_dir, '{0}.dat'.format(key))
         dirName = os.path.dirname(fullFileName)
         pkg = os.path.basename(dirName)
         fileName = os.path.basename(fullFileName)
         isZip = pkg.lower().endswith('.zip')
         save = True
         if isZip:
             fs = ZipFS(dirName, mode='a', compression='stored')
             if fs.exists(fileName):
                 log('[WARNING] archive "{}" already contains file "{}". Do not save the new data.'
                     .format(pkg, fileName))
                 save = False
         else:
             fs = OSFS(dirName, create=True)
         if save:
             fs.setcontents(fileName, cPickle.dumps(value))
     except Exception:
         err(traceback.format_exc())
     finally:
         if fs is not None:
             fs.close()
Esempio n. 2
0
 def get(self, key, default):
     fs = None
     try:
         fullFileName = os.path.join(self.cache_dir, '{0}.dat'.format(key))
         dirName = os.path.dirname(fullFileName)
         pkg = os.path.basename(dirName)
         fileName = os.path.basename(fullFileName)
         isZip = pkg.lower().endswith('.zip')
         if os.path.exists(dirName):
             if isZip:
                 fs = ZipFS(dirName, mode='r', compression='stored')
             else:
                 fs = OSFS(dirName, create=True)
             if fs.exists(fileName):
                 try:
                     #log(fileName)
                     #log(cPickle.loads(fs.getcontents(fileName)))
                     return cPickle.loads(fs.getcontents(fileName))
                 except Exception:
                     if isZip:
                         log('[WARNING] Broken file: %s' % fullFileName)
                     else:
                         log('[WARNING] Remove broken file: %s' % fullFileName)
                         fs.remove(fileName)
                     raise
         return default
     except Exception:
         err(traceback.format_exc())
         return default
     finally:
         if fs is not None:
             fs.close()
Esempio n. 3
0
def test_write_new_datapackage():
    first = load_datapackage(
        ZipFS(str(fixture_dir / "merging" / "merging_first.zip")))
    second = load_datapackage(
        ZipFS(str(fixture_dir / "merging" / "merging_second.zip")))
    with tempfile.TemporaryDirectory() as td:
        temp_fs = OSFS(td)
        result = merge_datapackages_with_mask(
            first_dp=first,
            first_resource_group_label="sa-data-vector",
            second_dp=second,
            second_resource_group_label="sa-data-array",
            mask_array=np.array([1, 0, 1, 0, 1, 0, 1, 0, 1, 0], dtype=bool),
            output_fs=temp_fs,
        )
        result = load_datapackage(OSFS(td))

        assert isinstance(result, DatapackageBase)
        assert not isinstance(result.fs, MemoryFS)
        assert len(result.resources) == 5

        for suffix in {"indices", "data", "distributions", "flip"}:
            try:
                d, r = result.get_resource(f"sa-data-vector.{suffix}")
            except KeyError:
                continue

            assert r["name"] == f"sa-data-vector.{suffix}"
            assert r["path"] == f"sa-data-vector.{suffix}.npy"
            assert r["group"] == "sa-data-vector"
            assert r["nrows"] == 5

            if suffix == "data":
                assert np.allclose(d, np.array([0, 2, 4, 6, 8]))

            try:
                d, r = result.get_resource(f"sa-data-array.{suffix}")
            except KeyError:
                continue

            assert r["name"] == f"sa-data-array.{suffix}"
            assert r["path"] == f"sa-data-array.{suffix}.npy"
            assert r["group"] == "sa-data-array"
            assert r["nrows"] == 5

            if suffix == "data":
                assert d.shape == (5, 10)
                assert np.allclose(d[:, 0], np.array([1, 3, 5, 7, 9]) + 10)
Esempio n. 4
0
    def process(self, **extra_metadata):
        """
Process intermediate data from a Python dictionary to a `stats_arrays <https://pypi.python.org/pypi/stats_arrays/>`_ array, which is a `NumPy <http://numpy.scipy.org/>`_ `Structured <http://docs.scipy.org/doc/numpy/reference/generated/numpy.recarray.html#numpy.recarray>`_ `Array <http://docs.scipy.org/doc/numpy/user/basics.rec.html>`_. A structured array (also called record array) is a heterogeneous array, where each column has a different label and data type.

Processed arrays are saved in the ``processed`` directory.

If the uncertainty type is no uncertainty, undefined, or not specified, then the 'amount' value is used for 'loc' as well. This is needed for the random number generator.

Doesn't return anything, but writes a file to disk.

        """
        data = self.load()
        dp = create_datapackage(
            fs=ZipFS(str(self.filepath_processed()), write=True),
            name=self.filename_processed(),
            sum_intra_duplicates=True,
            sum_inter_duplicates=False,
        )
        dp.add_persistent_vector_from_iterator(
            matrix=self.matrix,
            name=clean_datapackage_name(str(self.name) + " matrix data"),
            dict_iterator=(self.process_row(row) for row in data),
            nrows=len(data),
            **extra_metadata)
        dp.finalize_serialization()
Esempio n. 5
0
def test_ordering():
    dps = [
        load_datapackage(ZipFS(dirpath / "b-second.zip")),
        load_datapackage(ZipFS(dirpath / "a-first.zip")),
    ]
    for dp in dps:
        dp.rehydrate_interface("w-fourth", Interface())
        print(list(dp.groups))

    mm = MappedMatrix(packages=dps, matrix="matrix-a")
    assert [grp.label for grp in mm.groups] == [
        "y-second",
        "w-fourth",
        "y-second",
        "w-fourth",
    ]
Esempio n. 6
0
def copy_dataset_to_mem_fs(mem_fs, dataset_zip_file_path):
    tf.logging.info('Copying dataset to in-memory filesystem.')
    dataset_path, dataset_zip_filename = os.path.split(dataset_zip_file_path)
    with fs.open_fs(dataset_path) as host_fs:  # Could be local or GCS
        with host_fs.open(dataset_zip_filename, 'rb') as zip_file:
            with ZipFS(zip_file) as zip_fs:
                fs.copy.copy_dir(zip_fs, '.', mem_fs, '.')
Esempio n. 7
0
    def get_fs(cls, registry, fs_name, fs_name_params, fs_path, writeable, create_dir):

        zip_fs, zip_path = registry.parse(fs_path)
        if zip_path is None:
            raise OpenerError('File required for zip opener')
        if zip_fs.exists(zip_path):
            if writeable:
                open_mode = 'r+b'
            else:
                open_mode = 'rb'
        else:
            open_mode = 'w+'
        if zip_fs.hassyspath(zip_path):
            zip_file = zip_fs.getsyspath(zip_path)
        else:
            zip_file = zip_fs.open(zip_path, mode=open_mode)

        _username, _password, fs_path = _parse_credentials(fs_path)

        from fs.zipfs import ZipFS
        if zip_file is None:
            zip_file = fs_path

        mode = 'r'
        if writeable:
            mode = 'a'

        allow_zip_64 = fs_name.endswith('64')

        zipfs = ZipFS(zip_file, mode=mode, allow_zip_64=allow_zip_64)
        return zipfs, None
Esempio n. 8
0
def empty_biosphere():
    # Flow 1: The flow
    # Activity 1: The activity

    dp = create_datapackage(fs=ZipFS(str(fixture_dir / "empty_biosphere.zip"),
                                     write=True), )

    data_array = np.array([1, 2, 3])
    indices_array = np.array([(2, 1), (1, 1), (2, 2)], dtype=INDICES_DTYPE)
    flip_array = np.array([1, 0, 0], dtype=bool)
    dp.add_persistent_vector(
        matrix="technosphere_matrix",
        data_array=data_array,
        name="eb-technosphere",
        indices_array=indices_array,
        nrows=3,
        flip_array=flip_array,
    )

    data_array = np.array([1])
    indices_array = np.array([(1, 0)], dtype=INDICES_DTYPE)
    dp.add_persistent_vector(
        matrix="characterization_matrix",
        data_array=data_array,
        name="eb-characterization",
        indices_array=indices_array,
        global_index=0,
        nrows=1,
    )

    dp.finalize_serialization()
def create_ordering_datapackages():
    dp = create_datapackage(
        fs=ZipFS(str(dirpath / "a-first.zip"), write=True),
        name="test-fixture-a",
        id_="fixture-a",
    )
    add_data(dp)
    dp.finalize_serialization()

    dp = create_datapackage(
        fs=ZipFS(str(dirpath / "b-second.zip"), write=True),
        name="test-fixture-b",
        id_="fixture-b",
    )
    add_data(dp)
    dp.finalize_serialization()
Esempio n. 10
0
def test_processed_array():
    database = DatabaseChooser("a database")
    database.write(
        {
            ("a database", "2"): {
                "type": "process",
                "exchanges": [
                    {
                        "input": ("a database", "2"),
                        "amount": 42,
                        "uncertainty_type": 7,
                        "type": "production",
                    }
                ],
            }
        }
    )
    package = load_datapackage(ZipFS(database.filepath_processed()))
    print(package.resources)
    array = package.get_resource("a_database_technosphere_matrix.data")[0]

    assert array.shape == (1,)
    assert array[0] == 42

    array = package.get_resource("a_database_technosphere_matrix.distributions")[0]
    assert array.shape == (1,)
    assert array[0]["uncertainty_type"] == 7
def generate_local_sa_biosphere_datapackage(cutoff=1e-4, const_factor=10):

    lca = setup_bw_project_archetypes()
    uncertain_biosphere_exchanges = filter_uncertain_biosphere_exchanges(
        lca, cutoff)

    dp = bwp.create_datapackage(
        fs=ZipFS(str(DATA_DIR / "local-sa-biosphere.zip"), write=True),
        name="local sa biosphere",
    )

    amounts = np.array([exc.amount for exc in uncertain_biosphere_exchanges])
    num_samples = len(amounts)
    data_array = np.tile(amounts, num_samples) * (np.diag(
        np.ones(num_samples) * const_factor))

    indices_array = np.array(
        [(exc.input.id, exc.output.id)
         for exc in uncertain_biosphere_exchanges],
        dtype=bwp.INDICES_DTYPE,
    )

    # All inputs -> all True
    flip_array = np.ones(len(indices_array), dtype=bool)

    dp.add_persistent_array(
        matrix="biosphere_matrix",
        data_array=data_array,
        name="local sa biosphere",
        indices_array=indices_array,
        flip_array=flip_array,
    )

    dp.finalize_serialization()
Esempio n. 12
0
def test_process_without_exchanges_still_in_processed_array():
    database = DatabaseChooser("a database")
    database.write({("a database", "foo"): {}})

    package = load_datapackage(ZipFS(database.filepath_processed()))
    array = package.get_resource("a_database_technosphere_matrix.data")[0]
    assert array[0] == 1
    assert array.shape == (1,)
Esempio n. 13
0
def do_build(dataplicity_path):
    """Build firmware in project directory"""
    with fsopendir(dataplicity_path) as src_fs:
        version = firmware.get_version(src_fs)
        print "Building version {:010}...".format(version)
        filename = "firmware-{}.zip".format(version)
        firmware_path = join('__firmware__', filename)
        src_fs.makedir('__firmware__', allow_recreate=True)

        with src_fs.open(firmware_path, 'wb') as zip_file:
            dst_fs = ZipFS(zip_file, 'w')
            firmware.build(src_fs, dst_fs)
            dst_fs.close()

        size = src_fs.getsize(firmware_path)

    print "Wrote {} ({:,} bytes)".format(firmware_path, size)
Esempio n. 14
0
    def test_binary_write_read(self):
        # GIVEN zipfs
        z = self.z

        # WHEN binary data is written to a test file in zipfs
        f = z.open('test.data', 'wb')
        f.write(self.test_content)
        f.close()
        z.close()

        # THEN the same binary data is retrieved when opened again
        z = ZipFS('test.zip', 'r')
        f = z.open('test.data', 'rb')
        content = f.read()
        f.close()
        z.close()
        self.assertEqual(content, self.test_content)
Esempio n. 15
0
def do_build(dataplicity_path):
    """Build firmware in project directory"""
    with fsopendir(dataplicity_path) as src_fs:
        version = firmware.get_version(src_fs)
        print("Building version {:010}...".format(version))
        filename = "firmware-{}.zip".format(version)
        firmware_path = join('__firmware__', filename)
        src_fs.makedir('__firmware__', allow_recreate=True)

        with src_fs.open(firmware_path, 'wb') as zip_file:
            dst_fs = ZipFS(zip_file, 'w')
            firmware.build(src_fs, dst_fs)
            dst_fs.close()

        size = src_fs.getsize(firmware_path)

    print("Wrote {} ({:,} bytes)".format(firmware_path, size))
    def test_binary_write_read(self):
        # GIVEN zipfs
        z = self.z

        # WHEN binary data is written to a test file in zipfs
        f = z.open('test.data', 'wb')
        f.write(self.test_content)
        f.close()
        z.close()

        # THEN the same binary data is retrieved when opened again
        z = ZipFS('test.zip', 'r')
        f = z.open('test.data', 'rb')
        content = f.read()
        f.close()
        z.close()
        self.assertEqual(content, self.test_content)
Esempio n. 17
0
def test_group_ordering_consistent():
    dp = load_datapackage(ZipFS(dirpath / "test-fixture.zip"))
    assert list(dp.groups) == [
        "sa-data-vector-from-dict",
        "sa-data-vector",
        "sa-data-array",
        "sa-vector-interface",
        "sa-array-interface",
    ]
Esempio n. 18
0
def test_database_process_adds_correct_geo(add_biosphere):
    database = Database("food")
    database.write(food)

    package = load_datapackage(ZipFS(database.filepath_processed()))
    data = package.get_resource("food_inventory_geomapping_matrix.indices")[0]

    assert geomapping["CA"] in data["col"].tolist()
    assert geomapping["CH"] in data["col"].tolist()
Esempio n. 19
0
def test_default_metadata():
    first = load_datapackage(
        ZipFS(str(fixture_dir / "merging" / "merging_first.zip")))
    second = load_datapackage(
        ZipFS(str(fixture_dir / "merging" / "merging_second.zip")))
    result = merge_datapackages_with_mask(
        first_dp=first,
        first_resource_group_label="sa-data-vector",
        second_dp=second,
        second_resource_group_label="sa-data-array",
        mask_array=np.array([1, 0, 1, 0, 1, 0, 1, 0, 1, 0], dtype=bool),
    )

    assert result.metadata["name"]
    assert result.metadata["id"]
    assert not result.metadata["combinatorial"]
    assert not result.metadata["sequential"]
    assert not result.metadata["seed"]
Esempio n. 20
0
def test_integration_test_new_zipfile():
    with tempfile.TemporaryDirectory() as td:
        dp = create_datapackage(
            fs=ZipFS(str(Path(td) / "foo.zip"), write=True),
            name="test-fixture",
            id_="fixture-42",
        )
        add_data(dp)
        dp.finalize_serialization()

        check_metadata(dp)
        check_data(dp)

        loaded = load_datapackage(ZipFS(str(Path(td) / "foo.zip"),
                                        write=False))

        check_metadata(loaded, False)
        check_data(loaded)
		def GetZipFile(_self, _zipfile):
			# print("ZipFileManager::GetZipFile")
			projectsFS = _self.m_ZipFileList.get(_zipfile)
			if projectsFS == None:
				print("ZipFileManager::GetZipFile " + str(_zipfile))
				# projectsFS = ZipFS(_zipfile, mode = 'r')
				projectsFS = ZipFS(_zipfile)
				_self.m_ZipFileList[_zipfile] = projectsFS
			return projectsFS
Esempio n. 22
0
    def upload_docs(self, lib_name, lib_version):
        args = self.args

        archive, lib = build.build_lib(args.location, ignore_errors=True)
        lib_name = lib.long_name

        from ..docgen.extracter import Extracter

        extract_fs = TempFS('moyadoc-{}'.format(lib_name))

        extracter = Extracter(archive, extract_fs)
        extracter.extract_lib(lib_name)

        _fh, temp_filename = tempfile.mkstemp('moyadocs')
        with ZipFS(temp_filename, 'w') as docs_zip_fs:
            fs.copy.copy_dir(extract_fs, '/', docs_zip_fs, '/')

        package_filename = "{}-{}.docs.zip".format(lib_name, lib_version)

        upload_info = self.call('package.get-upload-info')
        docs_url = upload_info['docs_url']

        self.console("uploading '{}'...".format(package_filename)).nl()

        with io.open(temp_filename, 'rb') as package_file:
            files = [('file', (package_filename, package_file,
                               'application/octet-stream'))]
            data = {
                "auth": self.auth_token,
                "package": lib_name,
                "version": lib_version
            }

            response = requests.post(docs_url,
                                     verify=False,
                                     files=files,
                                     data=data,
                                     hooks={})

        if response.status_code != 200:
            raise CommandError(
                "upload failed -- server returned {} response".format(
                    response.status_code))

        message = decode_utf8_bytes(
            response.headers.get('moya-upload-package-message', ''))
        result = decode_utf8_bytes(
            response.headers.get('moya-upload-package-result', ''))

        if result == 'success':
            self.server_response(message, fg="green")
        else:
            raise CommandError('upload error ({})'.format(message))
        if result == "success":
            pass
        else:
            self.console.error("upload failed")
Esempio n. 23
0
def test_add_suffix():
    first = load_datapackage(
        ZipFS(str(fixture_dir / "merging" / "merging_same_1.zip")))
    second = load_datapackage(
        ZipFS(str(fixture_dir / "merging" / "merging_same_2.zip")))
    with pytest.warns(UserWarning):
        result = merge_datapackages_with_mask(
            first_dp=first,
            first_resource_group_label="same",
            second_dp=second,
            second_resource_group_label="same",
            mask_array=np.array([1, 0, 1, 0, 1, 0, 1, 0, 1, 0], dtype=bool),
        )

    assert isinstance(result, DatapackageBase)
    assert len(result.resources) == 5

    for suffix in {"indices", "data", "distributions", "flip"}:
        try:
            d, r = result.get_resource(f"same_true.{suffix}")
        except KeyError:
            continue

        assert r["name"] == f"same_true.{suffix}"
        assert r["path"] == f"same_true.{suffix}.npy"
        assert r["group"] == "same_true"
        assert r["nrows"] == 5

        if suffix == "data":
            assert np.allclose(d, np.array([0, 2, 4, 6, 8]))

        try:
            d, r = result.get_resource(f"same_false.{suffix}")
        except KeyError:
            continue

        assert r["name"] == f"same_false.{suffix}"
        assert r["path"] == f"same_false.{suffix}.npy"
        assert r["group"] == "same_false"
        assert r["nrows"] == 5

        if suffix == "data":
            assert d.shape == (5, 10)
            assert np.allclose(d[:, 0], np.array([1, 3, 5, 7, 9]) + 10)
Esempio n. 24
0
def process_delta_database(name, tech, bio, dependents):
    """A modification of ``bw2data.backends.base.SQLiteBackend.process`` to skip retrieving data from the database."""
    print("Tech:", tech)
    print("Bio:", bio)

    db = bd.Database(name)
    db.metadata["processed"] = datetime.datetime.now().isoformat()

    # Create geomapping array, from dataset interger ids to locations
    inv_mapping_qs = ActivityDataset.select(
        ActivityDataset.id, ActivityDataset.location
    ).where(ActivityDataset.database == name, ActivityDataset.type == "process")

    # self.filepath_processed checks if data is dirty,
    # and processes if it is. This causes an infinite loop.
    # So we construct the filepath ourselves.
    fp = str(db.dirpath_processed() / db.filename_processed())

    dp = bwp.create_datapackage(
        fs=ZipFS(fp, write=True),
        name=bwp.clean_datapackage_name(name),
        sum_intra_duplicates=True,
        sum_inter_duplicates=False,
    )
    dp.add_persistent_vector_from_iterator(
        matrix="inv_geomapping_matrix",
        name=bwp.clean_datapackage_name(name + " inventory geomapping matrix"),
        dict_iterator=(
            {
                "row": row[0],
                "col": bd.geomapping[
                    bd.backends.utils.retupleize_geo_strings(row[1])
                    or bd.config.global_location
                ],
                "amount": 1,
            }
            for row in inv_mapping_qs.tuples()
        ),
        nrows=inv_mapping_qs.count(),
    )

    dp.add_persistent_vector_from_iterator(
        matrix="biosphere_matrix",
        name=bwp.clean_datapackage_name(name + " biosphere matrix"),
        dict_iterator=bio,
    )
    dp.add_persistent_vector_from_iterator(
        matrix="technosphere_matrix",
        name=bwp.clean_datapackage_name(name + " technosphere matrix"),
        dict_iterator=tech,
    )
    dp.finalize_serialization()

    db.metadata["depends"] = sorted(dependents.difference({name}))
    db.metadata["dirty"] = False
    db._metadata.flush()
Esempio n. 25
0
def test_data_is_the_same_object_when_not_proxy():
    dp = load_datapackage(fs_or_obj=ZipFS(str(dirpath / "test-fixture.zip")))
    fdp = dp.filter_by_attribute("matrix", "sa_matrix")

    arr1, _ = dp.get_resource("sa-data-array.data")
    arr2, _ = fdp.get_resource("sa-data-array.data")

    assert np.allclose(arr1, arr2)
    assert arr1 is arr2
    assert np.shares_memory(arr1, arr2)
Esempio n. 26
0
def test_metadata_is_the_same_object():
    dp = load_datapackage(fs_or_obj=ZipFS(str(dirpath / "test-fixture.zip")))
    fdp = dp.filter_by_attribute("matrix", "sa_matrix")

    for k, v in fdp.metadata.items():
        if k != "resources":
            assert id(v) == id(dp.metadata[k])

    for resource in fdp.resources:
        assert any(obj for obj in dp.resources if obj is resource)
Esempio n. 27
0
def generic_zipfile_filesystem(*,
                               dirpath: Path,
                               filename: str,
                               write: bool = True) -> ZipFS:
    assert isinstance(dirpath,
                      Path), "`dirpath` must be a `pathlib.Path` instance"
    if not dirpath.is_dir():
        raise ValueError(
            "Destination directory `{}` doesn't exist".format(dirpath))
    return ZipFS(dirpath / filename, write=write)
Esempio n. 28
0
def test_database_process_adds_default_geo(add_biosphere):
    database = Database("food")
    new_food = copy.deepcopy(food)
    for v in new_food.values():
        del v["location"]
    database.write(new_food)

    package = load_datapackage(ZipFS(database.filepath_processed()))
    data = package.get_resource("food_inventory_geomapping_matrix.indices")[0]

    assert np.allclose(data["col"], geomapping[config.global_location])
Esempio n. 29
0
def test_fdp_can_load_proxy_first():
    dp = load_datapackage(fs_or_obj=ZipFS(str(dirpath / "test-fixture.zip")),
                          proxy=True)
    fdp = dp.filter_by_attribute("matrix", "sa_matrix")
    arr2, _ = fdp.get_resource("sa-data-array.data")
    arr1, _ = dp.get_resource("sa-data-array.data")

    assert np.allclose(arr1, arr2)
    assert arr1.base is not arr2
    assert arr2.base is not arr1
    assert not np.shares_memory(arr1, arr2)
Esempio n. 30
0
def test_integration_test_fixture_zipfile():
    loaded = load_datapackage(
        ZipFS(
            str(
                Path(__file__).parent.resolve() / "fixtures" /
                "test-fixture.zip"),
            write=False,
        ))

    check_metadata(loaded, False)
    check_data(loaded)
Esempio n. 31
0
def sensitivity_dps():
    class VectorInterface:
        def __next__(self):
            return np.array([1, 2, 3])

    class ArrayInterface:
        @property
        def shape(self):
            return (3, 100)

        def __getitem__(self, args):
            return np.ones((3, )) * args[1]

    dp_1 = bwp.load_datapackage(ZipFS(dirpath / "sa-1.zip"))
    dp_1.rehydrate_interface("a", ArrayInterface())

    dp_2 = bwp.load_datapackage(ZipFS(dirpath / "sa-2.zip"))
    dp_2.rehydrate_interface("d", VectorInterface())

    return dp_1, dp_2
Esempio n. 32
0
    def run(self):
        args = self.args
        device_class = args.device_class
        conf_path = constants.CONF_PATH

        if not os.path.exists(conf_path):
            sys.stderr.write('{} does not exist.\n'.format(conf_path))
            sys.stderr.write("please run 'dataplicity init' first\n")
            return -1

        print "reading conf from {}".format(conf_path)
        cfg = settings.read(conf_path)
        serial = cfg.get('device', 'serial')
        auth_token = cfg.get('device', 'auth')
        server_url = cfg.get('server', 'url', constants.SERVER_URL)

        remote = jsonrpc.JSONRPC(server_url)

        print "downloading firmware..."
        with remote.batch() as batch:
            batch.call_with_id('register_result',
                               'device.register',
                               auth_token=auth_token,
                               name=args.name or serial,
                               serial=serial,
                               device_class_name=device_class)
            batch.call_with_id('auth_result',
                               'device.check_auth',
                               device_class=device_class,
                               serial=serial,
                               auth_token=auth_token)
            batch.call_with_id('firmware_result', 'device.get_firmware')
        batch.get_result('register_result')
        batch.get_result('auth_result')
        fw = batch.get_result('firmware_result')

        if not fw['firmware']:
            sys.stderr.write('no firmware available!\n')
            return -1
        version = fw['version']

        firmware_bin = b64decode(fw['firmware'])
        firmware_file = StringIO(firmware_bin)
        firmware_fs = ZipFS(firmware_file)

        dst_fs = OSFS(constants.FIRMWARE_PATH, create=True)

        firmware.install(device_class, version, firmware_fs, dst_fs)

        fw_path = dst_fs.getsyspath('/')
        print "installed firmware {} to {}".format(version, fw_path)

        firmware.activate(device_class, version, dst_fs)
        print "activated {}".format(version)
Esempio n. 33
0
 def test_url_on_sys_path(self):
     t = TempFS()
     zpath = t.getsyspath("modules.zip")
     z = ZipFS(zpath, "w")
     self._init_modules(z)
     z.close()
     z = ZipFS(zpath, "r")
     assert z.isfile("fsih_hello.py")
     z.close()
     sys.path.append("zip://" + zpath)
     FSImportHook.install()
     try:
         self._check_imports_are_working()
     finally:
         sys.path_hooks.remove(FSImportHook)
         sys.path.pop()
         t.close()
Esempio n. 34
0
def install_encoded(device_class, version, firmware_b64, activate_firmware=True, firmware_path=None):
    """Install firmware from a b64 encoded zip file"""
    # TODO:  implement this in a less memory hungry way
    # decode from b64
    firmware_bin = base64.b64decode(firmware_b64)
    # Make a file-like object
    firmware_file = BytesIO(firmware_bin)
    # Open zip
    firmware_fs = ZipFS(firmware_file)
    # Open firmware dir
    dst_fs = OSFS(firmware_path or constants.FIRMWARE_PATH, create=True, dir_mode=0o755)
    # Install
    install_path = install(device_class, version, firmware_fs, dst_fs)
    # Move symlink to active firmware
    if activate_firmware:
        activate(device_class, version, dst_fs, fw_path=firmware_path)

    # Clean up any temporary files
    firmware_fs.close()
    dst_fs.close()

    # Return install_path
    return install_path
Esempio n. 35
0
 def test_url_on_sys_path(self):
     t = TempFS()
     zpath = t.getsyspath("modules.zip")
     z = ZipFS(zpath,"w")
     self._init_modules(z)
     z.close()
     z = ZipFS(zpath,"r")
     assert z.isfile("fsih_hello.py")
     z.close()
     sys.path.append("zip://" + zpath)
     FSImportHook.install()
     try:
         self._check_imports_are_working()
     finally:
         sys.path_hooks.remove(FSImportHook)
         sys.path.pop()
         t.close()
Esempio n. 36
0
def extract_world_archive(event, context, flog):
    flog.info('Starting world archive extraction...')

    bucket_name = event['bucket']['name']
    object_key = event['object']['key']

    flog.debug('Event object: %s::%s', bucket_name, object_key)

    # TODO: error handling
    api_key = os.path.splitext(os.path.split(object_key)[1])[0]
    world = World.select().where(World.api_key == api_key).get()
    user = world.user

    flog.info('Extracting for user::world: %s:%s', user.guid, world.guid)

    object_fd = fsopen('s3://{bucket}/{key}'.format(
        bucket=bucket_name,
        key=object_key,
    ), 'rb')
    archive_fs = ZipFS(object_fd, 'r')
    dest_fs = fsopendir('s3://{bucket}/'.format(bucket=bucket_name))
    dest_prefix = 'worlds/{user_guid}/{world_guid}/'.format(
        user_guid=user.guid,
        world_guid=world.guid,
    )

    for fn in archive_fs.walkfiles(wildcard='level.dat'):
        level_dat_fn = fn
        break
    flog.debug('Found level.dat at: %s', level_dat_fn)

    archive_fs = archive_fs.opendir(os.path.dirname(level_dat_fn))

    flog.info('Extracting level.dat')
    # TODO: make sure these paths are actually safe
    dest_fs.setcontents(
        safe_path_join(dest_prefix, 'level.dat'),
        archive_fs.getcontents('level.dat'))
    for region_fn in archive_fs.walkfiles(wildcard='*.mca'):
        flog.info('Extracting file: %s', region_fn)
        dest_fs.setcontents(
            safe_path_join(dest_prefix, region_fn),
            archive_fs.getcontents(region_fn))

    flog.info('Finished world archive extraction')
Esempio n. 37
0
 def set(self, key, value):
     fs = None
     try:
         fullFileName = os.path.join(self.cache_dir, '{0}.dat'.format(key))
         dirName = os.path.dirname(fullFileName)
         pkg = os.path.basename(dirName)
         fileName = os.path.basename(fullFileName)
         isZip = pkg.lower().endswith('.zip')
         save = True
         if isZip:
             fs = ZipFS(dirName, mode='a', compression='stored')
             if fs.exists(fileName):
                 log('[WARNING] archive "{}" already contains file "{}". Do not save the new data.'.format(pkg, fileName))
                 save = False
         else:
             fs = OSFS(dirName, create=True)
         if save:
             fs.setcontents(fileName, cPickle.dumps(value))
     except Exception:
         err(traceback.format_exc())
     finally:
         if fs is not None:
             fs.close()