Example #1
0
    def testMakeArchiveSubdir1(self):
        # test archiving a subdir with a file and extracting relative

        base = self.tempdir
        rar = opj(base, "testarchive1.rar")
        a = archive.Archive(rar, base)

        drel_top = "A"
        drel = opj(drel_top, "1", "a")
        dabs = opj(base, drel)

        frel = opj(drel, "testfile-1")
        fabs = opj(base, frel)

        file_helper.mkdir_p(dabs)
        file_helper.create_file(fabs, "content-1")

        a.add_dir(drel_top)

        self.assertEqual(a.run(), 0)
        self.assertEqual(file_helper.file_exists(rar), True)

        file_helper.remove_file(fabs)
        self.assertEqual(file_helper.file_exists(fabs), False)

        # test content of archive by extracting:
        b = archive.Archive(rar, base)
        syscode = b.extract(target_path=self.tempdir)
        self.assertEqual(syscode, 0)

        self.assertEqual(file_helper.file_head(fabs), "content-1")
Example #2
0
    def testMakeArchive(self):
        # Part A creates a single file, archives it, then deletes it
        # Part B extracts the file again from the archive and checks the content

        base = self.tempdir

        frel = "testfile"
        fabs = opj(base, "testfile")
        rar = opj(base, "testarchive1.rar")

        # Part A:
        file_helper.create_file(fabs, "content123test")

        a = archive.Archive(rar, base)
        a.add_file(frel)

        self.assertEqual(a.run(), 0)

        self.assertEqual(file_helper.file_exists(rar), True)

        file_helper.remove_file(fabs)
        self.assertEqual(file_helper.file_exists(fabs), False)

        # Part B:
        b = archive.Archive(rar, base)
        syscode = b.extract(target_path=base)
        self.assertEqual(syscode, 0)

        self.assertEqual(file_helper.file_head(fabs), "content123test")
Example #3
0
    def test_bot(self):
        arc = archive.Archive(":memory:")
        bot = archive_bot.ArchiveBot(arc)

        # Add a new entry
        bot.handle_message("!add somelink tags: [a, b, c]", no_extras)
        print(bot.handle_message("!get 1", no_extras))

        # Update old entry
        bot.handle_message("!update 1 tags: [d]", no_extras)
        print(bot.handle_message("!get 2", no_extras))

        # Add a bare entry
        assert not "error" in bot.handle_message("!add link", no_extras)

        # Add a complete entry
        assert not "error" in bot.handle_message(
            "!add link name: link tags: [a, b, c] read: someone",
            {
                "author": ["none"],
                "file": ("filename.txt", b"some content")
            },
        )

        # Find entries
        assert not "error" in bot.handle_message("!find", no_extras)

        assert "error" in bot.handle_message('!add "broken link"', no_extras)
Example #4
0
    def get_path(source):
        uri = urlparse.urlparse(source)

        if uri.scheme and uri.path.endswith(".git"):
            po = remote.GitRepo(source)
        elif uri.scheme.startswith("svn"):
            po = remote.SvnRepo(source)
        elif uri.scheme in ("http", "https", "ftp"):
            po = remote.Web(source)
        elif os.path.isfile(source):
            if any(source.endswith(ext) for ext in ('.zip', '.gz', '.tar')):
                po = archive.Archive(source)
            elif source.endswith('.crx'):
                po = archive.ChromeExtensionCrx(source)
            elif source.endswith('.xpi'):
                po = archive.FirefoxExtensionXpi(source)
            elif source.endswith('.apk'):
                po = decompilable.Apk(source)
            elif source.endswith('.jar'):
                po = decompilable.Jar(source)
            else:
                # we do not know what is is
                logger.debug("unknown: %s" % source)
                c = Cigma()
                try:
                    with open(source, 'rb') as f:
                        data = f.read(384)
                        magic = c.cigma(data=data)
                        logger.debug(magic)
                        d_magic = magic["magic"]
                        if not d_magic:
                            po = local.LocalPath(source)
                        else:
                            mimetype = d_magic.get('mimetype', "")
                            if mimetype.startswith(
                                    "application/x-executable-32"):
                                po = decompilable.Application32Bits(source)
                            elif mimetype.startswith(
                                    "application/x-executable-64"):
                                po = decompilable.Application64Bits(source)
                            elif mimetype.startswith("application/"):
                                petype = PeFileFormat.petype(data)
                                logger.debug(petype)
                                if petype == PeFileFormat.TYPE_PE64:
                                    po = decompilable.Application64Bits(source)
                                elif petype == PeFileFormat.TYPE_DOTNET:
                                    po = decompilable.ApplicationDotNet(source)
                                else:
                                    po = decompilable.Application32Bits(source)
                            else:
                                po = local.LocalPath(source)
                except Exception as e:
                    logger.exception("exception!")
                    po = local.LocalPath(source)
        elif os.path.isdir(source):
            po = local.LocalPath(source)

        logger.debug("scheme: %s" % uri.scheme)
        logger.debug(po)
        return po
Example #5
0
def load_prev_image():
    global a
    global position
    global idx

    if position <= 1:
        load_archive(archive_index - 1)
        load_next_image()
    else:
        position -= 1

        try:
            a = iter(archive.Archive(archives[archive_index]))
        except archive.core.Error:
            # This is really bad :/
            # Skip to next archive.
            load_archive(archive_index + 1)

        element = a.__next__()

        for i in range(1, position):
            element = a.__next__()

        buff = element.read()

        if len(buff) > 0:
            load_image(buff, element.filename)
        else:
            load_prev_image()
    def test_archive(self):
        arc = archive.Archive(":memory:")

        link = "https://www.google.com/"
        tags = {"tag1", "tag2"}

        arc.add(name="name", link=link, tags=tags)

        self.assertEqual((link, tags), arc.get(1, ["link", "tags"]))

        new_tag = {"new tag1", "new tag2"}
        arc.update(1, {"tags": new_tag})

        self.assertEqual((new_tag,), arc.get(2, ["tags"]))

        # Broken links are not allowed
        self.assertRaises(Exception, arc.add, link="broken link")

        # Complex update
        arc.update(
            2,
            {
                "tags": {"add": ["tag3", "tag4"], "sub": ["new tag1"]},
                "name": ["new name"],
            },
        )

        result = arc.get(3, ["name", "tags"])
        self.assertEqual(result[0], "new name")

        self.assertEqual(result[1], {"new tag2", "tag4", "tag3"})
Example #7
0
def collect(pkg_name):
    """collect the needed package file from system
    create a zip file of these files

    :param pkg_name: the name of needed package
    """
    collect_deb = collector.Collector(pkg_name)
    collect_deb.extract_installed_package()
    collect_deb.package_exist()
    collect_deb.collector_file()  # /usr/...

    # /DEBIAN/...
    control_info = collect_deb.get_control()
    if control_info is not None:
        os.mkdir("donut/DEBIAN")
        with open("donut/DEBIAN/control", 'w') as file:
            file.write(control_info)
        collect_deb.fix_control("donut/DEBIAN/control")
    collect_deb.extract_md5()

    time_setter = archive.Archive("{}/donut".format(os.getcwd()), pkg_name,
                                  "zip", "1 Jan 18")
    time_setter.set_default_time()  # time-sync
    time_setter.pack_it(
    )  # zip them, this file is asserted to be same in any machine
    collect_deb.clean()
Example #8
0
 def testMakeArchiveFail1(self):
     # create an archive of not existing file, this is the only file for the archive!
     base = self.tempdir
     a = archive.Archive(opj(base, "testarchive1.rar"), base)
     a.add_file("this-file-does-not-exist")
     syscode = a.run()
     self.assertNotEqual(syscode, 0)
Example #9
0
def build_deb(pkg_name):
    """unzip the packed file and build deb package
    """
    zip_mag = archive.Archive("{}/donut".format(os.getcwd()), pkg_name, "zip", "1 Jan 18")
    zip_mag.un_zip("{}/Coding/gcc-8-base_donut_decoded.zip".format(os.getcwd()),
                   "{}/donut".format(os.getcwd()))
    collect_deb = collector.Collector(pkg_name)
    collect_deb.build("{}/donut".format(os.getcwd()), "{}/{}_donut.deb".format(os.getcwd(), pkg_name))
Example #10
0
def load_archive(idx):
    global a
    global position
    global archive_index

    # By going back the program doesn't exit if it reached the end.
    if idx < 0:
        idx = 0

    # Don't read futher than you can!
    if idx >= len(archives):
        print("Done :)")
        sys.exit(0)

    try:
        a = iter(archive.Archive(archives[idx]))
    except archive.core.Error:
        # Shouldn't happen since we already checked the existence of
        # the file. If this happens, at least don't crash!
        load_archive(idx + 1)

    if idx < archive_index:
        # Here it's a bit hard, since we need to
        # know how many entries there are. pyarchive
        # doesn't look like it provised a 'len' field, so...
        try:
            while True:
                a.__next__()
                position += 1
        except StopIteration:
            position -= 2

        # Go one step back...
        try:
            a = iter(archive.Archive(archives[idx]))
        except archive.core.Error:
            # Shouldn't happen.
            # Go back to the one that didn't fail.
            load_archive(idx + 1)

        for x in range(0, position):
            a.__next__()
    else:
        position = 0

    archive_index = idx
Example #11
0
    def test_update(self):
        arc = archive.Archive(":memory:")
        bot = archive_bot.ArchiveBot(arc)

        bot.handle_message('!add "somelink.com" tags: [a, b]', no_extras)

        assert not "error" in bot.handle_message(
            '!update 1 tags: +[c, d] -e link: "otherlink.com" name: name',
            no_extras)
 def archive(self, inputs):
     v_inputs = inputs
     self.input_validation(v_inputs)
     print(v_inputs)
     archive = a.Archive(source=v_inputs['arc_path'],
                         dest=v_inputs['arc_dest'],
                         format_='zip')
     archive.publish(key=v_inputs['key'], toc=True)
     print('Archive complete.')
Example #13
0
 def testNames(self):
     ar = archive.Archive('test.tgz')
     self.assertSetEqual(
         set(a.filename for a in ar),
         set([
             'archive/__init__.py',
             'archive/core.pyx',
             'setup.py',
         ]))
Example #14
0
 def testData(self):
     ar = archive.Archive('test.tgz')
     self.assertEqual(
         {a.filename: md5(a.read()).hexdigest()
          for a in ar}, {
              'archive/__init__.py': '29a6a1e050bd42fe24cd17b138d4b08d',
              'archive/core.pyx': '1bd9e27890beb0b576a2122e7b57ca8c',
              'setup.py': 'de88961c0eca3d7875894eae7d551d18',
          })
    def test_find(self):
        arc = archive.Archive(":memory:")

        arc.add(name="link", link="link.com", tags=["atag"])
        arc.add(name="other", link="other", tags=["other"])

        assert len(arc.find({"name": "link"})) == 1
        assert len(arc.find({"link": "link.com"})) == 1
        assert len(arc.find({"tags": ["atag"]})) == 1
        assert len(arc.find({"keyword": "link"})) == 1
Example #16
0
def ArchiveAndUpload(info, archive_latest=False):
    print '@@@BUILD_STEP dartium_generate_archive@@@'
    cwd = os.getcwd()

    dartium_bucket = info.name
    dartium_bucket = dartium_bucket.replace('multivm', 'multivm-dartium')
    drt_bucket = dartium_bucket.replace('dartium', 'drt')
    chromedriver_bucket = dartium_bucket.replace('dartium', 'chromedriver')
    dartium_archive = dartium_bucket + '-' + info.version
    drt_archive = drt_bucket + '-' + info.version
    chromedriver_archive = chromedriver_bucket + '-' + info.version
    dartium_zip, drt_zip, chromedriver_zip = archive.Archive(
        SRC_PATH,
        info.mode,
        dartium_archive,
        drt_archive,
        chromedriver_archive,
        is_win_ninja=info.is_win_ninja)

    status = 0
    # Upload bleeding-edge builds to old dartium-archive bucket
    if info.channel == 'be':
        status = (OldUpload('dartium',
                            dartium_bucket,
                            os.path.abspath(dartium_zip),
                            archive_latest=archive_latest)
                  or OldUpload('drt',
                               drt_bucket,
                               os.path.abspath(drt_zip),
                               archive_latest=archive_latest)
                  or OldUpload('chromedriver',
                               chromedriver_bucket,
                               os.path.abspath(chromedriver_zip),
                               archive_latest=archive_latest))

    # Upload to new dart-archive bucket using GCSNamer, but not incremental
    # or perf builder builds.
    if not info.is_incremental and not info.is_build:
        Upload('dartium',
               os.path.abspath(dartium_zip),
               info,
               archive_latest=archive_latest)
        Upload('drt',
               os.path.abspath(drt_zip),
               info,
               archive_latest=archive_latest)
        Upload('chromedriver',
               os.path.abspath(chromedriver_zip),
               info,
               archive_latest=archive_latest)

    os.chdir(cwd)
    if status != 0:
        print '@@@STEP_FAILURE@@@'
    return status
Example #17
0
    def test_files(self):
        arc = archive.Archive(":memory:")
        bot = archive_bot.ArchiveBot(arc)

        file = ("filename.txt", b"some content here")

        # Send pseudo-file
        print(bot.handle_message("!add file", {"file": file}))

        # Get file
        print(bot.handle_message("!get 1", {}))
Example #18
0
 def testMakeArchiveFailSubdirs(self):
     # create an archive at not existing directory
     base = self.tempdir
     aname = opj(self.tempdir, "dir/doesnt/exist", "testarchive1.rar")
     a = archive.Archive(aname, base)
     file_helper.create_file(opj(self.tempdir,
                                 "testfile"))  # empty file is ok
     a.add_file(opj("testfile"))
     syscode = a.run()
     # should fail, because the target dir does not exist:
     self.assertNotEqual(syscode, 0)
Example #19
0
    def __call__(self):
        # TODO: Exception safety
        # TODO: If a branch name is given, checkout the branch from remote
        # TODO: Reset back to 'origin/master' afterwards
        git.reset(self.revision)
        revhash = git.id()

        # TODO: Use same content streams for all architectures
        upload_jobs = []
        for arch in arches.arches:
            date = time.strftime('%Y%m%d')
            filename = '%s-snapshot-%s-%s-%s' % (self.build_type, date,
                                                 revhash[:10], arch)

            try:
                archive_stream = StringIO.StringIO()
                archive_obj = archive.Archive(arch, archive_stream)

                if arch.startswith('darwin'):
                    result, uuid = autobuild.obtain(self.amqp_connection,
                                                    revhash, arch,
                                                    [self.build_type])
                    name, stream = result[0]
                    archive_obj.add(name, stream.read())
                else:
                    if self.build_type == 'openclonk':
                        for name, stream in contentiter.ContentIter(
                                groupcontent.snapshot):
                            archive_obj.add(name, stream.read())

                    arch_iter = architer.ArchIter(self.amqp_connection, arch,
                                                  revhash, self.build_type)
                    for name, stream in arch_iter:
                        archive_obj.add(name, stream.read())
                    uuid = arch_iter.uuid

                archive_filename = archive_obj.get_filename(filename)
                archive_obj.close()
                archive_stream.seek(0)

                upload_jobs.append(
                    (archive_filename, archive_stream, uuid, arch))
            except autobuild.AutobuildException as ex:
                # make an entry for "failed build"
                archive_filename = archive_obj.get_filename(filename)
                upload_jobs.append((archive_filename, None, ex.uuid, arch))

        uploader = upload.Uploader(self.log, self.dry_release)
        for archive_filename, archive_stream, uuid, arch in upload_jobs:
            if archive_stream is not None:  # Needed to skip mape osx build(?)
                uploader.nightly_file(self.build_type, archive_filename,
                                      archive_stream, uuid, revhash[:10], arch)

        return True
Example #20
0
    def testExclude(self):
        # test archiving a subdir with a file and extracting relative

        base = self.tempdir
        rar = opj(base, "testarchive1.rar")
        a = archive.Archive(rar, base)

        drel_top = "A"
        drel = opj(drel_top, "1", "a")
        dabs = opj(base, drel)
        file_helper.mkdir_p(dabs)

        frel = opj(drel, "testfile-1")
        fabs = opj(base, frel)
        file_helper.create_file(fabs, "content-2")

        frel2 = opj(drel, "testfile-2")
        fabs2 = opj(base, frel2)
        file_helper.create_file(fabs2, "content-2")

        a.add_dir(drel_top)
        a.exclude(frel)  # exclude the first file!

        self.assertEqual(a.run(), 0)
        self.assertEqual(file_helper.file_exists(rar), True)

        file_helper.remove_file(fabs)
        self.assertEqual(file_helper.file_exists(fabs), False)

        file_helper.remove_file(fabs2)
        self.assertEqual(file_helper.file_exists(fabs2), False)

        # test content of archive by extracting:
        b = archive.Archive(rar, base)
        syscode = b.extract(target_path=self.tempdir)
        self.assertEqual(syscode, 0)

        # the first file was excluded, may not exist:
        self.assertEqual(file_helper.file_exists(fabs), False)
        # the second file must be there:
        self.assertEqual(file_helper.file_head(fabs2), "content-2")
Example #21
0
    def acquire_toolchains(self, required, registered, path, debug_calls):
        return_paths = []
        for toolchain in required:
            if toolchain in registered:
                # if toolchain is registered as 'local' return empty string
                if registered[toolchain]["remote"] is False:
                    return_paths.append("")
                    continue

                toolchain_location = registered[toolchain]["server"]

                with self.cd(path + "/bin"):
                    # if archive exists but the catalog does not it is probably
                    # a malformed archive - delete it
                    if os.path.isfile(os.path.basename(toolchain_location)):
                        if not os.path.isdir(registered[toolchain]["path"]):
                            self.print_message(self.logtype.INFO,
                                               "Toolchain archive seems to ",
                                               "be corrupted, redownloading")
                            os.remove(os.path.basename(toolchain_location))

                    if os.path.isfile(
                            os.path.basename(toolchain_location)) is False:
                        call = "wget " + toolchain_location
                        if self.call_tool(call) != 0:
                            self.print_message(self.logtype.ERROR,
                                               "Error while downloading",
                                               "toolchain")
                            raise NameError("Required toolchains: " +
                                            ", ".join(required))
                        try:
                            a = archive.Archive(
                                os.path.basename(toolchain_location))
                            a.extract()
                        except Exception as ext:
                            # the downloaded file is corrupted, delete it
                            os.remove(os.path.basename(toolchain_location))

                            self.print_message(
                                self.logtype.ERROR, "Error while unpacking",
                                os.path.basename(toolchain_location),
                                "toolchain.", str(ext), "- deleting.")

                            raise NameError("Required toolchains: " +
                                            ", ".join(required))

                return_paths.append(path + "/bin/" +
                                    registered[toolchain]["path"])
            else:
                self.print_message(self.logtype.ERROR, required,
                                   "toolchain is not registered")
                raise NameError("Required toolchains: " + ", ".join(required))
        return return_paths
        def archive_(aid):
            nowebp = int(request.args.get('nowebp', 0))
            fn = capp.archives[aid]['filename']
            ar = archive.Archive(fn)

            return render_template("archive.html",
                                   aid=aid,
                                   fn=fn,
                                   archive=ar,
                                   basename=os.path.basename,
                                   enumerate=enumerate,
                                   nowebp=nowebp)
Example #23
0
 def test_upload_two(self, s3_client):
     e = util.Encrypt(self.encryption_token)
     s3_client.return_value.download_fileobj.side_effect = ClientError(
         {}, "download_fileobj")
     ar = archive.Archive()
     try:
         self.make_dirs(self.srcdir, N=2, M=100)
         ar.upload_many(self.srcdir)
         self.assertEqual(s3_client.return_value.upload_fileobj.call_count,
                          2)
     finally:
         ar.close()
Example #24
0
    def testMakeArchiveFileInSubdir(self):
        # test archiving a file in a subdir and extracting relative

        base = self.tempdir
        rar = opj(base, "testarchive1.rar")

        # Part A:
        a = archive.Archive(rar, base)

        drel = opj("A", "1", "a")  # relative path to directory
        dabs = opj(base, drel)  # absolute
        frel = opj("A", "1", "a", "testfile-1")
        fabs = opj(base, frel)

        file_helper.mkdir_p(dabs)
        file_helper.create_file(fabs, "content-1")
        self.assertEqual(file_helper.file_exists(fabs), True)

        a.add_file(frel)  # must add relative paths

        # run and check if rar archive exists:

        self.assertEqual(a.run(), 0)
        self.assertEqual(file_helper.file_exists(rar), True)

        # remove original file and check:
        file_helper.remove_file(fabs)
        self.assertEqual(file_helper.file_exists(fabs), False)

        # Part B:
        b = archive.Archive(rar, base)
        syscode = b.extract(target_path=base)
        self.assertEqual(syscode, 0)

        self.assertEqual(file_helper.file_exists(fabs), True)

        # TODO: check content:
        self.assertEqual(file_helper.file_head(fabs), "content-1")
Example #25
0
 def __init__(self, filename):
     super().__init__()
     arch = archive.Archive(filename)
     all = {}
     for f in arch:
         entry = parse_properties(f)
         pkgname = f.filename.rsplit('/', 1)[0]
         if pkgname in all:
             all[pkgname].update(entry)
         else:
             all[pkgname] = entry
     for namever, props in all.items():
         p = Package(**props)
         self.add_package(p)
Example #26
0
def extract_to_temp(
        file: FileStorage,
        ignore_filter: IgnoreFilterManager,
        handle_ignore: IgnoreHandling = IgnoreHandling.keep) -> str:
    """Extracts the contents of file into a temporary directory.

    :param file: The archive to extract.
    :param ignore_filter: The files and directories that should be ignored.
    :param handle_ignore: Determines how ignored files should be handled.
    :returns: The pathname of the new temporary directory.
    """
    tmpfd, tmparchive = tempfile.mkstemp()

    try:
        os.remove(tmparchive)
        tmparchive += os.path.basename(
            secure_filename('archive_' + file.filename))
        tmpdir = tempfile.mkdtemp()
        file.save(tmparchive)

        if handle_ignore == IgnoreHandling.error:
            arch = archive.Archive(tmparchive)
            wrong_files = ignore_filter.get_ignored_files_in_archive(arch)
            if wrong_files:
                raise IgnoredFilesException(invalid_files=wrong_files)
            arch.extract(to_path=tmpdir, method='safe')
        else:
            archive.extract(tmparchive, to_path=tmpdir, method='safe')
            if handle_ignore == IgnoreHandling.delete:
                ignore_filter.delete_from_dir(tmpdir)
    except (tarfile.ReadError, zipfile.BadZipFile):
        raise APIException(
            'The given archive could not be extracted',
            "The given archive doesn't seem to be an archive",
            APICodes.INVALID_ARCHIVE,
            400,
        )
    except (InvalidFile, archive.UnsafeArchive) as e:
        raise APIException(
            'The given archive contains invalid files',
            str(e),
            APICodes.INVALID_FILE_IN_ARCHIVE,
            400,
        )
    finally:
        os.close(tmpfd)
        os.remove(tmparchive)

    return tmpdir
Example #27
0
    def test_find(self):
        arc = archive.Archive(":memory:")
        bot = archive_bot.ArchiveBot(arc)

        # One single complex entry
        tags = ", ".join("somerandomtag" + str(e) for e in range(20))
        bot.handle_message("!add test name: name tags: [%s]" % tags, no_extras)

        # Add a bunch of entries to force multiple pages
        for _ in range(20):
            bot.handle_message("!add test", no_extras)

        assert not "error" in bot.handle_message("!find", no_extras)
        assert not "error" in bot.handle_message("!find page: 1", no_extras)
        assert not "error" in bot.handle_message("!find page: 2", no_extras)
Example #28
0
 def testIncremental(self):
     ar = archive.Archive('test.tgz')
     for ent in ar:
         if ent.filename == 'setup.py':
             break
     else:
         raise AssertionError("No setup.py")
     buf = b""
     while True:
         chunk = ent.read(100)
         if not chunk:
             break
         buf += chunk
     self.assertEqual(
         md5(buf).hexdigest(), 'de88961c0eca3d7875894eae7d551d18')
    def test_add_get(self):
        arc = archive.Archive(":memory:")

        id1 = arc.add(name="link", link="link.com", tags=["a", "b"])

        assert not "error" in arc.get(id1)

        # Adding repeated tags should save just a single tag
        id2 = arc.add(name="link", link="link.com", tags=["a", "a"])

        # Select just the tags
        self.assertAlmostEqual(arc.get(id2, ["tags"]), ({"a"},))

        id3 = arc.add(file=("filename.txt", b"some content"))
        arc.get(id3)
Example #30
0
 def __init__(s, path, software):
     new = False if os.path.isfile(path) else True
     s.archive = archive.Archive(path)
     s.metadata = {
         "createdOn": time.time(),
         "createdBy": getpass.getuser(),
         "name": os.path.basename(os.path.splitext(path)[0]),
         "software": software
     }
     with timer.Timer("Loading"):
         with s.archive:
             s.metadata = Dict(
                 dict(s.metadata,
                      **decode(s.archive.get("metadata.json",
                                             "{}"))))  # Metadata
             s.ref = Dict(
                 reference.Reference(
                     decode(s.archive.get("reference.json",
                                          "{}"))))  # Reference file
             s.data = Dict(decode(s.archive.get("data.json",
                                                "{}")))  # Storage
             if new:
                 s.metadata.diff = True
                 s.data.diff = True
                 s.ref.diff = True
             tree = dict((a, a.split("/")) for a in s.archive.keys())
             clipIDs = set(b[1] for a, b in tree.items() if b[0] == "clips")
             s.clips = Dict({})
             if clipIDs:
                 for ID in clipIDs:
                     c = Clip(ID)
                     c.metadata = Dict(
                         dict(
                             c.metadata,
                             **decode(
                                 s.archive.get(
                                     "clips/%s/metadata.json" % ID, "{}"))))
                     c.data = decode(
                         s.archive.get("clips/%s/data.json" % ID, "{}"))
                     thumbs = sorted([
                         a for a, b in tree.items() if b[0] == "clips"
                         and b[1] == ID and b[2] == "thumbs"
                     ])
                     if thumbs:
                         for th in thumbs:
                             c.thumbs.append(s.cache(th))
                     s.clips[ID] = c
             s.clips.diff