예제 #1
0
    def open_video(self) -> None:
        if not exists(self.original_location):
            raise Exception('Video File not readable')
        if not is_video(self.original_location):
            raise Exception("file is no video (.mp4, .mov, .avi, .mkv)")

        self._video = cv2.VideoCapture(self.original_location)
예제 #2
0
def update_repo(storage, sign):
    filelists = {}
    primary = {}
    revision = "0"

    if storage.exists('repodata/repomd.xml'):
        data = storage.read_file('repodata/repomd.xml')

        filelists, primary, revision = parse_repomd(data)

        data = storage.read_file(filelists['location'])
        filelists = parse_filelists(gunzip_string(data))

        data = storage.read_file(primary['location'])
        primary = parse_primary(gunzip_string(data))

    recorded_files = set()
    for package in primary.values():
        recorded_files.add((package['location'], float(package['file_time'])))

    existing_files = set()
    expr = r'^.*\.rpm$'
    for file_path in storage.files('.'):
        match = re.match(expr, file_path)

        if not match:
            continue

        mtime = storage.mtime(file_path)

        existing_files.add((file_path, mtime))

    files_to_add = existing_files - recorded_files

    for file_to_add in files_to_add:
        file_path = file_to_add[0]
        mtime = file_to_add[1]
        print("Adding: '%s'" % file_path)

        tmpdir = tempfile.mkdtemp()
        storage.download_file(file_path, os.path.join(tmpdir, 'package.rpm'))

        rpminfo = rpmfile.RpmInfo()
        header = rpminfo.parse_file(os.path.join(tmpdir, 'package.rpm'))
        sha256 = file_checksum(os.path.join(tmpdir, 'package.rpm'), "sha256")

        statinfo = os.stat(os.path.join(tmpdir, 'package.rpm'))
        size = statinfo.st_size

        shutil.rmtree(tmpdir)

        nerv, prim = header_to_primary(header, sha256, mtime, file_path,
                                       rpminfo.header_start, rpminfo.header_end,
                                       size)
        _, flist = header_to_filelists(header, sha256)

        primary[nerv] = prim
        filelists[nerv] = flist

    revision = str(int(revision) + 1)

    filelists_str = dump_filelists(filelists)
    primary_str = dump_primary(primary)

    repomd_str = generate_repomd(filelists_str, primary_str, revision)

    filelists_gz = gzip_string(filelists_str)
    primary_gz = gzip_string(primary_str)
    filelists_gz_sha256 = string_checksum(filelists_gz, 'sha256')
    primary_gz_sha256 = string_checksum(primary_gz, 'sha256')
    filelists_name = 'repodata/%s-filelists.xml.gz' % filelists_gz_sha256
    primary_name = 'repodata/%s-primary.xml.gz' % primary_gz_sha256

    storage.write_file(filelists_name, filelists_gz)
    storage.write_file(primary_name, primary_gz)
    storage.write_file('repodata/repomd.xml', repomd_str)

    if sign:
        repomd_str_signed = gpg_sign_string(repomd_str)
        storage.write_file('repodata/repomd.xml.asc', repomd_str_signed)
예제 #3
0

def analyse_folder(args):
    [
        analyse_recording(os.path.join(args.input, file), args)
        for file in get_files(args.input)
    ]


if __name__ == "__main__":
    args = setup_parser().parse_args()

    coloredlogs.install(fmt="%(asctime)s %(levelname)s %(message)s",
                        level=logging.DEBUG if args.debug else logging.INFO)

    if not exists(args.input):
        logging.error("File / Folder does not exist")
        sys.exit(-1)

    if is_folder(args.input):
        analyse_folder(args)
    else:
        analyse_recording(args.input, args)

    for recording in recordings:
        if recording.has_errors(
        ) and recording.error != 'video is split' and not recording.is_part_of_split:
            move_error_file(recording.original_location, args.unsure,
                            args.dry_run)

    print_summary(recordings)
예제 #4
0
        if not fences or fences["metadata"].get("returnert", 0) == 0:
            time.sleep(sleep_time)
            continue

        # TODO: Check if returned JSON has paging. If so, fetch the rest of
        #       the geofence objects
        vegobjekt_ids = []
        try:
            ic.connect()
            log.debug("Connect to interchange.")
            for fence in fences.get("objekter"):
                # Sample all vegobjekt IDs to check if there are anyone that
                # has been deleted from NVDB
                vegobjekt_ids.append(int(fence.get("id", 0)))

                if not storage.exists(fence):
                    datex_obj = datex2.create_doc(fence)
                    #datex_obj.name = unicode("TestÆØÅ-New")

                    msg = u"New geofence: id={}, version={}, name={}".format(
                        fence.get("id"), datex_obj.version, datex_obj.name)
                    log.info(msg)
                    try:
                        slack_notify(msg, slack_url)
                    except Exception:
                        log.warn("Unable to send slack notification")

                    try:
                        ic.send_obj(datex_obj)
                    except ConnectionError as ce:
                        raise ce