Beispiel #1
0
def retrieve_playlist_tmp_files(playlist):
    """
    Retrieve all files for a given playlist into the temporary folder.
    """
    preview_file_ids = []
    for shot in playlist["shots"]:
        if ("preview_file_id" in shot and shot["preview_file_id"] is not None
                and len(shot["preview_file_id"]) > 0):
            preview_file = files_service.get_preview_file(
                shot["preview_file_id"])
            if preview_file is not None and preview_file["extension"] == "mp4":
                preview_file_ids.append(preview_file["id"])

    file_paths = []
    for preview_file_id in preview_file_ids:
        if config.FS_BACKEND == "local":
            file_path = file_store.get_local_movie_path(
                "previews", preview_file_id)
        else:
            file_path = os.path.join(
                config.TMP_DIR,
                "cache-previews-%s.%s" % (preview_file_id, "mp4"),
            )
            if not os.path.exists(file_path) or os.path.getsize(
                    file_path) == 0:
                with open(file_path, "wb") as tmp_file:
                    for chunk in file_store.open_movie("previews",
                                                       preview_file_id):
                        tmp_file.write(chunk)

        file_name = names_service.get_preview_file_name(preview_file_id)
        tmp_file_path = os.path.join(config.TMP_DIR, file_name)
        copyfile(file_path, tmp_file_path)
        file_paths.append((tmp_file_path, file_name))
    return file_paths
Beispiel #2
0
def _run_remote_job_build_playlist(app, job, previews, params, movie_file_path,
                                   full):
    preview_ids = [
        preview["id"] for preview in previews if preview["extension"] == "mp4"
    ]
    input_bytes = zlib.compress(bytes(json.dumps(preview_ids), "utf-8"))
    input_string = base64.b64encode(input_bytes).decode("ascii")
    bucket_prefix = config.FS_BUCKET_PREFIX
    params = {
        "version": "1",
        "bucket_prefix": bucket_prefix,
        "output_filename": Path(movie_file_path).name,
        "output_key": file_store.make_key("playlists", job["id"]),
        "input": input_string,
        "width": params.width,
        "height": params.height,
        "fps": params.fps,
        "full": str(full).lower(),
    }
    nomad_job = config.JOB_QUEUE_NOMAD_PLAYLIST_JOB
    remote_job.run_job(app, config, nomad_job, params)

    with open(movie_file_path, "wb") as movie_file:
        for chunk in file_store.open_movie("playlists", job["id"]):
            movie_file.write(chunk)

    return movie_file_path
Beispiel #3
0
    def get(self, instance_id):
        if not self.is_exist(instance_id):
            abort(404)

        if not self.is_allowed(instance_id):
            abort(403)

        try:
            return send_file(file_store.open_movie("previews", instance_id),
                             mimetype="video/mp4")
        except FileNotFound:
            current_app.logger.error("File was not found for: %s" %
                                     instance_id)
            abort(404)
Beispiel #4
0
def _execute_nomad_job(job, previews, params, movie_file_path):
    import nomad
    import zlib
    import json
    preview_ids = [
        preview["id"] for preview in previews if preview["extension"] == "mp4"
    ]
    input_bytes = zlib.compress(bytes(json.dumps(preview_ids), 'utf-8'))
    input_string = base64.b64encode(input_bytes).decode("ascii")
    bucket_prefix = config.FS_BUCKET_PREFIX
    params = {
        "version": "1",
        "bucket_prefix": bucket_prefix,
        "output_filename": Path(movie_file_path).name,
        "output_key": file_store.make_key("playlists", job["id"]),
        "input": input_string,
        "width": params.width,
        "height": params.height,
        "fps": params.fps,
        "FS_BACKEND": config.FS_BACKEND,
    }
    # Add object storage information
    if config.FS_BACKEND == "s3":
        params.update({
            "S3_ENDPOINT": config.FS_S3_ENDPOINT,
            "AWS_DEFAULT_REGION": config.FS_S3_REGION,
            "AWS_ACCESS_KEY_ID": config.FS_S3_ACCESS_KEY,
            "AWS_SECRET_ACCESS_KEY": config.FS_S3_SECRET_KEY
        })
    elif config.FS_BACKEND == "swift":
        params.update({
            "OS_USERNAME": config.FS_SWIFT_USER,
            "OS_PASSWORD": config.FS_SWIFT_KEY,
            "OS_AUTH_URL": config.FS_SWIFT_AUTHURL,
            "OS_TENANT_NAME": config.FS_SWIFT_TENANT_NAME,
            "OS_REGION_NAME": config.FS_SWIFT_REGION_NAME,
        })

    # don't use 'app.config' because the webapp doesn't use this variable,
    # only the rq worker does.
    nomad_job = os.getenv("JOB_QUEUE_NOMAD_PLAYLIST_JOB", "zou-playlist")
    nomad_host = os.getenv("JOB_QUEUE_NOMAD_HOST", "zou-nomad-01.zou")
    data = json.dumps(params).encode('utf-8')
    payload = base64.b64encode(data).decode('utf-8')
    ncli = nomad.Nomad(host=nomad_host, timeout=5)

    response = ncli.job.dispatch_job(nomad_job, payload=payload)
    nomad_jobid = response['DispatchedJobID']

    while True:
        summary = ncli.job.get_summary(nomad_jobid)
        task_group = list(summary["Summary"])[0]
        status = summary["Summary"][task_group]
        if status["Failed"] != 0 or status["Lost"] != 0:
            logger.debug("Nomad job %r failed: %r", nomad_jobid, status)
            out, err = _get_nomad_job_logs(ncli, nomad_jobid)
            out = textwrap.indent(out, '\t')
            err = textwrap.indent(err, '\t')
            raise Exception("Job %s is 'Failed' or 'Lost':\nStatus: "
                            "%s\nerr:\n%s\nout:\n%s" %
                            (nomad_jobid, status, err, out))
        if status["Complete"] == 1:
            logger.debug("Nomad job %r: complete", nomad_jobid)
            break
        # there isn't a timeout here but python rq jobs have a timeout. Nomad
        # jobs have a timeout too.
        time.sleep(1)

    # fetch movie from object storage
    with open(movie_file_path, "wb") as movie_file:
        for chunk in file_store.open_movie("playlists", job["id"]):
            movie_file.write(chunk)