Exemplo n.º 1
0
def run_job(_, config):
    db = DBStorage(config)
    download = db.pop_download()
    if download is None:
        return 5

    associated_job = db.get_job(download.jobname)

    new_path = process_job(associated_job.convert_to, download.filename, config)

    fileId = upload_job(associated_job.upload, new_path, config)

    tag_job(fileId, associated_job.tags, config)
Exemplo n.º 2
0
    def __init__(self, config):
        self.config = config
        self.db = DBStorage(config)

        self.jobs = self.JOB_LIST

        logger.debug("Starting Orchestration.")

        self.sched = BlockingScheduler(
            jobstore=MemoryJobStore(),
            executor=ProcessPoolExecutor(5),
            job_defaults={
                "coalesce":
                True,  # Combine multiple waiting instances of the same job into one.
                "max_instances":
                1,  # Total number of concurrent instances for the sam job. Change to 1 for upload.
            },
        )
Exemplo n.º 3
0
class ChannelHandler(BaseHandler):
    """
    Handles creating channels.
    """

    def init_with_config(self, config: Configuration):
        super().init_with_config(config)

        self.db = DBStorage(config)

    def init_args(self, subparser: _SubParsersAction):
        parser = super().init_args(subparser)

        parser.add_argument(
            "action", help="Define what action to take.", choices=["list", "add"]
        )

        parser.add_argument("--name", action="store", help="Name of the channel")
        parser.add_argument("--ip", action="store", help="IP Address of the channel")

        # parser.add_argument("--teletext", action="store", help="Teletext Page")
        # parser.add_argument("--country", action="store", help="Country Code")
        # parser.add_argument(
        #     "--timezome", action="store", help="Timezone of the channel"
        # )
        # parser.add_argument("--language", action="store", help="Language of the stream")
        # parser.add_argument("--source", action="store", help="Source of the video.")

    def run(self, args: Namespace):
        if args.action == "add":
            if not args.name:
                logger.critical("--name is required. Try again.")
            elif not args.ip:
                logger.critical("--ip is required. Try again.")
            else:
                self.db.add_channel(args.name, args.ip)

        if args.action == "list":
            print(self.db.get_channels())
Exemplo n.º 4
0
    def init_with_config(self, config: Configuration):
        super().init_with_config(config)

        self.backend = DriveStorage(config)
        self.db = DBStorage(config)
        self.search = SearchHandler(config=config)
Exemplo n.º 5
0
class PermissionHandler(BaseHandler):
    """
    This class handles adding permissions to uploaded files.

    This has two main drawbacks as of now,
        1. No control over the tag types. User can upload any data as tag.
        2. Updating tags is not implemented.

    NOTE: Mechanism to tag file while uploading has not yet been implemented.
    """

    def init_with_config(self, config: Configuration):
        super().init_with_config(config)

        self.backend = DriveStorage(config)
        self.db = DBStorage(config)
        self.search = SearchHandler(config=config)

    def init_args(self, subparser: _SubParsersAction) -> None:
        parser = super().init_args(subparser)

        parser.add_argument(
            "action", help="Define what action to take.", choices=["list", "add"]
        )
        parser.add_argument(
            "--for_tags", help="Define which tags to add permissions for ", nargs="+"
        )
        parser.add_argument(
            "--share_with", help="email id of the person to share with."
        )
        parser.add_argument(
            "--not_persistent",
            action="store_true",
            help="If provided, future uploads wont be shared.",
        )

    def execute_command(self):
        pass

    def run(self, args: Namespace):
        if args.action == "add":
            if not args.for_tags:
                logger.critical("--for_tags is required. Try again.")
            elif not args.share_with:
                logger.critical("--share_with is required. Try again.")
            else:
                tags = args.for_tags
                email = args.share_with
                role = "reader"

                if not args.not_persistent:
                    self.db.add_permissions(tags, email, role)
                response = self.search.execute_command(
                    name=None, tags=tags, do_and=False
                )

                for item in response:
                    id = item[1]
                    self.backend.add_permissions_user(fileid=id, email=email, role=role)

        if args.action == "list":
            if args.for_tags is None:
                print(self.db.get_permissions())
            else:
                for tag in args.for_tags:
                    print(self.db.get_permissions(tag))
Exemplo n.º 6
0
class Server(object):
    """
    This is the main background Orchestration class. This will run every REFRESH_TIMER
    seconds and perform tasks. Each task is assumed to follow these guidelines:
        * Available in the JOB_LIST variable or is an recording job.
        * Independent from any other function in the list.
        * Takes the scheduler and config as the only argument (as of now).
        * Raises anything when it fails.
        * Raises nothing for a successful run.

    In case the task fails, it logs appropriately. No mechanism (as yet) has been provided to
    log errors from scheduler side. That task lies with the task, itself.

    The core of this server is a scheduler(APScheduler). It makes sure that jobs(functions)
    run at the specified REFRESH_TIMER. Event listeners are put in place to react to failures/successes.
    """
    JOB_LIST = [run_job]
    REFRESH_TIMER = 5

    def __init__(self, config):
        self.config = config
        self.db = DBStorage(config)

        self.jobs = self.JOB_LIST

        logger.debug("Starting Orchestration.")

        self.sched = BlockingScheduler(
            jobstore=MemoryJobStore(),
            executor=ProcessPoolExecutor(5),
            job_defaults={
                "coalesce":
                True,  # Combine multiple waiting instances of the same job into one.
                "max_instances":
                1,  # Total number of concurrent instances for the sam job. Change to 1 for upload.
            },
        )

    def add_regular_jobs(self):
        for item in self.jobs:
            j = self.sched.add_job(
                item,
                args=[self.sched, self.config],
                trigger="interval",
                seconds=self.REFRESH_TIMER,
            )
            logger.critical("Added job {}: {}".format(j.id, j.func))

    def add_recording_jobs(self):
        job_list = self.db.get_job()

        for job in job_list:
            cron = job.start.split()

            j = self.sched.add_job(
                record_video,
                args=[job, self.config],
                trigger="cron",
                minute=cron[0],
                hour=cron[1],
                day=cron[2],
                month=cron[3],
                day_of_week=cron[4],
                year=cron[5],
            )
            logger.critical("Added job {}: {}".format(j.id, j.func))

    @staticmethod
    def endjob_listener(event):
        if event.exception:
            logger.critical("Job {}: FAILED".format(event.job_id))
        else:
            logger.critical("Job {}: SUCCEEDED with return value {}".format(
                event.job_id, event.retval))

    def run_server(self):
        self.add_regular_jobs()
        self.add_recording_jobs()

        self.sched.add_listener(self.endjob_listener,
                                EVENT_JOB_ERROR | EVENT_JOB_EXECUTED)

        try:
            self.sched.start()
        except KeyboardInterrupt:
            logger.info("Interrupt recieved.")
            self.sched.shutdown()
            logger.debug("Orchestration shut down.")
Exemplo n.º 7
0
    def init_with_config(self, config: Configuration):
        super().init_with_config(config)

        self.db = DBStorage(config)
Exemplo n.º 8
0
class JobHandler(BaseHandler):
    """
    Handles creating new jobs. Each job is defined, at minimum, by the channel name,
    start time of the job, and the duration of the recording.

    Additionally, one can specify what actions can be performed on the recorded items.
    These actions currently are `upload`, `convert-to` and `tag`.
    `upload` makes sure that the file is uploaded to google drive.
    `convert-to` converts the file from one format to another.
    `tag` add the specified tags to the uploaded file.

    No matter what order you specify, these actions are performed in the order
    of convert-to -> upload -> tag. If one action is not specified, others will
    still carry on with the exception of tag. If upload is not specified, tag will
    not be performed.
    """

    def init_with_config(self, config: Configuration):
        super().init_with_config(config)

        self.db = DBStorage(config)

    def init_args(self, subparser: _SubParsersAction):
        parser = super().init_args(subparser)

        parser.add_argument(
            "action", help="Define what action to take.", choices=["list", "add"]
        )

        parser.add_argument("--channel", action="store", help="Name of the channel")
        parser.add_argument("--name", action="store", help="Name of the program")

        parser.add_argument(
            "--start", action="store", help="Start time of the job in cron format"
        )
        parser.add_argument(
            "--duration",
            action="store",
            help="How long do you want to record. In minutes",
            type=int,
        )

        parser.add_argument(
            "--upload",
            action="store_true",
            help="Set if you want to upload to Google Drive",
        )
        parser.add_argument(
            "--convert_to",
            action="store",
            help="Format you want to convert the file to.",
        )
        parser.add_argument(
            "--tags",
            action="store",
            nargs="*",
            help="Start time of the job in cron format",
        )

    def run(self, args: Namespace):
        if args.action == "add":
            if not all([args.channel, args.name, args.start, args.duration]):
                logger.critical(
                    "All of the following options are required: --channel, --name, --start, --duration"
                )
                return -1

            if len(self.db.get_channels(name=args.channel)) == 0:
                logger.critical(
                    "Provided channel name does not exist. Please provide a correct one."
                )
                return -1
            if self.db.get_job(jobname=args.name) is not None:
                logger.critical(
                    "There is already a job by that name. Choose a different name."
                )
                return -1

            self.db.add_job(
                args.name,
                args.channel,
                args.start,
                args.duration,
                args.upload,
                args.convert_to,
                args.tags,
            )

        if args.action == "list":
            for items in self.db.get_job():
                print(items)