def run_master():  # pragma: no cover
    """Runs :func:`load_master` then runs the application"""
    from pyfarm.master.application import app, api

    parser = ArgumentParser()
    if app.debug:
        parser.add_argument("--drop-all",
                            "-D",
                            action="store_true",
                            help="drop the existing tables before starting")

    parser.add_argument("--create-all",
                        "-C",
                        action="store_true",
                        help="create all tables before starting")
    parser.add_argument("--confirm-drop")
    parser.add_argument("--allow-agent-loopback-addresses",
                        action="store_true")
    parsed = parser.parse_args()

    if app.debug and parsed.drop_all:
        db.drop_all()

    if parsed.allow_agent_loopback_addresses:
        config["allow_agents_from_loopback"] = True

    if parsed.create_all:
        db.create_all()

    load_setup(app)
    load_master(app, api)
    app.run(host=config.get("flask_listen_address"), debug=config.get("debug"))
class ValidatePriorityMixin(object):
    """
    Mixin that adds a `state` column and uses a class
    level `STATE_ENUM` attribute to assist in validation.
    """
    MIN_PRIORITY = config.get("queue_min_priority")
    MAX_PRIORITY = config.get("queue_max_priority")

    if MAX_PRIORITY <= MIN_PRIORITY:
        raise AssertionError(
            "`queue_min_priority` must be <= `queue_max_priority`")

    @validates("priority")
    def validate_priority(self, key, value):
        """ensures the value provided to priority is valid"""
        if value is None or self.MIN_PRIORITY <= value <= self.MAX_PRIORITY:
            return value

        err_args = (key, self.MIN_PRIORITY, self.MAX_PRIORITY, value)
        raise ValueError("%s must be between %s and %s, got %s instead" %
                         err_args)

    @validates("attempts")
    def validate_attempts(self, key, value):
        """ensures the number of attempts provided is valid"""
        if value is None or value >= 0:
            return value

        raise ValueError("%s cannot be less than zero" % key)
Example #3
0
def run_master():  # pragma: no cover
    """Runs :func:`load_master` then runs the application"""
    from pyfarm.master.application import app, api

    parser = ArgumentParser()
    if app.debug:
        parser.add_argument("--drop-all", "-D", action="store_true",
                            help="drop the existing tables before starting")

    parser.add_argument("--create-all", "-C", action="store_true",
                        help="create all tables before starting")
    parser.add_argument("--confirm-drop")
    parser.add_argument("--allow-agent-loopback-addresses", action="store_true")
    parsed = parser.parse_args()

    if app.debug and parsed.drop_all:
        db.drop_all()

    if parsed.allow_agent_loopback_addresses:
        config["allow_agents_from_loopback"] = True

    if parsed.create_all:
        db.create_all()

    load_setup(app)
    load_master(app, api)
    app.run(
        host=config.get("flask_listen_address"),
        debug=config.get("debug")
    )
Example #4
0
class JobTagRequirement(db.Model, UtilityMixins):
    """
    Model representing a dependency of a job on a tag

    If a job has a tag requirement, it will only run on agents that have that
    tag.
    """
    __tablename__ = config.get("table_job_tag_req")
    __table_args__ = (UniqueConstraint("tag_id", "job_id"), )

    id = id_column()

    tag_id = db.Column(db.Integer,
                       db.ForeignKey("%s.id" % config.get("table_tag")),
                       nullable=False,
                       doc="Reference to the required tag")

    job_id = db.Column(IDTypeWork,
                       db.ForeignKey("%s.id" % config.get("table_job")),
                       nullable=False,
                       doc="Foreign key to :class:`Job.id`")

    negate = db.Column(
        db.Boolean,
        nullable=False,
        default=False,
        doc="If true, an agent that has this tag can not work on this job")

    job = db.relationship("Job",
                          backref=db.backref("tag_requirements",
                                             lazy="dynamic",
                                             cascade="all, delete-orphan"))

    tag = db.relationship("Tag")
Example #5
0
class GPU(db.Model, UtilityMixins, ReprMixin):
    __tablename__ = config.get("table_gpu")
    __table_args__ = (UniqueConstraint("fullname"), )

    id = id_column(db.Integer)

    fullname = db.Column(db.String(config.get("max_gpu_name_length")),
                         nullable=False,
                         doc="The full name of this graphics card model")
Example #6
0
 def test_jobgroup_schema(self):
     response = self.client.get("/api/v1/jobgroups/schema")
     self.assert_ok(response)
     schema = JobGroup.to_schema()
     schema["user"] = "******" % config.get("max_username_length")
     del schema["user_id"]
     schema["main_jobtype"] = \
         "VARCHAR(%s)" % config.get("job_type_max_name_length")
     del schema["main_jobtype_id"]
     self.assertEqual(response.json, schema)
Example #7
0
class AgentMacAddress(db.Model):
    __tablename__ = config.get("table_agent_mac_address")
    __table_args__ = (UniqueConstraint("agent_id", "mac_address"), )

    agent_id = db.Column(
        IDTypeAgent,
        db.ForeignKey("%s.id" % config.get("table_agent")),
        primary_key=True, nullable=False)
    mac_address = db.Column(
        MACAddress,
        primary_key=True, nullable=False, autoincrement=False)
Example #8
0
class TaskLog(db.Model, UtilityMixins, ReprMixin):
    """Table which represents a single task log entry"""
    __tablename__ = config.get("table_task_log")
    __table_args__ = (UniqueConstraint("identifier"),)

    id = id_column(db.Integer)

    identifier = db.Column(
        db.String(255),
        nullable=False,
        doc="The identifier for this log")

    agent_id = db.Column(
        IDTypeAgent,
        db.ForeignKey("%s.id" % config.get("table_agent")),
        nullable=True,
        doc="The agent this log was created on")

    created_on = db.Column(
        db.DateTime,
        default=datetime.utcnow,
        doc="The time when this log was created")

    #
    # Relationships
    #
    agent = db.relationship(
        "Agent",
        backref=db.backref("task_logs", lazy="dynamic"),
        doc="Relationship between an :class:`TaskLog`"
            "and the :class:`pyfarm.models.Agent` it was "
            "created on")

    task_associations = db.relationship(
        TaskTaskLogAssociation, backref="log",
        doc="Relationship between tasks and their logs."
    )

    def num_queued_tasks(self):
        return TaskTaskLogAssociation.query.filter_by(
            log=self, state=None).count()

    def num_running_tasks(self):
        return TaskTaskLogAssociation.query.filter_by(
            log=self, state=WorkState.RUNNING).count()

    def num_failed_tasks(self):
        return TaskTaskLogAssociation.query.filter_by(
            log=self, state=WorkState.FAILED).count()

    def num_done_tasks(self):
        return TaskTaskLogAssociation.query.filter_by(
            log=self, state=WorkState.DONE).count()
Example #9
0
class JobType(db.Model, UtilityMixins, ReprMixin):
    """
    Stores the unique information necessary to execute a task
    """
    __tablename__ = config.get("table_job_type")
    __table_args__ = (UniqueConstraint("name"), )
    REPR_COLUMNS = ("id", "name")

    id = id_column(IDTypeWork)

    name = db.Column(
        db.String(config.get("job_type_max_name_length")),
        nullable=False,
        doc="The name of the job type.  This can be either a human "
        "readable name or the name of the job type class itself.")

    description = db.Column(
        db.Text,
        nullable=True,
        doc="Human readable description of the job type.  This field is not "
        "required and is not directly relied upon anywhere.")

    success_subject = db.Column(
        db.Text,
        nullable=True,
        doc="The subject line to use for notifications in case of "
        "success.  Some substitutions, for example for the job title, "
        "are available.")

    success_body = db.Column(db.Text,
                             nullable=True,
                             doc="The email body to use for notifications in "
                             "in case of success.  Some substitutions, for "
                             "example for the job title, are available.")

    fail_subject = db.Column(db.Text,
                             nullable=True,
                             doc="The subject line to use for notifications "
                             "in case of failure.  Some substitutions, for "
                             "example for the job title, are available.")

    fail_body = db.Column(db.Text,
                          nullable=True,
                          doc="The email body to use for notifications in "
                          "in case of success.  Some substitutions, for "
                          "example for the job title, are available.")

    @validates("name")
    def validate_name(self, key, value):
        if value == "":
            raise ValueError("Name cannot be empty")

        return value
Example #10
0
class Tag(db.Model, UtilityMixins):
    """
    Model which provides tagging for :class:`.Job` and class:`.Agent` objects
    """
    __tablename__ = config.get("table_tag")
    __table_args__ = (UniqueConstraint("tag"), )

    id = id_column()

    tag = db.Column(db.String(config.get("max_tag_length")),
                    nullable=False,
                    doc="The actual value of the tag")
Example #11
0
def create_app():
    """An entry point specifically for uWSGI or similar to use"""
    from pyfarm.master.application import app, api

    if config.get("dev_db_drop_all"):
        db.drop_all()

    if config.get("dev_db_create_all"):
        db.create_all()

    load_setup(app)
    load_master(app, api)
    return app
def create_app():
    """An entry point specifically for uWSGI or similar to use"""
    from pyfarm.master.application import app, api

    if config.get("dev_db_drop_all"):
        db.drop_all()

    if config.get("dev_db_create_all"):
        db.create_all()

    load_setup(app)
    load_master(app, api)
    return app
Example #13
0
class SoftwareVersion(db.Model, UtilityMixins):
    """
    Model to represent a version for a given software
    """
    __tablename__ = config.get("table_software_version")
    __table_args__ = (UniqueConstraint("software_id", "version"),
                      UniqueConstraint("software_id", "rank"))

    id = id_column()

    software_id = db.Column(db.Integer,
                            db.ForeignKey("%s.id" %
                                          config.get("table_software")),
                            nullable=False,
                            doc="The software this version belongs to")

    version = db.Column(
        db.String(config.get("max_tag_length")),
        default="any",
        nullable=False,
        doc="The version of the software.  This value does not "
        "follow any special formatting rules because the "
        "format depends on the 3rd party.")

    rank = db.Column(
        db.Integer,
        nullable=False,
        doc="The rank of this version relative to other versions of "
        "the same software. Used to determine whether a version "
        "is higher or lower than another.")

    default = db.Column(db.Boolean,
                        default=False,
                        nullable=False,
                        doc="If true, this software version will be registered"
                        "on new nodes by default.")

    discovery_code = db.Column(
        db.UnicodeText,
        nullable=True,
        doc="Python code to discover if this software version is installed "
        "on a node")

    discovery_function_name = db.Column(
        db.String(config.get("max_discovery_function_name_length")),
        nullable=True,
        doc="The name of a function in `discovery_code` to call when "
        "checking for the presence of this software version on an agent.\n"
        "The function should return either a boolean (true if present, "
        "false if not) or a tuple of a boolean and a dict of named "
        "parameters describing this installation.")
Example #14
0
class Role(db.Model):
    """
    Stores role information that can be used to give a user access
    to individual resources.
    """
    __tablename__ = config.get("table_role")

    id = db.Column(db.Integer, primary_key=True, nullable=False)

    active = db.Column(db.Boolean,
                       default=True,
                       doc="Enables or disables a role.  Disabling a role "
                       "will prevent any users of this role from accessing "
                       "PyFarm")

    name = db.Column(db.String(config.get("max_role_length")),
                     unique=True,
                     nullable=False,
                     doc="The name of the role")

    expiration = db.Column(
        db.DateTime,
        doc="Role expiration.  If this value is set then the role, and "
        "anyone assigned to it, will no longer be able to access "
        "PyFarm past the expiration.")

    description = db.Column(db.Text, doc="Human description of the role.")

    @classmethod
    def create(cls, name, description=None):
        """
        Creates a role by the given name or returns an existing
        role if it already exists.
        """
        if isinstance(name, Role):
            return name

        role = Role.query.filter_by(name=name).first()

        if role is None:
            role = cls(name=name, description=description)
            db.session.add(role)
            db.session.commit()

        return role

    def is_active(self):
        if self.expiration is None:
            return self.active
        return self.active and datetime.utcnow() < self.expiration
Example #15
0
class JobTypeSoftwareRequirement(db.Model, UtilityMixins):
    """
    Model representing a dependency of a job on a software tag, with optional
    version constraints
    """
    __tablename__ = config.get("table_job_type_software_req")
    __table_args__ = (UniqueConstraint("software_id", "jobtype_version_id"), )

    software_id = db.Column(db.Integer,
                            db.ForeignKey("%s.id" %
                                          config.get("table_software")),
                            primary_key=True,
                            doc="Reference to the required software")

    jobtype_version_id = db.Column(
        IDTypeWork,
        db.ForeignKey("%s.id" % config.get("table_job_type_version")),
        primary_key=True,
        doc="Foreign key to :class:`JobTypeVersion.id`")

    min_version_id = db.Column(
        db.Integer,
        db.ForeignKey("%s.id" % config.get("table_software_version")),
        nullable=True,
        doc="Reference to the minimum required version")

    max_version_id = db.Column(
        db.Integer,
        db.ForeignKey("%s.id" % config.get("table_software_version")),
        nullable=True,
        doc="Reference to the maximum required version")

    #
    # Relationships
    #
    jobtype_version = db.relationship("JobTypeVersion",
                                      backref=db.backref(
                                          "software_requirements",
                                          lazy="dynamic",
                                          cascade="all, delete-orphan"))

    software = db.relationship("Software")

    min_version = db.relationship("SoftwareVersion",
                                  foreign_keys=[min_version_id])

    max_version = db.relationship("SoftwareVersion",
                                  foreign_keys=[max_version_id])
Example #16
0
    def rerun_failed(self):
        """
        Makes this job rerun all its failed tasks.  Tasks that are done or are
        currently running are left untouched
        """
        num_restarted = 0
        for task in self.tasks:
            if task.state == _WorkState.FAILED:
                task.state = None
                task.agent = None
                task.failures = 0
                db.session.add(task)
                num_restarted += 1

        self.completion_notify_sent = False
        self.update_state()
        db.session.add(self)

        if config.get("enable_statistics"):
            task_event_count = TaskEventCount(job_queue_id=self.job_queue_id, num_restarted=num_restarted)
            task_event_count.time_start = datetime.utcnow()
            task_event_count.time_end = datetime.utcnow()
            db.session.commit()

        for child in self.children:
            child.rerun_failed()
class WorkStateChangedModel(db.Model, WorkStateChangedMixin):
    __tablename__ = "%s_state_change_test" % config.get("table_prefix")
    id = db.Column(Integer, primary_key=True, autoincrement=True)
    state = db.Column(WorkStateEnum)
    attempts = db.Column(Integer, nullable=False, default=0)
    time_started = db.Column(DateTime)
    time_finished = db.Column(DateTime)
class ValidationModel(db.Model, ValidateWorkStateMixin, ValidatePriorityMixin):
    __tablename__ = "%s_validation_mixin_test" % config.get("table_prefix")
    STATE_ENUM = WorkState
    id = db.Column(Integer, primary_key=True, autoincrement=True)
    state = db.Column(WorkStateEnum)
    attempts = db.Column(Integer)
    priority = db.Column(Integer)
Example #19
0
    def validate_resource(self, key, value):
        """
        Validation that ensures that the value provided for either
        :attr:`.ram` or :attr:`.cpus` is a valid value with a given range
        """
        assert isinstance(value, int), "%s must be an integer" % key
        min_value = config.get("agent_min_%s" % key)
        max_value = config.get("agent_max_%s" % key)

        # check the provided input
        if min_value > value or value > max_value:
            msg = "value for `%s` must be between " % key
            msg += "%s and %s" % (min_value, max_value)
            raise ValueError(msg)

        return value
Example #20
0
def schema():
    """
    Returns the basic schema of :class:`.Agent`

    .. http:get:: /api/v1/pathmaps/schema HTTP/1.1

        **Request**

        .. sourcecode:: http

            GET /api/v1/pathmaps/schema HTTP/1.1
            Accept: application/json

        **Response**

        .. sourcecode:: http

            HTTP/1.1 200 OK
            Content-Type: application/json

            {
                "id": "INTEGER",
                "path_linux": "VARCHAR(512)",
                "path_windows": "VARCHAR(512)",
                "path_osx": "VARCHAR(512)",
                "tag": "VARCHAR(64)"
            }

    :statuscode 200: no error
    """
    out = PathMap.to_schema()
    del out["tag_id"]
    out["tag"] = "VARCHAR(%s)" % config.get("max_tag_length")
    return jsonify(out)
Example #21
0
class AgentCount(db.Model):
    __bind_key__ = 'statistics'
    __tablename__ = config.get("table_statistics_agent_count")

    counted_time = db.Column(
        db.DateTime,
        primary_key=True,
        nullable=False,
        autoincrement=False,
        doc="The point in time at which these counts were done")

    num_online = db.Column(
        db.Integer,
        nullable=False,
        doc="The number of agents that were in state `online` at counted_time")

    num_running = db.Column(
        db.Integer,
        nullable=False,
        doc="The number of agents that were in state `running` at counted_time"
    )

    num_offline = db.Column(
        db.Integer,
        nullable=False,
        doc="The number of agents that were in state `offline` at counted_time"
    )

    num_disabled = db.Column(
        db.Integer,
        nullable=False,
        doc="The number of agents that were in state `disabled` at "
        "counted_time")
Example #22
0
    def rerun_failed(self):
        """
        Makes this job rerun all its failed tasks.  Tasks that are done or are
        currently running are left untouched
        """
        num_restarted = 0
        for task in self.tasks:
            if task.state == _WorkState.FAILED:
                task.state = None
                task.agent = None
                task.failures = 0
                db.session.add(task)
                num_restarted += 1

        self.completion_notify_sent = False
        self.update_state()
        db.session.add(self)

        if config.get("enable_statistics"):
            task_event_count = TaskEventCount(job_queue_id=self.job_queue_id,
                                              num_restarted=num_restarted)
            task_event_count.time_start = datetime.utcnow()
            task_event_count.time_end = datetime.utcnow()
            db.session.commit()

        for child in self.children:
            child.rerun_failed()
class TaskCount(db.Model):
    __bind_key__ = 'statistics'
    __tablename__ = config.get("table_statistics_task_count")

    id = id_column(db.Integer)

    counted_time = db.Column(
        db.DateTime,
        nullable=False,
        default=datetime.utcnow,
        doc="The point in time at which these counts were done")

    # No foreign key reference, because this table is stored in a separate db
    # Code reading it will have to check for referential integrity manually.
    job_queue_id = db.Column(db.Integer,
                             nullable=True,
                             doc="ID of the jobqueue these stats refer to")

    total_queued = db.Column(db.Integer,
                             nullable=False,
                             doc="Number of queued tasks at `counted_time`")

    total_running = db.Column(db.Integer,
                              nullable=False,
                              doc="Number of running tasks at `counted_time`")

    total_done = db.Column(db.Integer,
                           nullable=False,
                           doc="Number of done tasks at `counted_time`")

    total_failed = db.Column(db.Integer,
                             nullable=False,
                             doc="Number of failed tasks at `counted_time`")
Example #24
0
    def validate_resource(self, key, value):
        """
        Validation that ensures that the value provided for either
        :attr:`.ram` or :attr:`.cpus` is a valid value with a given range
        """
        assert isinstance(value, int), "%s must be an integer" % key
        min_value = config.get("agent_min_%s" % key)
        max_value = config.get("agent_max_%s" % key)

        # check the provided input
        if min_value > value or value > max_value:
            msg = "value for `%s` must be between " % key
            msg += "%s and %s" % (min_value, max_value)
            raise ValueError(msg)

        return value
class MixinModel(db.Model, UtilityMixins):
    __tablename__ = "%s_mixin_test" % config.get("table_prefix")
    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
    a = db.Column(db.Integer)
    b = db.Column(db.String(512))
    c = db.Column(IPv4Address)
    d = db.Column(db.Integer, nullable=False)
    e = db.relationship("MixinModel", secondary=MixinModelRelation1)
    f = db.relationship("MixinModel", secondary=MixinModelRelation2)
Example #26
0
class PathMap(db.Model, ReprMixin, UtilityMixins):
    """
    Defines a table which is used for cross-platform
    file path mappings.
    """
    __tablename__ = config.get("table_path_map")

    id = id_column(db.Integer)

    path_linux = db.Column(
        db.String(config.get("max_path_length")),
        nullable=False,
        doc="The path on linux platforms")

    path_windows = db.Column(
        db.String(config.get("max_path_length")),
        nullable=False,
        doc="The path on Windows platforms")

    path_osx = db.Column(
        db.String(config.get("max_path_length")),
        nullable=False,
        doc="The path on Mac OS X platforms")

    tag_id = db.Column(
        db.Integer,
        db.ForeignKey("%s.id" % config.get("table_tag")),
        nullable=True,
        doc="The tag an agent needs to have for this path map "
            "to apply to it. "
            "If this is NULL, this path map applies to all "
            "agents, but is overridden by applying path maps "
            "that do specify a tag.")

    #
    # Relationships
    #
    tag = db.relationship(
        "Tag",
        backref=db.backref("path_maps", lazy="dynamic"),
        doc="Relationship attribute for the tag this path map "
            "applies to.")
class TypeModel(db.Model):
    __tablename__ = "%s_test_types" % config.get("table_prefix")
    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
    ipv4 = db.Column(IPv4Address)
    mac = db.Column(MACAddress)
    json_dict = db.Column(JSONDict)
    json_list = db.Column(JSONList)
    agent_addr = db.Column(UseAgentAddressEnum)
    agent_state = db.Column(AgentStateEnum)
    work_state = db.Column(WorkStateEnum)
    uuid = db.Column(UUIDType)
class TaskEventCount(db.Model):
    __bind_key__ = 'statistics'
    __tablename__ = config.get("table_statistics_task_event_count")

    id = id_column(db.Integer)

    time_start = db.Column(db.DateTime,
                           nullable=False,
                           default=datetime.utcnow)

    time_end = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)

    # No foreign key reference, because this table is stored in a separate db
    # Code reading it will have to check for referential integrity manually.
    job_queue_id = db.Column(db.Integer,
                             nullable=True,
                             doc="ID of the jobqueue these stats refer to")

    num_new = db.Column(
        db.Integer,
        nullable=False,
        default=0,
        doc="Number of tasks that were newly created during the time period")

    num_deleted = db.Column(
        db.Integer,
        nullable=False,
        default=0,
        doc="Number of tasks that were deleted during the time period")

    num_restarted = db.Column(
        db.Integer,
        nullable=False,
        default=0,
        doc="Number of tasks that were restarted during the time period")

    num_started = db.Column(
        db.Integer,
        nullable=False,
        default=0,
        doc="Number of tasks that work was started on during the time period")

    num_failed = db.Column(
        db.Integer,
        nullable=False,
        default=0,
        doc="Number of tasks that failed during the time period")

    num_done = db.Column(
        db.Integer,
        nullable=False,
        default=0,
        doc="Number of tasks that were finished successfully during the time "
        "period")
Example #29
0
    def alter_frame_range(self, start, end, by):
        # We have to import this down here instead of at the top to break a
        # circular dependency between the modules
        from pyfarm.scheduler.tasks import delete_task

        if end < start:
            raise ValueError("`end` must be greater than or equal to `start`")

        self.by = by

        required_frames = []
        current_frame = start
        while current_frame <= end:
            required_frames.append(current_frame)
            current_frame += by

        existing_tasks = Task.query.filter_by(job=self).all()
        frames_to_create = required_frames
        num_created = 0
        for task in existing_tasks:
            if task.frame not in required_frames:
                delete_task.delay(task.id)
            else:
                frames_to_create.remove(task.frame)

        for frame in frames_to_create:
            if self.num_tiles:
                for tile in range_(self.num_tiles - 1):
                    num_created += 1
                    task = Task()
                    task.job = self
                    task.frame = frame
                    task.tile = tile
                    task.priority = self.priority
                    db.session.add(task)
            else:
                num_created += 1
                task = Task()
                task.job = self
                task.frame = frame
                task.priority = self.priority
                db.session.add(task)

        if frames_to_create:
            if self.state != WorkState.RUNNING:
                self.state = None

        if config.get("enable_statistics"):
            task_event_count = TaskEventCount(num_new=num_created,
                                              job_queue_id=self.job_queue_id)
            task_event_count.time_start = datetime.utcnow()
            task_event_count.time_end = datetime.utcnow()
            db.session.add(task_event_count)
Example #30
0
class JobNotifiedUser(db.Model):
    """
    Defines the table containing users to be notified of certain
    events pertaining to jobs.
    """
    __tablename__ = config.get("table_job_notified_users")

    user_id = db.Column(db.Integer,
                        db.ForeignKey("%s.id" % config.get("table_user")),
                        primary_key=True,
                        doc="The id of the user to be notified")

    job_id = db.Column(IDTypeWork,
                       db.ForeignKey("%s.id" % config.get("table_job")),
                       primary_key=True,
                       doc="The id of the associated job")

    on_success = db.Column(
        db.Boolean,
        nullable=False,
        default=True,
        doc="True if a user should be notified on successful "
        "completion of a job")

    on_failure = db.Column(
        db.Boolean,
        nullable=False,
        default=True,
        doc="True if a user should be notified of a job's failure")

    on_deletion = db.Column(
        db.Boolean,
        nullable=False,
        default=False,
        doc="True if a user should be notified on deletion of "
        "a job")

    user = db.relationship("User",
                           backref=db.backref("subscribed_jobs",
                                              lazy="dynamic"))
Example #31
0
    def alter_frame_range(self, start, end, by):
        # We have to import this down here instead of at the top to break a
        # circular dependency between the modules
        from pyfarm.scheduler.tasks import delete_task

        if end < start:
            raise ValueError("`end` must be greater than or equal to `start`")

        self.by = by

        required_frames = []
        current_frame = start
        while current_frame <= end:
            required_frames.append(current_frame)
            current_frame += by

        existing_tasks = Task.query.filter_by(job=self).all()
        frames_to_create = required_frames
        num_created = 0
        for task in existing_tasks:
            if task.frame not in required_frames:
                delete_task.delay(task.id)
            else:
                frames_to_create.remove(task.frame)

        for frame in frames_to_create:
            if self.num_tiles:
                for tile in range_(self.num_tiles - 1):
                    num_created += 1
                    task = Task()
                    task.job = self
                    task.frame = frame
                    task.tile = tile
                    task.priority = self.priority
                    db.session.add(task)
            else:
                num_created += 1
                task = Task()
                task.job = self
                task.frame = frame
                task.priority = self.priority
                db.session.add(task)

        if frames_to_create:
            if self.state != WorkState.RUNNING:
                self.state = None

        if config.get("enable_statistics"):
            task_event_count = TaskEventCount(num_new=num_created, job_queue_id=self.job_queue_id)
            task_event_count.time_start = datetime.utcnow()
            task_event_count.time_end = datetime.utcnow()
            db.session.add(task_event_count)
Example #32
0
def get_api_blueprint(url_prefix=None):
    """
    Constructs and returns an instance of :class:`.Blueprint` for routing api
    requests.

    :param string url_prefix:
        The url prefix for the api such as ``/api/v1``.  If not provided then
        value will be derived from the `api_prefix` configuration variable.
    """
    if url_prefix is None:
        url_prefix = config.get("api_prefix")

    return Blueprint("api", "pyfarm.master.api", url_prefix=url_prefix)
Example #33
0
def get_api_blueprint(url_prefix=None):
    """
    Constructs and returns an instance of :class:`.Blueprint` for routing api
    requests.

    :param string url_prefix:
        The url prefix for the api such as ``/api/v1``.  If not provided then
        value will be derived from the `api_prefix` configuration variable.
    """
    if url_prefix is None:
        url_prefix = config.get("api_prefix")

    return Blueprint("api", "pyfarm.master.api", url_prefix=url_prefix)
def schema():
    """
    Returns the basic schema of :class:`.JobGroup`

    .. http:get:: /api/v1/jobgroups/schema HTTP/1.1

        **Request**

        .. sourcecode:: http

            GET /api/v1/jobgroups/schema HTTP/1.1
            Accept: application/json

        **Response**

        .. sourcecode:: http

            HTTP/1.1 200 OK
            Content-Type: application/json

            {
                "main_jobtype": "VARCHAR(64)",
                "title": "VARCHAR(255)",
                "user": "******",
                "id": "INTEGER"
            }

    :statuscode 200: no error
    """
    schema_dict = JobGroup.to_schema()

    # In the database, we are storing the user by id, but over the wire, we are
    # using the username to identify the user instead.
    schema_dict["user"] = "******" % config.get("max_username_length")
    del schema_dict["user_id"]
    schema_dict["main_jobtype"] = \
        "VARCHAR(%s)" % config.get("job_type_max_name_length")
    del schema_dict["main_jobtype_id"]
    return jsonify(schema_dict), OK
Example #35
0
def schema():
    """
    Returns the basic schema of :class:`.JobGroup`

    .. http:get:: /api/v1/jobgroups/schema HTTP/1.1

        **Request**

        .. sourcecode:: http

            GET /api/v1/jobgroups/schema HTTP/1.1
            Accept: application/json

        **Response**

        .. sourcecode:: http

            HTTP/1.1 200 OK
            Content-Type: application/json

            {
                "main_jobtype": "VARCHAR(64)",
                "title": "VARCHAR(255)",
                "user": "******",
                "id": "INTEGER"
            }

    :statuscode 200: no error
    """
    schema_dict = JobGroup.to_schema()

    # In the database, we are storing the user by id, but over the wire, we are
    # using the username to identify the user instead.
    schema_dict["user"] = "******" % config.get("max_username_length")
    del schema_dict["user_id"]
    schema_dict["main_jobtype"] = "VARCHAR(%s)" % config.get("job_type_max_name_length")
    del schema_dict["main_jobtype_id"]
    return jsonify(schema_dict), OK
Example #36
0
class JobGroup(db.Model, UtilityMixins):
    """
    Used to group jobs together for better presentation in the UI
    """
    __tablename__ = config.get("table_job_group")

    id = id_column(IDTypeWork)

    title = db.Column(
        db.String(config.get("max_jobgroup_name_length")),
        nullable=False,
        doc="The title of the job group's name")

    main_jobtype_id = db.Column(
        IDTypeWork,
        db.ForeignKey("%s.id" % config.get("table_job_type")),
        nullable=False,
        doc="ID of the jobtype of the main job in this "
            "group. Purely for display and filtering.")

    user_id = db.Column(
        db.Integer,
        db.ForeignKey("%s.id" % config.get("table_user")),
        doc="The id of the user who owns these jobs")

    #
    # Relationships
    #
    main_jobtype = db.relationship(
        "JobType",
        backref=db.backref("jobgroups", lazy="dynamic"),
        doc="The jobtype of the main job in this group")

    user = db.relationship(
        "User",
        backref=db.backref("jobgroups", lazy="dynamic"),
        doc="The user who owns these jobs")
Example #37
0
class Software(db.Model, UtilityMixins):
    """
    Model to represent a versioned piece of software that can be present on an
    agent and may be depended on by a job and/or jobtype through the appropriate
    SoftwareRequirement table
    """
    __tablename__ = config.get("table_software")
    __table_args__ = (UniqueConstraint("software"), )

    id = id_column()

    software = db.Column(db.String(config.get("max_tag_length")),
                         nullable=False,
                         doc="The name of the software")

    #
    # Relationships
    #
    versions = db.relationship("SoftwareVersion",
                               backref=db.backref("software"),
                               lazy="dynamic",
                               order_by="asc(SoftwareVersion.rank)",
                               cascade="all, delete-orphan",
                               doc="All known versions of this software")
Example #38
0
class TaskTaskLogAssociation(db.Model):
    """Stores an association between the task table and a task log"""
    __tablename__ = config.get("table_task_log_assoc")
    __table_args__ = (
        PrimaryKeyConstraint("task_log_id", "task_id", "attempt"),)

    task_log_id = db.Column(
        db.Integer,
        db.ForeignKey(
            "%s.id" % config.get("table_task_log"), ondelete="CASCADE"),
        doc="The ID of the task log")

    task_id = db.Column(
        IDTypeWork,
        db.ForeignKey("%s.id" % config.get("table_task"), ondelete="CASCADE"),
        doc="The ID of the job a task log is associated with")

    attempt = db.Column(
        db.Integer,
        autoincrement=False,
        doc="The attempt number for the given task log")

    state = db.Column(
        WorkStateEnum,
        nullable=True,
        doc="The state of the work being performed")

    #
    # Relationships
    #
    task = db.relationship(
        "Task",
        backref=db.backref(
            "log_associations",
            lazy="dynamic",
            passive_deletes=True))
Example #39
0
from pyfarm.models.jobtype import JobType, JobTypeVersion
from pyfarm.models.task import Task

try:
    # pylint: disable=undefined-variable
    range_ = xrange
except NameError:
    range_ = range

__all__ = ("Job",)

logger = getLogger("models.job")


JobTagAssociation = db.Table(
    config.get("table_job_tag_assoc"),
    db.metadata,
    db.Column(
        "job_id",
        IDTypeWork,
        db.ForeignKey("%s.id" % config.get("table_job")),
        primary_key=True,
        doc="The id of the job associated with this task",
    ),
    db.Column(
        "tag_id",
        db.Integer,
        db.ForeignKey("%s.id" % config.get("table_tag")),
        primary_key=True,
        doc="The id of the tag being associated with the job",
    ),
Example #40
0
def jobs():
    queued_count_query = db.session.query(
        Task.job_id, func.count('*').label('t_queued')).\
            filter(Task.state == None).group_by(Task.job_id).subquery()
    running_count_query = db.session.query(
        Task.job_id, func.count('*').label('t_running')).\
            filter(Task.state == WorkState.RUNNING).\
                group_by(Task.job_id).subquery()
    done_count_query = db.session.query(
        Task.job_id, func.count('*').label('t_done')).\
            filter(Task.state == WorkState.DONE).\
                group_by(Task.job_id).subquery()
    failed_count_query = db.session.query(
        Task.job_id, func.count('*').label('t_failed')).\
            filter(Task.state == WorkState.FAILED).\
                group_by(Task.job_id).subquery()
    child_count_query = db.session.query(
        JobDependency.c.parentid, func.count('*').label('child_count')).\
                group_by(JobDependency.c.parentid).subquery()
    blocker_count_query = db.session.query(
        JobDependency.c.childid, func.count('*').label('blocker_count')).\
            join(Job, Job.id == JobDependency.c.parentid).\
                filter(or_(Job.state == None, Job.state != WorkState.DONE)).\
                    group_by(JobDependency.c.childid).subquery()
    agent_count_query = db.session.query(
        Task.job_id, func.count(distinct(Task.agent_id)).label('agent_count')).\
            filter(Task.agent_id != None, or_(Task.state == None,
                                              Task.state == WorkState.RUNNING),
                   Task.agent.has(Agent.state != AgentState.OFFLINE)).\
                group_by(Task.job_id).subquery()

    jobs_query = db.session.query(Job,
                                  func.coalesce(
                                      queued_count_query.c.t_queued,
                                      0).label('t_queued'),
                                  func.coalesce(
                                      running_count_query.c.t_running,
                                      0).label('t_running'),
                                  func.coalesce(
                                      done_count_query.c.t_done,
                                      0).label('t_done'),
                                  func.coalesce(
                                      failed_count_query.c.t_failed,
                                      0).label('t_failed'),
                                  User.username,
                                  JobType.name.label('jobtype_name'),
                                  JobType.id.label('jobtype_id'),
                                  JobQueue.fullpath.label('jobqueue_path'),
                                  func.coalesce(
                                      child_count_query.c.child_count,
                                      0).label('child_count'),
                                  func.coalesce(
                                      blocker_count_query.c.blocker_count,
                                      0).label('blocker_count'),
                                  func.coalesce(
                                      agent_count_query.c.agent_count,
                                      0).label('agent_count')).\
        join(JobTypeVersion, Job.jobtype_version_id == JobTypeVersion.id).\
        join(JobType, JobTypeVersion.jobtype_id == JobType.id).\
        outerjoin(JobQueue, Job.job_queue_id == JobQueue.id).\
        outerjoin(queued_count_query, Job.id == queued_count_query.c.job_id).\
        outerjoin(running_count_query, Job.id == running_count_query.c.job_id).\
        outerjoin(done_count_query, Job.id == done_count_query.c.job_id).\
        outerjoin(failed_count_query, Job.id == failed_count_query.c.job_id).\
        outerjoin(User, Job.user_id == User.id).\
        outerjoin(child_count_query, Job.id == child_count_query.c.parentid).\
        outerjoin(blocker_count_query, Job.id == blocker_count_query.c.childid).\
        outerjoin(agent_count_query, Job.id == agent_count_query.c.job_id)

    filters = {}
    if "tags" in request.args:
        filters["tags"] = request.args.get("tags")
        tags = request.args.get("tags").split(" ")
        tags = [x for x in tags if not x == ""]
        for tag in tags:
            jobs_query = jobs_query.filter(Job.tags.any(Tag.tag == tag))

    filters["state_paused"] = ("state_paused" in request.args and
                               request.args["state_paused"].lower() == "true")
    filters["state_queued"] = ("state_queued" in request.args and
                               request.args["state_queued"].lower() == "true")
    filters["state_running"] = ("state_running" in request.args and
                                request.args["state_running"].lower() == "true")
    filters["state_done"] = ("state_done" in request.args and
                             request.args["state_done"].lower() == "true")
    filters["state_failed"] = ("state_failed" in request.args and
                               request.args["state_failed"].lower() == "true")
    no_state_filters = True
    if (filters["state_paused"] or
        filters["state_queued"] or
        filters["state_running"] or
        filters["state_done"] or
        filters["state_failed"]):
        no_state_filters = False
        wanted_states = []
        if filters["state_paused"]:
            wanted_states.append(WorkState.PAUSED)
        if filters["state_running"]:
            wanted_states.append(WorkState.RUNNING)
        if filters["state_done"]:
            wanted_states.append(WorkState.DONE)
        if filters["state_failed"]:
            wanted_states.append(WorkState.FAILED)
        if filters["state_queued"]:
            jobs_query = jobs_query.filter(or_(
                Job.state == None,
                Job.state.in_(wanted_states)))
        else:
            jobs_query = jobs_query.filter(Job.state.in_(wanted_states))

    if "title" in request.args:
        title = request.args.get("title")
        filters["title"] = title
        if title != "":
            if config.get("support_sql_regex"):
                jobs_query = jobs_query.filter(
                    Job.title.op("~*")("%s" % title))
            else:
                jobs_query = jobs_query.filter(
                    Job.title.ilike("%%%s%%" % title))

    filters["hidden_filter"] = ("hidden_filter" in request.args and
                                request.args["hidden_filter"].lower() == "true")
    filters["hidden"] = False
    filters["not_hidden"] = True

    if filters["hidden_filter"]:
        filters["hidden"] = ("hidden" in request.args and
                             request.args["hidden"].lower() == "true")
        filters["not_hidden"] = ("not_hidden" in request.args and
                                 request.args["not_hidden"].lower() == "true")
    if not filters["hidden"]:
        jobs_query = jobs_query.filter(Job.hidden != True)
    if not filters["not_hidden"]:
        jobs_query = jobs_query.filter(Job.hidden != False)

    filters["blocked_filter"] = ("blocked_filter" in request.args and
                                 request.args["blocked_filter"].lower() ==
                                    "true")
    filters["blocked"] = True
    filters["not_blocked"] = True

    if filters["blocked_filter"]:
        filters["blocked"] = ("blocked" in request.args and
                              request.args["blocked"].lower() == "true")
        filters["not_blocked"] = ("not_blocked" in request.args and
                                  request.args["not_blocked"].lower() == "true")
    if not filters["blocked"]:
        jobs_query = jobs_query.filter(
            blocker_count_query.c.blocker_count == None)
    if not filters["not_blocked"]:
        jobs_query = jobs_query.filter(
            blocker_count_query.c.blocker_count != None)

    filters["no_user"] = ("no_user" in request.args and
                          request.args["no_user"].lower() == "true")
    if "u" in request.args or filters["no_user"]:
        user_ids = request.args.getlist("u")
        user_ids = [int(x) for x in user_ids]
        if filters["no_user"]:
            jobs_query = jobs_query.filter(or_(
                Job.user_id.in_(user_ids),
                Job.user_id == None))
        else:
            jobs_query = jobs_query.filter(Job.user_id.in_(user_ids))
        filters["u"] = user_ids

    filters["no_queue"] = ("no_queue" in request.args and
                           request.args["no_queue"].lower() == "true")
    if "q" in request.args or filters["no_queue"]:
        jobqueue_ids = request.args.getlist("q")
        jobqueue_ids = [int(x) for x in jobqueue_ids]
        if filters["no_queue"]:
            jobs_query = jobs_query.filter(or_(
                Job.job_queue_id.in_(jobqueue_ids),
                Job.job_queue_id == None))
        else:
            jobs_query = jobs_query.filter(JobQueue.id.in_(jobqueue_ids))
        filters["q"] = jobqueue_ids

    if "p" in request.args:
        priorities = request.args.getlist("p")
        priorities = [int(x) for x in priorities]
        jobs_query = jobs_query.filter(Job.priority.in_(priorities))
        filters["p"] = priorities

    if "jt" in request.args:
        jobtype_ids = request.args.getlist("jt")
        jobtype_ids = [int(x) for x in jobtype_ids]
        jobs_query = jobs_query.filter(JobType.id.in_(jobtype_ids))
        filters["jt"] = jobtype_ids

    order_dir = "desc"
    order_by = "time_submitted"
    if "order_by" in request.args:
        order_by = request.args.get("order_by")
    if order_by not in ["title", "state", "time_submitted", "t_queued",
                        "t_running", "t_failed", "t_done", "username",
                        "jobtype_name", "agent_count", "priority", "weight",
                        "jobqueue_path"]:
        return (render_template(
            "pyfarm/error.html",
            error="Unknown order key %r. Options are 'title', 'state', "
                  "'time_submitted', 't_queued', 't_running', 't_failed', "
                  "'t_done', 'username', 'agent_count', 'priority', 'weight' "
                  "or 'jobqueue_path'" % order_by), BAD_REQUEST)
    if "order_dir" in request.args:
        order_dir = request.args.get("order_dir")
        if order_dir not in ["asc", "desc"]:
            return (render_template(
            "pyfarm/error.html",
            error="Unknown order direction %r. Options are 'asc' or 'desc'" %
                  order_dir),
            BAD_REQUEST)
    if order_by == "time_submitted" and order_dir == "desc":
        jobs_query = jobs_query.order_by(desc(Job.time_submitted))
    elif order_by == "time_submitted" and order_dir == "asc":
        jobs_query = jobs_query.order_by(asc(Job.time_submitted))
    elif order_by == "state" and order_dir == "desc":
        jobs_query = jobs_query.order_by(desc(Job.state))
    elif order_by == "state" and order_dir == "asc":
        jobs_query = jobs_query.order_by(asc(Job.state))
    elif order_by == "weight" and order_dir == "asc":
        jobs_query = jobs_query.order_by(asc(Job.weight))
    elif order_by == "weight" and order_dir == "desc":
        jobs_query = jobs_query.order_by(desc(Job.weight))
    elif order_by == "priority" and order_dir == "asc":
        jobs_query = jobs_query.order_by(asc(Job.priority))
    elif order_by == "priority" and order_dir == "desc":
        jobs_query = jobs_query.order_by(desc(Job.priority))
    else:
        jobs_query = jobs_query.order_by("%s %s" % (order_by, order_dir))

    jobs_query = jobs_query.order_by(Job.id)

    jobs_count = jobs_query.count()
    queued_jobs_count = jobs_query.filter(Job.state == None).count()
    running_jobs_count = jobs_query.filter(
        Job.state == WorkState.RUNNING).count()
    failed_jobs_count = jobs_query.filter(Job.state == WorkState.FAILED).count()
    done_jobs_count = jobs_query.filter(Job.state == WorkState.DONE).count()

    jobs_subquery = jobs_query.subquery()

    per_page = int(request.args.get("per_page", 100))
    page = int(request.args.get("page", 1))
    filters["per_page"] = per_page
    filters["page"] = page
    num_pages = 1
    all_pages = []
    if per_page > 0:
        jobs_query = jobs_query.offset((page - 1) * per_page).limit(per_page)
        num_pages = int(jobs_count / per_page)
        if jobs_count % per_page > 0:
            num_pages = num_pages + 1
        all_pages = range(0, num_pages)

    jobs = jobs_query.all()
    users_query = User.query.order_by(User.username)

    jobtypes_query = JobType.query

    tags_by_job_query = db.session.query(JobTagAssociation.c.job_id, Tag.tag).\
        join(Tag, JobTagAssociation.c.tag_id==Tag.id).all()
    tags_by_job_id = {}
    for association in tags_by_job_query:
        if association[0] not in tags_by_job_id:
            tags_by_job_id[association[0]] = [association[1]]
        else:
            tags_by_job_id[association[0]] += [association[1]]

    jobqueues = JobQueue.query.order_by(JobQueue.fullpath).all()

    available_priorities = db.session.query(distinct(Job.priority)).all()
    available_priorities = set(x[0] for x in available_priorities)

    filters_and_order = filters.copy()
    filters_and_order.update({"order_by": order_by, "order_dir": order_dir})
    filters_and_order_wo_pagination = filters_and_order.copy()
    del filters_and_order_wo_pagination["per_page"]
    del filters_and_order_wo_pagination["page"]
    return render_template("pyfarm/user_interface/jobs.html",
                           jobs=jobs, filters=filters, order_by=order_by,
                           order_dir=order_dir,
                           order={"order_by": order_by, "order_dir": order_dir},
                           no_state_filters=no_state_filters, users=users_query,
                           filters_and_order=filters_and_order,
                           jobtypes=jobtypes_query,
                           tags_by_job_id=tags_by_job_id, jobs_count=jobs_count,
                           all_pages=all_pages, num_pages=num_pages,
                           filters_and_order_wo_pagination=\
                               filters_and_order_wo_pagination,
                           jobqueues=jobqueues,
                           queued_jobs_count=queued_jobs_count,
                           running_jobs_count=running_jobs_count,
                           failed_jobs_count=failed_jobs_count,
                           done_jobs_count=done_jobs_count,
                           priorities=available_priorities)
Example #41
0
        config["allow_agents_from_loopback"] = True

    if parsed.create_all:
        db.create_all()

    load_setup(app)
    load_master(app, api)
    app.run(
        host=config.get("flask_listen_address"),
        debug=config.get("debug")
    )


def create_app():
    """An entry point specifically for uWSGI or similar to use"""
    from pyfarm.master.application import app, api

    if config.get("dev_db_drop_all"):
        db.drop_all()

    if config.get("dev_db_create_all"):
        db.create_all()

    load_setup(app)
    load_master(app, api)
    return app


if config.get("instance_application"):
    app = create_app()
Example #42
0
def load_user_interface(app_instance):
    from pyfarm.master.user_interface.agents import (
        agents, single_agent, delete_single_agent, agent_add_software,
        agent_delete_software, restart_single_agent, restart_multiple_agents,
        update_notes_for_agent, update_tags_in_agent,
        check_software_in_single_agent, disable_multiple_agents,
        enable_multiple_agents, disable_single_agent, enable_single_agent,
        delete_multiple_agents)
    from pyfarm.master.user_interface.jobs import (
        jobs, delete_single_job, rerun_single_job, single_job, pause_single_job,
        unpause_single_job, alter_frames_in_single_job,
        alter_scheduling_parameters_for_job, update_notes_for_job,
        update_tags_in_job, rerun_single_task, add_notified_user_to_job,
        remove_notified_user_from_job, upgrade_job_to_latest_jobtype_version,
        rerun_failed_in_job, alter_autodeletion_for_job, rerun_multiple_jobs,
        rerun_failed_in_multiple_jobs, pause_multiple_jobs,
        unpause_multiple_jobs, delete_multiple_jobs, move_multiple_jobs,
        set_prio_weight_on_jobs, add_tag_on_jobs, remove_tag_from_jobs,
        update_tag_requirements_in_job, add_tag_requirement_on_jobs,
        remove_tag_requirement_from_jobs)
    from pyfarm.master.user_interface.jobqueues import (
        jobqueues, jobqueue_create, jobqueue, delete_jobqueue)
    from pyfarm.master.user_interface.jobtypes import (
        jobtypes, jobtype, remove_jobtype_software_requirement,
        add_jobtype_software_requirement, remove_jobtype, create_jobtype,
        update_jobtype_notification_templates)
    from pyfarm.master.user_interface.logs_in_task import logs_in_task
    from pyfarm.master.user_interface.software import (
        software, software_item, update_version_rank, remove_software_version,
        add_software_version, add_software, remove_software,
        update_version_default_status)
    from pyfarm.master.user_interface.software_version import software_version
    from pyfarm.master.user_interface.jobgroups import jobgroups
    from pyfarm.master.user_interface.statistics.index import statistics_index
    from pyfarm.master.user_interface.statistics.agent_counts import (
        agent_counts)
    from pyfarm.master.user_interface.statistics.task_events import (
        task_events)

    farm_name = config.get("farm_name")
    app_instance.jinja_env.globals.update({"farm_name": farm_name})
    app_instance.jinja_env.filters["timedelta_format"] = timedelta_format

    app_instance.add_url_rule("/agents/", "agents_index_ui", agents,
                              methods=("GET", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>/delete",
                              "delete_single_agent_ui", delete_single_agent,
                              methods=("POST", ))
    app_instance.add_url_rule("/agents/delete_multiple",
                              "delete_multiple_agents_ui",
                              delete_multiple_agents, methods=("POST", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>/restart",
                              "restart_single_agent_ui", restart_single_agent,
                              methods=("POST", ))
    app_instance.add_url_rule("/agents/restart_multiple",
                              "restart_multiple_agents_ui",
                              restart_multiple_agents,
                              methods=("POST", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>/disable",
                              "disable_single_agent_ui",
                              disable_single_agent,
                              methods=("POST", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>/enable",
                              "enable_single_agent_ui",
                              enable_single_agent,
                              methods=("POST", ))
    app_instance.add_url_rule("/agents/disable_multiple",
                              "disable_multiple_agents_ui",
                              disable_multiple_agents,
                              methods=("POST", ))
    app_instance.add_url_rule("/agents/enable_multiple",
                              "enable_multiple_agents_ui",
                              enable_multiple_agents,
                              methods=("POST", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>", "single_agent_ui",
                              single_agent, methods=("GET", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>/software/add",
                              "single_agent_add_software_ui",
                              agent_add_software, methods=("POST", ))
    app_instance.add_url_rule(
        "/agents/<uuid:agent_id>/software/<int:version_id>/delete",
        "single_agent_remove_software_ui",
        agent_delete_software, methods=("POST", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>/notes",
                              "update_agent_notes_ui",
                              update_notes_for_agent, methods=("POST", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>/tags",
                              "update_agent_tags_ui",
                              update_tags_in_agent, methods=("POST", ))
    app_instance.add_url_rule("/agents/<uuid:agent_id>/check_all_software",
                              "single_agent_check_all_software_ui",
                              check_software_in_single_agent, methods=("POST", ))

    app_instance.add_url_rule("/jobs/", "jobs_index_ui", jobs,
                              methods=("GET", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/delete",
                              "delete_single_job_ui", delete_single_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/delete_multiple",
                              "delete_multiple_jobs_ui", delete_multiple_jobs,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/rerun",
                              "rerun_single_job_ui", rerun_single_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/rerun_multiple",
                              "rerun_multiple_jobs_ui", rerun_multiple_jobs,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/rerun_failed_tasks",
                              "rerun_failed_in_job_ui", rerun_failed_in_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/rerun_failed_multiple",
                              "rerun_failed_multiple_jobs_ui",
                              rerun_failed_in_multiple_jobs,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/pause",
                              "pause_single_job_ui", pause_single_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/pause_multiple",
                              "pause_multiple_jobs_ui", pause_multiple_jobs,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/unpause",
                              "unpause_single_job_ui", unpause_single_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/unpause_multiple",
                              "unpause_multiple_jobs_ui", unpause_multiple_jobs,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/alter_frame_selection",
                              "alter_frames_in_job_ui",
                              alter_frames_in_single_job, methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/alter_scheduling_parameters",
                              "alter_scheduling_parameters_in_job_ui",
                              alter_scheduling_parameters_for_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/move_multiple",
                              "move_multiple_jobs_ui", move_multiple_jobs,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/set_prio_weight_multiple",
                              "set_prio_weight_on_jobs",
                              set_prio_weight_on_jobs, methods=("POST", ))
    app_instance.add_url_rule("/jobs/add_tag_multiple",
                              "add_tag_on_jobs",
                              add_tag_on_jobs, methods=("POST", ))
    app_instance.add_url_rule("/jobs/remove_tag_requirement_multiple",
                              "remove_tag_requirement_from_jobs",
                              remove_tag_requirement_from_jobs,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/add_tag_requirement_multiple",
                              "add_tag_requirement_on_jobs",
                              add_tag_requirement_on_jobs, methods=("POST", ))
    app_instance.add_url_rule("/jobs/remove_tag_multiple",
                              "remove_tag_from_jobs",
                              remove_tag_from_jobs, methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/alter_autodelete_parameters",
                              "alter_autodelete_parameters_in_job_ui",
                              alter_autodeletion_for_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/update_notes",
                              "update_job_notes_ui", update_notes_for_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/update_tags",
                              "update_job_tags_ui", update_tags_in_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/update_tag_requirements",
                              "update_job_tag_requirements_ui",
                              update_tag_requirements_in_job, methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/upgrade_jobtype",
                              "upgrade_jobtype_for_job",
                              upgrade_job_to_latest_jobtype_version,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/tasks/<int:task_id>",
                              "rerun_single_task_ui", rerun_single_task,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/notified_users/add",
                              "add_notified_user_ui", add_notified_user_to_job,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>/notified_users/<int:user_id>/"
                              "remove", "remove_notified_user_ui",
                              remove_notified_user_from_job, methods=("POST", ))
    app_instance.add_url_rule("/jobs/<int:job_id>",
                              "single_job_ui", single_job, methods=("GET", ))

    app_instance.add_url_rule("/jobs/<int:job_id>/tasks/<int:task_id>/logs/",
                              "logs_in_task_ui", logs_in_task, methods=("GET", ))

    app_instance.add_url_rule("/jobqueues/",
                              "jobqueues_index_ui", jobqueues, methods=("GET", ))
    app_instance.add_url_rule("/jobqueues/create",
                              "jobqueue_create_ui", jobqueue_create,
                              methods=("GET", "POST"))
    app_instance.add_url_rule("/jobqueues/<int:queue_id>",
                              "single_jobqueue_ui", jobqueue,
                              methods=("GET", "POST"))
    app_instance.add_url_rule("/jobqueues/<int:queue_id>/delete",
                              "delete_jobqueue_ui", delete_jobqueue,
                              methods=("POST", ))

    app_instance.add_url_rule("/jobtypes/",
                              "jobtypes_index_ui", jobtypes, methods=("GET", ))
    app_instance.add_url_rule("/jobtypes/<int:jobtype_id>",
                              "single_jobtype_ui", jobtype,
                              methods=("GET", "POST"))
    app_instance.add_url_rule("/jobtypes/<int:jobtype_id>/delete",
                              "remove_single_jobtype_ui", remove_jobtype,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobtypes/<int:jobtype_id>/update_templates",
                              "single_jobtype_update_templates",
                              update_jobtype_notification_templates,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobtypes/<int:jobtype_id>/software_requirements/"
                              "<int:software_id>/delete",
                              "single_jobtype_remove_requirement_ui",
                              remove_jobtype_software_requirement,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobtypes/<int:jobtype_id>/software_requirements/"
                              "add",
                              "single_jobtype_add_requirement_ui",
                              add_jobtype_software_requirement,
                              methods=("POST", ))
    app_instance.add_url_rule("/jobtypes/create", "jobtype_create_ui",
                              create_jobtype, methods=("GET", "POST"))

    app_instance.add_url_rule("/software/",
                              "software_index_ui", software, methods=("GET", ))
    app_instance.add_url_rule("/software/<int:software_id>",
                              "single_software_ui", software_item,
                              methods=("GET", ))
    app_instance.add_url_rule("/software/<int:software_id>/versions/"
                              "<int:version_id>",
                              "single_software_version_ui", software_version,
                              methods=("GET", "POST"))
    app_instance.add_url_rule("/software/<int:software_id>/versions/"
                              "<int:version_id>/update_rank",
                              "version_update_rank_ui", update_version_rank,
                              methods=("POST", ))
    app_instance.add_url_rule("/software/<int:software_id>/versions/"
                              "<int:version_id>/default_status",
                              "version_update_default_ui",
                              update_version_default_status,
                              methods=("POST", ))
    app_instance.add_url_rule("/software/<int:software_id>/versions/"
                              "<int:version_id>/remove",
                              "delete_single_version_ui",
                              remove_software_version, methods=("POST", ))
    app_instance.add_url_rule("/software/<int:software_id>/versions/create",
                              "add_single_version_ui",
                              add_software_version, methods=("POST", ))
    app_instance.add_url_rule("/software/create",
                              "add_single_software_ui",
                              add_software, methods=("POST", ))
    app_instance.add_url_rule("/software/<int:software_id>/delete",
                              "delete_single_software_ui",
                              remove_software, methods=("POST", ))

    app_instance.add_url_rule("/jobgroups/",
                              "jobgroups_index_ui", jobgroups, methods=("GET", ))

    app_instance.add_url_rule("/statistics/",
                              "statistics_index_ui", statistics_index,
                              methods=("GET", ))

    app_instance.add_url_rule("/statistics/agent_counts",
                              "agent_counts_ui", agent_counts,
                              methods=("GET", ))
    app_instance.add_url_rule("/statistics/task_events",
                              "task_events_ui", task_events,
                              methods=("GET", ))
Example #43
0
except ImportError:  # pragma: no cover
    from http.client import BAD_REQUEST, CREATED, NOT_FOUND

from flask.views import MethodView
from flask import request, redirect, send_file

from pyfarm.core.logger import getLogger
from pyfarm.master.config import config
from pyfarm.master.utility import jsonify

logger = getLogger("api.agents")

VERSION_REGEX = re.compile("\d+(\.\d+(\.\d+)?)?((-pre\d?)|(-dev\d?)|(-rc?\d?)|"
                           "(-alpha\d?)|(-beta\d?))?$")

UPDATES_DIR = config.get("agent_updates_dir")
UPDATES_WEBDIR = config.get("agent_updates_webdir")


try:
    makedirs(UPDATES_DIR)
except OSError as e:  # pragma: no cover
    if e.errno != EEXIST:
        raise


class AgentUpdatesAPI(MethodView):
    def put(self, version):
        """
        A ``PUT`` to this endpoint will upload a new version of pyfarm-agent to
        be used for agent auto-updates.  The update must be a zip file.
Example #44
0
from sqlalchemy import func, asc
from pyfarm.core.logger import getLogger
from pyfarm.core.enums import WorkState
from pyfarm.models.user import User
from pyfarm.models.jobtype import JobType
from pyfarm.models.job import Job
from pyfarm.models.task import Task
from pyfarm.models.jobgroup import JobGroup
from pyfarm.master.config import config
from pyfarm.master.utility import jsonify, validate_with_model
from pyfarm.master.application import db

logger = getLogger("api.jobgroups")

AUTOCREATE_USERS = config.get("autocreate_users")
AUTO_USER_EMAIL = config.get("autocreate_user_email")


def schema():
    """
    Returns the basic schema of :class:`.JobGroup`

    .. http:get:: /api/v1/jobgroups/schema HTTP/1.1

        **Request**

        .. sourcecode:: http

            GET /api/v1/jobgroups/schema HTTP/1.1
            Accept: application/json
Example #45
0
def task_events():
    consolidate_interval = timedelta(**config.get(
        "task_event_count_consolidate_interval"))

    minutes_resolution = int(consolidate_interval.total_seconds() / 60)
    if "minutes_resolution" in request.args:
        minutes_resolution = int(request.args.get("minutes_resolution"))
        consolidate_interval = timedelta(minutes=minutes_resolution)

    days_back = int(request.args.get("days_back", 7))
    time_back = timedelta(days=days_back)

    task_event_count_query = TaskEventCount.query.order_by(
        TaskEventCount.time_start).filter(
            TaskEventCount.time_start > datetime.utcnow() - time_back)

    task_count_query = TaskCount.query.order_by(
        TaskCount.counted_time).filter(
            TaskCount.counted_time > datetime.utcnow() - time_back)

    jobqueue_ids = []
    no_queue = ("no_queue" in request.args and
        request.args["no_queue"].lower() == "true")
    if "queue" in request.args or no_queue:
        jobqueue_ids = request.args.getlist("queue")
        jobqueue_ids = [int(x) for x in jobqueue_ids]
        if no_queue:
            task_event_count_query = task_event_count_query.filter(or_(
                TaskEventCount.job_queue_id.in_(jobqueue_ids),
                TaskEventCount.job_queue_id == None))
            task_count_query = task_count_query.filter(or_(
                TaskCount.job_queue_id.in_(jobqueue_ids),
                TaskCount.job_queue_id == None))
        else:
            task_event_count_query = task_event_count_query.filter(
                TaskEventCount.job_queue_id.in_(jobqueue_ids))
            task_count_query = task_count_query.filter(
                TaskCount.job_queue_id.in_(jobqueue_ids))

    tasks_new = []
    tasks_deleted = []
    tasks_restarted = []
    tasks_failed = []
    tasks_done = []
    current_period_start = None
    for sample in task_event_count_query:
        if not current_period_start:
            current_period_start = sample.time_start
            timestamp = timegm(current_period_start.utctimetuple())
            tasks_new.append([timestamp, sample.num_new])
            tasks_deleted.append([timestamp, -sample.num_deleted])
            tasks_restarted.append([timestamp, sample.num_restarted])
            tasks_failed.append([timestamp, -sample.num_failed])
            tasks_done.append([timestamp, -sample.num_done])
        elif (sample.time_start <
              (current_period_start + consolidate_interval)):
            tasks_new[-1][-1] += sample.num_new
            tasks_deleted[-1][-1] -= sample.num_deleted
            tasks_restarted[-1][-1] += sample.num_restarted
            tasks_failed[-1][-1] -= sample.num_failed
            tasks_done[-1][-1] -= sample.num_done
        else:
            while (sample.time_start >=
                   (current_period_start + consolidate_interval)):
                current_period_start += consolidate_interval
                timestamp = timegm(current_period_start.utctimetuple())
                tasks_new.append([timestamp, 0])
                tasks_deleted.append([timestamp, 0])
                tasks_restarted.append([timestamp, 0])
                tasks_failed.append([timestamp, 0])
                tasks_done.append([timestamp, 0])

            tasks_new[-1][-1] += sample.num_new
            tasks_deleted[-1][-1] -= sample.num_deleted
            tasks_restarted[-1][-1] += sample.num_restarted
            tasks_failed[-1][-1] -= sample.num_failed
            tasks_done[-1][-1] -= sample.num_done

    total_queued = []
    total_running = []
    total_done = []
    total_failed = []
    current_average = None
    for sample in task_count_query:
        if not current_average:
            current_average = TotalsAverage(sample)
        elif (sample.counted_time <
              (current_average.time_start + consolidate_interval)):
            current_average.add_sample(sample)
        else:
            timestamp = timegm(current_average.time_start.utctimetuple())
            total_queued.append([timestamp, current_average.avg_queued()])
            total_running.append([timestamp, current_average.avg_running()])
            total_done.append([timestamp, current_average.avg_done()])
            total_failed.append([timestamp, current_average.avg_failed()])
            current_average = TotalsAverage(sample, current_average)

    if current_average:
        timestamp = timegm(current_average.time_start.utctimetuple())
        total_queued.append([timestamp, current_average.avg_queued()])
        total_running.append([timestamp, current_average.avg_running()])
        total_done.append([timestamp, current_average.avg_done()])
        total_failed.append([timestamp, current_average.avg_failed()])

    jobqueues = JobQueue.query.order_by(JobQueue.fullpath).all()

    return render_template(
        "pyfarm/statistics/task_events.html",
        tasks_new_json=json.dumps(tasks_new),
        tasks_deleted_json=json.dumps(tasks_deleted),
        tasks_restarted_json=json.dumps(tasks_restarted),
        tasks_failed_json=json.dumps(tasks_failed),
        tasks_done_json=json.dumps(tasks_done),
        total_queued_json=json.dumps(total_queued),
        total_running_json=json.dumps(total_running),
        total_done_json=json.dumps(total_done),
        total_failed_json=json.dumps(total_failed),
        no_queue=no_queue,
        jobqueue_ids=jobqueue_ids,
        jobqueues=jobqueues,
        minutes_resolution=minutes_resolution,
        days_back=days_back)
Example #46
0
from sys import maxsize
from functools import reduce
from logging import DEBUG

from sqlalchemy import event, distinct, or_, and_
from sqlalchemy.schema import UniqueConstraint

from pyfarm.core.logger import getLogger
from pyfarm.core.enums import WorkState, _WorkState, AgentState
from pyfarm.master.application import db
from pyfarm.master.config import config
from pyfarm.models.core.mixins import UtilityMixins, ReprMixin
from pyfarm.models.core.types import id_column, IDTypeWork
from pyfarm.models.agent import Agent

PREFER_RUNNING_JOBS = config.get("queue_prefer_running_jobs")
USE_TOTAL_RAM = config.get("use_total_ram_for_scheduling")
logger = getLogger("pf.models.jobqueue")

if config.get("debug_queue"):
    logger.setLevel(DEBUG)


class JobQueue(db.Model, UtilityMixins, ReprMixin):
    """
    Stores information about a job queue. Used for flexible, configurable
    distribution of computing capacity to jobs.
    """
    __tablename__ = config.get("table_job_queue")
    __table_args__ = (UniqueConstraint("parent_jobqueue_id", "name"),)
Example #47
0
=========

Contains core functions and data for use by :mod:`pyfarm.models`
"""

from uuid import UUID
from datetime import datetime
from textwrap import dedent

from pyfarm.core.enums import STRING_TYPES
from pyfarm.master.application import db
from pyfarm.master.config import config
from pyfarm.models.core.types import (
    id_column, IDTypeWork, IPAddress, WorkStateEnum)

DEFAULT_PRIORITY = config.get("queue_default_priority")


def modelfor(model, table):
    """
    Returns True if the given `model` object is for the
    expected `table`.

    >>> from pyfarm.master.config import config
    >>> from pyfarm.models.agent import Agent
    >>> modelfor(Agent("foo", "10.56.0.0", "255.0.0.0"), config.get("table_agent"))
    True
    """
    try:
        return model.__tablename__ == table
    except AttributeError:
Example #48
0

class WorkStateChangedModel(db.Model, WorkStateChangedMixin):
    __tablename__ = "%s_state_change_test" % config.get("table_prefix")
    id = db.Column(Integer, primary_key=True, autoincrement=True)
    state = db.Column(WorkStateEnum)
    attempts = db.Column(Integer, nullable=False, default=0)
    time_started = db.Column(DateTime)
    time_finished = db.Column(DateTime)

event.listen(
    WorkStateChangedModel.state, "set", WorkStateChangedModel.state_changed)


MixinModelRelation1 = db.Table(
    "%s_mixin_rel_test1" % config.get("table_prefix"), db.metadata,
    db.Column("mixin_id", db.Integer,
              db.ForeignKey(
                  "%s.id" % "%s_mixin_test" % config.get("table_prefix")),
              primary_key=True))

MixinModelRelation2 = db.Table(
    "%s_mixin_rel_test2" % config.get("table_prefix"), db.metadata,
    db.Column("mixin_id", db.Integer,
              db.ForeignKey(
                  "%s.id" % "%s_mixin_test" % config.get("table_prefix")),
              primary_key=True))

class MixinModel(db.Model, UtilityMixins):
    __tablename__ = "%s_mixin_test" % config.get("table_prefix")
    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
Example #49
0
def get_application(**configuration_keywords):
    """
    Returns a new application context.  If keys and values are provided
    to ``config_values`` they will be used to override the default
    configuration values or create new ones

    >>> app = get_application(TESTING=True)
    >>> assert app.testing is True

    :keyword bool setup_appcontext:
        If ``True`` then setup the ``flask.g`` variable to include the
        application level information (ex. ``g.db``)
    """
    app_config = {
        "DEBUG": config.get("debug"),
        "SECRET_KEY": config.get("secret_key"),
        "LOGIN_DISABLED": config.get("login_disabled"),
        "PYFARM_JSON_PRETTY": config.get("pretty_json"),
        "SQLALCHEMY_ECHO": config.get("echo_sql"),
        "SQLALCHEMY_DATABASE_URI": config.get("database"),
        "CSRF_SESSION_KEY": config.get("csrf_session_key"),
        "REMEMBER_COOKIE_DURATION": timedelta(**config.get("cookie_duration")),
        "JSONIFY_PRETTYPRINT_REGULAR": config.get("pretty_json"),
        "TIMESTAMP_FORMAT": config.get("timestamp_format")
    }

    if config.get("enable_statistics"):
        app_config["SQLALCHEMY_BINDS"] = {
            "statistics": config.get("statistics_database")}

    static_folder = configuration_keywords.pop("static_folder", None)
    if static_folder is None:  # static folder not provided
        import pyfarm.master
        static_folder = os.path.join(
            os.path.dirname(pyfarm.master.__file__), "static")

    static_folder = os.path.abspath(static_folder)
    assert os.path.isdir(static_folder), "No such directory %s" % static_folder

    app = Flask("pyfarm.master", static_folder=static_folder)
    app.config.update(app_config)
    app.config.update(configuration_keywords)
    app.url_map.converters["uuid"] = UUIDConverter

    @app.context_processor
    def template_context_processor():
        return {
            "timestamp_format": app.config["TIMESTAMP_FORMAT"]
        }

    return app
Example #50
0
def consolidate_task_events_for_queue(job_queue_id):
    logger.debug("Consolidating task events for queue %s now", job_queue_id)

    consolidate_interval = timedelta(**config.get(
        "task_event_count_consolidate_interval"))

    def add_task_count(consolidation_count, event_count, last_count):
        consolidation_count.num_new += event_count.num_new
        consolidation_count.num_deleted += event_count.num_deleted
        consolidation_count.num_restarted += event_count.num_restarted
        consolidation_count.num_started += event_count.num_started
        consolidation_count.num_failed += event_count.num_failed
        consolidation_count.num_done += event_count.num_done

    event_counts_query = TaskEventCount.query.filter_by(
        job_queue_id=job_queue_id).order_by(TaskEventCount.time_start)

    last_count = None
    open_consolidation_count = None
    for event_count in event_counts_query:
        # If current count is not consolidated yet
        if event_count.time_end - event_count.time_start < consolidate_interval:
            if not open_consolidation_count:
                open_consolidation_count = TaskEventCount(
                    job_queue_id=job_queue_id,
                    num_new=0,
                    num_deleted=0,
                    num_restarted=0,
                    num_started=0,
                    num_failed=0,
                    num_done=0)
                open_consolidation_count.time_start = event_count.time_start
                open_consolidation_count.time_end = (event_count.time_start +
                                                     consolidate_interval)
                add_task_count(open_consolidation_count, event_count,
                               last_count)
                db.session.delete(event_count)
            else:
                # We know the event count does not fall into the period of the
                # next already existing consolidated count, because we sorted
                # the query by time_start, so the other consolidated count
                # would have come up before this unconsolidated one.
                while (event_count.time_start >
                       open_consolidation_count.time_end):
                    db.session.add(open_consolidation_count)
                    new_consolidation_count = TaskEventCount(
                        job_queue_id=job_queue_id,
                        num_new=0,
                        num_deleted=0,
                        num_restarted=0,
                        num_started=0,
                        num_failed=0,
                        num_done=0)
                    new_consolidation_count.time_start = (
                        open_consolidation_count.time_end)
                    new_consolidation_count.time_end = (
                        new_consolidation_count.time_start +
                        consolidate_interval)
                    open_consolidation_count = new_consolidation_count
                add_task_count(open_consolidation_count, event_count,
                               last_count)
                db.session.delete(event_count)
        else:
            if not open_consolidation_count:
                open_consolidation_count = event_count
            else:
                if event_count.time_start < open_consolidation_count.time_end:
                    add_task_count(open_consolidation_count, event_count,
                               last_count)
                    db.session.delete(event_count)
                else:
                    db.session.add(open_consolidation_count)
                    open_consolidation_count = event_count

    if open_consolidation_count:
        db.session.add(open_consolidation_count)

    db.session.commit()
Example #51
0
from flask.views import MethodView
from flask import g, redirect, send_file, request, Response

from sqlalchemy.exc import IntegrityError

from pyfarm.core.logger import getLogger
from pyfarm.master.config import config
from pyfarm.models.tasklog import TaskLog, TaskTaskLogAssociation
from pyfarm.models.task import Task
from pyfarm.master.application import db
from pyfarm.master.utility import jsonify, validate_with_model, isuuid

logger = getLogger("api.tasklogs")

LOGFILES_DIR = config.get("tasklogs_dir")

try:
    makedirs(LOGFILES_DIR)
except OSError as e:  # pragma: no cover
    if e.errno != EEXIST:
        raise


class LogsInTaskAttemptsIndexAPI(MethodView):
    def get(self, job_id, task_id, attempt):
        """
        A ``GET`` to this endpoint will return a list of all known logs that are
        associated with this attempt at running this task

        .. http:get:: /api/v1/jobs/<job_id>/tasks/<task_id>/attempts/<attempt>/logs/ HTTP/1.1
Example #52
0
Creates the base instance of :class:`.Celery` which is used by components of
PyFarm's master that require interaction with a task queue.  This module also
configures Celery's beat scheduler for other tasks such as agent polling
and task assignment.
"""

from datetime import timedelta

from celery import Celery

from pyfarm.master.config import config

celery_app = Celery(
    "pyfarm.tasks",
    broker=config.get("scheduler_broker"),
    include=["pyfarm.scheduler.tasks", "pyfarm.scheduler.statistics_tasks"])

celery_app.conf.CELERYBEAT_SCHEDULE = {
    "periodically_poll_agents": {
        "task": "pyfarm.scheduler.tasks.poll_agents",
        "schedule": timedelta(**config.get("agent_poll_interval"))
    },
    "periodical_scheduler": {
        "task": "pyfarm.scheduler.tasks.assign_tasks",
        "schedule": timedelta(**config.get("agent_poll_interval"))
    },
    "periodically_clean_task_logs": {
        "task": "pyfarm.scheduler.tasks.clean_up_orphaned_task_logs",
        "schedule": timedelta(**config.get("orphaned_log_cleanup_interval"))
    },
Example #53
0
    AgentState, STRING_TYPES, UseAgentAddress, INTEGER_TYPES, WorkState)
from pyfarm.master.config import config
from pyfarm.master.application import db
from pyfarm.models.core.functions import repr_ip
from pyfarm.models.core.mixins import (
    ValidatePriorityMixin, UtilityMixins, ReprMixin, ValidateWorkStateMixin)
from pyfarm.models.core.types import (
    id_column, IPv4Address, IDTypeAgent, IDTypeWork, UseAgentAddressEnum,
    OperatingSystemEnum, AgentStateEnum, MACAddress)
from pyfarm.models.jobtype import JobTypeVersion
from pyfarm.models.job import Job


__all__ = ("Agent", )

ALLOW_AGENT_LOOPBACK = config.get("allow_agents_from_loopback")
REGEX_HOSTNAME = re.compile("^(?!-)[A-Z\d-]{1,63}(?<!-)"
                            "(\.(?!-)[A-Z\d-]{1,63}(?<!-))*\.?$",
                            re.IGNORECASE)


AgentSoftwareVersionAssociation = db.Table(
    config.get("table_agent_software_version_assoc"), db.metadata,
    db.Column(
        "agent_id", IDTypeAgent,
        db.ForeignKey("%s.id" % config.get("table_agent")),
        primary_key=True),
    db.Column(
        "software_version_id", db.Integer,
        db.ForeignKey("%s.id" % config.get("table_software_version")),
        primary_key=True))
Example #54
0
    Agent, AgentMacAddress, AgentSoftwareVersionAssociation)
from pyfarm.models.gpu import GPU
from pyfarm.models.task import Task
from pyfarm.models.software import Software, SoftwareVersion
from pyfarm.master.config import config
from pyfarm.models.tag import Tag
from pyfarm.models.disk import AgentDisk
from pyfarm.master.application import db
from pyfarm.master.utility import (
    jsonify, validate_with_model, get_ipaddr_argument, get_integer_argument,
    get_hostname_argument, get_port_argument, isuuid)

logger = getLogger("api.agents")

MAC_RE = re.compile("^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$")
OUR_FARM_NAME = config.get("farm_name")


def fail_missing_assignments(agent, current_assignments):
    known_task_ids = []
    for assignment in current_assignments.values():
        for task in assignment["tasks"]:
            known_task_ids.append(task["id"])
    tasks_query = Task.query.filter(Task.agent == agent,
                                    or_(Task.state == None,
                                        ~Task.state.in_(
                                            [WorkState.FAILED, WorkState.DONE])))
    if known_task_ids:
        tasks_query = tasks_query.filter(not_(Task.id.in_(known_task_ids)))

    failed_tasks = []
Example #55
0
from flask.ext.login import UserMixin

from pyfarm.core.enums import STRING_TYPES, PY3
from pyfarm.master.application import app, db, login_serializer
from pyfarm.master.config import config
from pyfarm.models.core.mixins import ReprMixin
from pyfarm.models.core.functions import split_and_extend

__all__ = ("User", "Role")

SHA256_ASCII_LENGTH = 64  # static length of a sha256 string

# roles the user is a member of
UserRole = db.Table(
    config.get("table_user_role"),
    db.Column(
        "user_id", db.Integer,
        db.ForeignKey("%s.id" % config.get("table_user")),
        doc="The id of the associated user"),
    db.Column(
        "role_id", db.Integer,
        db.ForeignKey("%s.id" % config.get("table_role")),
        doc="The id of the associated role")
)


class User(db.Model, UserMixin, ReprMixin):
    """
    Stores information about a user including the roles they belong to
    """