コード例 #1
0
ファイル: data.py プロジェクト: MaslovaEV/resolwe
class Data(BaseModel):
    """Postgres model for storing data."""
    class Meta(BaseModel.Meta):
        """Data Meta options."""

        permissions = (
            ("view_data", "Can view data"),
            ("edit_data", "Can edit data"),
            ("share_data", "Can share data"),
            ("download_data", "Can download files from data"),
            ("owner_data", "Is owner of the data"),
        )

    #: data object is uploading
    STATUS_UPLOADING = 'UP'
    #: data object is being resolved
    STATUS_RESOLVING = 'RE'
    #: data object is waiting
    STATUS_WAITING = 'WT'
    #: data object is processing
    STATUS_PROCESSING = 'PR'
    #: data object is done
    STATUS_DONE = 'OK'
    #: data object is in error state
    STATUS_ERROR = 'ER'
    #: data object is in dirty state
    STATUS_DIRTY = 'DR'
    STATUS_CHOICES = ((STATUS_UPLOADING, 'Uploading'),
                      (STATUS_RESOLVING, 'Resolving'), (STATUS_WAITING,
                                                        'Waiting'),
                      (STATUS_PROCESSING, 'Processing'), (STATUS_DONE, 'Done'),
                      (STATUS_ERROR, 'Error'), (STATUS_DIRTY, 'Dirty'))

    #: process started date and time (set by
    #: :meth:`resolwe.flow.executors.BaseFlowExecutor.run` or its derivatives)
    started = models.DateTimeField(blank=True, null=True, db_index=True)

    #: process finished date date and time (set by
    #: :meth:`resolwe.flow.executors.BaseFlowExecutor.run` or its derivatives)
    finished = models.DateTimeField(blank=True, null=True, db_index=True)

    #: checksum field calculated on inputs
    checksum = models.CharField(
        max_length=64,
        validators=[
            RegexValidator(regex=r'^[0-9a-f]{64}$',
                           message='Checksum is exactly 40 alphanumerics',
                           code='invalid_checksum')
        ])

    status = models.CharField(max_length=2,
                              choices=STATUS_CHOICES,
                              default=STATUS_RESOLVING)
    """
    :class:`Data` status

    It can be one of the following:

    - :attr:`STATUS_UPLOADING`
    - :attr:`STATUS_RESOLVING`
    - :attr:`STATUS_WAITING`
    - :attr:`STATUS_PROCESSING`
    - :attr:`STATUS_DONE`
    - :attr:`STATUS_ERROR`
    """

    #: process used to compute the data object
    process = models.ForeignKey('Process', on_delete=models.PROTECT)

    #: process id
    process_pid = models.PositiveIntegerField(blank=True, null=True)

    #: progress
    process_progress = models.PositiveSmallIntegerField(default=0)

    #: return code
    process_rc = models.PositiveSmallIntegerField(blank=True, null=True)

    #: info log message
    process_info = ArrayField(models.CharField(max_length=255), default=[])

    #: warning log message
    process_warning = ArrayField(models.CharField(max_length=255), default=[])

    #: error log message
    process_error = ArrayField(models.CharField(max_length=255), default=[])

    #: actual inputs used by the process
    input = JSONField(default=dict)

    #: actual outputs of the process
    output = JSONField(default=dict)

    #: data descriptor schema
    descriptor_schema = models.ForeignKey('DescriptorSchema',
                                          blank=True,
                                          null=True,
                                          on_delete=models.PROTECT)

    #: actual descriptor
    descriptor = JSONField(default=dict)

    #: indicate whether `descriptor` doesn't match `descriptor_schema` (is dirty)
    descriptor_dirty = models.BooleanField(default=False)

    #: track if user set the data name explicitly
    named_by_user = models.BooleanField(default=False)

    #: dependencies between data objects
    parents = models.ManyToManyField('self',
                                     symmetrical=False,
                                     related_name='children')

    #: tags for categorizing objects
    tags = ArrayField(models.CharField(max_length=255), default=list)

    def __init__(self, *args, **kwargs):
        """Initialize attributes."""
        super(Data, self).__init__(*args, **kwargs)
        self._original_name = self.name

    def save_storage(self, instance, schema):
        """Save basic:json values to a Storage collection."""
        for field_schema, fields in iterate_fields(instance, schema):
            name = field_schema['name']
            value = fields[name]
            if field_schema.get('type', '').startswith('basic:json:'):
                if value and not self.pk:
                    raise ValidationError(
                        'Data object must be `created` before creating `basic:json:` fields'
                    )

                if isinstance(value, int):
                    # already in Storage
                    continue

                if isinstance(value, six.string_types):
                    file_path = os.path.join(
                        settings.FLOW_EXECUTOR['DATA_DIR'], str(self.pk),
                        value)
                    if os.path.isfile(file_path):
                        with open(file_path) as file_handler:
                            value = json.load(file_handler)

                storage = Storage.objects.create(
                    name='Storage for data id {}'.format(self.pk),
                    contributor=self.contributor,
                    data_id=self.pk,
                    json=value,
                )

                # `value` is copied by value, so `fields[name]` must be changed
                fields[name] = storage.pk

    def save_dependencies(self, instance, schema):
        """Save data: and list:data: references as parents."""
        def add_dependency(value):
            """Add parent Data dependency."""
            try:
                self.parents.add(Data.objects.get(pk=value))  # pylint: disable=no-member
            except Data.DoesNotExist:
                pass

        for field_schema, fields in iterate_fields(instance, schema):
            name = field_schema['name']
            value = fields[name]

            if field_schema.get('type', '').startswith('data:'):
                add_dependency(value)
            elif field_schema.get('type', '').startswith('list:data:'):
                for data in value:
                    add_dependency(data)

    def create_entity(self):
        """Create entity if `flow_collection` is defined in process.

        Following rules applies for adding `Data` object to `Entity`:
        * Only add `Data object` to `Entity` if process has defined
        `flow_collwection` field
        * Add object to existing `Entity`, if all parents that are part
        of it (but not necessary all parents), are part of the same
        `Entity`
        * If parents belong to different `Entities` or do not belong to
        any `Entity`, create new `Entity`

        """
        ds_slug = self.process.flow_collection  # pylint: disable=no-member
        if ds_slug:
            entity_query = Entity.objects.filter(
                data__in=self.parents.all()).distinct()  # pylint: disable=no-member

            if entity_query.count() == 1:
                entity = entity_query.first()
            else:

                descriptor_schema = DescriptorSchema.objects.filter(
                    slug=ds_slug).latest()
                entity = Entity.objects.create(
                    contributor=self.contributor,
                    descriptor_schema=descriptor_schema,
                    name=self.name,
                    tags=self.tags,
                )

                for permission in list(zip(*entity._meta.permissions))[0]:  # pylint: disable=protected-access
                    assign_perm(permission, entity.contributor, entity)

                for collection in Collection.objects.filter(data=self):
                    entity.collections.add(collection)

            entity.data.add(self)

    def save(self, render_name=False, *args, **kwargs):
        """Save the data model."""
        # Generate the descriptor if one is not already set.
        if self.name != self._original_name:
            self.named_by_user = True

        create = self.pk is None
        if create:
            # Default values for INPUT
            input_schema = self.process.input_schema  # pylint: disable=no-member
            for field_schema, fields, path in iterate_schema(
                    self.input, input_schema):
                if 'default' in field_schema and field_schema[
                        'name'] not in fields:
                    dict_dot(self.input, path, field_schema['default'])

            if not self.name:
                self._render_name()
            else:
                self.named_by_user = True

            self.checksum = get_data_checksum(self.input, self.process.slug,
                                              self.process.version)  # pylint: disable=no-member

        elif render_name:
            self._render_name()

        self.save_storage(self.output, self.process.output_schema)  # pylint: disable=no-member

        if self.status != Data.STATUS_ERROR:
            hydrate_size(self)

        if create:
            validate_schema(self.input, self.process.input_schema)  # pylint: disable=no-member

        render_descriptor(self)

        if self.descriptor_schema:
            try:
                validate_schema(self.descriptor, self.descriptor_schema.schema)  # pylint: disable=no-member
                self.descriptor_dirty = False
            except DirtyError:
                self.descriptor_dirty = True
        elif self.descriptor and self.descriptor != {}:
            raise ValueError(
                "`descriptor_schema` must be defined if `descriptor` is given")

        if self.status != Data.STATUS_ERROR:
            path_prefix = os.path.join(settings.FLOW_EXECUTOR['DATA_DIR'],
                                       str(self.pk))
            output_schema = self.process.output_schema  # pylint: disable=no-member
            if self.status == Data.STATUS_DONE:
                validate_schema(self.output,
                                output_schema,
                                path_prefix=path_prefix)
            else:
                validate_schema(self.output,
                                output_schema,
                                path_prefix=path_prefix,
                                test_required=False)

        with transaction.atomic():
            super(Data, self).save(*args, **kwargs)

            # We can only save dependencies after the data object has been saved. This
            # is why a transaction block is needed and the save method must be called first.
            if create:
                self.save_dependencies(self.input, self.process.input_schema)  # pylint: disable=no-member

        if create:
            self.create_entity()

    def _render_name(self):
        """Render data name.

        The rendering is based on name template (`process.data_name`) and
        input context.

        """
        if not self.process.data_name or self.named_by_user:  # pylint: disable=no-member
            return

        inputs = copy.deepcopy(self.input)
        hydrate_input_references(inputs,
                                 self.process.input_schema,
                                 hydrate_values=False)  # pylint: disable=no-member
        template_context = inputs

        try:
            name = render_template(
                self.process,
                self.process.data_name,  # pylint: disable=no-member
                template_context)
        except EvaluationError:
            name = '?'

        name_max_len = self._meta.get_field('name').max_length
        if len(name) > name_max_len:
            name = name[:(name_max_len - 3)] + '...'

        self.name = name
コード例 #2
0
ファイル: export.py プロジェクト: bibbox/app-azizi-onadata
class Export(models.Model):
    """
    Class representing a data export from an XForm
    """
    class ExportTypeError(Exception):
        def __unicode__(self):
            return _(u"Invalid export type specified")

        def __str__(self):
            return unicode(self).encode('utf-8')

    XLS_EXPORT = 'xls'
    CSV_EXPORT = 'csv'
    KML_EXPORT = 'kml'
    ZIP_EXPORT = 'zip'
    CSV_ZIP_EXPORT = 'csv_zip'
    SAV_ZIP_EXPORT = 'sav_zip'
    SAV_EXPORT = 'sav'
    EXTERNAL_EXPORT = 'external'
    OSM_EXPORT = OSM
    GOOGLE_SHEETS_EXPORT = 'gsheets'

    EXPORT_MIMES = {
        'xls': 'vnd.ms-excel',
        'xlsx': 'vnd.openxmlformats',
        'csv': 'csv',
        'zip': 'zip',
        'csv_zip': 'zip',
        'sav_zip': 'zip',
        'sav': 'sav',
        'kml': 'vnd.google-earth.kml+xml',
        OSM: OSM
    }

    EXPORT_TYPES = [
        (XLS_EXPORT, 'Excel'),
        (CSV_EXPORT, 'CSV'),
        (ZIP_EXPORT, 'ZIP'),
        (KML_EXPORT, 'kml'),
        (CSV_ZIP_EXPORT, 'CSV ZIP'),
        (SAV_ZIP_EXPORT, 'SAV ZIP'),
        (SAV_EXPORT, 'SAV'),
        (EXTERNAL_EXPORT, 'Excel'),
        (OSM, OSM),
        (GOOGLE_SHEETS_EXPORT, 'Google Sheets'),
    ]

    EXPORT_OPTION_FIELDS = [
        "binary_select_multiples", "dataview_pk", "group_delimiter",
        "include_images", "include_labels", "include_labels_only",
        "include_hxl", "query", "remove_group_name", "split_select_multiples",
        "value_select_multiples", "win_excel_utf8"
    ]

    EXPORT_TYPE_DICT = dict(export_type for export_type in EXPORT_TYPES)

    PENDING = async_status.PENDING
    SUCCESSFUL = async_status.SUCCESSFUL
    FAILED = async_status.FAILED

    # max no. of export files a user can keep
    MAX_EXPORTS = 10

    # Required fields
    xform = models.ForeignKey(XForm)
    export_type = models.CharField(max_length=10,
                                   choices=EXPORT_TYPES,
                                   default=XLS_EXPORT)

    # optional fields
    created_on = models.DateTimeField(auto_now_add=True)
    filename = models.CharField(max_length=255, null=True, blank=True)

    # need to save an the filedir since when an xform is deleted, it cascades
    # its exports which then try to delete their files and try to access the
    # deleted xform - bad things happen
    filedir = models.CharField(max_length=255, null=True, blank=True)
    task_id = models.CharField(max_length=255, null=True, blank=True)
    # time of last submission when this export was created
    time_of_last_submission = models.DateTimeField(null=True, default=None)
    # status
    internal_status = models.SmallIntegerField(default=PENDING)
    export_url = models.URLField(null=True, default=None)

    options = JSONField(default=dict, null=False)
    error_message = models.CharField(max_length=255, null=True, blank=True)

    class Meta:
        app_label = "viewer"
        unique_together = (("xform", "filename"), )

    def save(self, *args, **kwargs):
        if not self.pk and self.xform:
            # if new, check if we've hit our limit for exports for this form,
            # if so, delete oldest
            # TODO: let user know that last export will be deleted
            num_existing_exports = Export.objects.filter(
                xform=self.xform, export_type=self.export_type).count()

            if num_existing_exports >= self.MAX_EXPORTS:
                Export._delete_oldest_export(self.xform, self.export_type)

            # update time_of_last_submission with
            # xform.time_of_last_submission_update
            self.time_of_last_submission = self.xform.\
                time_of_last_submission_update()
        if self.filename:
            self.internal_status = Export.SUCCESSFUL
        super(Export, self).save(*args, **kwargs)

    @classmethod
    def _delete_oldest_export(cls, xform, export_type):
        oldest_export = Export.objects.filter(
            xform=xform, export_type=export_type).order_by('created_on')[0]
        oldest_export.delete()

    @property
    def is_pending(self):
        return self.status == Export.PENDING

    @property
    def is_successful(self):
        return self.status == Export.SUCCESSFUL

    @property
    def status(self):
        if self.filename:
            # need to have this since existing models will have their
            # internal_status set to PENDING - the default
            return Export.SUCCESSFUL
        elif self.internal_status == Export.FAILED:
            return Export.FAILED
        else:
            return Export.PENDING

    def set_filename(self, filename):
        self.filename = filename
        self.internal_status = Export.SUCCESSFUL
        self._update_filedir()

    def _update_filedir(self):
        assert (self.filename)
        self.filedir = os.path.join(self.xform.user.username, 'exports',
                                    self.xform.id_string, self.export_type)

    @property
    def filepath(self):
        if self.filedir and self.filename:
            return os.path.join(self.filedir, self.filename)
        return None

    @property
    def full_filepath(self):
        if self.filepath:
            default_storage = get_storage_class()()
            try:
                return default_storage.path(self.filepath)
            except NotImplementedError:
                # read file from s3
                name, ext = os.path.splitext(self.filepath)
                tmp = NamedTemporaryFile(suffix=ext, delete=False)
                f = default_storage.open(self.filepath)
                tmp.write(f.read())
                tmp.close()
                return tmp.name
        return None

    @classmethod
    def exports_outdated(cls, xform, export_type, options={}):
        # get newest export for xform
        try:
            export_options = get_export_options_query_kwargs(options)
            latest_export = Export.objects.filter(
                xform=xform,
                export_type=export_type,
                internal_status__in=[Export.SUCCESSFUL, Export.PENDING],
                **export_options).latest('created_on')
        except cls.DoesNotExist:
            return True
        else:
            if latest_export.time_of_last_submission is not None \
                    and xform.time_of_last_submission_update() is not None:
                return latest_export.time_of_last_submission <\
                    xform.time_of_last_submission_update()
            else:
                # return true if we can't determine the status, to force
                # auto-generation
                return True

    @classmethod
    def is_filename_unique(cls, xform, filename):
        return Export.objects.filter(xform=xform,
                                     filename=filename).count() == 0
コード例 #3
0
class Node(
        core_models.UuidMixin,
        core_models.NameMixin,
        structure_models.StructureModel,
        core_models.StateMixin,
        BackendMixin,
        structure_models.TimeStampedModel,
):
    class RuntimeStates:
        ACTIVE = 'active'
        REGISTERING = 'registering'
        UNAVAILABLE = 'unavailable'

    content_type = models.ForeignKey(on_delete=models.CASCADE,
                                     to=ContentType,
                                     null=True,
                                     related_name='+')
    object_id = models.PositiveIntegerField(null=True)
    instance = GenericForeignKey(
        'content_type',
        'object_id')  # a virtual machine where will deploy k8s node.
    cluster = models.ForeignKey(Cluster, on_delete=models.CASCADE)
    controlplane_role = models.BooleanField(default=False)
    etcd_role = models.BooleanField(default=False)
    worker_role = models.BooleanField(default=False)
    initial_data = JSONField(
        blank=True,
        default=dict,
        help_text=_('Initial data for instance creating.'))
    runtime_state = models.CharField(max_length=255, blank=True)
    k8s_version = models.CharField(max_length=255, blank=True)
    docker_version = models.CharField(max_length=255, blank=True)
    cpu_allocated = models.FloatField(blank=True, null=True)
    cpu_total = models.IntegerField(blank=True, null=True)
    ram_allocated = models.IntegerField(blank=True,
                                        null=True,
                                        help_text='Allocated RAM in Mi.')
    ram_total = models.IntegerField(blank=True,
                                    null=True,
                                    help_text='Total RAM in Mi.')
    pods_allocated = models.IntegerField(blank=True, null=True)
    pods_total = models.IntegerField(blank=True, null=True)
    labels = JSONField(blank=True, default=dict)
    annotations = JSONField(blank=True, default=dict)

    tracker = FieldTracker()

    def get_node_command(self):
        roles_command = []
        if self.controlplane_role:
            roles_command.append('--controlplane')

        if self.etcd_role:
            roles_command.append('--etcd')

        if self.worker_role:
            roles_command.append('--worker')

        return self.cluster.node_command + ' ' + ' '.join(roles_command)

    class Meta:
        unique_together = (('content_type', 'object_id'), ('cluster', 'name'))

    class Permissions:
        customer_path = 'cluster__service_project_link__project__customer'
        project_path = 'cluster__service_project_link__project'
        service_path = 'cluster__service_project_link__service'

    @classmethod
    def get_url_name(cls):
        return 'rancher-node'

    @property
    def service_project_link(self):
        return self.cluster.service_project_link

    def __str__(self):
        return self.name
コード例 #4
0
class OCPAWSCostLineItemDailySummary(models.Model):
    """A summarized view of OCP on AWS cost."""
    class Meta:
        """Meta for OCPAWSCostLineItemDailySummary."""

        db_table = "reporting_ocpawscostlineitem_daily_summary"

        indexes = [
            models.Index(fields=["usage_start"],
                         name="cost_summary_ocp_usage_idx"),
            models.Index(fields=["namespace"],
                         name="cost_summary_namespace_idx"),
            models.Index(fields=["node"],
                         name="cost_summary_node_idx",
                         opclasses=["varchar_pattern_ops"]),
            models.Index(fields=["resource_id"],
                         name="cost_summary_resource_idx"),
            GinIndex(fields=["tags"], name="cost_tags_idx"),
            models.Index(fields=["product_family"],
                         name="ocp_aws_product_family_idx"),
            models.Index(fields=["instance_type"],
                         name="ocp_aws_instance_type_idx"),
            # A GIN functional index named "ix_ocp_aws_product_family_ilike" was created manually
            # via RunSQL migration operation
            # Function: (upper(product_family) gin_trgm_ops)
            # A GIN functional index named "ix_ocp_aws_product_code_ilike" was created manually
            # via RunSQL migration operation
            # Function: (upper(product_code) gin_trgm_ops)
        ]

    # OCP Fields
    report_period = models.ForeignKey("OCPUsageReportPeriod",
                                      on_delete=models.CASCADE,
                                      null=True)

    cluster_id = models.CharField(max_length=50, null=True)

    cluster_alias = models.CharField(max_length=256, null=True)

    # Kubernetes objects by convention have a max name length of 253 chars
    namespace = ArrayField(models.CharField(max_length=253, null=False))

    pod = ArrayField(models.CharField(max_length=253, null=False))

    node = models.CharField(max_length=253, null=False)

    resource_id = models.CharField(max_length=253, null=True)

    usage_start = models.DateField(null=False)

    usage_end = models.DateField(null=False)

    # AWS Fields
    cost_entry_bill = models.ForeignKey("AWSCostEntryBill",
                                        on_delete=models.CASCADE,
                                        null=True)

    product_code = models.CharField(max_length=50, null=False)

    product_family = models.CharField(max_length=150, null=True)

    instance_type = models.CharField(max_length=50, null=True)

    usage_account_id = models.CharField(max_length=50, null=False)

    account_alias = models.ForeignKey("AWSAccountAlias",
                                      on_delete=models.SET_NULL,
                                      null=True)

    availability_zone = models.CharField(max_length=50, null=True)

    region = models.CharField(max_length=50, null=True)

    unit = models.CharField(max_length=63, null=True)

    tags = JSONField(null=True)

    usage_amount = models.DecimalField(max_digits=24,
                                       decimal_places=9,
                                       null=True)

    normalized_usage_amount = models.FloatField(null=True)

    currency_code = models.CharField(max_length=10, null=True)

    # Cost breakdown can be done by cluster, node, project, and pod.
    # Cluster and node cost can be determined by summing the AWS unblended_cost
    # with a GROUP BY cluster/node.
    # Project cost is a summation of pod costs with a GROUP BY project
    # The cost of un-utilized resources = sum(unblended_cost) - sum(project_cost)
    unblended_cost = models.DecimalField(max_digits=30,
                                         decimal_places=15,
                                         null=True)

    markup_cost = models.DecimalField(max_digits=30,
                                      decimal_places=15,
                                      null=True)

    # This is a count of the number of projects that share an AWS resource
    # It is used to divide cost evenly among projects
    shared_projects = models.IntegerField(null=False, default=1)

    # A JSON dictionary of the project cost, keyed by project/namespace name
    # See comment on unblended_cost for project cost explanation
    project_costs = JSONField(null=True)
コード例 #5
0
ファイル: task_model.py プロジェクト: bkopanichuk/sev_sed_2
class TaskExecutor(CoreBase):
    task = models.ForeignKey(Task,
                             related_name='task_executors',
                             on_delete=models.CASCADE,
                             null=True)
    executor = models.ForeignKey(CoreUser,
                                 related_name='task_executor_users',
                                 verbose_name="Виконавець",
                                 on_delete=models.PROTECT,
                                 null=True)
    executor_role = models.CharField(verbose_name="Роль",
                                     choices=EXECUTOR_ROLE,
                                     null=True,
                                     max_length=50)
    detail = models.TextField(
        verbose_name="Уточення завдання",
        max_length=500,
        null=True,
    )
    result = models.TextField(
        verbose_name="Деталі виконаного підзавдання",
        max_length=500,
        null=True,
    )
    result_file = models.FileField(upload_to=get_result_file_path,
                                   null=True,
                                   verbose_name='Результат (файл)')
    result_document = models.ForeignKey(
        BaseDocument,
        on_delete=models.PROTECT,
        related_name='task_executors',
        null=True,
        help_text=
        "Результатом виконання завдання іноді може бути документ, але не завжти."
        + " Зазвичай достатньо описати результат")
    end_date = models.DateField(verbose_name="Уточнення кінцевого терміну",
                                null=True,
                                default=None,
                                blank=True)
    status = models.CharField(verbose_name="Статус завдання",
                              choices=TASK_EXECUTOR_STATUS,
                              max_length=20,
                              default=PENDING)
    sign = models.TextField(null=True, verbose_name='Цифровий підпис')
    approve_method = models.CharField(max_length=20,
                                      verbose_name="Метод підтвердження",
                                      choices=APPROVE_METHODS,
                                      null=True)
    sign_info = JSONField(
        null=True,
        verbose_name='Детальна інформація про накладений цифровий підпис')
    history = HistoricalRecords()

    class Meta:
        verbose_name = 'Виконання завдання'
        unique_together = (('task', 'executor'), ('task', 'executor_role'))

    def __str__(self):
        return str(self.executor)

    def save(self, *args, **kwargs):
        self.author = self.task.author
        super(CoreBase, self).save(*args, **kwargs)
コード例 #6
0
class Question(TimeStampedModel):
    """
    Survey Question model class

    Defines extra question(s) to collect relevant information from
    respondent(s) in survey context.

    Once added, survey will prompt Respondent(s) with the added questions.
    """

    INTEGER = 'integer'
    DECIMAL = 'decimal'
    TEXT = 'text'
    SELECT_ONE = 'select_one'
    SELECT_MULTIPLE = 'select_multiple'
    DATE = 'date'
    TIME = 'time'
    DATETIME = 'dateTime'
    IMAGE = 'image'
    FILE = 'file'

    TYPE_CHOICES = (
        (INTEGER, _('integer')),
        (DECIMAL, _('decimal')),
        (TEXT, _('text')),  # TODO: rename to Open ended
    )

    #: Global unique identifier for a question.
    uuid = models.UUIDField(_('UUID'),
                            default=uuid.uuid4,
                            editable=False,
                            unique=True)

    #: Survey under which a question belongs to.
    survey = models.ForeignKey('surveys.Survey',
                               related_name='questions',
                               related_query_name='question',
                               verbose_name=_('survey'),
                               on_delete=models.CASCADE)

    #: System processable variable name of a question.
    name = models.SlugField(_('field name'), blank=True)

    #: Human readable label of a question.
    label = models.CharField(
        _('label'),
        max_length=255,
        help_text=_('this will be displayed to user'),
    )

    #: Prompt(input type) of a question.
    type = models.CharField(_('type'), max_length=50, choices=TYPE_CHOICES)

    #: User who created(or owning) a question
    creator = models.ForeignKey(settings.AUTH_USER_MODEL,
                                verbose_name=_('creator'),
                                blank=True,
                                related_name='created_questions',
                                related_query_name='created_question',
                                on_delete=models.CASCADE)

    #: Possible choices(select options) of a question.
    options = JSONField(_('options'), blank=True, default=dict)

    class Meta:
        verbose_name = _('Question')
        verbose_name_plural = _('Questions')
        unique_together = ['survey', 'name']

    def __str__(self):
        """Returns string representation of a question"""
        return self.label
コード例 #7
0
ファイル: experiment_groups.py プロジェクト: jonike/polyaxon
class ExperimentGroup(DiffModel, RunTimeModel, NameableModel, PersistenceModel,
                      DescribableModel, TagModel, LastStatusMixin,
                      TensorboardJobMixin):
    """A model that saves Specification/Polyaxonfiles."""
    STATUSES = ExperimentGroupLifeCycle

    uuid = models.UUIDField(default=uuid.uuid4,
                            editable=False,
                            unique=True,
                            null=False)
    user = models.ForeignKey(settings.AUTH_USER_MODEL,
                             on_delete=models.CASCADE,
                             related_name='+')
    project = models.ForeignKey(
        'db.Project',
        on_delete=models.CASCADE,
        related_name='experiment_groups',
        help_text='The project this polyaxonfile belongs to.')
    content = models.TextField(
        null=True,
        blank=True,
        help_text='The yaml content of the polyaxonfile/specification.',
        validators=[validate_group_spec_content])
    hptuning = JSONField(
        help_text='The experiment group hptuning params config.',
        null=True,
        blank=True,
        validators=[validate_group_hptuning_config])
    code_reference = models.ForeignKey('db.CodeReference',
                                       on_delete=models.SET_NULL,
                                       blank=True,
                                       null=True,
                                       related_name='+')
    status = models.OneToOneField('db.ExperimentGroupStatus',
                                  related_name='+',
                                  blank=True,
                                  null=True,
                                  editable=True,
                                  on_delete=models.SET_NULL)

    class Meta:
        app_label = 'db'
        unique_together = (('project', 'name'), )

    def __str__(self):
        return self.unique_name

    @property
    def unique_name(self):
        return GROUP_UNIQUE_NAME_FORMAT.format(
            project_name=self.project.unique_name, id=self.id)

    def can_transition(self, status):
        """Update the status of the current instance.

        Returns:
            boolean: if the instance is updated.
        """
        if not self.STATUSES.can_transition(status_from=self.last_status,
                                            status_to=status):
            _logger.info(
                '`%s` tried to transition from status `%s` to non permitted status `%s`',
                str(self), self.last_status, status)
            return False

        return True

    def set_status(self, status, message=None, **kwargs):
        if not self.can_transition(status):
            return

        ExperimentGroupStatus.objects.create(experiment_group=self,
                                             status=status,
                                             message=message)

    @cached_property
    def hptuning_config(self):
        return HPTuningConfig.from_dict(
            self.hptuning) if self.hptuning else None

    @cached_property
    def specification(self):
        return GroupSpecification.read(self.content) if self.content else None

    @cached_property
    def concurrency(self):
        if not self.hptuning_config:
            return None
        return self.hptuning_config.concurrency

    @cached_property
    def search_algorithm(self):
        if not self.hptuning_config:
            return None
        return self.hptuning_config.search_algorithm

    @cached_property
    def has_early_stopping(self):
        return bool(self.early_stopping)

    @cached_property
    def early_stopping(self):
        if not self.hptuning_config:
            return None
        return self.hptuning_config.early_stopping or []

    @property
    def scheduled_experiments(self):
        return self.experiments.filter(
            status__status=ExperimentLifeCycle.SCHEDULED).distinct()

    @property
    def succeeded_experiments(self):
        return self.experiments.filter(
            status__status=ExperimentLifeCycle.SUCCEEDED).distinct()

    @property
    def failed_experiments(self):
        return self.experiments.filter(
            status__status=ExperimentLifeCycle.FAILED).distinct()

    @property
    def stopped_experiments(self):
        return self.experiments.filter(
            status__status=ExperimentLifeCycle.STOPPED).distinct()

    @property
    def pending_experiments(self):
        return self.experiments.filter(
            status__status__in=ExperimentLifeCycle.PENDING_STATUS).distinct()

    @property
    def running_experiments(self):
        return self.experiments.filter(
            status__status__in=ExperimentLifeCycle.RUNNING_STATUS).distinct()

    @property
    def done_experiments(self):
        return self.experiments.filter(
            status__status__in=ExperimentLifeCycle.DONE_STATUS).distinct()

    @property
    def non_done_experiments(self):
        return self.experiments.exclude(
            status__status__in=ExperimentLifeCycle.DONE_STATUS).distinct()

    @property
    def n_experiments_to_start(self):
        """We need to check if we are allowed to start the experiment
        If the polyaxonfile has concurrency we need to check how many experiments are running.
        """
        return self.concurrency - self.running_experiments.count()

    @property
    def iteration(self):
        return self.iterations.last()

    @property
    def iteration_data(self):
        return self.iteration.data if self.iteration else None

    @property
    def current_iteration(self):
        return self.iterations.count()

    def should_stop_early(self):
        filters = []
        for early_stopping_metric in self.early_stopping:
            comparison = ('gte' if Optimization.maximize(
                early_stopping_metric.optimization) else 'lte')
            metric_filter = 'metric__values__{}__{}'.format(
                early_stopping_metric.metric, comparison)
            filters.append({metric_filter: early_stopping_metric.value})
        if filters:
            return self.experiments.filter(
                functools.reduce(OR, [Q(**f) for f in filters])).exists()
        return False

    def get_annotated_experiments_with_metric(self,
                                              metric,
                                              experiment_ids=None):
        query = self.experiments
        if experiment_ids:
            query = query.filter(id__in=experiment_ids)
        annotation = {metric: KeyTransform(metric, 'metric__values')}
        return query.annotate(**annotation)

    def get_ordered_experiments_by_metric(self, experiment_ids, metric,
                                          optimization):
        query = self.get_annotated_experiments_with_metric(
            metric=metric, experiment_ids=experiment_ids)

        metric_order_by = '{}{}'.format(
            '-' if Optimization.maximize(optimization) else '', metric)
        return query.order_by(metric_order_by)

    def get_experiments_metrics(self, metric, experiment_ids=None):
        query = self.get_annotated_experiments_with_metric(
            metric=metric, experiment_ids=experiment_ids)
        return query.values_list('id', metric)

    @cached_property
    def search_manager(self):
        from hpsearch.search_managers import get_search_algorithm_manager

        return get_search_algorithm_manager(
            hptuning_config=self.hptuning_config)

    @cached_property
    def iteration_manager(self):
        from hpsearch.iteration_managers import get_search_iteration_manager

        return get_search_iteration_manager(experiment_group=self)

    @property
    def iteration_config(self):
        from hpsearch.schemas import get_iteration_config

        if self.iteration_data and self.search_algorithm:
            return get_iteration_config(search_algorithm=self.search_algorithm,
                                        iteration=self.iteration_data)
        return None

    def get_suggestions(self):
        iteration_config = self.iteration_config
        if iteration_config:
            return self.search_manager.get_suggestions(
                iteration_config=iteration_config)
        return self.search_manager.get_suggestions()
コード例 #8
0
ファイル: models.py プロジェクト: andersonfrailey/comp
class Simulation(models.Model):
    # TODO: dimension needs to go
    dimension_name = "Dimension--needs to go"

    inputs = models.OneToOneField(
        Inputs, on_delete=models.CASCADE, related_name="outputs"
    )
    meta_data = JSONField(default=None, blank=True, null=True)
    outputs = JSONField(default=None, blank=True, null=True)
    aggr_outputs = JSONField(default=None, blank=True, null=True)
    error_text = models.CharField(null=True, blank=True, default=None, max_length=4000)
    owner = models.ForeignKey(
        "users.Profile", on_delete=models.PROTECT, null=True, related_name="sims"
    )
    sponsor = models.ForeignKey(
        "users.Profile",
        on_delete=models.SET_NULL,
        null=True,
        related_name="sponsored_sims",
    )
    project = models.ForeignKey(
        "users.Project", on_delete=models.PROTECT, related_name="sims"
    )
    # run-time in seconds
    run_time = models.IntegerField(default=0)
    # run cost can be very small. ex: 4 sec * ($0.09/hr)/3600
    run_cost = models.DecimalField(max_digits=9, decimal_places=4, default=0.0)
    creation_date = models.DateTimeField(
        default=make_aware(datetime.datetime(2015, 1, 1))
    )
    exp_comp_datetime = models.DateTimeField(
        default=make_aware(datetime.datetime(2015, 1, 1))
    )
    job_id = models.UUIDField(blank=True, default=None, null=True)
    model_vers = models.CharField(blank=True, default=None, null=True, max_length=50)
    webapp_vers = models.CharField(blank=True, default=None, null=True, max_length=50)

    def get_absolute_url(self):
        kwargs = {
            "pk": self.pk,
            "title": self.project.title,
            "username": self.project.owner.user.username,
        }
        return reverse("outputs", kwargs=kwargs)

    def get_absolute_edit_url(self):
        kwargs = {
            "pk": self.pk,
            "title": self.project.title,
            "username": self.project.owner.user.username,
        }
        return reverse("edit", kwargs=kwargs)

    def get_absolute_download_url(self):
        kwargs = {
            "pk": self.pk,
            "title": self.project.title,
            "username": self.project.owner.user.username,
        }
        return reverse("download", kwargs=kwargs)

    def zip_filename(self):
        return f"{self.project.title}_{self.pk}.zip"

    def json_filename(self):
        return f"{self.project.title}_{self.pk}.json"

    @cached_property
    def dimension(self):
        # return unique values set at the dimension level.
        return list({item["dimension"] for item in self.outputs if item["dimension"]})

    @property
    def effective_cost(self):
        return self.project.run_cost(self.run_time, adjust=True)
コード例 #9
0
class AbstractBaseModelImportAttempt(TrackedModel, ImportStatusModel):
    PROPAGATED_FIELDS = ("status", )

    importee_field_data = JSONField(
        encoder=DjangoErrorJSONEncoder,
        default=dict,
        help_text="The original data used to create this Audit",
    )
    errors = JSONField(
        encoder=DjangoErrorJSONEncoder,
        default=dict,
        help_text=
        "Stores any errors encountered during the creation of the auditee",
    )
    error_summary = JSONField(
        encoder=DjangoErrorJSONEncoder,
        default=dict,
        help_text=
        "Stores any 'summary' information that might need to be associated",
    )
    imported_by = SensibleCharField(max_length=128, default=None)
    content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)

    # file_import_attempt = models.ForeignKey(
    #     "django_import_data.FileImportAttempt",
    #     related_name="model_import_attempts",
    #     on_delete=models.CASCADE,
    #     null=True,
    #     blank=True,
    #     help_text="Reference to the FileImportAttempt this was created from",
    # )

    model_importer = models.ForeignKey(
        "django_import_data.ModelImporter",
        related_name="model_import_attempts",
        on_delete=models.CASCADE,
        help_text="Reference to the ModelImporter that made this attempt",
    )

    @property
    def row_data(self):
        return self.model_importer.row_data

    @property
    def file_import_attempt(self):
        return self.model_importer.row_data.file_import_attempt

    class Meta:
        abstract = True
        ordering = ["-created_on"]

    def __str__(self):
        if self.file_import_attempt:
            return (
                f"{self.content_type} imported from "
                f"{os.path.basename(self.imported_from)} ({self.STATUSES[self.status].value})"
            )

        return f"{self.content_type}: {self.STATUSES[self.status].value}"

    def save(self,
             *args,
             propagate_derived_values=True,
             derive_cached_values=False,
             **kwargs):
        if self.status == ImportStatusModel.STATUSES.pending.db_value:
            if self.errors:
                self.status = ImportStatusModel.STATUSES.rejected.db_value
            else:
                self.status = ImportStatusModel.STATUSES.created_clean.db_value

        super().save(*args, **kwargs)
        if propagate_derived_values:
            self.model_importer.save(
                propagate_derived_values=propagate_derived_values)

    def summary(self):
        return (
            "I {created_str} a(n) {importee_class} instance {importee_str}from "
            "data {importee_field_data} I converted from row {row_data}, which "
            "I found in file {file_path}").format(
                created_str="created"
                if self.importee else "attempted to create",
                importee_class=self.importee_class.__name__,
                importee_str=f"({self.importee}) " if self.importee else "",
                importee_field_data=pformat(self.importee_field_data),
                row_data=pformat(self.row_data.data),
                file_path=self.file_import_attempt.imported_from
                if self.file_import_attempt else None,
            )

    @property
    def importee_class(self):
        return self.content_type.model_class()

    # IF NO GFK
    @property
    def importee(self):
        """Get the object we are auditing, if it exists, otherwise return None"""
        return getattr(self, self.content_type.model, None)

    # IF NO GFK
    @importee.setter
    def importee(self, instance):
        """Set auditee to given object"""
        return setattr(self, self.content_type.model, instance)

    @cached_property
    def imported_from(self):
        return (self.file_import_attempt.imported_from
                if self.file_import_attempt else None)

    # TODO: UNIt tests!
    @transaction.atomic
    def delete_imported_models(self):
        """Delete all models imported by this MIA"""

        num_deletions = 0
        deletions = Counter()
        # For every ContentType imported by this MIA...
        for ct in ContentType.objects.filter(
                id__in=self.values("content_type")):
            # ...get a queryset of all model instances that were imported...
            to_delete = ct.model_class().objects.filter(
                model_import_attempt=self)
            # ...and delete them:
            num_deletions_for_model_class, deletions_for_model_class = (
                to_delete.delete())
            num_deletions += num_deletions_for_model_class
            deletions += deletions_for_model_class

        self.current_status = self.CURRENT_STATUSES.deleted.db_value
        self.save()
        self.refresh_from_db()
        print(
            f"MIA {self.id} deleted its imported model and set its current status to {self.current_status}"
        )
        return (num_deletions, deletions)

    def gen_error_summary(self):
        summary_items = set()
        for conversion_error in self.errors.get("conversion_errors", []):
            summary_items.update(conversion_error["to_fields"])

        for form_error in self.errors.get("form_errors", []):
            summary_items.add(form_error["field"])

        return summary_items
コード例 #10
0
ファイル: tax_lots.py プロジェクト: nW-fr/seed
class TaxLotState(models.Model):
    # The state field names should match pretty close to the pdf, just
    # because these are the most 'public' fields in terms of
    # communicating with the cities.

    confidence = models.FloatField(default=0, null=True, blank=True)

    # Support finding the property by the import_file
    import_file = models.ForeignKey(ImportFile, null=True, blank=True)

    # Add organization to the tax lot states
    organization = models.ForeignKey(Organization)
    data_state = models.IntegerField(choices=DATA_STATE, default=DATA_STATE_UNKNOWN)
    merge_state = models.IntegerField(choices=MERGE_STATE, default=MERGE_STATE_UNKNOWN, null=True)

    custom_id_1 = models.CharField(max_length=255, null=True, blank=True)

    jurisdiction_tax_lot_id = models.CharField(max_length=2047, null=True, blank=True)
    block_number = models.CharField(max_length=255, null=True, blank=True)
    district = models.CharField(max_length=255, null=True, blank=True)
    address_line_1 = models.CharField(max_length=255, null=True, blank=True)
    address_line_2 = models.CharField(max_length=255, null=True, blank=True)
    normalized_address = models.CharField(max_length=255, null=True, blank=True, editable=False)

    city = models.CharField(max_length=255, null=True, blank=True)
    state = models.CharField(max_length=255, null=True, blank=True)
    postal_code = models.CharField(max_length=255, null=True, blank=True)
    number_properties = models.IntegerField(null=True, blank=True)

    extra_data = JSONField(default=dict, blank=True)

    class Meta:
        index_together = [
            ['import_file', 'data_state'],
            ['import_file', 'data_state', 'merge_state']
        ]

    def __unicode__(self):
        return u'TaxLot State - %s' % self.pk

    def promote(self, cycle):
        """
            Promote the TaxLotState to the view table for the given cycle

            Args:
                cycle: Cycle to assign the view

            Returns:
                The resulting TaxLotView (note that it is not returning the
                TaxLotState)

        """
        # First check if the cycle and the PropertyState already have a view
        tlvs = TaxLotView.objects.filter(cycle=cycle, state=self)

        if len(tlvs) == 0:
            # _log.debug("Found 0 TaxLotViews, adding TaxLot, promoting")
            # There are no PropertyViews for this property state and cycle.
            # Most likely there is nothing to match right now, so just
            # promote it to the view

            # Need to create a property for this state
            if self.organization is None:
                _log.error("organization is None")

            taxlot = TaxLot.objects.create(
                organization=self.organization
            )

            tlv = TaxLotView.objects.create(taxlot=taxlot, cycle=cycle, state=self)

            # This is legacy but still needed here to have the tests pass.
            self.data_state = DATA_STATE_MATCHING

            self.save()

            return tlv
        elif len(tlvs) == 1:
            # _log.debug("Found 1 PropertyView... Nothing to do")
            # PropertyView already exists for cycle and state. Nothing to do.

            return tlvs[0]
        else:
            _log.error("Found %s PropertyView" % len(tlvs))
            _log.error("This should never occur, famous last words?")

            return None

    def to_dict(self, fields=None, include_related_data=True):
        """
        Returns a dict version of the TaxLotState, either with all fields
        or masked to just those requested.
        """

        # TODO: make this a serializer and/or merge with PropertyState.to_dict
        if fields:
            model_fields, ed_fields = split_model_fields(self, fields)
            extra_data = self.extra_data
            ed_fields = filter(lambda f: f in extra_data, ed_fields)

            result = {
                field: getattr(self, field) for field in model_fields
            }
            result['extra_data'] = {
                field: extra_data[field] for field in ed_fields
            }

            # always return id's and canonical_building id's
            result['id'] = result['pk'] = self.pk

            # should probably also return children, parents, and coparent
            # result['children'] = map(lambda c: c.id, self.children.all())
            # result['parents'] = map(lambda p: p.id, self.parents.all())
            # result['co_parent'] = (self.co_parent and self.co_parent.pk)
            # result['coparent'] = (self.co_parent and {
            #     field: self.co_parent.pk for field in ['pk', 'id']
            #     })

            return result

        d = obj_to_dict(self, include_m2m=include_related_data)

        # if include_related_data:
        # d['parents'] = list(self.parents.values_list('id', flat=True))
        # d['co_parent'] = self.co_parent.pk if self.co_parent else None

        return d

    def save(self, *args, **kwargs):
        # Calculate and save the normalized address
        if self.address_line_1 is not None:
            self.normalized_address = normalize_address_str(self.address_line_1)
        else:
            self.normalized_address = None

        return super(TaxLotState, self).save(*args, **kwargs)

    def history(self):
        """
        Return the history of the taxlot state by parsing through the auditlog. Returns only the ids
        of the parent states and some descriptions.

              master
              /    \
             /      \
          parent1  parent2

        In the records, parent2 is most recent, so make sure to navigate parent two first since we
        are returning the data in reverse over (that is most recent changes first)

        :return: list, history as a list, and the master record
        """

        """Return history in reverse order."""
        history = []
        master = {
            'state_id': self.id,
            'state_data': self,
            'date_edited': None,
        }

        def record_dict(log):
            filename = None if not log.import_filename else path.basename(log.import_filename)
            if filename:
                # Attempt to remove NamedTemporaryFile suffix
                name, ext = path.splitext(filename)
                pattern = re.compile('(.*?)(_[a-zA-Z0-9]{7})$')
                match = pattern.match(name)
                if match:
                    filename = match.groups()[0] + ext
            return {
                'state_id': log.state.id,
                'state_data': log.state,
                'date_edited': convert_to_js_timestamp(log.created),
                'source': log.get_record_type_display(),
                'filename': filename,
                # 'changed_fields': json.loads(log.description) if log.record_type == AUDIT_USER_EDIT else None
            }

        log = TaxLotAuditLog.objects.select_related('state', 'parent1', 'parent2').filter(
            state_id=self.id
        ).order_by('-id').first()

        if log:
            master = {
                'state_id': log.state.id,
                'state_data': log.state,
                'date_edited': convert_to_js_timestamp(log.created),
            }

            # Traverse parents and add to history
            if log.name in ['Manual Match', 'System Match', 'Merge current state in migration']:
                done_searching = False

                while not done_searching:
                    # if there is no parents, then break out immediately
                    if (log.parent1_id is None and log.parent2_id is None) or log.name == 'Manual Edit':
                        break

                    # initalize the tree to None everytime. If not new tree is found, then we will not iterate
                    tree = None

                    # Check if parent2 has any other parents or is the original import creation. Start with parent2
                    # because parent2 will be the most recent import file.
                    if log.parent2:
                        if log.parent2.name in ['Import Creation', 'Manual Edit']:
                            record = record_dict(log.parent2)
                            history.append(record)
                        elif log.parent2.name == 'System Match' and log.parent2.parent1.name == 'Import Creation' and \
                                log.parent2.parent2.name == 'Import Creation':
                            # Handle case where an import file matches within itself, and proceeds to match with
                            # existing records
                            record = record_dict(log.parent2.parent2)
                            history.append(record)
                            record = record_dict(log.parent2.parent1)
                            history.append(record)
                        else:
                            tree = log.parent2

                    if log.parent1:
                        if log.parent1.name in ['Import Creation', 'Manual Edit']:
                            record = record_dict(log.parent1)
                            history.append(record)
                        elif log.parent1.name == 'System Match' and log.parent1.parent1.name == 'Import Creation' and \
                                log.parent1.parent2.name == 'Import Creation':
                            # Handle case where an import file matches within itself, and proceeds to match with
                            # existing records
                            record = record_dict(log.parent1.parent2)
                            history.append(record)
                            record = record_dict(log.parent1.parent1)
                            history.append(record)
                        else:
                            tree = log.parent1

                    if not tree:
                        done_searching = True
                    else:
                        log = tree
            elif log.name == 'Manual Edit':
                record = record_dict(log.parent1)
                history.append(record)
            elif log.name == 'Import Creation':
                record = record_dict(log)
                history.append(record)

        return history, master

    @classmethod
    def coparent(cls, state_id):
        """
        Return the coparent of the TaxLotState. This will query the TaxLotAuditLog table to
        determine if there is a coparent and return it if it is found. The state_id needs to be
        the base ID of when the original record was imported

        :param state_id: integer, state id to find coparent.
        :return: dict
        """

        coparents = list(
            TaxLotState.objects.raw("""
                    WITH creation_id AS (
                        SELECT
                          pal.id,
                          pal.state_id AS original_state_id
                        FROM seed_taxlotauditlog pal
                        WHERE pal.state_id = %s AND
                              pal.name = 'Import Creation' AND
                              pal.import_filename IS NOT NULL
                    ), audit_id AS (
                        SELECT
                          audit_log.id,
                          audit_log.state_id,
                          audit_log.parent1_id,
                          audit_log.parent2_id,
                          audit_log.parent_state1_id,
                          audit_log.parent_state2_id,
                          cid.original_state_id
                        FROM creation_id cid, seed_taxlotauditlog audit_log
                        WHERE audit_log.parent1_id = cid.id OR audit_log.parent2_id = cid.id
                    )
                    SELECT
                      ps.id,
                      ps.custom_id_1,
                      ps.block_number,
                      ps.district,
                      ps.address_line_1,
                      ps.address_line_2,
                      ps.city,
                      ps.state,
                      ps.postal_code,
                      ps.extra_data,
                      ps.number_properties,
                      ps.jurisdiction_tax_lot_id,
                      NULL
                    FROM seed_taxlotstate ps, audit_id aid
                    WHERE (ps.id = aid.parent_state1_id AND
                           aid.parent_state1_id <> aid.original_state_id) OR
                          (ps.id = aid.parent_state2_id AND
                           aid.parent_state2_id <> aid.original_state_id);""", [int(state_id)])
        )

        # reduce this down to just the fields that were returns and convert to dict. This is
        # important because the fields that were not queried will be deferred and require a new
        # query to retrieve.
        keep_fields = ['id', 'custom_id_1', 'jurisdiction_tax_lot_id', 'block_number', 'district',
                       'address_line_1', 'address_line_2', 'city', 'state', 'postal_code',
                       'number_properties', 'extra_data']
        coparents = [{key: getattr(c, key) for key in keep_fields} for c in coparents]

        return coparents, len(coparents)

    @classmethod
    def merge_relationships(cls, merged_state, state1, state2):
        """Stub to implement if merging TaxLotState relationships is needed"""
        return None
コード例 #11
0
class RowData(ImportStatusModel, models.Model):
    file_import_attempt = models.ForeignKey(
        "django_import_data.FileImportAttempt",
        on_delete=models.CASCADE,
        related_name="row_datas",
    )
    data = JSONField(
        help_text="Stores a 'row' (or similar construct) of data as it "
        "was originally encountered",
        encoder=DjangoJSONEncoder,
    )
    row_num = models.PositiveIntegerField()
    headers = JSONField(null=True)
    errors = JSONField(null=True, default=dict)

    objects = RowDataManager()

    def __str__(self):
        return (
            f"Row {self.row_num} from '{self.file_import_attempt.name}' via "
            f"'{self.file_import_attempt.importer_name}'")

    class Meta:
        verbose_name = "Row Data"
        verbose_name_plural = "Row Data"

    def get_absolute_url(self):
        return reverse("rowdata_detail", args=[str(self.id)])

    def derive_status(self):
        """RD status is the most severe status of its MIs"""
        if self.model_importers.exists():
            status = (self.model_importers.order_by("-status").values_list(
                "status", flat=True).first())
        else:
            status = self.STATUSES.empty.db_value

        return status

    def derive_cached_values(self):
        self.status = self.derive_status()

    def save(self,
             *args,
             derive_cached_values=True,
             propagate_derived_values=True,
             **kwargs):
        if derive_cached_values:
            self.derive_cached_values()
        super().save(*args, **kwargs)

        if propagate_derived_values:
            self.file_import_attempt.save(
                propagate_derived_values=propagate_derived_values)

    def errors_by_alias(self):
        _errors_by_alias = defaultdict(list)
        all_errors = ModelImportAttempt.objects.filter(
            model_importer__row_data=self).values_list("errors", flat=True)
        for error in all_errors:
            if "form_errors" in error:
                for form_error in error["form_errors"]:
                    _errors_by_alias[form_error.pop("alias")[0]].append({
                        **form_error, "error_type":
                        "form"
                    })
            if "conversion_errors" in error:
                for conversion_error in error["conversion_errors"]:
                    for alias in [
                            alias for aliases in
                            conversion_error["aliases"].values()
                            for alias in aliases
                    ]:
                        conversion_error.pop("aliases")
                        _errors_by_alias[alias].append({
                            **conversion_error, "error_type":
                            "conversion"
                        })

        return _errors_by_alias
コード例 #12
0
class AbstractBaseFileImportAttempt(ImportStatusModel, CurrentStatusModel,
                                    TrackedModel):
    """Represents an individual attempt at an import of a "batch" of Importers"""

    PROPAGATED_FIELDS = ("status", )

    file_importer = NotImplemented

    # TODO: Make this a FileField?
    imported_from = SensibleCharField(
        max_length=512, help_text="Path to file that this was imported from")
    imported_by = SensibleCharField(max_length=128, blank=True)
    creations = JSONField(encoder=DjangoErrorJSONEncoder,
                          default=dict,
                          null=True)
    info = JSONField(default=dict,
                     null=True,
                     help_text="Stores any file-level info about the import")
    errors = JSONField(
        encoder=DjangoErrorJSONEncoder,
        default=dict,
        null=True,
        help_text="Stores any file-level errors encountered during import",
    )
    ignored_headers = ArrayField(
        SensibleCharField(max_length=128),
        null=True,
        blank=True,
        help_text="Headers that were ignored during import",
    )
    hash_when_imported = SensibleCharField(max_length=40, blank=True)

    class Meta:
        abstract = True

    def __str__(self):
        return f"{self.name} <{self.importer_name}>"

    @property
    def importer_name(self):
        return self.imported_by.split(".")[-1]

    def derive_status(self):
        """FIA status is the most severe status of its RDs"""
        if self.row_datas.exists():
            status = (self.row_datas.order_by("-status").values_list(
                "status", flat=True).first())
        else:
            status = self.STATUSES.empty.db_value

        return status

    def derive_cached_values(self):
        self.status = self.derive_status()

    def get_creations(self):
        creations = {}
        for ct in ContentType.objects.filter(id__in=self.row_datas.values(
                "model_importers__model_import_attempts__content_type")
                                             ).distinct():
            model_class = ct.model_class()
            creations[model_class] = (model_class.objects.filter(
                model_import_attempt__model_importer__row_data__file_import_attempt
                =self).select_related(
                    "model_import_attempt__model_importer__row_data"
                ).order_by(
                    "model_import_attempt__model_importer__row_data__row_num"))
        return creations

    def save(self,
             *args,
             derive_cached_values=True,
             propagate_derived_values=True,
             **kwargs):
        if derive_cached_values:
            self.derive_cached_values()
        super().save(*args, **kwargs)

        if propagate_derived_values:
            self.file_importer.save(
                propagate_derived_values=propagate_derived_values)
            # ModelImportAttempt.objects.filter(
            #     model_importer__row_data__file_import_attempt__id=self.id
            # ).update(current_status=self.current_status)

    @cached_property
    def name(self):
        return os.path.basename(self.imported_from)

    @cached_property
    def models_to_reimport(self):
        try:
            return self.importer.Command.MODELS_TO_REIMPORT
        except AttributeError:
            return []

    # TODO: Unit tests!
    @transaction.atomic
    def delete_imported_models(self, propagate=True):
        """Delete all models imported by this FIA"""

        num_deletions = 0
        deletions = Counter()
        if self.models_to_reimport:
            model_classes_to_delete = self.models_to_reimport
            print(f"Using given MODELS_TO_REIMPORT: {model_classes_to_delete}")
        else:
            model_classes_to_delete = [
                ct.model_class() for ct in ContentType.objects.filter(
                    id__in=self.row_datas.values(
                        "model_importers__model_import_attempts__content_type")
                ).prefetch_related(
                    "model_importers__model_import_attempts__content_type").
                distinct()
            ]
            print(
                f"Derived model_classes_to_delete: {model_classes_to_delete}")
        # For every ContentType imported by this FIA...
        for model_class in model_classes_to_delete:
            to_delete = model_class.objects.filter(
                model_import_attempt__model_importer__row_data__file_import_attempt
                =self)
            num_deletions_for_model_class, deletions_for_model_class = (
                to_delete.delete())
            num_deletions += num_deletions_for_model_class
            deletions += deletions_for_model_class

        self.current_status = self.CURRENT_STATUSES.deleted.db_value
        self.save()
        return (num_deletions, deletions)

    @cached_property
    def importer(self):
        return import_module(self.imported_by)

    def get_form_maps_used_during_import(self):
        return self.importer.Command.FORM_MAPS

    def get_field_maps_used_during_import(self):
        form_maps = self.get_field_maps_used_during_import()
        return {
            form_map.get_name(): form_map.field_maps
            for form_map in form_maps
        }

    def condensed_errors_by_row_as_dicts(self):
        error_report = self.errors_by_row_as_dict()
        print(error_report)
        error_to_row_nums = defaultdict(list)

        condensed_errors = []
        for row_num, error in error_report.items():
            if "form_errors" in error:
                for form_error in error["form_errors"]:
                    condensed_errors.append({
                        "aliases": form_error["alias"],
                        "error":
                        f"{form_error['errors']}; got value '{form_error['value']}'",
                        "error_type": "form",
                        "row_num": row_num,
                        # "value": form_error["value"],
                    })
            if "conversion_errors" in error:
                for conversion_error in error["conversion_errors"]:
                    condensed_errors.append({
                        "aliases":
                        tuple(alias for aliases in
                              conversion_error["aliases"].values()
                              for alias in aliases),
                        "error":
                        conversion_error["error"],
                        "error_type":
                        "conversion",
                        "row_num":
                        row_num,
                        # "value": conversion_error["value"],
                    })

        for condensed_error in condensed_errors:
            row_num = condensed_error.pop("row_num")
            error_to_row_nums[json.dumps(condensed_error)].append(row_num)

        condensed = [{
            "row_nums": get_str_from_nums(row_nums),
            **json.loads(error)
        } for error, row_nums in error_to_row_nums.items()]
        return condensed

    def errors_by_row_as_dict(self):
        all_errors = ModelImportAttempt.objects.filter(
            model_importer__row_data__file_import_attempt=self).values_list(
                "model_importer__row_data__row_num", "errors")
        error_report = {
            row_num: errors
            for row_num, errors in all_errors if errors
        }
        return error_report

    def errors_by_row(self):
        return json.dumps(self.errors_by_row_as_dict(), indent=4)

    def acknowledge(self):
        if self.current_status == self.CURRENT_STATUSES.active.db_value:
            self.current_status = self.CURRENT_STATUSES.acknowledged.db_value
            self.save()
            return True
        return False

    def unacknowledge(self):
        if self.current_status == self.CURRENT_STATUSES.acknowledged.db_value:
            self.current_status = self.CURRENT_STATUSES.active.db_value
            self.save()
            return True
        return False

    @property
    def is_acknowledged(self):
        return self.current_status == self.CURRENT_STATUSES.acknowledged.db_value

    @property
    def is_active(self):
        return self.current_status == self.CURRENT_STATUSES.active.db_value

    @property
    def is_deleted(self):
        return self.current_status == self.CURRENT_STATUSES.deleted.db_value
コード例 #13
0
class AbstractBaseFileImporterBatch(ImportStatusModel, TrackedModel):

    command = SensibleCharField(max_length=64, default=None)
    args = ArrayField(SensibleCharField(max_length=256))
    kwargs = JSONField()
    errors = JSONField(null=True, default=dict)

    @cached_property
    def cli(self):
        clean_options = {}
        if self.kwargs:
            kwargs = self.kwargs
        else:
            kwargs = {}
        for key, value in kwargs.items():
            if key not in (
                    "settings",
                    "start_index",
                    "traceback",
                    "verbosity",
                    "skip_checks",
                    "no_color",
                    "pythonpath",
            ):
                if isinstance(value, list):
                    value = " ".join(value)

                if isinstance(key, str):
                    key = key.replace("_", "-")

                clean_options[key] = value

        importer_name = self.command
        args_str = " ".join(self.args)
        options_str = ""
        for key, value in clean_options.items():
            if isinstance(value, bool):
                if value:
                    options_str += f" --{key}"
            elif value is not None:
                options_str += f" --{key} {value}"
        return f"python manage.py {importer_name} {args_str}{options_str}"

    class Meta:
        abstract = True

    def __str__(self):
        return self.cli

    @transaction.atomic
    def delete_imported_models(self, propagate=True):
        """Delete all models imported by this FIB"""

        total_num_fi_deletions = 0
        total_num_fia_deletions = 0
        total_num_mia_deletions = 0
        all_mia_deletions = Counter()
        # For every ContentType imported by this FIB...
        for fi in self.file_importers.all():
            # ...and delete them:
            num_fia_deletions, num_mia_deletions, all_mia_deletions = fi.delete_imported_models(
                propagate=False)
            total_num_fi_deletions += 1
            total_num_fia_deletions += num_fia_deletions
            total_num_mia_deletions += num_mia_deletions
            all_mia_deletions += all_mia_deletions

        return (total_num_fia_deletions, total_num_mia_deletions,
                all_mia_deletions)

    @transaction.atomic
    def reimport(self, paths=None):
        """Delete then reimport all models imported by this FIB"""
        if paths is not None:
            args = paths
        else:
            args = self.args
        num_fias_deleted, num_models_deleted, deletions = self.delete_imported_models(
            propagate=True)
        call_command(self.command, *args, **{**self.kwargs, "overwrite": True})
        return FileImporterBatch.objects.order_by("created_on").last()

    def derive_status(self):
        """FIB status is the most severe status of its most recent FIs"""
        if self.file_importers.exists():
            status = (self.file_importers.order_by("-status").values_list(
                "status", flat=True).first())
        else:
            status = self.STATUSES.empty.db_value
        return status

    def derive_cached_values(self):
        self.status = self.derive_status()

    def save(self,
             *args,
             derive_cached_values=True,
             propagate_derived_values=True,
             **kwargs):
        if derive_cached_values:
            self.derive_cached_values()

        super().save(*args, **kwargs)
コード例 #14
0
ファイル: test_json.py プロジェクト: 7924102/django
 def test_not_serializable(self):
     field = JSONField()
     with self.assertRaises(exceptions.ValidationError) as cm:
         field.clean(datetime.timedelta(days=1), None)
     self.assertEqual(cm.exception.code, 'invalid')
     self.assertEqual(cm.exception.message % cm.exception.params, "Value must be valid JSON.")
コード例 #15
0
class Task(models.Model):
    """Task object

    Task object designed to record the metadata around distributed celery tasks."""

    id = models.CharField(max_length=255,
                          unique=True,
                          db_index=True,
                          null=False,
                          blank=False,
                          primary_key=True,
                          default=fast_uuid)

    main_task = models.ForeignKey('self',
                                  blank=True,
                                  null=True,
                                  on_delete=CASCADE)

    name = models.CharField(max_length=100,
                            db_index=True,
                            null=True,
                            blank=True)
    description = models.CharField(max_length=1024,
                                   db_index=False,
                                   null=True,
                                   blank=True)
    date_start = models.DateTimeField(default=now, db_index=True)
    date_work_start = models.DateTimeField(blank=True,
                                           null=True,
                                           db_index=True)
    user = models.ForeignKey(User,
                             db_index=True,
                             blank=True,
                             null=True,
                             on_delete=CASCADE)
    celery_metadata = JSONField(blank=True, null=True)
    metadata = JSONField(blank=True, null=True)
    args = JSONField(blank=True, null=True, db_index=True)
    kwargs = JSONField(blank=True, null=True, db_index=True)
    sequential_tasks = JSONField(blank=True, null=True, db_index=True)
    sequential_tasks_started = models.BooleanField(default=False,
                                                   db_index=True)
    group_id = StringUUIDField(blank=True, null=True, db_index=True)
    source_data = models.TextField(blank=True, null=True)
    visible = models.BooleanField(default=True)
    run_count = models.IntegerField(blank=False, null=False, default=0)
    worker = models.CharField(max_length=1024,
                              db_index=True,
                              null=True,
                              blank=True)
    priority = models.IntegerField(blank=False, null=False, default=0)

    own_date_done = models.DateTimeField(blank=True, null=True)
    own_status = models.CharField(_('state'),
                                  max_length=50,
                                  default=states.PENDING,
                                  choices=TASK_STATE_CHOICES,
                                  db_index=True,
                                  null=True,
                                  blank=True)
    own_progress = models.PositiveSmallIntegerField(default=0,
                                                    blank=True,
                                                    null=True)

    result = JSONField(blank=True, null=True, encoder=DjangoJSONEncoder)
    traceback = models.TextField(_('traceback'), blank=True, null=True)
    hidden = models.BooleanField(editable=False, default=False, db_index=True)
    propagate_exceptions = models.BooleanField(editable=False,
                                               default=False,
                                               db_index=True)

    status = models.CharField(max_length=50,
                              default=states.PENDING,
                              choices=TASK_STATE_CHOICES,
                              db_index=True,
                              null=True,
                              blank=True)
    progress = models.PositiveIntegerField(default=0, null=True, blank=True)
    completed = models.BooleanField(null=False, blank=False, default=False)

    date_done = models.DateTimeField(blank=True, null=True)

    title = models.CharField(max_length=1024,
                             db_index=False,
                             null=True,
                             blank=True)

    log_extra = JSONField(blank=True, null=True, encoder=DjangoJSONEncoder)
    push_steps = models.IntegerField(null=True, blank=True, default=1)
    failure_processed = models.BooleanField(null=False,
                                            blank=False,
                                            default=False)

    project = models.ForeignKey('project.Project',
                                blank=True,
                                null=True,
                                db_index=True,
                                on_delete=CASCADE)
    upload_session = models.ForeignKey('project.UploadSession',
                                       blank=True,
                                       null=True,
                                       db_index=True,
                                       on_delete=CASCADE)
    run_after_sub_tasks_finished = models.BooleanField(editable=False,
                                                       default=False)

    objects = TaskManager()

    def __str__(self):
        return "Task (name={}, celery_id={})" \
            .format(self.name, self.id)

    def is_sub_task(self):
        return self.main_task_id and self.main_task_id != self.id

    @property
    def has_error(self) -> bool:
        return self.status == states.FAILURE

    @property
    def duration(self):
        if not self.date_start:
            return None
        date_done = self.date_done or now()
        duration = date_done - self.date_start
        return duration

    @property
    def subtasks(self):
        return Task.objects.filter(main_task=self)

    @classmethod
    def disallow_start(cls, name):
        return Task.objects.filter(name=name, status='PENDING').exists()

    @classmethod
    def special_tasks(cls, filter_opts):
        """
        Get tasks related with key/value
        """
        opts = {'metadata__%s' % k: v for k, v in filter_opts.items()}
        return cls.objects.main_tasks().filter(**opts)

    def write_log(self, message, level='info', **kwargs):
        message = str(message)
        extra = {
            'log_task_id': self.id,
            'log_main_task_id': self.main_task_id or self.id,
            'log_task_name': self.name,
            'log_user_id': self.user.id if self.user else None,
            'log_user_login': self.user.username if self.user else None
        }

        if self.log_extra:
            extra.update(dict(self.log_extra))

        if kwargs:
            extra.update(kwargs)

        try:
            getattr(logger, level)(message, extra=extra)

            return True
        except Exception as exception:
            trace = format_exc()
            exc_class, exception, _ = sys.exc_info()
            exception_str = '%s: %s' % (exc_class.__name__, str(exception))

            logger.error(
                'Exception caught while trying to log a message:\n{0}\n{1}'.
                format(exception_str, trace),
                extra=extra)
            pass

    def get_task_log_from_elasticsearch(self):
        try:
            es_query = {
                'sort': ['@timestamp'],
                'query': {
                    'bool': {
                        'must': [
                            {
                                'match': {
                                    'log_main_task_id': self.id
                                }
                            },
                        ]
                    }
                },
                '_source': [
                    '@timestamp', 'level', 'message', 'log_task_id',
                    'log_main_task_id'
                ]
            }
            es_res = es.search(
                size=1000,
                index=settings.LOGGING_ELASTICSEARCH_INDEX_TEMPLATE,
                body=es_query)
            logs = []
            for hit in es_res['hits']['hits']:
                doc = hit['_source']
                timestamp = date_parser.parse(doc['@timestamp'])
                timestamp = timestamp.strftime('%Y-%m-%d %H:%M:%S')

                level = doc['level']
                if not level:
                    level = 'INFO'

                message = doc['message']

                # log_add = '{2: <9} {0} | {1}'.format(timestamp, message, level.upper())
                log_add = 'Main task: {3} | Sub-task: {4}\n{2: <9} {0} | {1}'.format(
                    timestamp, message, level.upper(), doc['log_main_task_id'],
                    doc['log_task_id'])
                logs.append(log_add)
            return str('\n'.join(logs))
        except:
            return 'Unable to fetch logs from ElasticSearch:\n{0}'.format(
                format_exc())

    @property
    def log(self):
        return self.get_task_log_from_elasticsearch()
コード例 #16
0
ファイル: models.py プロジェクト: jwnasambu/oclapi2
class ClientConfig(models.Model):
    class Meta:
        db_table = 'client_configurations'

    name = models.TextField(default='View Configuration')
    type = models.CharField(choices=CONFIG_TYPES,
                            default=HOME_TYPE,
                            max_length=255)
    is_default = models.BooleanField(default=False)
    config = JSONField()
    resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
    resource_id = models.PositiveIntegerField()
    resource = GenericForeignKey('resource_type', 'resource_id')

    created_by = models.ForeignKey(
        'users.UserProfile',
        default=SUPER_ADMIN_USER_ID,
        on_delete=models.SET_DEFAULT,
        related_name='%(app_label)s_%(class)s_related_created_by',
        related_query_name='%(app_label)s_%(class)ss_created_by',
    )
    updated_by = models.ForeignKey(
        'users.UserProfile',
        default=SUPER_ADMIN_USER_ID,
        on_delete=models.SET_DEFAULT,
        related_name='%(app_label)s_%(class)s_related_updated_by',
        related_query_name='%(app_label)s_%(class)ss_updated_by',
    )
    created_at = models.DateTimeField(auto_now_add=True)
    updated_at = models.DateTimeField(auto_now=True)
    is_active = models.BooleanField(default=True)

    errors = None

    @property
    def is_home(self):
        return self.type == HOME_TYPE

    @property
    def uri(self):
        return "/client-configs/{}/".format(self.id)

    def clean(self):
        self.errors = None
        super().clean()

        if self.is_home:
            self.validate_home_config()
            if not self.errors:
                self.format_home_config_tabs()

        if self.errors:
            raise ValidationError(self.errors)

    @property
    def siblings(self):
        return self.__class__.objects.filter(
            resource_type_id=self.resource_type_id,
            resource_id=self.resource_id,
            type=self.type).exclude(id=self.id)

    def format_home_config_tabs(self):
        for tab in self.config.get('tabs', []):
            fields = get(tab, 'fields')
            if isinstance(fields, dict):
                tab['fields'] = [{k: v} for k, v in fields.items()]

    def validate_home_config(self):
        self.validate_home_tabs_config()

    def validate_home_tabs_config(self):
        tabs = get(self.config, 'tabs')
        if not tabs:
            self.errors = dict(tabs=[EMPTY_TABS_CONFIG])
        elif not isinstance(tabs, list):
            self.errors = dict(tabs=[NOT_LIST_TABS_CONFIG])
        elif any([not isinstance(tab, dict) for tab in tabs]):
            self.errors = dict(tabs=[INVALID_TABS_CONFIG])
        else:
            default_tabs = [tab for tab in tabs if tab.get('default', False)]
            if len(default_tabs) != 1:
                self.errors = dict(tabs=[ONE_DEFAULT_TAB])
コード例 #17
0
class ChangeParametersCommand(Delta, ChangesWfModuleOutputs):
    wf_module = models.ForeignKey(WfModule, on_delete=models.PROTECT)
    old_values = JSONField('old_values')  # _all_ params
    new_values = JSONField('new_values')  # only _changed_ params
    wf_module_delta_ids = ChangesWfModuleOutputs.wf_module_delta_ids

    def forward_impl(self):
        self.wf_module.params = self.new_values
        self.wf_module.save(update_fields=['params'])
        self.forward_affected_delta_ids()

    def backward_impl(self):
        self.wf_module.params = self.old_values
        self.wf_module.save(update_fields=['params'])
        self.backward_affected_delta_ids()

    @classmethod
    def wf_module_is_deleted(self, wf_module):
        """Return True iff we cannot add commands to `wf_module`."""
        try:
            wf_module.refresh_from_db()
        except WfModule.DoesNotExist:
            return True

        if wf_module.is_deleted:
            return True

        wf_module.tab.refresh_from_db()
        if wf_module.tab.is_deleted:
            return True

        return False

    @classmethod
    def amend_create_kwargs(cls, *, wf_module, new_values, **kwargs):
        """
        Prepare `old_values` and `new_values`.

        Raise ValueError if `new_values` won't be valid according to the module
        spec.
        """
        if cls.wf_module_is_deleted(wf_module):  # refreshes from DB
            return None

        module_version = wf_module.module_version
        if module_version is None:
            raise ValueError('Module %s does not exist' %
                             wf_module.module_id_name)

        # Old values: store exactly what we had
        old_values = wf_module.params

        # New values: store _migrated_ old_values, with new_values applied on
        # top
        lm = loaded_module.LoadedModule.for_module_version_sync(module_version)
        migrated_old_values = lm.migrate_params(module_version.param_schema,
                                                old_values)
        new_values = {
            **migrated_old_values,
            **new_values,
        }

        module_version.param_schema.validate(new_values)  # raises ValueError

        return {
            **kwargs,
            'wf_module': wf_module,
            'new_values': new_values,
            'old_values': old_values,
            'wf_module_delta_ids': cls.affected_wf_module_delta_ids(wf_module),
        }

    @property
    def command_description(self):
        return 'Change params'
コード例 #18
0
ファイル: models.py プロジェクト: zhangkai0217/djangotest
class User(models.Model):
    name = models.CharField(max_length=255)
    sex = models.IntegerField()
    hobby = JSONField()
    money = models.DecimalField(max_digits=16, decimal_places=4)
    start_time = models.DateField()
コード例 #19
0
ファイル: models.py プロジェクト: samjaninf/tacticalrmm
class Agent(models.Model):
    version = models.CharField(default="0.1.0", max_length=255)
    operating_system = models.CharField(null=True, max_length=255)
    plat = models.CharField(max_length=255, null=True)
    plat_release = models.CharField(max_length=255, null=True)
    hostname = models.CharField(max_length=255)
    salt_id = models.CharField(null=True, blank=True, max_length=255)
    local_ip = models.TextField(null=True)
    agent_id = models.CharField(max_length=200)
    last_seen = models.DateTimeField(null=True, blank=True)
    services = JSONField(null=True)
    public_ip = models.CharField(null=True, max_length=255)
    total_ram = models.IntegerField(null=True)
    used_ram = models.IntegerField(null=True)
    disks = JSONField(null=True)
    boot_time = models.FloatField(null=True)
    logged_in_username = models.CharField(null=True, max_length=200)
    client = models.CharField(max_length=200)
    antivirus = models.CharField(default="n/a", max_length=255)
    site = models.CharField(max_length=150)
    monitoring_type = models.CharField(max_length=30)
    description = models.CharField(null=True, max_length=255)
    mesh_node_id = models.CharField(null=True, max_length=255)
    overdue_email_alert = models.BooleanField(default=False)
    overdue_text_alert = models.BooleanField(default=False)
    overdue_time = models.PositiveIntegerField(default=30)
    check_interval = models.PositiveIntegerField(default=120)
    needs_reboot = models.BooleanField(default=False)
    managed_by_wsus = models.BooleanField(default=False)
    update_pending = models.BooleanField(default=False)
    choco_installed = models.BooleanField(default=False)
    wmi_detail = JSONField(null=True)
    time_zone = models.CharField(
        max_length=255, choices=TZ_CHOICES, null=True, blank=True
    )
    policy = models.ForeignKey(
        "automation.Policy",
        related_name="agents",
        null=True,
        blank=True,
        on_delete=models.SET_NULL,
    )

    def __str__(self):
        return self.hostname

    @property
    def timezone(self):
        # return the default timezone unless the timezone is explicity set per agent
        if self.time_zone is not None:
            return self.time_zone
        else:
            from core.models import CoreSettings

            return CoreSettings.objects.first().default_time_zone

    @property
    def status(self):
        offline = dt.datetime.now(dt.timezone.utc) - dt.timedelta(minutes=6)
        overdue = dt.datetime.now(dt.timezone.utc) - dt.timedelta(
            minutes=self.overdue_time
        )

        if self.last_seen is not None:
            if (self.last_seen < offline) and (self.last_seen > overdue):
                return "offline"
            elif (self.last_seen < offline) and (self.last_seen < overdue):
                return "overdue"
            else:
                return "online"
        else:
            return "offline"

    @property
    def has_patches_pending(self):

        if self.winupdates.filter(action="approve").filter(installed=False).exists():
            return True
        else:
            return False

    @property
    def checks(self):
        total, passing, failing = 0, 0, 0

        if self.agentchecks.exists():
            for i in self.agentchecks.all():
                total += 1
                if i.status == "passing":
                    passing += 1
                elif i.status == "failing":
                    failing += 1

        has_failing_checks = True if failing > 0 else False

        ret = {
            "total": total,
            "passing": passing,
            "failing": failing,
            "has_failing_checks": has_failing_checks,
        }
        return ret

    @property
    def cpu_model(self):
        ret = []
        try:
            cpus = self.wmi_detail["cpu"]
            for cpu in cpus:
                ret.append([x["Name"] for x in cpu if "Name" in x][0])
            return ret
        except:
            return ["unknown cpu model"]

    @property
    def local_ips(self):
        try:
            ips = self.wmi_detail["network_config"]
            ret = []
            for _ in ips:
                try:
                    addr = [x["IPAddress"] for x in _ if "IPAddress" in x][0]
                except:
                    continue
                else:
                    for ip in addr:
                        if validators.ipv4(ip):
                            ret.append(ip)

                if len(ret) == 1:
                    return ret[0]
                else:
                    return ", ".join(ret)
        except:
            return "error getting local ips"

    @property
    def make_model(self):
        try:
            comp_sys = self.wmi_detail["comp_sys"][0]
            comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
            make = [x["Vendor"] for x in comp_sys_prod if "Vendor" in x][0]
            model = [x["Model"] for x in comp_sys if "Model" in x][0]

            if "to be filled" in model.lower():
                mobo = self.wmi_detail["base_board"][0]
                make = [x["Manufacturer"] for x in mobo if "Manufacturer" in x][0]
                model = [x["Product"] for x in mobo if "Product" in x][0]

            return f"{make} {model}"
        except:
            pass

        try:
            return [x["Version"] for x in comp_sys_prod if "Version" in x][0]
        except:
            pass

        return "unknown make/model"

    @property
    def physical_disks(self):
        try:
            disks = self.wmi_detail["disk"]
            ret = []
            for disk in disks:
                interface_type = [
                    x["InterfaceType"] for x in disk if "InterfaceType" in x
                ][0]

                if interface_type == "USB":
                    continue

                model = [x["Caption"] for x in disk if "Caption" in x][0]
                size = [x["Size"] for x in disk if "Size" in x][0]

                size_in_gb = round(int(size) / 1_073_741_824)
                ret.append(f"{model} {size_in_gb:,}GB {interface_type}")

            return ret
        except:
            return ["unknown disk"]

    # clear is used to delete managed policy checks from agent
    # parent_checks specifies a list of checks to delete from agent with matching parent_check field
    def generate_checks_from_policies(self, clear=False, parent_checks=[]):
        # Clear agent checks managed by policy
        if clear:
            if parent_checks:
                self.agentchecks.filter(managed_by_policy=True).filter(
                    parent_checks__in=parent_checks
                ).delete()
            else:
                self.agentchecks.filter(managed_by_policy=True).delete()

        # Clear agent checks that have overriden_by_policy set
        self.agentchecks.update(overriden_by_policy=False)

        # Generate checks based on policies
        automation.models.Policy.generate_policy_checks(self)

    # clear is used to delete managed policy tasks from agent
    # parent_tasks specifies a list of tasks to delete from agent with matching parent_task field
    def generate_tasks_from_policies(self, clear=False, parent_tasks=[]):
        # Clear agent tasks managed by policy
        if clear:
            if parent_tasks:
                tasks = self.autotasks.filter(managed_by_policy=True).filter(
                    parent_tasks__in=parent_tasks
                )
                for task in tasks:
                    autotasks.tasks.delete_win_task_schedule.delay(task.pk)
            else:
                for task in self.autotasks.filter(managed_by_policy=True):
                    autotasks.tasks.delete_win_task_schedule.delay(task.pk)

        # Generate tasks based on policies
        automation.models.Policy.generate_policy_tasks(self)

    # https://github.com/Ylianst/MeshCentral/issues/59#issuecomment-521965347
    def get_login_token(self, key, user, action=3):
        try:
            key = bytes.fromhex(key)
            key1 = key[0:48]
            key2 = key[48:]
            msg = '{{"a":{}, "u":"{}","time":{}}}'.format(
                action, user, int(time.time())
            )
            iv = get_random_bytes(16)

            # sha
            h = SHA3_384.new()
            h.update(key1)
            hashed_msg = h.digest() + msg.encode()

            # aes
            cipher = AES.new(key2, AES.MODE_CBC, iv)
            msg = cipher.encrypt(pad(hashed_msg, 16))

            return base64.b64encode(iv + msg, altchars=b"@$").decode("utf-8")
        except Exception:
            return "err"

    def salt_api_cmd(self, **kwargs):

        # salt should always timeout first before the requests' timeout
        try:
            timeout = kwargs["timeout"]
        except KeyError:
            # default timeout
            timeout = 15
            salt_timeout = 12
        else:
            if timeout < 8:
                timeout = 8
                salt_timeout = 5
            else:
                salt_timeout = timeout - 3

        json = {
            "client": "local",
            "tgt": self.salt_id,
            "fun": kwargs["func"],
            "timeout": salt_timeout,
            "username": settings.SALT_USERNAME,
            "password": settings.SALT_PASSWORD,
            "eauth": "pam",
        }

        if "arg" in kwargs:
            json.update({"arg": kwargs["arg"]})
        if "kwargs" in kwargs:
            json.update({"kwarg": kwargs["kwargs"]})

        try:
            resp = requests.post(
                f"http://{settings.SALT_HOST}:8123/run", json=[json], timeout=timeout,
            )
        except Exception:
            return "timeout"

        try:
            ret = resp.json()["return"][0][self.salt_id]
        except Exception as e:
            logger.error(f"{self.salt_id}: {e}")
            return "error"
        else:
            return ret

    def salt_api_async(self, **kwargs):

        json = {
            "client": "local_async",
            "tgt": self.salt_id,
            "fun": kwargs["func"],
            "username": settings.SALT_USERNAME,
            "password": settings.SALT_PASSWORD,
            "eauth": "pam",
        }

        if "arg" in kwargs:
            json.update({"arg": kwargs["arg"]})
        if "kwargs" in kwargs:
            json.update({"kwarg": kwargs["kwargs"]})

        try:
            resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
        except Exception:
            return "timeout"

        return resp

    @staticmethod
    def salt_batch_async(**kwargs):
        assert isinstance(kwargs["minions"], list)

        json = {
            "client": "local_async",
            "tgt_type": "list",
            "tgt": kwargs["minions"],
            "fun": kwargs["func"],
            "username": settings.SALT_USERNAME,
            "password": settings.SALT_PASSWORD,
            "eauth": "pam",
        }

        if "arg" in kwargs:
            json.update({"arg": kwargs["arg"]})
        if "kwargs" in kwargs:
            json.update({"kwarg": kwargs["kwargs"]})

        try:
            resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
        except Exception:
            return "timeout"

        return resp

    @staticmethod
    def get_github_versions():
        r = requests.get("https://api.github.com/repos/wh1te909/winagent/releases")
        versions = {}
        for i, release in enumerate(r.json()):
            versions[i] = release["name"]

        return {"versions": versions, "data": r.json()}

    def schedule_reboot(self, obj):

        start_date = dt.datetime.strftime(obj, "%Y-%m-%d")
        start_time = dt.datetime.strftime(obj, "%H:%M")

        # let windows task scheduler automatically delete the task after it runs
        end_obj = obj + dt.timedelta(minutes=15)
        end_date = dt.datetime.strftime(end_obj, "%Y-%m-%d")
        end_time = dt.datetime.strftime(end_obj, "%H:%M")

        task_name = "TacticalRMM_SchedReboot_" + "".join(
            random.choice(string.ascii_letters) for _ in range(10)
        )

        r = self.salt_api_cmd(
            timeout=15,
            func="task.create_task",
            arg=[
                f"name={task_name}",
                "force=True",
                "action_type=Execute",
                'cmd="C:\\Windows\\System32\\shutdown.exe"',
                'arguments="/r /t 5 /f"',
                "trigger_type=Once",
                f'start_date="{start_date}"',
                f'start_time="{start_time}"',
                f'end_date="{end_date}"',
                f'end_time="{end_time}"',
                "ac_only=False",
                "stop_if_on_batteries=False",
                "delete_after=Immediately",
            ],
        )

        if r == "error" or (isinstance(r, bool) and not r):
            return "failed"
        elif r == "timeout":
            return "timeout"
        elif isinstance(r, bool) and r:
            from logs.models import PendingAction

            details = {
                "taskname": task_name,
                "time": str(obj),
            }
            PendingAction(agent=self, action_type="schedreboot", details=details).save()

            nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
            return {"msg": {"time": nice_time, "agent": self.hostname}}
        else:
            return "failed"
コード例 #20
0
class EmailUser(AbstractBaseUser, PermissionsMixin):
    """Custom authentication model for the ledger project.
    Password and email are required. Other fields are optional.
    """
    email = models.EmailField(unique=True, blank=False)
    first_name = models.CharField(max_length=128,
                                  blank=False,
                                  verbose_name='Given name(s)')
    last_name = models.CharField(max_length=128, blank=False)
    is_staff = models.BooleanField(
        default=False,
        help_text='Designates whether the user can log into the admin site.',
    )
    is_active = models.BooleanField(
        default=True,
        help_text='Designates whether this user should be treated as active.'
        'Unselect this instead of deleting ledger.accounts.',
    )
    date_joined = models.DateTimeField(default=timezone.now)

    TITLE_CHOICES = (('Mr', 'Mr'), ('Miss', 'Miss'), ('Mrs', 'Mrs'),
                     ('Ms', 'Ms'), ('Dr', 'Dr'))
    title = models.CharField(max_length=100,
                             choices=TITLE_CHOICES,
                             null=True,
                             blank=True,
                             verbose_name='title',
                             help_text='')
    dob = models.DateField(auto_now=False,
                           auto_now_add=False,
                           null=True,
                           blank=False,
                           verbose_name="date of birth",
                           help_text='')
    phone_number = models.CharField(max_length=50,
                                    null=True,
                                    blank=True,
                                    verbose_name="phone number",
                                    help_text='')
    position_title = models.CharField(max_length=100,
                                      null=True,
                                      blank=True,
                                      verbose_name="position title",
                                      help_text='')
    mobile_number = models.CharField(max_length=50,
                                     null=True,
                                     blank=True,
                                     verbose_name="mobile number",
                                     help_text='')
    fax_number = models.CharField(max_length=50,
                                  null=True,
                                  blank=True,
                                  verbose_name="fax number",
                                  help_text='')
    organisation = models.CharField(
        max_length=300,
        null=True,
        blank=True,
        verbose_name="organisation",
        help_text='organisation, institution or company')

    residential_address = models.ForeignKey(Address,
                                            null=True,
                                            blank=False,
                                            related_name='+')
    postal_address = models.ForeignKey(Address,
                                       null=True,
                                       blank=True,
                                       related_name='+')
    billing_address = models.ForeignKey(Address,
                                        null=True,
                                        blank=True,
                                        related_name='+')

    identification = models.ForeignKey(Document,
                                       null=True,
                                       blank=True,
                                       on_delete=models.SET_NULL,
                                       related_name='identification_document')

    senior_card = models.ForeignKey(Document,
                                    null=True,
                                    blank=True,
                                    on_delete=models.SET_NULL,
                                    related_name='senior_card')

    character_flagged = models.BooleanField(default=False)

    character_comments = models.TextField(blank=True)

    documents = models.ManyToManyField(Document)

    extra_data = JSONField(default=dict)

    objects = EmailUserManager()
    USERNAME_FIELD = 'email'

    def __str__(self):
        if self.is_dummy_user:
            if self.organisation:
                return '{} {} ({})'.format(self.first_name, self.last_name,
                                           self.organisation)
            return '{} {}'.format(self.first_name, self.last_name)
        else:
            if self.organisation:
                return '{} ({})'.format(self.email, self.organisation)
            return '{}'.format(self.email)

    def clean(self):
        super(EmailUser, self).clean()
        self.email = self.email.lower() if self.email else self.email
        post_clean.send(sender=self.__class__, instance=self)

    def save(self, *args, **kwargs):
        if not self.email:
            self.email = self.get_dummy_email()
        elif in_dbca_domain(self):
            # checks and updates department user details from address book after every login
            user_details = get_department_user_compact(self.email)
            if user_details:
                # check if keys can be found in ITAssets api - the response JSON sent by API may have have changed
                if 'telephone' not in user_details or 'mobile_phone' not in user_details or 'title' not in user_details or 'location' not in user_details:
                    logger.warn(
                        'Cannot find user details in ITAssets api call for user {}'
                        .format(self.email))

                # Only set the below fields if there is a value from address book (in ITAssets API).
                # This will allow fields in EmailUser object to be:
                #   a. overridden whenever newer/updated fields (e.g. telephone number) are available in address book
                #   b. if value for the field in address book empty/null, a previous value entered by user will not be overwritten with null
                if user_details.get('telephone'):
                    self.phone_number = user_details.get('telephone')
                if user_details.get('mobile_phone'):
                    self.mobile_number = user_details.get('mobile_phone')
                if user_details.get('title'):
                    self.position_title = user_details.get('title')
                if user_details.get('location', {}).get('fax'):
                    self.fax_number = user_details.get('location',
                                                       {}).get('fax')

                self.is_staff = True

        self.email = self.email.lower()
        super(EmailUser, self).save(*args, **kwargs)

    def get_full_name(self):
        full_name = '{} {}'.format(self.first_name, self.last_name)
        #.encode('utf-8').strip()
        return full_name

    def get_full_name_dob(self):
        full_name_dob = '{} {} ({})'.format(self.first_name, self.last_name,
                                            self.dob.strftime('%d/%m/%Y'))
        return full_name_dob.strip()

    def get_short_name(self):
        if self.first_name:
            return self.first_name.split(' ')[0]
        return self.email

    def upload_identification(self, request):
        with transaction.atomic():
            document = Document(file=request.data.dict()['identification'])
            document.save()
            self.identification = document
            self.save()

    dummy_email_suffix = "*****@*****.**"
    dummy_email_suffix_len = len(dummy_email_suffix)

    @property
    def is_dummy_user(self):
        return not self.email or self.email[
            -1 * self.dummy_email_suffix_len:] == self.dummy_email_suffix

    @property
    def dummy_email(self):
        if self.is_dummy_user:
            return self.email
        else:
            return None

    def get_dummy_email(self):
        # use timestamp plus first name, last name to generate a unique id.
        uid = datetime.now().strftime("%Y%m%d%H%M%S%f")
        return "{}.{}.{}{}".format(self.first_name, self.last_name, uid,
                                   self.dummy_email_suffix)

    @property
    def username(self):
        return self.email

    @property
    def is_senior(self):
        """
        Test if the the user is a senior according to the rules of WA senior
        dob is before 1 July 1955; or
        dob is between 1 July 1955 and 30 June 1956 and age is 61 or older; or
        dob is between 1 July 1956 and 30 June 1957 and age is 62 or older; or
        dob is between 1 July 1957 and 30 June 1958 and age is 63 or older; or
        dob is between 1 July 1958 and 30 June 1959 and age is 64 or older; or
        dob is after 30 June 1959 and age is 65 or older

        :return:
        """
        return \
            self.dob < date(1955, 7, 1) or \
            ((date(1955, 7, 1) <= self.dob <= date(1956, 6, 30)) and self.age() >= 61) or \
            ((date(1956, 7, 1) <= self.dob <= date(1957, 6, 30)) and self.age() >= 62) or \
            ((date(1957, 7, 1) <= self.dob <= date(1958, 6, 30)) and self.age() >= 63) or \
            ((date(1958, 7, 1) <= self.dob <= date(1959, 6, 30)) and self.age() >= 64) or \
            (self.dob > date(1959, 6, 1) and self.age() >= 65)

    def age(self):
        if self.dob:
            today = date.today()
            # calculate age with the help of trick int(True) = 1 and int(False) = 0
            return today.year - self.dob.year - (
                (today.month, today.day) < (self.dob.month, self.dob.day))
        else:
            return -1

    def upload_identification(self, request):
        with transaction.atomic():
            document = Document(file=request.data.dict()['identification'])
            document.save()
            self.identification = document
            self.save()

    def log_user_action(self, action, request=None):
        if request:
            return EmailUserAction.log_action(self, action, request.user)
        else:
            pass
コード例 #21
0
class UserProfile(models.Model):
    user = models.OneToOneField(User,
                                related_name='profile',
                                on_delete=models.CASCADE)
    roles = ArrayField(models.CharField(max_length=10, null=True))
    attrs = JSONField(null=True)
コード例 #22
0
class History(models.Model):
    table_name = models.CharField(max_length=255, verbose_name=_('table name'))
    row_id = models.IntegerField(verbose_name=_('row id'))
    action = models.CharField(max_length=50, verbose_name=_('action'))
    old_value = JSONField(verbose_name=_('old value'), null=True)
    new_value = JSONField(verbose_name=_('new value'), null=True)
    change_user_id = models.IntegerField(verbose_name=_('User'))
    change_timestamp = models.DateTimeField(verbose_name=_('Change timestamp'),
                                            default=datetime.now)
    message = models.TextField(verbose_name=_('message'), null=True)

    class Meta:
        db_table = 'history'
        verbose_name = _("History")
        verbose_name_plural = _("Histories")

    @property
    def difference(self):
        if self.action == 'UPDATE':
            ddiff = DeepDiff(
                self.old_value,
                self.new_value,
                ignore_order=True,
                verbose_level=0,
                exclude_paths={
                    "root['modified']", "root['metadata_modified']"
                },
                # exclude_types=['dictionary_item_added']
            )

            if 'type_changes' in ddiff:
                del ddiff['type_changes']
            if 'dictionary_item_added' in ddiff:
                del ddiff['dictionary_item_added']
            if 'values_changed' in ddiff:
                for el in ddiff['values_changed']:
                    if 'diff' in ddiff['values_changed'][el]:
                        del ddiff['values_changed'][el]['diff']
            ddiff = ddiff.json
        else:
            ddiff = json.dumps(self.new_value, ensure_ascii=False)
        return ddiff

    @property
    def user(self):
        users = get_user_model()
        user = users.objects.get(id=self.change_user_id)
        return user

    def __str__(self):
        return f"{self.id} | {self.action} > {self.table_name} > {self.row_id}"

    def diff_prettified(self):
        response = json.loads(self.difference)
        response = json.dumps(response,
                              sort_keys=True,
                              indent=1,
                              ensure_ascii=False).replace('&oacute;', "ó")
        response = response[:10000]
        formatter = HtmlFormatter(style='colorful', lineseparator="<br>")
        response = highlight(response, JsonLexer(), formatter)
        style = "<style>" + formatter.get_style_defs() + "</style>"
        return mark_safe(style + response)

    diff_prettified.short_description = _("Differences")

    @classmethod
    def get_object_history(cls, obj):
        return cls.objects.filter(table_name=obj._meta.model_name,
                                  row_id=obj.id)

    @classmethod
    def accusative_case(cls):
        return _("acc: History")

    def indexing(self):
        Document = get_document_for_model(History)

        obj = Document(id=self.id,
                       table_name=self.table_name,
                       row_id=self.row_id,
                       action=self.action,
                       change_user_id=self.change_user_id,
                       change_timestamp=self.change_timestamp,
                       message=self.message)
        obj.save()
        return obj.to_dict(include_meta=True)
コード例 #23
0
class Job(AbstractJob, OutputsModel, PersistenceModel, NameableModel,
          DescribableModel, TagModel, JobMixin):
    """A model that represents the configuration for run job."""
    user = models.ForeignKey(settings.AUTH_USER_MODEL,
                             on_delete=models.CASCADE,
                             related_name='+')
    project = models.ForeignKey('db.Project',
                                on_delete=models.CASCADE,
                                related_name='jobs')
    config = JSONField(help_text='The compiled polyaxonfile for the run job.',
                       validators=[validate_job_spec_config])
    code_reference = models.ForeignKey('db.CodeReference',
                                       on_delete=models.SET_NULL,
                                       blank=True,
                                       null=True,
                                       related_name='+')
    build_job = models.ForeignKey('db.BuildJob',
                                  on_delete=models.SET_NULL,
                                  blank=True,
                                  null=True,
                                  related_name='+')
    original_job = models.ForeignKey(
        'self',
        on_delete=models.SET_NULL,
        null=True,
        blank=True,
        related_name='clones',
        help_text='The original job that was cloned from.')
    cloning_strategy = models.CharField(max_length=16,
                                        blank=True,
                                        null=True,
                                        default=CloningStrategy.RESTART,
                                        choices=CloningStrategy.CHOICES)
    status = models.OneToOneField('db.JobStatus',
                                  related_name='+',
                                  blank=True,
                                  null=True,
                                  editable=True,
                                  on_delete=models.SET_NULL)

    class Meta:
        app_label = 'db'
        unique_together = (('project', 'name'), )

    @cached_property
    def unique_name(self):
        return JOB_UNIQUE_NAME_FORMAT.format(
            project_name=self.project.unique_name, id=self.id)

    @cached_property
    def specification(self):
        return JobSpecification(values=self.config)

    @property
    def is_clone(self):
        return self.original_job is not None

    @property
    def original_unique_name(self):
        return self.original_job.unique_name if self.original_job else None

    @property
    def is_restart(self):
        return self.is_clone and self.cloning_strategy == CloningStrategy.RESTART

    @property
    def is_resume(self):
        return self.is_clone and self.cloning_strategy == CloningStrategy.RESUME

    @property
    def is_copy(self):
        return self.is_clone and self.cloning_strategy == CloningStrategy.COPY

    def set_status(self, status, message=None, details=None):  # pylint:disable=arguments-differ
        return self._set_status(status_model=JobStatus,
                                status=status,
                                message=message,
                                details=details)

    def _clone(self,
               cloning_strategy,
               event_type,
               user=None,
               description=None,
               config=None,
               code_reference=None,
               update_code_reference=False):
        if not code_reference and not update_code_reference:
            code_reference = self.code_reference
        instance = Job.objects.create(project=self.project,
                                      user=user or self.user,
                                      description=description
                                      or self.description,
                                      config=config or self.config,
                                      original_job=self,
                                      cloning_strategy=cloning_strategy,
                                      code_reference=code_reference)
        auditor.record(event_type=event_type, instance=instance)
        return instance

    def restart(self,
                user=None,
                description=None,
                config=None,
                code_reference=None,
                update_code_reference=False):
        return self._clone(cloning_strategy=CloningStrategy.RESTART,
                           event_type=JOB_RESTARTED,
                           user=user,
                           description=description,
                           config=config,
                           code_reference=code_reference,
                           update_code_reference=update_code_reference)
コード例 #24
0
class OCPAWSCostLineItemProjectDailySummary(models.Model):
    """A summarized view of OCP on AWS cost by OpenShift project."""
    class Meta:
        """Meta for OCPAWSCostLineItemProjectDailySummary."""

        db_table = "reporting_ocpawscostlineitem_project_daily_summary"

        indexes = [
            models.Index(fields=["usage_start"],
                         name="cost_proj_sum_ocp_usage_idx"),
            models.Index(fields=["namespace"],
                         name="cost__proj_sum_namespace_idx",
                         opclasses=["varchar_pattern_ops"]),
            models.Index(fields=["node"],
                         name="cost_proj_sum_node_idx",
                         opclasses=["varchar_pattern_ops"]),
            models.Index(fields=["resource_id"],
                         name="cost_proj_sum_resource_idx"),
            GinIndex(fields=["pod_labels"], name="cost_proj_pod_labels_idx"),
            models.Index(fields=["product_family"],
                         name="ocp_aws_proj_prod_fam_idx"),
            models.Index(fields=["instance_type"],
                         name="ocp_aws_proj_inst_type_idx"),
        ]

    # OCP Fields
    report_period = models.ForeignKey("OCPUsageReportPeriod",
                                      on_delete=models.CASCADE,
                                      null=True)

    cluster_id = models.CharField(max_length=50, null=True)

    cluster_alias = models.CharField(max_length=256, null=True)

    # Whether the data comes from a pod or volume report
    data_source = models.CharField(max_length=64, null=True)

    # Kubernetes objects by convention have a max name length of 253 chars
    namespace = models.CharField(max_length=253, null=False)

    pod = models.CharField(max_length=253, null=True)

    node = models.CharField(max_length=253, null=False)

    pod_labels = JSONField(null=True)

    resource_id = models.CharField(max_length=253, null=True)

    usage_start = models.DateField(null=False)

    usage_end = models.DateField(null=False)

    # AWS Fields
    cost_entry_bill = models.ForeignKey("AWSCostEntryBill",
                                        on_delete=models.CASCADE,
                                        null=True)

    product_code = models.CharField(max_length=50, null=False)

    product_family = models.CharField(max_length=150, null=True)

    instance_type = models.CharField(max_length=50, null=True)

    usage_account_id = models.CharField(max_length=50, null=False)

    account_alias = models.ForeignKey("AWSAccountAlias",
                                      on_delete=models.SET_NULL,
                                      null=True)

    availability_zone = models.CharField(max_length=50, null=True)

    region = models.CharField(max_length=50, null=True)

    unit = models.CharField(max_length=63, null=True)

    # Need more precision on calculated fields, otherwise there will be
    # Rounding errors
    usage_amount = models.DecimalField(max_digits=30,
                                       decimal_places=15,
                                       null=True)

    normalized_usage_amount = models.FloatField(null=True)

    currency_code = models.CharField(max_length=10, null=True)

    unblended_cost = models.DecimalField(max_digits=30,
                                         decimal_places=15,
                                         null=True)

    project_markup_cost = models.DecimalField(max_digits=30,
                                              decimal_places=15,
                                              null=True)

    pod_cost = models.DecimalField(max_digits=30, decimal_places=15, null=True)
コード例 #25
0
ファイル: repository.py プロジェクト: dannysauer/pulpcore
class Remote(MasterModel):
    """
    A remote source for content.

    This is meant to be subclassed by plugin authors as an opportunity to provide plugin-specific
    persistent data attributes for a plugin remote subclass.

    This object is a Django model that inherits from :class: `pulpcore.app.models.Remote` which
    provides the platform persistent attributes for a remote object. Plugin authors can add
    additional persistent remote data by subclassing this object and adding Django fields. We
    defer to the Django docs on extending this model definition with additional fields.

    Validation of the remote is done at the API level by a plugin defined subclass of
    :class: `pulpcore.plugin.serializers.repository.RemoteSerializer`.

    Fields:

        name (models.TextField): The remote name.
        url (models.TextField): The URL of an external content source.
        ca_cert (models.FileField): A PEM encoded CA certificate used to validate the
            server certificate presented by the external source.
        client_cert (models.FileField): A PEM encoded client certificate used
            for authentication.
        client_key (models.FileField): A PEM encoded private key used for authentication.
        tls_validation (models.BooleanField): If True, TLS peer validation must be performed.
        proxy_url (models.TextField): The optional proxy URL.
            Format: scheme://host:port
        proxy_username (models.TextField): The optional username to authenticate with the proxy.
        proxy_password (models.TextField): The optional password to authenticate with the proxy.
        username (models.TextField): The username to be used for authentication when syncing.
        password (models.TextField): The password to be used for authentication when syncing.
        download_concurrency (models.PositiveIntegerField): Total number of
            simultaneous connections.
        policy (models.TextField): The policy to use when downloading content.
        total_timeout (models.FloatField): Value for aiohttp.ClientTimeout.total on connections
        connect_timeout (models.FloatField): Value for aiohttp.ClientTimeout.connect
        sock_connect_timeout (models.FloatField): Value for aiohttp.ClientTimeout.sock_connect
        sock_read_timeout (models.FloatField): Value for aiohttp.ClientTimeout.sock_read
        rate_limit (models.IntegerField): Limits total download rate in requests per second.
    """

    TYPE = "remote"

    # Constants for the ChoiceField 'policy'
    IMMEDIATE = "immediate"
    ON_DEMAND = "on_demand"
    STREAMED = "streamed"

    POLICY_CHOICES = (
        (IMMEDIATE, "When syncing, download all metadata and content now."),
        (
            ON_DEMAND,
            "When syncing, download metadata, but do not download content now. Instead, "
            "download content as clients request it, and save it in Pulp to be served for "
            "future client requests.",
        ),
        (
            STREAMED,
            "When syncing, download metadata, but do not download content now. Instead,"
            "download content as clients request it, but never save it in Pulp. This causes "
            "future requests for that same content to have to be downloaded again.",
        ),
    )

    name = models.TextField(db_index=True, unique=True)

    url = models.TextField()

    ca_cert = models.TextField(null=True)
    client_cert = models.TextField(null=True)
    client_key = models.TextField(null=True)
    tls_validation = models.BooleanField(default=True)

    username = models.TextField(null=True)
    password = models.TextField(null=True)

    proxy_url = models.TextField(null=True)
    proxy_username = models.TextField(null=True)
    proxy_password = models.TextField(null=True)

    download_concurrency = models.PositiveIntegerField(default=10)
    policy = models.TextField(choices=POLICY_CHOICES, default=IMMEDIATE)

    total_timeout = models.FloatField(
        null=True, validators=[MinValueValidator(0.0, "Timeout must be >= 0")]
    )
    connect_timeout = models.FloatField(
        null=True, validators=[MinValueValidator(0.0, "Timeout must be >= 0")]
    )
    sock_connect_timeout = models.FloatField(
        null=True, validators=[MinValueValidator(0.0, "Timeout must be >= 0")]
    )
    sock_read_timeout = models.FloatField(
        null=True, validators=[MinValueValidator(0.0, "Timeout must be >= 0")]
    )
    headers = JSONField(blank=True, null=True)
    rate_limit = models.IntegerField(null=True)

    @property
    def download_factory(self):
        """
        Return the DownloaderFactory which can be used to generate asyncio capable downloaders.

        Upon first access, the DownloaderFactory is instantiated and saved internally.

        Plugin writers are expected to override when additional configuration of the
        DownloaderFactory is needed.

        Returns:
            DownloadFactory: The instantiated DownloaderFactory to be used by
                get_downloader().
        """
        try:
            return self._download_factory
        except AttributeError:
            self._download_factory = DownloaderFactory(self)
            return self._download_factory

    @property
    def download_throttler(self):
        """
        Return the Throttler which can be used to rate limit downloaders.

        Upon first access, the Throttler is instantiated and saved internally.
        Plugin writers are expected to override when additional configuration of the
        DownloaderFactory is needed.

        Returns:
            Throttler: The instantiated Throttler to be used by get_downloader()

        """
        try:
            return self._download_throttler
        except AttributeError:
            if self.rate_limit:
                self._download_throttler = Throttler(rate_limit=self.rate_limit)
                return self._download_throttler

    def get_downloader(self, remote_artifact=None, url=None, **kwargs):
        """
        Get a downloader from either a RemoteArtifact or URL that is configured with this Remote.

        This method accepts either `remote_artifact` or `url` but not both. At least one is
        required. If neither or both are passed a ValueError is raised.

        Plugin writers are expected to override when additional configuration is needed or when
        another class of download is required.

        Args:
            remote_artifact (:class:`~pulpcore.app.models.RemoteArtifact`): The RemoteArtifact to
                download.
            url (str): The URL to download.
            kwargs (dict): This accepts the parameters of
                :class:`~pulpcore.plugin.download.BaseDownloader`.

        Raises:
            ValueError: If neither remote_artifact and url are passed, or if both are passed.

        Returns:
            subclass of :class:`~pulpcore.plugin.download.BaseDownloader`: A downloader that
            is configured with the remote settings.
        """
        if remote_artifact and url:
            raise ValueError(_("get_downloader() cannot accept both 'remote_artifact' and 'url'."))
        if remote_artifact is None and url is None:
            raise ValueError(_("get_downloader() requires either 'remote_artifact' and 'url'."))
        if remote_artifact:
            url = remote_artifact.url
            expected_digests = {}
            for digest_name in Artifact.DIGEST_FIELDS:
                digest_value = getattr(remote_artifact, digest_name)
                if digest_value:
                    expected_digests[digest_name] = digest_value
            if expected_digests:
                kwargs["expected_digests"] = expected_digests
            if remote_artifact.size:
                kwargs["expected_size"] = remote_artifact.size
        return self.download_factory.build(url, **kwargs)

    def get_remote_artifact_url(self, relative_path=None):
        """
        Get the full URL for a RemoteArtifact from a relative path.

        This method returns the URL for a RemoteArtifact by concatinating the Remote's url and the
        relative path.located in the Remote. Plugin writers are expected to override this method
        when a more complex algorithm is needed to determine the full URL.

        Args:
            relative_path (str): The relative path of a RemoteArtifact

        Raises:
            ValueError: If relative_path starts with a '/'.

        Returns:
            str: A URL for a RemoteArtifact available at the Remote.
        """
        if path.isabs(relative_path):
            raise ValueError(_("Relative path can't start with '/'. {0}").format(relative_path))
        return path.join(self.url, relative_path)

    def get_remote_artifact_content_type(self, relative_path=None):
        """
        Get the type of content that should be available at the relative path.

        Plugin writers are expected to implement this method.

        Args:
            relative_path (str): The relative path of a RemoteArtifact

        Returns:
            Class: The Class of the content type that should be available at the relative path.
        """
        raise NotImplementedError()

    class Meta:
        default_related_name = "remotes"
コード例 #26
0
ファイル: models.py プロジェクト: xavierfigueroav/EvalAI
class ChallengePhase(TimeStampedModel):

    """Model representing a Challenge Phase"""

    def __init__(self, *args, **kwargs):
        super(ChallengePhase, self).__init__(*args, **kwargs)
        self._original_test_annotation = self.test_annotation

    name = models.CharField(max_length=100, db_index=True)
    description = models.TextField()
    leaderboard_public = models.BooleanField(default=False)
    start_date = models.DateTimeField(
        null=True, blank=True, verbose_name="Start Date (UTC)", db_index=True
    )
    end_date = models.DateTimeField(
        null=True, blank=True, verbose_name="End Date (UTC)", db_index=True
    )
    challenge = models.ForeignKey("Challenge", on_delete=models.CASCADE)
    is_public = models.BooleanField(default=False)
    is_submission_public = models.BooleanField(default=False)
    test_annotation = models.FileField(
        upload_to=RandomFileName("test_annotations"), null=True, blank=True
    )
    max_submissions_per_day = models.PositiveIntegerField(
        default=100000, db_index=True
    )
    max_submissions_per_month = models.PositiveIntegerField(
        default=100000, db_index=True
    )
    max_submissions = models.PositiveIntegerField(
        default=100000, db_index=True
    )
    max_concurrent_submissions_allowed = models.PositiveIntegerField(default=3)
    codename = models.CharField(max_length=100, default="Phase Code Name")
    dataset_split = models.ManyToManyField(
        DatasetSplit, blank=True, through="ChallengePhaseSplit"
    )
    allowed_email_ids = ArrayField(
        models.TextField(null=True, blank=True),
        default=list,
        blank=True,
        null=True,
    )
    slug = models.SlugField(max_length=200, null=True, unique=True)
    environment_image = models.CharField(
        max_length=2128, null=True, blank=True
    )  # Max length of repository name and tag is 2000 and 128 respectively
    allowed_submission_file_types = models.CharField(
        max_length=200, default=".json, .zip, .txt, .tsv, .gz, .csv, .h5, .npy"
    )
    # Flag to restrict user to select only one submission for leaderboard
    is_restricted_to_select_one_submission = models.BooleanField(default=False)
    # Store the schema for the submission meta attributes of this challenge phase.
    submission_meta_attributes = JSONField(default=None, blank=True, null=True)
    # Flag to allow reporting partial metrics for submission evaluation
    is_partial_submission_evaluation_enabled = models.BooleanField(
        default=False
    )
    # Id in the challenge config file. Needed to map the object to the value in the config file while updating through Github
    config_id = models.IntegerField(default=None, blank=True, null=True)
    # Store the default metadata for a submission meta attributes of a challenge phase.
    default_submission_meta_attributes = JSONField(
        default=None, blank=True, null=True
    )

    class Meta:
        app_label = "challenges"
        db_table = "challenge_phase"
        unique_together = (("codename", "challenge"),)

    def __str__(self):
        """Returns the name of Phase"""
        return self.name

    def get_start_date(self):
        """Returns the start date of Phase"""
        return self.start_date

    def get_end_date(self):
        """Returns the end date of Challenge"""
        return self.end_date

    @property
    def is_active(self):
        """Returns if the challenge is active or not"""
        if self.start_date < timezone.now() and self.end_date > timezone.now():
            return True
        return False

    def save(self, *args, **kwargs):

        # If the max_submissions_per_day is less than the max_concurrent_submissions_allowed.
        if (
            self.max_submissions_per_day
            < self.max_concurrent_submissions_allowed
        ):
            self.max_concurrent_submissions_allowed = (
                self.max_submissions_per_day
            )

        challenge_phase_instance = super(ChallengePhase, self).save(
            *args, **kwargs
        )
        return challenge_phase_instance
コード例 #27
0
class DataType(models.Model):
    """
    Describes the types of data a DataFile can contain.
    """

    name = models.CharField(
        max_length=40, blank=False, unique=True, validators=[charvalidator]
    )
    parent = models.ForeignKey(
        "self", blank=True, null=True, related_name="children", on_delete=models.PROTECT
    )
    last_editor = models.ForeignKey(Member, on_delete=models.SET_NULL, null=True)
    description = models.CharField(max_length=100, blank=False)
    details = models.TextField(blank=True)
    uploadable = models.BooleanField(default=False)
    created = models.DateTimeField(auto_now_add=True)
    modified = models.DateTimeField(auto_now=True)
    history = JSONField(default=dict, editable=False)

    def __str__(self):
        parents = self.all_parents
        if parents:
            parents.reverse()
            parents = [parent.name for parent in parents if parent]
            parents = ":".join(parents)
            return str("{0}:{1}").format(parents, self.name)
        return self.name

    def save(self, *args, **kwargs):
        """
        Override save to record edit history and require an associated "editor".

        "editor" is an instance-specific parameter; this avoids accepting an update
        that is merely retaining the existing value for the "last_editor" field.
        """
        if not self.editor:
            raise ValueError("'self.editor' must be set when saving DataType.")
        else:
            self.last_editor = self.editor
        self.history[arrow.get(timezone.now()).isoformat()] = {
            "name": self.name,
            "parent": self.parent.id if self.parent else None,
            "description": self.description,
            "details": self.details,
            "uploadable": self.uploadable,
            "editor": self.last_editor.id,
        }
        return super().save(*args, **kwargs)

    @property
    def history_sorted(self):
        history_sorted = OrderedDict()
        items_sorted = sorted(
            self.history.items(), key=lambda item: arrow.get(item[0]), reverse=True
        )
        for item in items_sorted:
            parent = (
                DataType.objects.get(id=item[1]["parent"])
                if item[1]["parent"]
                else None
            )
            try:
                editor = Member.objects.get(id=item[1]["editor"])
            except Member.DoesNotExist:
                editor = None
            history_sorted[arrow.get(item[0]).datetime] = {
                "parent": parent,
                "editor": editor,
                "hash": hash(item[0]),
            }
            history_sorted[arrow.get(item[0]).datetime].update(
                {
                    field: item[1][field]
                    for field in ["name", "description", "details", "uploadable"]
                    if field in item[1]
                }
            )

        return history_sorted

    @property
    def editable(self):
        """
        Return True if no approved projects are registered as using this.
        """
        # Always true for a new instance that hasn't yet been saved:
        if not self.id:
            return True

        approved_registered = self.source_projects.filter(approved=True)
        if approved_registered:
            return False
        else:
            return True

    @property
    def all_parents(self):
        """
        Return list of parents, from immediate to most ancestral.
        """
        parent = self.parent
        parents = []
        if parent:
            while True:
                if not parent:
                    break
                parents.append(parent)
                parent = parent.parent

        return parents

    @classmethod
    def all_as_tree(cls):
        """
        Dict tree of all datatypes. Key = parent & value = array of child dicts.

        This method is intended to make all ancestry relationships available without
        having to hit the database more than necessary.
        """

        def _children(parent, all_datatypes):
            children = {}
            for dt in [dt for dt in all_datatypes if dt.parent == parent]:
                children[dt] = _children(dt, all_datatypes)
            return children

        all_datatypes = list(DataType.objects.all())
        roots = DataType.objects.filter(parent=None)
        tree = {dt: _children(dt, all_datatypes) for dt in roots}
        return tree

    @classmethod
    def sorted_by_ancestors(cls, queryset=None):
        """
        Sort DataTypes by ancestors array of dicts containing 'datatype' and 'depth'.
        """

        def _flatten(node, depth=0):
            flattened = []
            for child in sorted(node.keys(), key=lambda obj: obj.name):
                flattened.append({"datatype": child, "depth": depth})
                flattened = flattened + _flatten(node[child], depth=depth + 1)
            return flattened

        datatypes_tree = cls.all_as_tree()
        return _flatten(datatypes_tree)
コード例 #28
0
class Tag(models.Model):
    # categories
    KEYWORD = 'keyword'  # i.e, no special category at all
    BLOG = 'blog'  # items tagged as events are "news"
    HIGHLIGHTS = 'highlights'
    WRITING = 'writing'
    COLLECTION = 'collection'
    PUBLISHING = 'publishing'  # things related to publishing activity, I.E issue number that can be filtered by

    CATEGORY_CHOICES = (
        (KEYWORD, 'keyword'), (BLOG, 'blog'), (HIGHLIGHTS, 'highlights'),
        (WRITING, 'writing'), (COLLECTION, 'collection'),
        (PUBLISHING, 'publishing')) + settings.MILLER_TAG_CATEGORY_CHOICES

    HIDDEN = 'hidden'
    PUBLIC = 'public'  # everyone can access that.

    STATUS_CHOICES = (
        (HIDDEN, 'keep this hidden'),
        (PUBLIC, 'published tag'),
    )

    name = models.CharField(max_length=100)  # e.g. 'Mr. E. Smith'
    slug = models.SlugField(max_length=100, unique=True,
                            blank=True)  # e.g. 'mr-e-smith'
    category = models.CharField(
        max_length=32, choices=CATEGORY_CHOICES,
        default=KEYWORD)  # e.g. 'actor' or 'institution'
    status = models.CharField(max_length=10,
                              choices=STATUS_CHOICES,
                              default=PUBLIC)

    data = JSONField(default=dict)

    # search     = models.TextField(null=True, blank=True)# SearchVectorField(null=True, blank=True) # index search

    class Meta:
        unique_together = ('name', 'category')

    def __unicode__(self):
        return '%s (%s)' % (self.name, self.category)

    def asXMLSubjects(self):
        """ return a subject collection """
        subjects = []
        for key, value in self.data.get('name', dict()).iteritems():
            lang = key.lower().replace('_', '-')
            subjects.append(u'<subject xml:lang="{0}">{1}</subject>'.format(
                lang, value))
        if not subjects:
            subjects.append(u'<subject>{0}</subject>'.format(self.name))
        return subjects

    @staticmethod
    def get_search_Q(query):
        """
    Return search queryset for this model. Generate Q fields for each language This follows: https://stackoverflow.com/questions/852414/how-to-dynamically-compose-an-or-query-filter-in-django
    """
        queries = [
            models.Q(**{'data__name__%s__icontains' % lang[2]: query})
            for lang in settings.LANGUAGES
        ] + [models.Q(slug__icontains=query),
             models.Q(name__icontains=query)]
        q = queries.pop()
        for item in queries:
            q |= item
        return q

    def save(self, *args, **kwargs):
        if not self.slug:
            self.slug = helpers.get_unique_slug(self, self.name, 100)

        # for a in settings.LANGUAGES:
        #   config_language = a[3]
        # self.search = SearchVector('name', weight='A', config='simple')

        super(Tag, self).save(*args, **kwargs)
コード例 #29
0
class DataFile(models.Model):
    """
    Represents a data file from a study or activity.
    """

    objects = DataFileManager()

    file = models.FileField(upload_to=get_upload_path, max_length=1024, unique=True)
    metadata = JSONField(default=dict)
    created = models.DateTimeField(auto_now_add=True)

    source = models.CharField(max_length=32)

    user = models.ForeignKey(
        settings.AUTH_USER_MODEL, related_name="datafiles", on_delete=models.CASCADE
    )

    def __str__(self):
        return str("{0}:{1}:{2}").format(self.user, self.source, self.file)

    def download_url(self, request):
        key = self.generate_key(request)
        url = full_url(reverse("data-management:datafile-download", args=(self.id,)))
        return "{0}?key={1}".format(url, key)

    @property
    def file_url_as_attachment(self):
        """
        Get an S3 pre-signed URL specifying content disposation as attachment.
        """
        return self.file.storage.url(self.file.name)

    def generate_key(self, request):
        """
        Generate new link expiration key
        """
        new_key = DataFileKey(datafile_id=self.id)

        if request:
            # Log the entity that is requesting the key be generated
            new_key.ip_address = get_ip(request)

            try:
                new_key.access_token = request.query_params.get("access_token", None)
            except (AttributeError, KeyError):
                new_key.access_token = None
            try:
                new_key.project_id = request.auth.id
            except AttributeError:
                # We do not have an accessing project
                new_key.project_id = None

        new_key.save()
        return new_key.key

    @property
    def is_public(self):
        return self.parent_project_data_file.is_public

    def has_access(self, user=None):
        return self.is_public or self.user == user

    @property
    def basename(self):
        return os.path.basename(self.file.name)

    @property
    def description(self):
        """
        Filled in by the data-processing server.
        """
        return self.metadata.get("description", "")

    @property
    def tags(self):
        """
        Filled in by the data-processing server.
        """
        return self.metadata.get("tags", [])

    @property
    def size(self):
        """
        Return file size, or empty string if the file key can't be loaded.

        Keys should always load, but this is a more graceful failure mode.
        """
        try:
            return self.file.size
        except (AttributeError, ClientError):
            return ""
コード例 #30
0
ファイル: test_json.py プロジェクト: thomas-scrace/django
 def test_custom_encoder(self):
     with self.assertRaisesMessage(ValueError, "The encoder parameter must be a callable object."):
         field = JSONField(encoder=DjangoJSONEncoder())
     field = JSONField(encoder=DjangoJSONEncoder)
     self.assertEqual(field.clean(datetime.timedelta(days=1), None), datetime.timedelta(days=1))
コード例 #31
0
ファイル: neo.py プロジェクト: dappcenter/rubic_backend
class ContractDetailsNeo(CommonDetails):

    temp_directory = models.CharField(max_length=36, default='')
    parameter_list = JSONField(default={})
    neo_contract = models.ForeignKey(NeoContract, null=True, default=None)
    storage_area = models.BooleanField(default=False)
    token_name = models.CharField(max_length=50)
    token_short_name = models.CharField(max_length=10)
    decimals = models.IntegerField()
    admin_address = models.CharField(max_length=70)
    future_minting = models.BooleanField(default=False)

    @classmethod
    def min_cost(cls):
        network = Network.objects.get(name='NEO_MAINNET')
        cost = cls.calc_cost({}, network)
        return cost

    @staticmethod
    def calc_cost(details, network):
        if NETWORKS[network.name]['is_free']:
            return 0
        if details.get('storage_area', False):
            return 600
        return 200

    def predeploy_validate(self):
        pass

    def compile(self):
        print('standalone token contract compile')
        if self.temp_directory:
            print('already compiled')
            return
        dest, preproc_config = create_directory(
            self, 'lastwill/neo-ico-contracts/*', 'token-config.json')
        token_holders = self.contract.tokenholder_set.all()
        preproc_params = {
            "constants": {
                "D_NAME": self.token_name,
                "D_SYMBOL": self.token_short_name,
                "D_DECIMALS": self.decimals,
                "D_PREMINT_COUNT": len(token_holders),
                "D_OWNER": self.admin_address,
                "D_CONTINUE_MINTING": self.future_minting,
                "D_PREMINT_SCRIPT_HASHES": [],
                "D_PREMINT_AMOUNTS": []
            }
        }
        for th in token_holders:
            preproc_params["constants"]["D_PREMINT_SCRIPT_HASHES"].append(
                list(binascii.unhexlify(address_to_scripthash(
                    th.address)))[::-1])
            amount = [
                int(x) for x in int(th.amount).to_bytes(
                    math.floor(math.log(int(th.amount) or 1, 256)) +
                    1, 'little')
            ]
            while len(amount) < 33:
                amount.append(0)
            preproc_params["constants"]["D_PREMINT_AMOUNTS"].append(amount)
        with open(preproc_config, 'w') as f:
            f.write(json.dumps(preproc_params))
        if os.system("/bin/bash -c 'cd {dest} && ./2_compile_token.sh'".format(
                dest=dest)):
            raise Exception('compiler error while deploying')
        print('dest', dest, flush=True)
        test_neo_token_params(preproc_config, preproc_params, dest)
        with open(preproc_config, 'w') as f:
            f.write(json.dumps(preproc_params))

        with open(
                path.join(
                    dest,
                    'NEP5.Contract/bin/Release/netcoreapp2.0/publish/NEP5.Contract.abi.json'
                ), 'rb') as f:
            token_json = json.loads(f.read().decode('utf-8-sig'))
        with open(path.join(
                dest,
                'NEP5.Contract/bin/Release/netcoreapp2.0/publish/NEP5.Contract.avm'
        ),
                  mode='rb') as f:
            bytecode = f.read()
        with open(path.join(dest, 'NEP5.Contract/Nep5Token.cs'), 'rb') as f:
            source_code = f.read().decode('utf-8-sig')
        neo_contract = NeoContract()
        neo_contract.abi = token_json
        neo_contract.bytecode = binascii.hexlify(bytecode).decode()
        neo_contract.source_code = source_code
        neo_contract.contract = self.contract
        neo_contract.original_contract = self.contract
        neo_contract.save()
        self.neo_contract = neo_contract
        self.save()

    @blocking
    @postponable
    def deploy(self, contract_params='0710', return_type='05'):
        self.compile()
        from_addr = NETWORKS[self.contract.network.name]['address']
        bytecode = self.neo_contract.bytecode
        neo_int = NeoInt(self.contract.network.name)
        print('from address', from_addr)
        details = {
            'name': 'WISH',
            'description': 'NEO smart contract',
            'email': '*****@*****.**',
            'version': '1',
            'author': 'MyWish'
        }
        param_list = {
            'from_addr': from_addr,
            'bin': bytecode,
            'needs_storage': True,
            'needs_dynamic_invoke': False,
            'contract_params': contract_params,
            'return_type': return_type,
            'details': details,
        }
        response = neo_int.mw_construct_deploy_tx(param_list)
        print('construct response', response, flush=True)
        binary_tx = response['tx']
        contract_hash = response['hash']

        tx = ContractTransaction.DeserializeFromBufer(
            binascii.unhexlify(binary_tx))
        tx = sign_neo_transaction(tx, binary_tx, from_addr)
        print('after sign', tx.ToJson()['txid'], flush=True)
        ms = StreamManager.GetStream()
        writer = BinaryWriter(ms)
        tx.Serialize(writer)
        ms.flush()
        signed_tx = ms.ToArray()
        print('full tx:', flush=True)
        print(signed_tx, flush=True)
        result = neo_int.sendrawtransaction(signed_tx.decode())
        print(result, flush=True)
        if not result:
            raise TxFail()
        print('contract hash:', contract_hash)
        print('result of send raw transaction: ', result)
        self.neo_contract.address = contract_hash
        self.neo_contract.tx_hash = tx.ToJson()['txid']
        self.neo_contract.save()

    @blocking
    @postponable
    @check_transaction
    def msg_deployed(self, message):
        neo_int = NeoInt(self.contract.network.name)
        from_addr = NETWORKS[self.contract.network.name]['address']
        param_list = {
            'from_addr':
            from_addr,
            'contract_params': [{
                'type': str(ContractParameterType.String),
                'value': 'init'
            }, {
                'type': str(ContractParameterType.Array),
                'value': []
            }],
            'addr':
            self.neo_contract.address,
        }

        response = neo_int.mw_construct_invoke_tx(param_list)

        binary_tx = response['tx']

        tx = ContractTransaction.DeserializeFromBufer(
            binascii.unhexlify(binary_tx))
        tx = sign_neo_transaction(tx, binary_tx, from_addr)
        print('after sign', tx.ToJson()['txid'])
        ms = StreamManager.GetStream()
        writer = BinaryWriter(ms)
        tx.Serialize(writer)
        ms.flush()
        signed_tx = ms.ToArray()
        print('signed_tx', signed_tx)
        result = neo_int.sendrawtransaction(signed_tx.decode())
        print(result, flush=True)
        if not result:
            raise TxFail()
        print('result of send raw transaction: ', result)
        self.contract.save()
        self.neo_contract.tx_hash = tx.ToJson()['txid']
        self.neo_contract.save()
        return

    @postponable
    @check_transaction
    def initialized(self, message):
        if self.contract.state not in ('WAITING_FOR_DEPLOYMENT', 'ENDED'):
            return

        take_off_blocking(self.contract.network.name)

        self.contract.state = 'ACTIVE' if self.future_minting else 'ENDED'
        self.contract.save()

        if self.contract.user.email:
            send_mail(
                common_subject,
                neo_token_text.format(addr=Crypto.ToAddress(
                    UInt160.ParseString(self.neo_contract.address)), ),
                DEFAULT_FROM_EMAIL, [self.contract.user.email])

    def finalized(self, message):
        self.contract.state = 'ENDED'
        self.contract.save()
コード例 #32
0
ファイル: test_json.py プロジェクト: 7924102/django
 def test_formfield(self):
     model_field = JSONField()
     form_field = model_field.formfield()
     self.assertIsInstance(form_field, forms.JSONField)
コード例 #33
0
class FXTable(models.Model):
    created = models.DateTimeField("created", auto_now=True)
    kind = models.TextField()
    payload = JSONField()
    completed = models.DateTimeField(null=True)
    completion_payload = JSONField(null=True)