Пример #1
0
class RawMetadata(Document):
    srcid = StringField(required=True)
    building = StringField(required=True)
    metadata = DictField()
    meta = {'allow_inheritance': True}
class MongoengineUserMixin(UserMixin):
    """Social Auth association model"""
    user = None
    provider = StringField(max_length=32)
    uid = StringField(max_length=255, unique_with='provider')
    extra_data = DictField()

    def str_id(self):
        return str(self.id)

    @classmethod
    def get_social_auth_for_user(cls, user, provider=None, id=None):
        qs = cls.objects
        if provider:
            qs = qs.filter(provider=provider)
        if id:
            qs = qs.filter(id=id)
        return qs.filter(user=user.id)

    @classmethod
    def create_social_auth(cls, user, uid, provider):
        if not isinstance(type(uid), six.string_types):
            uid = str(uid)
        return cls.objects.create(user=user.id, uid=uid, provider=provider)

    @classmethod
    def username_max_length(cls):
        username_field = cls.username_field()
        field = getattr(cls.user_model(), username_field)
        return field.max_length

    @classmethod
    def username_field(cls):
        return getattr(cls.user_model(), 'USERNAME_FIELD', 'username')

    @classmethod
    def create_user(cls, *args, **kwargs):
        kwargs['password'] = UNUSABLE_PASSWORD
        if 'email' in kwargs:
            # Empty string makes email regex validation fail
            kwargs['email'] = kwargs['email'] or None
        return cls.user_model().objects.create(*args, **kwargs)

    @classmethod
    def allowed_to_disconnect(cls, user, backend_name, association_id=None):
        if association_id is not None:
            qs = cls.objects.filter(id__ne=association_id)
        else:
            qs = cls.objects.filter(provider__ne=backend_name)
        qs = qs.filter(user=user)

        if hasattr(user, 'has_usable_password'):
            valid_password = user.has_usable_password()
        else:
            valid_password = True

        return valid_password or qs.count() > 0

    @classmethod
    def changed(cls, user):
        user.save()

    def set_extra_data(self, extra_data=None):
        if super(MongoengineUserMixin, self).set_extra_data(extra_data):
            self.save()

    @classmethod
    def disconnect(cls, entry):
        entry.delete()

    @classmethod
    def user_exists(cls, *args, **kwargs):
        """
        Return True/False if a User instance exists with the given arguments.
        Arguments are directly passed to filter() manager method.
        """
        if 'username' in kwargs:
            kwargs[cls.username_field()] = kwargs.pop('username')
        return cls.user_model().objects.filter(*args, **kwargs).count() > 0

    @classmethod
    def get_username(cls, user):
        return getattr(user, cls.username_field(), None)

    @classmethod
    def get_user(cls, pk):
        try:
            return cls.user_model().objects.get(id=pk)
        except cls.user_model().DoesNotExist:
            return None

    @classmethod
    def get_users_by_email(cls, email):
        return cls.user_model().objects.filter(email__iexact=email)

    @classmethod
    def get_social_auth(cls, provider, uid):
        if not isinstance(uid, six.string_types):
            uid = str(uid)
        try:
            return cls.objects.get(provider=provider, uid=uid)
        except cls.DoesNotExist:
            return None
Пример #3
0
class TaskStatus(Document, ReaperMixin):
    """
    Represents a task.
    This inherits from mongoengine.Document and defines the schema for the documents
    in task_status collection. The documents in this collection may be reaped,
    so it inherits from ReaperMixin.

    :ivar task_id:     identity of the task this status corresponds to
    :type task_id:     basestring
    :ivar worker_name: The name of the worker that the Task is in
    :type worker_name: basestring
    :ivar tags:        custom tags on the task
    :type tags:        list
    :ivar state:       state of callable in its lifecycle
    :type state:       basestring
    :ivar error: Any errors or collections of errors that occurred while this task was running
    :type error: dict (created from a PulpException)
    :ivar spawned_tasks: List of tasks that were spawned during the running of this task
    :type spawned_tasks: list of str
    :ivar progress_report: A report containing information about task's progress
    :type progress_report: dict
    :ivar task_type:   the fully qualified (package/method) type of the task
    :type task_type:   basestring
    :ivar start_time:  ISO8601 representation of the time the task started executing
    :type start_time:  basestring
    :ivar finish_time: ISO8601 representation of the time the task completed
    :type finish_time: basestring
    :ivar result:      return value of the callable, if any
    :type result:      any
    :ivar exception:   Deprecated. This is always None.
    :type exception:   None
    :ivar traceback:   Deprecated. This is always None.
    :type traceback:   None
    """

    task_id = StringField(unique=True, required=True)
    worker_name = StringField()
    tags = ListField(StringField())
    state = StringField(choices=constants.CALL_STATES,
                        default=constants.CALL_WAITING_STATE)
    error = DictField(default=None)
    spawned_tasks = ListField(StringField())
    progress_report = DictField()
    task_type = StringField()
    start_time = ISO8601StringField()
    finish_time = ISO8601StringField()
    result = DynamicField()

    # These are deprecated, and will always be None
    exception = StringField()
    traceback = StringField()

    # For backward compatibility
    _ns = StringField(default='task_status')

    meta = {
        'collection': 'task_status',
        'indexes': ['-task_id', '-tags', '-state'],
        'allow_inheritance': False,
        'queryset_class': CriteriaQuerySet
    }

    def save_with_set_on_insert(self, fields_to_set_on_insert):
        """
        Save the current state of the TaskStatus to the database, using an upsert operation.
        The upsert operation will only set those fields if this becomes an insert operation,
        otherwise those fields will be ignored. This also validates the fields according to the
        schema above.

        This is required because the current mongoengine version we are using does not support
        upsert with set_on_insert through mongoengine queries. Once we update to the version
        which supports this, this method can be deleted and it's usages can be replaced
        with mongoengine upsert queries.

        :param fields_to_set_on_insert: A list of field names that should be updated with Mongo's
                                        $setOnInsert operator.
        :type  fields_to_set_on_insert: list
        """

        # If fields_to_set_on_insert is None or empty, just save
        if not fields_to_set_on_insert:
            self.save()
            return

        # This will be used in place of superclass' save method, so we need to call validate()
        # explicitly.
        self.validate()

        stuff_to_update = dict(copy.deepcopy(self._data))

        # Let's pop the $setOnInsert attributes out of the copy of self so that we can pass the
        # remaining attributes to the $set operator in the query below.
        set_on_insert = {}
        for field in fields_to_set_on_insert:
            set_on_insert[field] = stuff_to_update.pop(field)
        task_id = stuff_to_update.pop('task_id')

        update = {'$set': stuff_to_update, '$setOnInsert': set_on_insert}
        TaskStatus._get_collection().update({'task_id': task_id},
                                            update,
                                            upsert=True)

    @classmethod
    def post_save(cls, sender, document, **kwargs):
        """
        Send a taskstatus message on save.

        :param sender: class of sender (unused)
        :type  sender: class
        :param document: mongoengine document
        :type  document: mongoengine.Document

        """
        send_taskstatus_message(document,
                                routing_key="tasks.%s" % document['task_id'])
Пример #4
0
class ResponseCustom(EmbeddedDocument):
    custom = DictField(required=True)
Пример #5
0
class User(Document, BaseEventumDocument):
    """A user model.

    The :class:`User` object is only created once the user logs in for the
    first time and confirms the details of their account.
    :ivar date_created: :class:`mongoengine.fields.DateTimeField` - The date
        that this user was created.
    :ivar date_modified: :class:`mongoengine.fields.DateTimeField` - The date
        the this user was last modified.
    :ivar gplus_id: :class:`mongoengine.fields.StringField` - The Google+ ID
        for this user.  It's what we use in the Google+ authentication.
    :ivar name: :class:`mongoengine.fields.StringField` - The user's name.
    :ivar slug: :class:`mongoengine.fields.StringField` - A URL slug  their
        internal profile page.
    :ivar email: :class:`mongoengine.fields.EmailField` - The user's email
        address.
    :ivar roles: :class:`mongoengine.fields.ListField` of
        :class:`mongoengine.fields.StringField` - A list of roles that the user
        has.
    :ivar privileges: :class:`mongoengine.fields.DictField` - A dictionary of
        privileges that the user has.  Often determined soley by their
        ``user_type``.
    :ivar image_url: :class:`mongoengine.fields.URLField` - The URL of the
        profile picture for the user's profile picture.
    :ivar image: :class:`mongoengine.fields.ReferenceField` - The local image
        for the user's profile picture.
    :ivar user_type: :class:`mongoengine.fields.StringField` - The type of the
        user. Can either be ``"fake_user"``, ``"editor"``, ``"publisher"``, or
        ``"admin"``.  The selection of user type determines their
        ``privileges``.
    :ivar last_logon: :class:`mongoengine.fields.DateTimeField` - The date of
        this user's last logon.
    """

    date_created = DateTimeField(required=True, default=now)
    date_modified = DateTimeField(required=True, default=now)
    gplus_id = StringField(required=True, unique=True)
    name = StringField(required=True, max_length=510)
    slug = StringField(required=True,
                       max_length=510,
                       unique=True,
                       regex=Regex.SLUG_REGEX)
    email = EmailField(required=True, unique=True)
    roles = ListField(StringField(db_field="role"), default=list)
    privileges = DictField(required=True, default={})
    image_url = URLField()
    image = ReferenceField('Image')
    user_type = StringField(default='editor', regex=USER_TYPE_REGEX)
    last_logon = DateTimeField()

    # MongoEngine ORM metadata
    meta = {'allow_inheritance': True, 'indexes': ['email', 'gplus_id']}

    def can(self, privilege):
        """Returns True if the user has ``privilege``.

        :returns: True if the user has ``privilege``
        :rtype: bool
        """
        return self.privileges.get(privilege)

    def get_profile_picture(self, size=50):
        """Returns the url to the profile picture for the user.

        TODO: This method needs major fixing.  What's going on with that URL?

        :param int size: The size of the image to pass, if the size can be
            changed.

        :returns: The URL of the image.
        :rtype: str
        """
        if self.image:
            return self.image.url()
        if not self.image_url:
            # Import app in the function body to avoid importing `None` when
            # the module is first loaded.
            return url_for(
                'eventum.static',
                filename=current_app.config['EVENTUM_DEFAULT_PROFILE_PICTURE'])
        if "googleusercontent.com" in self.image_url:
            return self.image_url + str(size)
        return self.image_url

    def register_login(self):
        """Update the model as having logged in."""
        self.last_logon = now()

    def clean(self):
        """Called by Mongoengine on every ``.save()`` to the object.

        Update date_modified and apply privileges shorthand notation.

        :raises: :class:`wtforms.validators.ValidationError`
        """
        self.date_modified = now()

        # Update self.privileges with one of the USER_TYPES dictionaries
        self.privileges.update(USER_TYPES[self.user_type])

        # Update the slug for the user (used in URLs)
        new_slug = self.name.lower().replace(' ', '-')
        new_slug = re.sub(r"\'|\.|\_|", "", new_slug)
        if User.objects(slug=new_slug).count() > 0:
            i = 2
            new_slug = new_slug + "-{}".format(i)
            while User.objects(slug=new_slug).count() > 0:
                i += 1
                new_slug = re.sub(r"-([0-9])*$", "-{}".format(i), new_slug)
        self.slug = new_slug

        if self.image_url and "googleusercontent.com" in self.image_url:
            self.image_url = re.sub(r"sz=([0-9]*)$", "sz=", self.image_url)

    def id_str(self):
        """The id of this object, as a string.

        :returns: The id
        :rtype: str
        """
        return str(self.id)

    def role(self):
        """Returns the role of the user, in plain English.  It is either
        ``"Admin"``, ``"Publisher"``, ``"Editor"``, or ``"Fake User"``.

        :returns: The role.
        :rtype: str
        """
        if self.can('admin'):
            return "Admin"
        if self.can('publish'):
            return "Publisher"
        if self.can('edit'):
            return "Editor"
        return "Fake User"

    def __repr__(self):
        """The representation of this user.

        :returns: The user's details.
        :rtype: str
        """
        return ('User(id=%r, name=%r, email=%r, roles=%r, privileges=%r, '
                'gplus_id=%r, date_created=%r)' %
                (self.id, self.name, self.email, self.roles, self.privileges,
                 self.gplus_id, self.date_created))

    def __unicode__(self):
        """This user, as a unicode string.

        :returns: The user encoded as a string.
        :rtype: str
        """
        if self.can('admin'):
            return '%r <%r> (Admin)' % (self.name, self.email)
        if self.can('publish'):
            return '%r <%r> (Publisher)' % (self.name, self.email)
        if self.can('edit'):
            return '%r <%r> (Editor)' % (self.name, self.email)
        else:
            return '%r <%r> (Fake User)' % (self.name, self.email)
Пример #6
0
class Offer(BaseDocument):
    TYPE_CREDIT = 'credit'
    TYPE_FUTURES = 'futures'
    TYPE_FACTORING = 'factoring'

    salesman = ReferenceField('User',
                              reverse_delete_rule=mongoengine.NULLIFY,
                              default=None)
    type = StringField(choices=(TYPE_CREDIT, TYPE_FUTURES, TYPE_FACTORING),
                       default=None)

    description = StringField(default=None)
    zip_code = StringField(default=None)
    reg_service = StringField(default=None)
    options = DictField(default={})
    price = IntField(default=None)

    bc_hash = StringField(default=None)

    ru_card = DictField(default=None)
    en_card = DictField(default=None)

    creation_date = DateTimeField()
    update_date = DateTimeField()

    @property
    def fprice(self):
        try:
            return '{:,}'.format(self.price).replace(',', ' ')
        except:
            return ''

    def process(self, bot):
        offer_type = bot.match_command(
            bot.t(['SELL_CREDIT', 'SELL_FUTURES', 'SELL_FACTORING']))
        if offer_type and self.type is None:
            self.set_type(bot, offer_type.get('command'))
            if self.type == self.TYPE_CREDIT:
                return bot.send(bot.t('CREDIT_ENTER_DESC'),
                                reply_markup=ReplyKeyboardHide())
            elif self.type == self.TYPE_FUTURES:
                return bot.send(bot.t('FUTURES_ENTER_DESC'),
                                reply_markup=ReplyKeyboardHide())
            elif self.type == self.TYPE_FACTORING:
                return bot.send(bot.t('FACTORING_ENTER_DESC'),
                                reply_markup=ReplyKeyboardHide())

        if self.type == self.TYPE_CREDIT:
            self._process_credit(bot)
        elif self.type == self.TYPE_FUTURES:
            self._process_futures(bot)
        elif self.type == self.TYPE_FACTORING:
            self._process_factoring(bot)

    def _process_credit(self, bot):
        if self.description is None:
            self.description = bot.text
            bot.send(bot.t('CREDIT_ENTER_ZIP'))
        elif self.zip_code is None:
            self.zip_code = bot.text
            bot.send(bot.t('CREDIT_ENTER_REG_SERVICE'))
        elif self.reg_service is None:
            self.reg_service = bot.text
            bot.send(bot.t('CREDIT_ENTER_LOAN_ID'))
        elif self.options.get('loan_id') is None:
            self.options['loan_id'] = bot.text
            bot.send(bot.t('CREDIT_ENTER_LOAN_AMOUNT'))
        elif self.options.get('loan_amount') is None:
            self.options['loan_amount'] = utils.to_int(bot.text, None)
            if self.options['loan_amount'] is None:
                return bot.send(bot.t('ENTER_NUMBER'))
            bot.send(bot.t('CREDIT_ENTER_INTEREST_RATE'))
        elif self.options.get('interest_rate') is None:
            self.options['interest_rate'] = bot.text
            bot.send(bot.t('CREDIT_ENTER_LOAN_LENGTH'))
        elif self.options.get('loan_length') is None:
            self.options['loan_length'] = bot.text
            bot.send(bot.t('CREDIT_ENTER_LOAN_STATUS'),
                     reply_markup=ReplyKeyboard([[bot.t('LOAN_STATUS_EARLY')],
                                                 [bot.t('LOAN_STATUS_NORMAL')],
                                                 [bot.t('LOAN_STATUS_LATE')]]))
        elif self.options.get('loan_status') is None:
            self.options['loan_status'] = bot.text
            bot.send(bot.t('CREDIT_ENTER_SELLERS_WARRANTY'),
                     reply_markup=ReplyKeyboard([[
                         bot.t('WARRANTY_FULL'),
                         bot.t('WARRANTY_PARTLY'),
                         bot.t('WARRANTY_NONE')
                     ]],
                                                one_time_keyboard=True))
        elif self.options.get('sellers_warranty') is None:
            self.options['sellers_warranty'] = bot.text
            bot.send(bot.t('CREDIT_ENTER_PRICE'),
                     reply_markup=ReplyKeyboardHide())
        elif self.price is None:
            self.price = utils.to_int(bot.text, None)
            if self.price is None:
                return bot.send(bot.t('ENTER_NUMBER'))
            self._create_contract_send(bot)
        else:
            self._create_contract_process(bot)

    def _process_futures(self, bot):
        if self.description is None:
            self.description = bot.text
            bot.send(bot.t('FUTURES_ENTER_ZIP'))
        elif self.zip_code is None:
            self.zip_code = bot.text
            bot.send(bot.t('FUTURES_ENTER_REG_SERVICE'))
        elif self.reg_service is None:
            self.reg_service = bot.text
            bot.send(bot.t('FUTURES_ENTER_LOAN_ID'))
        elif self.options.get('loan_id') is None:
            self.options['loan_id'] = bot.text
            bot.send(bot.t('FUTURES_CHOOSE_CONTRACT_TYPE'),
                     reply_markup=ReplyKeyboard([
                         [bot.t('FUTURES_TYPE_SETTLEMENT')],
                         [bot.t('FUTURES_TYPE_DELIVERABLE')],
                     ],
                                                one_time_keyboard=True))
        elif self.options.get('contract_type') is None:
            self.options['contract_type'] = bot.text
            bot.send(bot.t('FUTURES_ENTER_CONTRACT_SIZE'),
                     reply_markup=ReplyKeyboardHide())
        elif self.options.get('contract_size') is None:
            self.options['contract_size'] = bot.text
            bot.send(bot.t('FUTURES_CONTRACT_MATURITY'))
        elif self.options.get('contract_maturity') is None:
            self.options['contract_maturity'] = bot.text
            bot.send(bot.t('FUTURES_ENTER_DELIVERY_DATE'))
        elif self.options.get('delivery_date') is None:
            self.options['delivery_date'] = bot.text
            bot.send(bot.t('FUTURES_PRICE'))
        elif self.price is None:
            self.price = utils.to_int(bot.text, None)
            if self.price is None:
                return bot.send(bot.t('ENTER_NUMBER'))
            self._create_contract_send(bot)
        else:
            self._create_contract_process(bot)

    def _process_factoring(self, bot):
        if self.description is None:
            self.description = bot.text
            bot.send(bot.t('FACTORING_ENTER_ZIP'))
        elif self.zip_code is None:
            self.zip_code = bot.text
            bot.send(bot.t('FACTORING_ENTER_REG_SERVICE'))
        elif self.reg_service is None:
            self.reg_service = bot.text
            bot.send(bot.t('FACTORING_ENTER_LOAN_ID'))
        elif self.options.get('loan_id') is None:
            self.options['loan_id'] = bot.text
            bot.send(bot.t('FACTORING_PAY_REQS'),
                     reply_markup=ReplyKeyboard([
                         [bot.t('FACTORING_REGRESS')],
                         [bot.t('FACTORING_NO_REGRESS')],
                     ],
                                                one_time_keyboard=True))
        elif self.options.get('pay_reqs') is None:
            self.options['pay_reqs'] = bot.text
            bot.send(bot.t('FACTORING_TITLE_SUPPLIER'),
                     reply_markup=ReplyKeyboardHide())
        elif self.options.get('title_supplier') is None:
            self.options['title_supplier'] = bot.text
            bot.send(bot.t('FACTORING_SUM_REQS'))
        elif self.options.get('sum_reqs') is None:
            self.options['sum_reqs'] = bot.text
            bot.send(bot.t('FACTORING_DATE_REQS_PAY'))
        elif self.options.get('date_reqs_pay') is None:
            self.options['date_reqs_pay'] = bot.text
            bot.send(bot.t('FACTORING_PRICE'))
        elif self.price is None:
            self.price = utils.to_int(bot.text, None)
            if self.price is None:
                return bot.send(bot.t('ENTER_NUMBER'))
            self._create_contract_send(bot)
        else:
            self._create_contract_process(bot)

    @gen.coroutine
    def _create_contract_send(self, bot):
        try:
            bot.send(bot.t('GENERATE_PREVIEW_START'))

            yield self.generate_img(lang=bot.user.lang)

            path = self.get_image_path(bot.user.lang)
            if not path:
                raise Exception

            with open(path, 'rb') as f:
                yield bot.send_photo(files=(('photo', path, f.read()), ),
                                     caption=bot.t('GENERATE_PREVIEW_END'))
        except Exception as e:
            bot.send(bot.t('GENERATE_PREVIEW_FAIL'))
            traceback.print_exc()

        bot.send(bot.t('SAIL_YOU_CREATE_CONTRACT'),
                 reply_markup=ReplyKeyboard([[
                     bot.t('YES_APPROVE'),
                     bot.t('NO_FEAR'),
                     bot.t('WHAT_IS_BLOCKCHAIN')
                 ]],
                                            one_time_keyboard=False))

    @gen.coroutine
    def _create_contract_process(self, bot):
        if bot.match_command(bot.t('YES_APPROVE')):
            bot.send(bot.t('REGISTER_BC_BEGIN'),
                     reply_markup=ReplyKeyboardHide())

            # рега в реестре
            yield gen.sleep(1)
            self.bc_hash = PasswordHelper.get_hash(datetime.now().isoformat())

            # генерация картинок
            yield self.generate_img()

            self.salesman = self.salesman
            self.save()

            bot._on_start(welcome_text=bot.t('REGISTER_BC_END'),
                          keyboard=ReplyKeyboard([[bot.t('THNX_UNDERSTAND')]]))
        elif bot.match_command(bot.t('NO_FEAR')):
            bot._on_start(welcome_text=bot.t('FEAR_BLOCKCHAIN_WIKI'))
        elif bot.match_command(bot.t('WHAT_IS_BLOCKCHAIN')):
            bot.send(bot.t('WHAT_IS_BLOCKCHAIN_WIKI'))

    @gen.coroutine
    def generate_img(self, sync=False, *args, **kwargs):
        if sync:
            client = HTTPClient()
        else:
            client = AsyncHTTPClient()

        if kwargs.get('lang') in [None, Text.LANG_RU]:
            # генерация русской карточки
            req = HTTPRequest(
                get_screenshot_img_url(self.get_id(), Text.LANG_RU))
            if sync:
                res = client.fetch(req)
            else:
                res = yield client.fetch(req)

            ru_path = gen_path()
            mkdir(ru_path.get('folder'))
            with open(ru_path.get('fullname'), "wb") as f:
                f.write(res.body)

            self.ru_card = ru_path

        if kwargs.get('lang') in [None, Text.LANG_EN]:
            # генерация английской карточки
            req = HTTPRequest(
                get_screenshot_img_url(self.get_id(), Text.LANG_EN))
            if sync:
                res = client.fetch(req)
            else:
                res = yield client.fetch(req)

            en_path = gen_path()
            mkdir(en_path.get('folder'))
            with open(en_path.get('fullname'), "wb") as f:
                f.write(res.body)

            self.en_card = en_path

    def get_image_path(self, lang):
        path = None

        if lang == Text.LANG_RU:
            if self.ru_card is None:
                return False
            path = self.ru_card.get('relname')
        elif lang == Text.LANG_EN:
            if self.en_card is None:
                return False
            path = self.en_card.get('relname')

        if path is None:
            return False

        return '%s/%s' % (options.upload_path, path)

    def set_type(self, bot, offer_type):
        if offer_type == bot.t('SELL_CREDIT'):
            self.type = Offer.TYPE_CREDIT
        elif offer_type == bot.t('SELL_FUTURES'):
            self.type = Offer.TYPE_FUTURES
        elif offer_type == bot.t('SELL_FACTORING'):
            self.type = Offer.TYPE_FACTORING

    def save(self, *args, **kwargs):
        if not self.creation_date:
            self.creation_date = datetime.now(tz=timezone('UTC'))
        self.update_date = datetime.now(tz=timezone('UTC'))
        return super(Offer, self).save(*args, **kwargs)

    def to_dict_impl(self, **kwargs):
        return {
            'id': self.get_id(),
            'type': self.type,
            'description': self.description,
            'creation_date': self.creation_date,
            'zip_code': self.zip_code,
            'reg_service': self.reg_service,
            'options': self.options,
            'price': self.price,
            'salesman': self.salesman.to_dict() if self.salesman else None,
            'bc_hash': self.bc_hash
        }
Пример #7
0
class QueueInfoListContent(EmbeddedDocument):
    items = ListField(DictField())
Пример #8
0
class GitRepoDetailsModel(Document):
    _id = StringField(required=True, max_length=200)
    repo_details = DictField(required=True)
Пример #9
0
class Log(ExtrovirtsDocument):
    version = StringField()
    active_operation = DictField()
    active_step = DictField()
    event_stream = ListField(EncryptedDictField())
Пример #10
0
class Item(Document):
    name = StringField(required=True)
    attrs = DictField(required=True)
    price = IntField(required=True)
    meta = {"collection": "items"}
Пример #11
0
class Structure(Document):
    client = ReferenceField(Client)
    date = DateTimeField(default=datetime.now)
    structure = DictField()
Пример #12
0
class CveItem(Document):
    cve = DictField(required=True)
    configurations = DictField(required=False)
    impact = DictField(required=False)
    publishedDate = DateTimeField(required=False)
    lastModifiedDate = DateTimeField(required=False)
    cveid = StringField(required=True, primary_key=True)

    def __init__(self, **cve_item_definition):
        super(CveItem, self).__init__()
        self.set_data(**cve_item_definition)

    def set_data(self, **cve_item_definition):
        self.cve = cve_item_definition['cve']

        if not self.cveid:
            self.cveid = self.cve['CVE_data_meta']['ID']

        try:
            self.configurations = cve_item_definition['configurations']
        except KeyError:
            pass

        try:
            self.impact = cve_item_definition['impact']
        except KeyError:
            pass

        try:
            if isinstance(cve_item_definition['publishedDate'], datetime):
                self.publishedDate = cve_item_definition['publishedDate']
            else:
                self.publishedDate = datetime.strptime(
                    cve_item_definition['publishedDate'], '%Y-%m-%dT%H:%MZ')

        except KeyError:
            pass

        try:
            if isinstance(cve_item_definition['lastModifiedDate'], datetime):
                self.lastModifiedDate = cve_item_definition['lastModifiedDate']
            else:
                self.lastModifiedDate = datetime.strptime(
                    cve_item_definition['lastModifiedDate'], '%Y-%m-%dT%H:%MZ')
        except KeyError:
            pass

    @property
    def cvss3vector(self):
        try:
            return self.impact['baseMetricV3']['cvssV3']['vectorString']
        except KeyError:
            pass
        return ""

    @property
    def cvss3score(self):
        try:
            return self.impact['baseMetricV3']['cvssV3']['baseScore']
        except KeyError:
            pass
        return ""

    @property
    def cvss3severity(self):
        try:
            return self.impact['baseMetricV3']['cvssV3']['baseSeverity']
        except KeyError:
            pass
        return ""

    @property
    def cwe(self):
        try:
            return self.cve['problemtype']['problemtype_data'][0][
                'description'][0]['value']
        except (KeyError, IndexError):
            pass
        return ""

    @property
    def description(self):
        try:
            return self.cve['description']['description_data']['value']
        except KeyError:
            pass
        return ""

    @property
    def references(self):
        return self.cve['references']['reference_data']
Пример #13
0
class JupyterNotebookTransformer(WorkflowTool):
    """Uses Jupyter notebook to perform a certain optimization step"""

    notebook = BinaryField(required=True,
                           default=open(
                               os.path.join(os.path.dirname(__file__),
                                            'jupyter_templates',
                                            'python2_template.ipynb')).read())
    """Stores the actual notebook"""

    calc_settings = DictField()
    """Any settings for the calculation"""
    def __init__(self, *args, **kwargs):
        super(JupyterNotebookTransformer, self).__init__(*args, **kwargs)

    @classmethod
    def load_notebook(cls, name, description, path):
        """Load a notebook from disk, turn into transformer

        :param name: string, name of transformer
        :param description: string, description of transformer
        :param path: string, path to the notebook. None for default notebook
        :return: JupyterNotebookTransformer
        """
        x = JupyterNotebookTransformer(name=name, description=description)

        if path is None:
            x.notebook = open(
                os.path.join(os.path.dirname(inspect.getfile(x.__class__)),
                             'jupyter_templates',
                             'python2_template.ipynb')).read()
        else:
            x.notebook = open(path).read()
        return x

    def get_settings(self):
        settings = super(JupyterNotebookTransformer, self).get_settings()

        # Don't print the whole notebook
        del settings['notebook']

        # Make the settings a little prettier
        del settings['calc_settings']
        settings.update(self.calc_settings)

        return settings

    def get_form(self):
        super_form = super(JupyterNotebookTransformer, self).get_form()

        def add_settings(cls):
            """Add calculation settings to form"""
            for name, value in self.calc_settings.iteritems():
                setattr(cls, name, wtfields.StringField(name, default=value))
            return cls

        @add_settings
        class MyForm(super_form):
            nbfile = wtfields.FileField(
                'Notebook file',
                description=
                'Jupyter notebook to be executed. Must be formatted in the Pinyon '
                'file style (see examples). Leave this blank to keep current notebook',
                render_kw={'class': 'form-control-file'})

        # Add other fields
        return MyForm

    def process_form(self, form, request):
        super(JupyterNotebookTransformer, self).process_form(form, request)

        # Read the notebook
        if not type(request.POST['nbfile']) == unicode:
            nbfile = request.POST['nbfile'].file
            self.notebook = str(nbfile.read())

        # Read in the calculation settings
        for name in self.calc_settings.keys():
            self.calc_settings[name] = form[name].data

    def get_file_information(self):
        info = super(JupyterNotebookTransformer, self).get_file_information()

        info['notebook'] = dict(
            description='Jupyter notebook being run by this calculation',
            extension='ipynb')

        return info

    def _run(self, data, other_inputs):
        # Combine data into a form to send to the notebook
        inputs = dict(other_inputs)
        inputs['data'] = data

        # Run the notebook
        nb, outputs = run_notebook(self.notebook, inputs, self.calc_settings)
        data = outputs['data']
        del outputs['data']

        # Save the notebook
        self.notebook = str(nbformat.writes(nb))
        return data, outputs

    def load_workbook(self, f):
        """Load a workbook

        :param f: string or file-like object, path or object to be read from"""

        # Prepare to read
        if isinstance(f, str):
            fp = open(f, 'r')
        else:
            fp = f

        # Read it
        nb_str = fp.read()

        # Set the notebook
        self.set_notebook(nb_str)

    def set_notebook(self, nb_str):
        """Set the notebook from string, and check the format

        :param nb_str: string, notebook to be set
        """
        # Make sure it is in the correct format
        nb = nbformat.reads(nb_str, nbformat.NO_CONVERT)
        check_notebook(nb)

        # Set and leave
        self.notebook = nb_str

    def write_notebook(self, f):
        """Write the notebook to disk with the current data

        :param f: string or file-like object, path or object used to be written to disk. None to print to return string"""

        # Prepare to write
        if isinstance(f, str):
            fp = open(f, 'w')
        else:
            fp = f

        # Add data to the notebook
        nb = nbformat.reads(self.notebook, nbformat.NO_CONVERT)
        inputs = self.get_inputs()
        inputs['data'] = inputs['data'].get_object()

        add_data(nb, self.get_inputs(), self.calc_settings)

        # Write the notebook
        notebook = nbformat.writes(nb, nbformat.NO_CONVERT)
        if f is None:
            return notebook
        fp.write(notebook)

        if isinstance(f, str):
            fp.close()
Пример #14
0
class Session(Document):
    id = StringField(primary_key=True)
    user = StringField()
    last_modified = DateTimeField()
    created_at = DateTimeField()
    data = DictField()
    selection = EmbeddedDocumentField(RVDSelection)
    contragent = ReferenceField(Contragent)
    cart = EmbeddedDocumentField(Cart)
    sale = IntField()
    comment = StringField()

    def get_safe(self) -> dict:
        session = document_to_dict(self)
        if session.get('contragent'):
            session['contragent'] = self.contragent.get_safe()
        if session.get('cart'):
            session['cart'] = self.cart.get_safe()
        if self.selection:
            session['selection'] = self.selection.get_safe()
        return session

    @property
    def dict(self):
        return self.to_mongo().to_dict()

    def __init__(self, *args, **values):
        super().__init__(*args, **values)

    def add_data(self, data):
        self.data.update(data)

    def set_user(self, user):
        self.user = user

    def set_data(self, key, val):
        self.data[key] = val

    def remove_data(self, key):
        if key in self.data:
            del self.data[key]

    def to_dict(self):
        return {
            "_id": self.id,
            "user": self.user,
            "data": self.data,
            "last_modified": self.last_modified,
        }

    def get_id(self):
        return self.id

    def set_id(self, sid):
        self.id = sid

    def create_from_struct(self, struct):
        self.set_id(struct["_id"])
        self.data = struct["data"]
        self.last_modified = struct["last_modified"]
        self.user = struct["user"]
Пример #15
0
class Repository(AutoRetryDocument):
    """
    Defines schema for a pulp repository in the `repos` collection.

    :ivar repo_id: unique across all repos
    :type repo_id: mongoengine.StringField
    :ivar display_name: user-readable name of the repository
    :type display_name: mongoengine.StringField
    :ivar description: free form text provided by the user to describe the repo
    :type description: mongoengine.StringField
    :ivar notes: arbitrary key-value pairs programmatically describing the repo;
                 these are intended as a way to describe the repo usage or
                 organizational purposes and should not vary depending on the
                 actual content of the repo
    :type notes: mongoengine.DictField
    :ivar content_unit_counts: key-value pairs of number of units associated with this repo.
                               This is different than the number of associations, since a
                               unit may be associated multiple times.
    :type content_unit_counts: mongoengine.DictField
    :ivar scratchpad: Field used to persistently store arbitrary information from the plugins
                      across multiple operations.
    :type scratchpad: mongoengine.DictField
    :ivar last_unit_added: Datetime of the most recent occurence of adding a unit to the repo
    :type last_unit_added: UTCDateTimeField
    :ivar last_unit_removed: Datetime of the most recent occurence of removing a unit from the repo
    :type last_unit_removed: UTCDateTimeField
    :ivar _ns: (Deprecated) Namespace of repo, included for backwards compatibility.
    :type _is: mongoengine.StringField
    """

    # Previously, this field was 'id'. This field is required to be unique, but the previous index
    # was '-id'. Setting unique=True here would generate a new 'repo_id' index. Instead, we set the
    # index in meta and enforce uniqueness there.
    repo_id = StringField(required=True, regex=r'^[.\-_A-Za-z0-9]+$')
    display_name = StringField()
    description = StringField()
    notes = DictField()
    scratchpad = DictField(default={})
    content_unit_counts = DictField(default={})
    last_unit_added = UTCDateTimeField()
    last_unit_removed = UTCDateTimeField()

    # For backward compatibility
    _ns = StringField(default='repos')

    meta = {
        'collection': 'repos',
        'allow_inheritance': False,
        'indexes': [{
            'fields': ['-repo_id'],
            'unique': True
        }],
        'queryset_class': RepoQuerySet
    }
    SERIALIZER = serializers.Repository

    def to_transfer_repo(self):
        """
        Converts the given database representation of a repository into a plugin repository transfer
        object, including any other fields that need to be included.

        Note: In the transfer unit, the repo_id is accessed with obj.id for backwards compatability.

        :return: transfer object used in many plugin API calls
        :rtype:  pulp.plugins.model.Repository}
        """
        r = plugin_repo(self.repo_id,
                        self.display_name,
                        self.description,
                        self.notes,
                        content_unit_counts=self.content_unit_counts,
                        last_unit_added=self.last_unit_added,
                        last_unit_removed=self.last_unit_removed,
                        repo_obj=self)
        return r

    def update_from_delta(self, repo_delta):
        """
        Update the repository's fields from a delta. Keys that are not fields will be ignored.

        :param delta: key value pairs that represent the new values
        :type  delta: dict
        """

        # Notes is done seperately to only change notes fields that are specified. If a notes
        # field is set to None, remove it.
        if 'notes' in repo_delta:
            for key, value in repo_delta.pop('notes').items():
                if value is None:
                    self.notes.pop(key)
                else:
                    self.notes[key] = value

        # These keys may not be changed.
        prohibited = [
            'content_unit_counts', 'repo_id', 'last_unit_added',
            'last_unit_removed'
        ]
        [
            setattr(self, key, value) for key, value in repo_delta.items()
            if key not in prohibited
        ]
Пример #16
0
class Operation(ExtrovirtsDocument):
    start_time = DateTimeField()
    end_time = DateTimeField()
    network = ReferenceField(Network, required=True)
    adversary = ReferenceField("Adversary", required=True)
    performed_actions = EmbeddedDocumentListField(PerformedAction, default=list)
    failed_actions = EmbeddedDocumentListField(PerformedAction, default=list)
    log = ReferenceField(Log, required=True)
    status = StringField(required=True)
    status_state = StringField(required=True)
    name = StringField(required=True)
    parent_process = StringField()
    user_type = StringField()
    start_type = StringField(required=True)
    start_host = ReferenceField(Host, required=True)
    start_user = StringField()
    start_rat = ReferenceField(Rat)
    start_password = StringField()
    start_path = StringField()
    reason = StringField()
    stop_requested = StringField()
    rat_iv_map = EmbeddedDocumentListField(IVOB, default=list)
    known_credentials = ListField(ReferenceField(ObservedCredential), default=list)
    known_devices = ListField(ReferenceField(ObservedDevice), default=list)
    known_domains = ListField(ReferenceField(ObservedDomain), default=list)
    known_files = ListField(ReferenceField(ObservedFile), default=list)
    known_hosts = ListField(ReferenceField(ObservedHost), default=list)
    known_rats = ListField(ReferenceField(ObservedRat), default=list)
    known_schtasks = ListField(ReferenceField(ObservedSchtask), default=list)
    known_shares = ListField(ReferenceField(ObservedShare), default=list)
    known_timedeltas = ListField(ReferenceField(ObservedTimeDelta), default=list)
    known_users = ListField(ReferenceField(ObservedUser), default=list)
    known_persistence = ListField(ReferenceField(ObservedPersistence), default=list)
    known_registry_keys = ListField(ReferenceField(ObservedRegKey), default=list)
    known_services = ListField(ReferenceField(ObservedService, default=list))
    known_processes = ListField(ReferenceField(ObservedProcess), default=list)
    known_trashed = ListField(ReferenceField(Trashed), default=list)
    known_os_versions = ListField(ReferenceField(ObservedOSVersion), default=list)
    known_software = ListField(ReferenceField(ObservedSoftware), default=list)
    clean_log = EmbeddedDocumentListField(ErrorLog, default=list)
    steps = ListField(StringField(), default=list)
    planner_facts = StringField()
    jobs = ListField(ReferenceField(Job), default=list)
    performed_steps = EmbeddedDocumentListField(PerformedStep, default=list)
    nonexistent_rats = ListField(ReferenceField(ObservedRat), default=list)
    ignored_rats = ListField(ReferenceField(Rat), default=list)
    object_references = DictField()
    cleanup_index = IntField(default=0)
    perform_cleanup = BooleanField(required=True)
    skip_cleanup = BooleanField()
    delay = IntField(required=True)
    jitter = IntField(required=True)
    precons = StringField()
    script = StringField()

    def delete(self, *args, **kwargs):
        try:
            self.log.delete()
        except (mongoengine.errors.DoesNotExist, mongoengine.errors.FieldDoesNotExist):
            pass

        delete_fields = ('known_credentials', 'known_domains', 'known_files', 'known_hosts', 'known_rats',
                         'known_schtasks', 'known_shares', 'known_timedeltas', 'known_users', 'known_persistence',
                         'known_registry_keys', 'known_services', 'known_processes',
                         'nonexistent_rats')
        for field in delete_fields:
            for x in getattr(self, field):
                try:
                    x.delete()
                except (mongoengine.errors.DoesNotExist, mongoengine.errors.FieldDoesNotExist):
                    pass

        for job in self.jobs:
                try:
                    if job.status in ("success", "failed"):
                        job.delete()
                except (mongoengine.errors.DoesNotExist, mongoengine.errors.FieldDoesNotExist):
                    pass

        super().delete(*args, **kwargs)
Пример #17
0
class StockHoldingsDoc(Document):
    """Document which tracks which stocks a user has"""
    user = StringField()
    stocks = DictField()
Пример #18
0
class Proof(EmbeddedDocument):
    hardware = DictField(required=True)
    hardware_hash = StringField(required=True)
    disks = DictField(required=True)
    disks_hash = StringField(required=True)
    created = DateTimeField(default=datetime.datetime.utcnow)
Пример #19
0
class Playlist(Document):
    meta = {'collection': 'playlists'}

    collaborative = BooleanField()
    description = StringField()
    external_urls = DictField()
    href = StringField()
    id = ObjectIdField(primary_key=True)
    image = DictField()
    images = ListField(DictField())
    name = StringField()
    owner = DictField()
    playlist_id = StringField()
    primary_color = StringField()
    public = BooleanField()
    snapshot_id = StringField()
    tracks = DictField()
    type = StringField()
    uri = StringField()

    def __init__(self, *args, **kwargs):
        super(Playlist, self).__init__(**kwargs)

        try:
            self.href = kwargs['href']
            self.id = kwargs['id'] if 'playlist_id' in kwargs else ObjectId()
            self.images = kwargs['images']
            self.image = None
            self.name = kwargs['name']
            self.playlist_id = kwargs[
                'playlist_id'] if 'playlist_id' in kwargs else kwargs['id']

            for img in self.images:
                if img['height'] == 60:
                    self.image = img
        except Exception as e:
            print("No item attribute, ", e)

    def set_image(self, image_obj):
        self.image = image_obj

    def pre_save(self):
        if not self.image:
            self.set_image(settings.DEFAULT_IMAGE)

    def exists(self):
        if Playlist.objects(playlist_id=self.playlist_id):
            return True
        return False

    def save(self):
        self.pre_save()
        super(Playlist, self).save()

    def new_save(self):
        if not self.exists():
            self.save()

    def to_log(self):
        dict = {
            'href': self.href,
            'id': self.playlist_id,
            'image': self.image,
            'name': self.name,
            'tracks': self.tracks
        }
        return dict

    def __str__(self):
        return str(self.to_log())
Пример #20
0
class MispFeed(Feed):
    last_runs = DictField()

    default_values = {
        "frequency": timedelta(hours=1),
        "name": "MispFeed",
        "description": "Parses events from a given MISP instance",
        "source": "MISP",
    }

    TYPES_TO_IMPORT = {
        "domain": Hostname,
        "ip-dst": Ip,
        "ip-src": Ip,
        "url": Url,
        "hostname": Hostname,
        "md5": Hash,
        "sha1": Hash,
        "sha256": Hash,
        "btc": Bitcoin,
        "email-src": Email,
        "email-dst": Email,
    }

    def __init__(self, *args, **kwargs):
        super(MispFeed, self).__init__(*args, **kwargs)
        self.get_instances()

    def get_instances(self):
        self.instances = {}

        for instance in yeti_config.get("misp", "instances", "").split(","):
            config = {
                "url": yeti_config.get(instance, "url"),
                "key": yeti_config.get(instance, "key"),
                "name": yeti_config.get(instance, "name") or instance,
                "galaxy_filter": yeti_config.get(instance, "galaxy_filter"),
                "days": yeti_config.get(instance, "days"),
                "organisations": {},
            }

            if config["url"] and config["key"]:
                self.instances[instance] = config

    def last_run_for(self, instance):
        last_run = [int(part) for part in self.last_runs[instance].split("-")]

        return date(*last_run)

    def get_organisations(self, instance):
        url = urljoin(self.instances[instance]["url"],
                      "/organisations/index/scope:all")
        headers = {
            "Authorization": self.instances[instance]["key"],
            "Content-type": "application/json",
            "Accept": "application/json",
        }

        r = requests.get(url, headers=headers, proxies=yeti_config.proxy)

        if r.status_code == 200:

            orgs = r.json()

            for org in orgs:
                org_id = org["Organisation"]["id"]
                org_name = org["Organisation"]["name"]
                self.instances[instance]["organisations"][org_id] = org_name
        else:
            logging.error("error http %s to get instances" % r.status_code)

    def week_events(self, instance):
        one_week = timedelta(days=7)
        if not self.instances:
            logging.error("not instances in MISP")
            return
        elif instance not in self.instances:
            logging.error("error in instances of Misp")
            return

        url = urljoin(self.instances[instance]["url"], "/events/restSearch")
        headers = {"Authorization": self.instances[instance]["key"]}
        to = date.today()
        fromdate = to - timedelta(days=6)
        body = {}

        while True:
            imported = 0

            body["to"] = to.isoformat()
            body["from"] = fromdate.isoformat()
            body["returnFormat"] = "json"
            body["published"] = True
            body["enforceWarninglist"] = True
            r = requests.post(url,
                              headers=headers,
                              json=body,
                              proxies=yeti_config.proxy)

            if r.status_code == 200:
                results = r.json()

                for event in results["response"]:
                    self.analyze(event["Event"], instance)
                    imported += 1

                yield fromdate, to, imported
                to = to - one_week
                fromdate = fromdate - one_week
            else:
                logging.debug(r.content)

    def get_last_events(self, instance):
        logging.debug("Getting last events for {}".format(instance))
        last_run = self.last_run_for(instance)
        seen_last_run = False

        for date_from, date_to, imported in self.week_events(instance):

            logging.debug("Imported {} attributes from {} to {}".format(
                imported, date_from, date_to))

            if seen_last_run:
                break

            if date_from <= last_run <= date_to:
                seen_last_run = True

    def get_all_events(self, instance):
        logging.debug("Getting all events for {}".format(instance))
        had_results = True

        for date_from, date_to, imported in self.week_events(instance):
            if "days" in self.instances[instance]:
                days_to_sync = self.instances[instance]["days"]
            else:
                days_to_sync = 60
            if date.today() - date_to > timedelta(days=days_to_sync):

                break
            logging.debug("Imported {} attributes from {} to {}".format(
                imported, date_from, date_to))

            if imported == 0:
                if had_results:
                    had_results = False
                else:
                    break
            else:
                had_results = True

    def update(self):
        for instance in self.instances:
            logging.debug("Processing instance {}".format(instance))
            self.get_organisations(instance)
            if instance in self.last_runs:
                self.get_last_events(instance)
            else:
                self.get_all_events(instance)

            self.modify(**{
                "set__last_runs__{}".format(instance):
                date.today().isoformat()
            })

    def analyze(self, event, instance):
        tags = []
        galaxies_to_context = []

        context = {}

        context["source"] = self.instances[instance]["name"]
        external_analysis = [
            attr["value"] for attr in event["Attribute"]
            if attr["category"] == "External analysis"
            and attr["type"] == "url" and attr["to_ids"]
        ]
        if external_analysis:
            context["external sources"] = "\r\n".join(external_analysis)
        if "Tag" in event:
            if not self.instances[instance].get("galaxy_filter"):
                tags = [tag["name"] for tag in event["Tag"]]
            else:
                galaxies = self.instances[instance]["galaxy_filter"].split(",")

                for tag in event["Tag"]:
                    found = False
                    if "misp-galaxy" in tag["name"]:
                        galaxies_to_context.append(tag["name"])
                    for g in galaxies:
                        if g in tag["name"]:
                            found = True
                            break
                    if not found:
                        tags.append(tag["name"])

        for attribute in event["Attribute"]:
            if attribute["category"] == "External analysis":
                continue

            if attribute.get("type") in self.TYPES_TO_IMPORT:

                context["id"] = attribute["event_id"]
                context["link"] = urljoin(
                    self.instances[instance]["url"],
                    "/events/{}".format(attribute["event_id"]),
                )

                context["comment"] = attribute["comment"]

                try:

                    klass = self.TYPES_TO_IMPORT[attribute["type"]]
                    obs = klass.get_or_create(value=attribute["value"])

                    if attribute["category"]:
                        obs.tag(attribute["category"].replace(" ", "_"))

                    if tags:
                        obs.tag(tags)

                    if galaxies_to_context:
                        context["galaxies"] = "\r\n".join(galaxies_to_context)
                    obs.add_context(context)

                except:

                    try:
                        logging.error("{}: error adding {}".format(
                            "MispFeed", attribute["value"]))
                    except UnicodeError:
                        logging.error("{}: error adding {}".format(
                            "MispFeed", attribute["id"]))
Пример #21
0
class JobListContent(EmbeddedDocument):
    items = ListField(DictField())
Пример #22
0
class OtxFeed(Feed):

    last_runs = DictField()

    default_values = {
        "frequency": timedelta(hours=1),
        "name": "OtxFeed",
        "description": "Parses events from a given OTX pulse",
        "source": "OTX",
    }

    def __init__(self, *args, **kwargs):
        super(OtxFeed, self).__init__(*args, **kwargs)
        self.otx = OTXv2(yeti_config.get("otx", "key"))
        self.get_pulses()

    def get_pulses(self):
        self.pulses = {}

        for pulse in yeti_config.get("otx", "pulses", "").split(","):
            config = {
                "pulse_id":
                yeti_config.get(pulse, "pulse_id"),
                "use_otx_tags":
                yeti_config.get(pulse, "use_otx_tags") == "Y" or False,
            }

            if config["pulse_id"]:
                self.pulses[pulse] = config

    def update(self):
        for pulse in self.pulses.values():
            pulse_details = self.otx.get_pulse_details(pulse["pulse_id"])

            pulse_context = {
                "source": "OTX Pulse - {}".format(pulse_details["name"]),
                "pulse_id": pulse["pulse_id"],
                "name": pulse_details["name"],
                "pulse_description": pulse_details["description"],
                "tags": pulse_details["tags"],
                "author_name": pulse_details["author_name"],
                "references": pulse_details["references"],
                "industries": pulse_details["industries"],
                "tlp": pulse_details["TLP"],
                "targeted_countries": pulse_details["targeted_countries"],
                "adversary": pulse_details["adversary"],
                "public": pulse_details["public"],
                "created": dateutil.parser.parse(pulse_details["created"]),
            }

            for indicator in pulse_details["indicators"]:
                self.analyze(indicator,
                             pulse_context,
                             use_otx_tags=pulse["use_otx_tags"])

    def analyze(self, indicator_context, pulse_context, use_otx_tags=False):
        context = pulse_context.copy()
        value = indicator_context.pop("indicator")
        context["date_dadded"] = dateutil.parser.parse(
            indicator_context.pop("created"))
        context.update(indicator_context)

        observable = OBSERVABLE_TYPES[indicator_context["type"]].get_or_create(
            value=value)
        observable.add_context(context)
        if use_otx_tags:
            observable.tag(pulse_context["tags"])
Пример #23
0
class paytype_det(Document):
    code = LongField(reference=False)
    typid = LongField(required=True)
    denomination = DictField(required=False)
    reference = BooleanField(required=True, default=False)
    percent_extra = FloatField(required=True, default=0.00)
Пример #24
0
class LazyCatalogEntry(AutoRetryDocument):
    """
    A catalog of content that can be downloaded by the specified plugin.

    :ivar path: The content unit storage path.
    :type path: str
    :ivar importer_id: The ID of the plugin that contributed the catalog entry.
        This plugin participates in the downloading of content when requested by the streamer.
    :type importer_id: str
    :ivar unit_id: The associated content unit ID.
    :type unit_id: str
    :ivar unit_type_id: The associated content unit type.
    :type unit_type_id: str
    :ivar url: The *real* download URL.
    :type url: str
    :ivar checksum: The checksum of the file associated with the
        content unit. Used for validation.
    :type checksum: str
    :ivar checksum_algorithm: The algorithm used to generate the checksum.
    :type checksum_algorithm: str
    :ivar revision: The revision is used to group collections of entries.
    :type revision: int
    :ivar data: Arbitrary information stored with the entry.
        Managed by the plugin.
    :type data: dict
    """

    ALG_REGEX = r'(md5|sha1|sha224|sha256|sha384|sha512)'

    meta = {
        'collection':
        'lazy_content_catalog',
        'allow_inheritance':
        False,
        'indexes': [
            'importer_id',
            {
                'fields': [
                    '-path',
                    '-importer_id',
                    '-revision',
                ],
                'unique': True
            },
        ],
    }

    # For backward compatibility
    _ns = StringField(default=meta['collection'])

    path = StringField(required=True)
    importer_id = StringField(required=True)
    unit_id = StringField(required=True)
    unit_type_id = StringField(required=True)
    url = StringField(required=True)
    checksum = StringField()
    checksum_algorithm = StringField(regex=ALG_REGEX)
    revision = IntField(default=0)
    data = DictField()

    def save_revision(self):
        """
        Add the entry using the next revision number.
        Previous revisions are deleted.
        """
        revisions = set([0])
        query = dict(importer_id=self.importer_id, path=self.path)
        # Find revisions
        qs = LazyCatalogEntry.objects.filter(**query)
        for revision in qs.distinct('revision'):
            revisions.add(revision)
        # Add new revision
        last_revision = max(revisions)
        self.revision = last_revision + 1
        self.save()
        # Delete previous revisions
        qs = LazyCatalogEntry.objects.filter(revision__in=revisions, **query)
        qs.delete()
Пример #25
0
class Module(Project):
    category = StringField(choices=('model', 'toolkit'))
    module_path = StringField()
    input = DictField()
    output = DictField()
    repo_path = StringField()
Пример #26
0
class Importer(AutoRetryDocument):
    """
    Defines schema for an Importer in the `repo_importers` collection.
    """
    repo_id = StringField(required=True)
    importer_type_id = StringField(required=True)
    config = DictField()
    scratchpad = DictField(default=None)
    last_sync = ISO8601StringField()
    last_updated = UTCDateTimeField()
    last_override_config = DictField()

    # For backward compatibility
    _ns = StringField(default='repo_importers')
    SERIALIZER = serializers.ImporterSerializer

    meta = {
        'collection': 'repo_importers',
        'allow_inheritance': False,
        'indexes': [{
            'fields': ['-repo_id', '-importer_type_id'],
            'unique': True
        }],
        'queryset_class': CriteriaQuerySet
    }

    @classmethod
    def pre_delete(cls, sender, document, **kwargs):
        """
        Purge the lazy catalog of all entries for the importer being deleted.

        :param sender:   class of sender (unused)
        :type  sender:   object
        :param document: mongoengine document being deleted.
        :type  document: pulp.server.db.model.Importer
        """
        query_set = LazyCatalogEntry.objects(importer_id=str(document.id))
        _logger.debug(
            _('Deleting lazy catalog entries for the {repo} repository.').
            format(repo=document.repo_id))
        query_set.delete()

    @classmethod
    def pre_save(cls, sender, document, **kwargs):
        """
        The signal that is triggered before importer is saved.

        :param sender:   class of sender (unused)
        :type sender:    object
        :param document: mongoengne document being saved
        :type document:  pulp.server.db.model.Importer
        """
        document.last_updated = dateutils.now_utc_datetime_with_tzinfo()

    def delete(self):
        """
        Delete the Importer. Remove any documents it has stored.
        """
        if os.path.exists(self._local_storage_path):
            shutil.rmtree(self._local_storage_path)
        super(Importer, self).delete()

    def save(self):
        """
        Save the Importer. Additionally, write any pki documents from its config into disk storage
        for use by requests.
        """
        super(Importer, self).save()
        # A map of Importer config key names to file paths for the TLS PEM settings.
        pem_keys_paths = ((importer_constants.KEY_SSL_CA_CERT,
                           self.tls_ca_cert_path),
                          (importer_constants.KEY_SSL_CLIENT_CERT,
                           self.tls_client_cert_path),
                          (importer_constants.KEY_SSL_CLIENT_KEY,
                           self.tls_client_key_path))
        for key, path in pem_keys_paths:
            self._write_pem_file(key, path)

    @property
    def tls_ca_cert_path(self):
        """
        Return the path where the TLS CA certificate should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'ca.crt')

    @property
    def tls_client_cert_path(self):
        """
        Return the path where the TLS client certificate should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'client.crt')

    @property
    def tls_client_key_path(self):
        """
        Return the path where the TLS client key should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'client.key')

    @property
    def _local_storage_path(self):
        """
        Return the path that the Importer should use for local storage.

        :rtype: basestring
        """
        return os.path.join(
            LOCAL_STORAGE, 'importers', '{repo}-{importer_type}'.format(
                repo=self.repo_id, importer_type=self.importer_type_id))

    @property
    def _pki_path(self):
        """
        Return the path that all pki files should be stored within for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._local_storage_path, 'pki')

    def _write_pem_file(self, config_key, path):
        """
        Write the PEM data from self.config[config_key] to the given path, if the key is defined and
        is "truthy".

        :param config_key: The key corresponding to a value in self.config to write to path.
        :type  config_key: basestring
        :param path:       The path to write the PEM data to.
        :type  path:       basestring
        """
        if config_key in self.config and self.config[config_key]:
            if not os.path.exists(self._pki_path):
                misc.mkdir(os.path.dirname(self._pki_path))
                os.mkdir(self._pki_path, 0700)
            with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, 0600),
                           'w') as pem_file:
                if type(self.config[config_key]) is unicode:
                    pem_file.write(self.config[config_key].encode('utf-8'))
                else:
                    pem_file.write(self.config[config_key])
Пример #27
0
class Schedule(Document):
    opcode = StringField(required=True, max_length=50)
    timestamp = LongField(required=True)
    data = DictField(required=True)
Пример #28
0
class ContentUnit(AutoRetryDocument):
    """
    The base class for all content units.

    All classes inheriting from this class must define a _content_type_id and unit_key_fields.

    _content_type_id must be of type mongoengine.StringField and have a default value of the string
    name of the content type.

    unit_key_fields must be a tuple of strings, each of which is a valid field name of the subcalss.

    :ivar id: content unit id
    :type id: mongoengine.StringField
    :ivar pulp_user_metadata: Bag of User supplied data to go along with this unit
    :type pulp_user_metadata: mongoengine.DictField
    :ivar _last_updated: last time this unit was updated (since epoch, zulu time)
    :type _last_updated: mongoengine.IntField
    :ivar _storage_path: The absolute path to associated content files.
    :type _storage_path: mongoengine.StringField
    """

    id = StringField(primary_key=True, default=lambda: str(uuid.uuid4()))
    pulp_user_metadata = DictField()
    _last_updated = IntField(required=True)
    _storage_path = StringField()

    meta = {
        'abstract': True,
    }

    NAMED_TUPLE = _ContentUnitNamedTupleDescriptor()

    @classmethod
    def attach_signals(cls):
        """
        Attach the signals to this class.

        This is provided as a class method so it can be called on subclasses
        and all the correct signals will be applied.
        """
        signals.pre_save.connect(cls.pre_save_signal, sender=cls)

    @classmethod
    def validate_model_definition(cls):
        """
        Validate that all subclasses of ContentType define required fields correctly.

        Ensure a field named `_content_type_id` is defined and raise an AttributeError if not. Each
        subclass of ContentUnit must have the content type id stored in the `_content_type_id`
        field as a StringField. The field must be marked as required and have a default set. For
        example:

           _content_type_id = StringField(required=True, default='rpm')

        Ensure a field named `unit_key_fields` is defined and raise an AttributeError if not. Each
        subclass of ContentUnit must have the content type id stored in the `unit_key_fields`
        field as a tuple and must not be empty.

           unit_key_fields = ('author', 'name', 'version')

        :raises: AttributeError if a field or attribute is not defined
        :raises: ValueError if a field or attribute have incorrect values
        :raises: TypeError if a field or attribute has invalid type
        """
        # Validate the 'unit_key_fields' attribute

        if not hasattr(cls, 'unit_key_fields'):
            msg = _("The class %(class_name)s must define a 'unit_key_fields' attribute")\
                % {'class_name': cls.__name__}
            _logger.error(msg)
            raise AttributeError(msg)
        if not isinstance(cls.unit_key_fields, tuple):
            msg = _("The class %(class_name)s must define 'unit_key_fields' to be a tuple")\
                % {'class_name': cls.__name__}
            _logger.error(msg)
            raise TypeError(msg)
        if len(cls.unit_key_fields) == 0:
            msg = _("The field 'unit_key_fields' on class %(class_name)s must have length > 0")\
                % {'class_name': cls.__name__}
            _logger.error(msg)
            raise ValueError(msg)

        # Validate the '_content_type_id' field
        if not hasattr(cls, '_content_type_id'):
            msg = _("The class %(class_name)s must define a '_content_type_id' attribute")\
                % {'class_name': cls.__name__}
            _logger.error(msg)
            raise AttributeError(msg)

        if not isinstance(cls._content_type_id, StringField):
            msg = _("The class %(class_name)s must define '_content_type_id' to be a StringField")\
                % {'class_name': cls.__name__}
            _logger.error(msg)
            raise TypeError(msg)
        if cls._content_type_id.default is None:
            msg = _("The class %(class_name)s must define a default value "
                    "for the '_content_type_id' field") % {
                        'class_name': cls.__name__
                    }
            _logger.error(msg)
            raise ValueError(msg)
        if cls._content_type_id.required is False:
            msg = _("The class %(class_name)s must require the '_content_type_id' field")\
                % {'class_name': cls.__name__}
            _logger.error(msg)
            raise ValueError(msg)

    @classmethod
    def pre_save_signal(cls, sender, document, **kwargs):
        """
        The signal that is triggered before a unit is saved, this is used to
        support the legacy behavior of generating the unit id and setting
        the _last_updated timestamp

        :param sender: sender class
        :type sender: object
        :param document: Document that sent the signal
        :type document: ContentUnit
        """
        document._last_updated = dateutils.now_utc_timestamp()

    def get_repositories(self):
        """
        Get an iterable of Repository models for all the repositories that contain this unit

        :return: Repositories that contain this content unit
        :rtype: iterable of Repository
        """
        content_list = RepositoryContentUnit.objects(unit_id=self.id)
        id_list = [item.repo_id for item in content_list]
        return Repository.objects(repo_id__in=id_list)

    @property
    def storage_path(self):
        """
        The content storage path.

        :return: The absolute path to stored content.
        :rtype: str
        """
        return self._storage_path

    @property
    def unit_key(self):
        """
        Dictionary representation of the unit key
        """
        return dict((key, getattr(self, key)) for key in self.unit_key_fields)

    @property
    def unit_key_str(self):
        """
        The unit key represented as a string ordered by unit key fields alphabetically
        """
        return self.unit_key_as_digest()

    @property
    def unit_key_as_named_tuple(self):
        """
        The unit key represented as a named_tuple by field name
        """
        return self.NAMED_TUPLE(**self.unit_key)

    def to_id_dict(self):
        """
        Returns identity info as a dict.

        Returns a dict with the identity information (type ID and unit key) for this unit. The
        primary intention of this method is as a means to convert these units into a JSON
        serializable format.

        :return: Identity information (type ID and unit key)
        :rtype: dict
        """

        return {'type_id': self._content_type_id, 'unit_key': self.unit_key}

    @property
    def type_id(self):
        """
        Backwards compatible interface for _content_type_id

        The pre-mongoengine units used type_id to track what is stored in _content_type_id. This
        provides internal backwards compatibility allowing code to not be updated until all models
        are converted to mongoengine and able to use the new name exclusively.

        This should be removed once the old, non-mongoengine code paths are removed.
        """
        return self._content_type_id

    def unit_key_as_digest(self, algorithm=None):
        """
        The digest (hash) of the unit key.

        :param algorithm: A hashing algorithm object. Uses SHA256 when not specified.
        :type algorithm: hashlib.algorithm
        :return: The hex digest of the unit key.
        :rtype: str
        """
        _hash = algorithm or sha256()
        for key, value in sorted(self.unit_key.items()):
            _hash.update(key)
            if not isinstance(value, basestring):
                _hash.update(str(value))
            else:
                _hash.update(value)
        return _hash.hexdigest()

    def list_files(self):
        """
        List absolute paths to files associated with this unit.

        This *must* be overridden by multi-file unit subclasses. Units without files can use the
        default implementation.

        :return: A list of absolute file paths.
        :rtype: list
        """
        if self._storage_path and not os.path.isdir(self._storage_path):
            return [self._storage_path]
        else:
            return []

    def __hash__(self):
        """
        This should provide a consistent and unique hash where units of the same
        type and the same unit key will get the same hash value.
        """
        return hash(self.type_id + self.unit_key_as_digest())
Пример #29
0
class ContentUnit(Document):
    """
    The base class for all content units.

    All classes inheriting from this class must override the unit_type_id and _ns to ensure
    they are populated properly.

    :ivar id: content unit id
    :type id: mongoengine.StringField
    :ivar last_updated: last time this unit was updated (since epoch, zulu time)
    :type last_updated: mongoengine.IntField
    :ivar user_metadata: Bag of User supplied data to go along with this unit
    :type user_metadata: mongoengine.DictField
    :ivar storage_path: Location on disk where the content associated with this unit lives
    :type storage_path: mongoengine.StringField

    :ivar _ns: (Deprecated), Contains the name of the collection this model represents
    :type _ns: mongoengine.StringField
    :ivar unit_type_id: content unit type
    :type unit_type_id: mongoengine.StringField
    """

    id = StringField(primary_key=True)
    last_updated = IntField(db_field='_last_updated', required=True)
    user_metadata = DictField(db_field='pulp_user_metadata')
    storage_path = StringField(db_field='_storage_path')

    # For backward compatibility
    _ns = StringField(required=True)
    unit_type_id = StringField(db_field='_content_type_id', required=True)

    meta = {
        'abstract': True,
    }

    _NAMED_TUPLE = None

    def __init__(self, *args, **kwargs):
        super(ContentUnit, self).__init__(*args, **kwargs)
        self._source_location = None
        self._relative_path = None

    @classmethod
    def attach_signals(cls):
        """
        Attach the signals to this class.

        This is provided as a class method so it can be called on subclasses
        and all the correct signals will be applied.
        """
        signals.post_init.connect(cls.post_init_signal, sender=cls)
        signals.pre_save.connect(cls.pre_save_signal, sender=cls)
        # Create the named tuple here so it happens during server startup
        cls.NAMED_TUPLE = namedtuple(cls.unit_type_id.default,
                                     cls.unit_key_fields)

    @classmethod
    def post_init_signal(cls, sender, document):
        """
        The signal that is triggered before a unit is initialized

        This is used to validate that the unit_key_fields attribute is set properly

        :param sender: sender class
        :type sender: object
        :param document: Document that sent the signal
        :type document: ContentUnit
        :raises: PLP0035 if the unit_key_fields attribute has not been defined
        """
        if not hasattr(document, 'unit_key_fields'):
            class_name = type(document).__name__
            raise exceptions.PulpCodedException(error_codes.PLP0035,
                                                class_name=class_name)

    @classmethod
    def pre_save_signal(cls, sender, document, **kwargs):
        """
        The signal that is triggered before a unit is saved, this is used to
        support the legacy behavior of generating the unit id and setting
        the last_updated timestamp

        :param sender: sender class
        :type sender: object
        :param document: Document that sent the signal
        :type document: ContentUnit
        """
        if not document.id:
            document.id = str(uuid.uuid4())
        document.last_updated = dateutils.now_utc_timestamp()

        # If content was set on this unit, copy the content into place
        if document._source_location:
            server_storage_dir = config.config.get('server', 'storage_dir')
            platform_storage_location = os.path.join(server_storage_dir,
                                                     'units',
                                                     document.unit_type_id,
                                                     str(document.id)[0],
                                                     str(document.id)[1:3],
                                                     str(document.id))
            # Make if source is a directory, recursively copy it, otherwise copy the file
            if os.path.isdir(document._source_location):
                shutil.copytree(document._source_location,
                                platform_storage_location)
            else:
                target_file_name = os.path.basename(document._source_location)
                # Make sure the base directory exists
                try:
                    os.makedirs(platform_storage_location)
                except OSError as e:
                    if e.errno != errno.EEXIST:
                        raise
                # Copy the file
                document_full_storage_location = os.path.join(
                    platform_storage_location, target_file_name)
                shutil.copy(document._source_location,
                            document_full_storage_location)
                platform_storage_location = document_full_storage_location
            document.storage_path = platform_storage_location

    def set_content(self, source_location):
        """
        Store the source of the content for the unit and the relative path
        where it should be stored within the plugin content directory.

        :param source_location: The absolute path to the content in the plugin working directory.
        :type source_location: str

        :raises PulpCodedException: PLP0036 if the source_location doesn't exist.
        """
        if not os.path.exists(source_location):
            raise exceptions.PulpCodedException(
                error_code=error_codes.PLP0036,
                source_location=source_location)
        self._source_location = source_location

    def get_repositories(self):
        """
        Get an iterable of Repository models for all the repositories that contain this unit

        :return: Repositories that contain this content unit
        :rtype: iterable of Repository
        """
        content_list = RepositoryContentUnit.objects(unit_id=self.id)
        id_list = [item.repo_id for item in content_list]
        return Repository.objects(repo_id__in=id_list)

    @property
    def unit_key(self):
        """
        Dictionary representation of the unit key
        """
        return dict((key, getattr(self, key)) for key in self.unit_key_fields)

    @property
    def unit_key_str(self):
        """
        The unit key represented as a string ordered by unit key fields alphabetically
        """
        return str(sorted([getattr(self, key)
                           for key in self.unit_key_fields]))

    @property
    def unit_key_as_named_tuple(self):
        """
        The unit key represented as a named_tuple by field name
        """
        return self.NAMED_TUPLE(**self.unit_key)

    def __hash__(self):
        """
        This should provide a consistent and unique hash where units of the same
        type and the same unit key will get the same hash value.
        """
        return hash(self.unit_type_id + self.unit_key_str)
Пример #30
0
class Users(Document):
    username = StringField(required=True, max_length=32, unique=True)
    password = StringField(required=True)
    name = StringField(required=True, max_length=32)
    zodiac = StringField(required=True)
    predictions = DictField()