Beispiel #1
0
def act(action, batch_db, event_data, plugin_config, logger, config):
    assert batch_db
    descriptors = filter(lambda x: x['name'] == action, get_actions())
    action_descriptor = descriptors[0] if descriptors else None
    if not action_descriptor:
        raise ValueError(u'Invalid action: %s for %s plugin' % (action, PLUGIN_NAME))

    if action == 'get_policy_info_async':
        policy_series_field_name = plugin_config['policy_series_field_name']
        policy_number_field_name = plugin_config['policy_number_field_name']

        policy_series = batch_db.data.get(policy_series_field_name, None)
        policy_number = batch_db.data.get(policy_number_field_name, None)

        if not policy_number or not policy_series:
            return False
        get_policy_info_async.delay(policy_series, policy_number, event_data, batch_db.id)
    elif action == 'get_policy_info_first_try':
        policy_series_field_name = plugin_config['policy_series_field_name']
        policy_number_field_name = plugin_config['policy_number_field_name']

        policy_series = batch_db.data.get(policy_series_field_name, None)
        policy_number = batch_db.data.get(policy_number_field_name, None)

        if not policy_number or not policy_series:
            return False
        try:
            get_policy_info_async(policy_series, policy_number, event_data, batch_db.id, async=False, logger=logger)
        except Exception:
            BatchManager.handle_event(batch_db.id, "on_policy_info_receive_fail", event_data, logger, config=config)
            return False
    else:
        raise Exception(u"Invalid action %s for plugin %s" % (action, PLUGIN_NAME))

    return True
Beispiel #2
0
def render_batch_documents(batch_id=None, document_type=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id,
                                                  _owner=current_user,
                                                  deleted=False).first()
    if not batch:
        raise errors.BatchNotFound()
    batch_manager = BatchManager.init(batch)

    document_types = json.loads(document_type)
    if not isinstance(document_types, list) and not isinstance(
            document_types, tuple):
        raise errors.InvalidParameterValue('document_type')

    doc_type_set = set()
    for doc_type in document_types:
        if not batch_manager.check_doc_type_support(batch.batch_type,
                                                    doc_type):
            raise errors.InvalidParameterValue('document_type')
        doc_type_set.add(doc_type)

    action_descriptor = {'plugin': 'doc_builder', 'action': 'render_group'}

    event_data = {'doc_types': list(doc_type_set), 'batch_id': batch.id}

    BatchManager.perform_action(action_descriptor, batch, event_data,
                                current_app.logger, current_app.config)
    return {"result": True}
Beispiel #3
0
def register(app, jinja_env, class_loader, **kwargs):
    search_path = os.path.normpath(
        os.path.join(os.path.abspath(os.path.dirname(__file__)), u"templates"))
    jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))

    jinja_env.globals.update({
        'DocMultiDeclensionField':
        DocMultiDeclensionField,
        'InitialCapitalDepositTypeEnum':
        InitialCapitalDepositTypeEnum
    })

    class_loader.POSSIBLE_LOCATIONS.append('services.llc_reg.documents')
    class_loader.POSSIBLE_LOCATIONS.append('services.llc_reg.documents.enums')
    class_loader.POSSIBLE_LOCATIONS.append(
        'services.llc_reg.documents.general_doc_fields')
    class_loader.POSSIBLE_LOCATIONS.append(
        'services.llc_reg.documents.llc_gov_forms_adapters')
    class_loader.POSSIBLE_LOCATIONS.append(
        'services.llc_reg.documents.llc_validators')

    BatchManager.register_manager(DocumentBatchTypeEnum.DBT_NEW_LLC,
                                  LlcRegBatchManager)

    _init_doc_requisities(app.config)
Beispiel #4
0
def render_batch_documents(batch_id=None, document_type=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()
    if not batch:
        raise errors.BatchNotFound()
    batch_manager = BatchManager.init(batch)

    document_types = json.loads(document_type)
    if not isinstance(document_types, list) and not isinstance(document_types, tuple):
        raise errors.InvalidParameterValue('document_type')

    doc_type_set = set()
    for doc_type in document_types:
        if not batch_manager.check_doc_type_support(batch.batch_type, doc_type):
            raise errors.InvalidParameterValue('document_type')
        doc_type_set.add(doc_type)

    action_descriptor = {
        'plugin': 'doc_builder',
        'action': 'render_group'
    }

    event_data = {
        'doc_types': list(doc_type_set),
        'batch_id': batch.id
    }

    BatchManager.perform_action(action_descriptor, batch, event_data, current_app.logger, current_app.config)
    return {"result": True}
Beispiel #5
0
def run_scheduled_task(descriptor_name, action_name, batch_id):
    app = celery.conf['flask_app']()
    with app.app_context():
        batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        descriptor = DocRequisitiesStorage.get_batch_descriptor(descriptor_name)
        action = descriptor['actions'][action_name]
        BatchManager.perform_action(action, batch_db, {}, app.logger, app.config)
def check_frozen_batch_finalisation():
    config = celery.conf.get('config')

    with celery.conf['flask_app']().app_context():
        logger = current_app.logger
        logger.debug(u"Starting dead batches being finalised")

        cur = DocumentBatchDbObject.query.filter(
            DocumentBatchDbObject.current_task_id != None,
            DocumentBatchDbObject.batch_rendering_start < datetime.utcnow() -
            timedelta(seconds=BATCH_FINALISATION_MAX_DURATION),
            DocumentBatchDbObject.status == BatchStatusEnum.BS_BEING_FINALISED)

        inspect_tasks = []
        inspector = celery.control.inspect()
        actives = inspector.active()
        for item in (actives.values() if actives else []):
            inspect_tasks.extend(item)
        schedules = inspector.scheduled()
        for item in (schedules.values() if schedules else []):
            inspect_tasks.extend(item)

        for batch in cur:
            logger.debug(u"checking %s" % batch.id)
            task_id = batch.current_task_id
            if not celery_utils.found_same_task(
                    'fw.async_tasks.rendering.render_batch',
                    task_id=task_id,
                    args=(batch.id, ),
                    inspect_tasks=inspect_tasks):
                logger.warn(
                    u"Batch %s is being finalised but corresponding celery task was not found. "
                    u"Cancelling batch finalisation!" % batch.id)

                try:
                    BatchManager.cancel_batch_finalization(
                        batch, config, logger)
                except Exception:
                    current_app.logger.exception(
                        u"Failed to cancel batch finalization.")
                    continue

        cur = DocumentBatchDbObject.query.filter(
            DocumentBatchDbObject.current_task_id == None,
            DocumentBatchDbObject.status == BatchStatusEnum.BS_BEING_FINALISED)
        for batch in cur:
            logger.warn(
                u"Batch %s is being finalised but corresponding celery task was not found [2]. "
                u"Cancelling batch finalisation!" % batch.id)

            try:
                BatchManager.cancel_batch_finalization(batch, config, logger)
            except Exception:
                current_app.logger.exception(
                    u"Failed to cancel batch finalization.")
                continue

        return True
Beispiel #7
0
def run_scheduled_task(descriptor_name, action_name, batch_id):
    app = celery.conf['flask_app']()
    with app.app_context():
        batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        descriptor = DocRequisitiesStorage.get_batch_descriptor(
            descriptor_name)
        action = descriptor['actions'][action_name]
        BatchManager.perform_action(action, batch_db, {}, app.logger,
                                    app.config)
Beispiel #8
0
def go_back(batch_id=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()
    if not batch:
        raise errors.BatchNotFound()
    if batch.batch_type != DocumentBatchTypeEnum.DBT_NEW_LLC:
        BatchManager.handle_event(batch_id, 'go_back', {
            'batch_id': batch_id
        }, logger=current_app.logger, config=current_app.config)
        return {"result": True}
    raise NotImplementedError()
Beispiel #9
0
def register(app, jinja_env, class_loader, **kwargs):
    search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u"templates"))
    jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))

    class_loader.POSSIBLE_LOCATIONS.append('services.test_svc.documents.enums')
    class_loader.POSSIBLE_LOCATIONS.append('services.test_svc.documents.ext_methods')
    class_loader.POSSIBLE_LOCATIONS.append('services.test_svc.documents.ext_validators')

    BatchManager.register_manager(DocumentBatchTypeEnum.DBT_TEST_TYPE, TestSvcManager)

    _init_doc_requisities(app.config)
    def test_keep_document_instance_on_batch_render(self):
        batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)
        new_data = {
            'short_name': u'Тест нейм'
        }
        new_batch_data = {
            'data': new_data,
            'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,
            'metadata': {}
        }

        new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)
        manager = BatchManager.init(batch)
        result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)
        self.assertEqual(BatchDocumentDbObject.query.count(), 1)
        doc = BatchDocumentDbObject.query.scalar()
        del result['result']['creation_date']
        self.assertEqual(result, {
            'result': {
                'status': u'new',
                'all_docs': [{u'caption': u'Тестовый документ 1', u'document_type': u'test_doc_1', u'file_link': None, u'document_id': doc.id}],
                'name': u'Тестовый батч',
                'paid': 'false',
                'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,
                'result_fields': {u'name': u'Тест нейм'},
                'data': {
                    'short_name': u'Тест нейм'
                },
                'id': batch.id,
                'metadata': {},
                'status_data': {'finalisation_count': u'0'}
            }
        })

        new_data['short_name'] = u'создай второй документ'

        new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)
        manager = BatchManager.init(batch)
        result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)

        doc_ids = set()
        for d in BatchDocumentDbObject.query.filter_by():
            doc_ids.add(d.id)

        result = self.test_client.post('/batch/finalise/', data={'batch_id': batch.id})
        self.assertEqual(result.status_code, 200)
        self.assertEqual(json.loads(result.data), {'result': True})

        self.assertEqual(BatchDocumentDbObject.query.count(), 2)
        new_doc_ids = set()
        for d in BatchDocumentDbObject.query.filter_by():
            new_doc_ids.add(d.id)

        self.assertEqual(doc_ids, new_doc_ids)
Beispiel #11
0
def get_user_api_structure(auth_user):
    result = {
        'temporal': auth_user.temporal or False,

        'id': unicode(auth_user.uuid),
        'email': auth_user.email or u"",
        'email_confirmed': auth_user.email_confirmed,
        'mobile': auth_user.mobile or u"",
        'mobile_confirmed': auth_user.mobile_confirmed,

        'password_set': bool(auth_user.password and auth_user.password != u'!notset!'),    # bool

        'registration_date': auth_user.signup_date.strftime("%Y-%m-%dT%H:%M:%S"),    # — дата регистрации
        'facebook': None,                   # — идентификатор пользователя в facebook (если есть)
        'vk': None,                         # — идентификатор пользователя в VK (если есть)

        'person': {                         # — физическое лицо
            'name': auth_user.name,
            'surname': auth_user.surname,
            'patronymic': auth_user.patronymic
        },
        'role': ['user']                    # — список ролей пользователя, в виде массива,
                                            # пример: ["user", "support", "moderator", "admin"]
    }

    from fw.documents.batch_manager import BatchManager
    batch = BatchManager.get_last_modified_batch(auth_user.id)

    if batch:
        batch_manager = BatchManager.init(batch)

        batch_caption = batch_manager.get_last_modified_batch_caption(batch)
        batch_type = batch.batch_type
        batch_stage = batch_manager.get_stage(batch)

        last_service_data = {
            'id': batch.id,
            'caption': batch_caption,
            'type': batch_type,
            'stage': batch_stage
        }

        result['last_service'] = last_service_data

    from services.pay.subs_manager import SubscriptionManager

    user_subs = SubscriptionManager.get_user_active_subscription(auth_user.id)

    result['subscription'] = {
        'type': user_subs.type,
        'last_day': user_subs.end_dt.strftime("%Y-%m-%dT%H:%M:%S")
    } if user_subs else None

    return result
Beispiel #12
0
def touch_batch_plugin(batch_id, event_data):
    from fw.documents.batch_manager import BatchManager
    config = celery.conf.get('config')
    app = celery.conf['flask_app']()
    logger = celery.log.get_default_logger()
    with app.app_context():
        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            raise Exception("Batch not found: %s" % batch_id)
        BatchManager.handle_event(batch_id, "batch_manager.touch", event_data, logger, config=config)
    return True
Beispiel #13
0
def register(app, jinja_env, class_loader, **kwargs):
    search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u"templates"))
    jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))

    class_loader.POSSIBLE_LOCATIONS.append('services.osago.documents')
    class_loader.POSSIBLE_LOCATIONS.append('services.osago.documents.enums')
    class_loader.POSSIBLE_LOCATIONS.append('services.osago.documents.general_doc_fields')

    BatchManager.register_manager(DocumentBatchTypeEnum.DBT_OSAGO, OsagoBatchManager)

    _init_doc_requisities(app.config)
Beispiel #14
0
def go_back(batch_id=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id,
                                                  _owner=current_user,
                                                  deleted=False).first()
    if not batch:
        raise errors.BatchNotFound()
    if batch.batch_type != DocumentBatchTypeEnum.DBT_NEW_LLC:
        BatchManager.handle_event(batch_id,
                                  'go_back', {'batch_id': batch_id},
                                  logger=current_app.logger,
                                  config=current_app.config)
        return {"result": True}
    raise NotImplementedError()
Beispiel #15
0
def cancel_batch_finalization(batch_id=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()
    if not batch:
        raise errors.BatchNotFound()
    if batch.status != BatchStatusEnum.BS_BEING_FINALISED:
        return {"result": True}

    try:
        BatchManager.cancel_batch_finalization(batch, current_app.config, current_app.logger)
    except Exception:
        current_app.logger.exception(u"Failed to cancel batch finalization.")
        return {"result": False}

    return {"result": True}
Beispiel #16
0
def register(app, jinja_env, class_loader, **kwargs):
    search_path = os.path.normpath(
        os.path.join(os.path.abspath(os.path.dirname(__file__)), u"templates"))
    jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))

    class_loader.POSSIBLE_LOCATIONS.append('services.osago.documents')
    class_loader.POSSIBLE_LOCATIONS.append('services.osago.documents.enums')
    class_loader.POSSIBLE_LOCATIONS.append(
        'services.osago.documents.general_doc_fields')

    BatchManager.register_manager(DocumentBatchTypeEnum.DBT_OSAGO,
                                  OsagoBatchManager)

    _init_doc_requisities(app.config)
Beispiel #17
0
def register(app, jinja_env, class_loader, **kwargs):
    search_path = os.path.normpath(
        os.path.join(os.path.abspath(os.path.dirname(__file__)), u"templates"))
    jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))

    class_loader.POSSIBLE_LOCATIONS.append('services.test_svc.documents.enums')
    class_loader.POSSIBLE_LOCATIONS.append(
        'services.test_svc.documents.ext_methods')
    class_loader.POSSIBLE_LOCATIONS.append(
        'services.test_svc.documents.ext_validators')

    BatchManager.register_manager(DocumentBatchTypeEnum.DBT_TEST_TYPE,
                                  TestSvcManager)

    _init_doc_requisities(app.config)
Beispiel #18
0
def touch_batch_plugin(batch_id, event_data):
    from fw.documents.batch_manager import BatchManager
    config = celery.conf.get('config')
    app = celery.conf['flask_app']()
    logger = celery.log.get_default_logger()
    with app.app_context():
        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            raise Exception("Batch not found: %s" % batch_id)
        BatchManager.handle_event(batch_id,
                                  "batch_manager.touch",
                                  event_data,
                                  logger,
                                  config=config)
    return True
    def run(self):
        self.logger.info(u"Отправка пакета документов на email")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'batch id: ')
        email = get_single(u'email: ')
        try:
            ObjectId(batch_id)
        except Exception:
            self.logger.error(u"Invalid batch id")
            return False

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            self.logger.error(u"Batch not found")
            return False

        if batch.status == BatchStatusEnum.BS_FINALISED:
            total_attachments = BatchManager.get_batch_rendered_docs(
                batch, current_app.config, current_app.logger)
            send_email.send_email.delay(email,
                                        'email_batch_docs',
                                        attach=total_attachments)
            return True
        self.logger.error(u"Invalid current batch status")
        return True
Beispiel #20
0
def schedule_notification_email(batch_id):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id, deleted=False, finalisation_count=0).scalar()
    if not batch or batch.status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):
        return

    mail_type = 'please_finalise'
    if mail_type in (batch.sent_mails or []):
        return False

    user = batch._owner
    if not user or not user.email:
        return

    manager = BatchManager.init(batch)
    timezone_name = manager.get_batch_timezone(batch_id) or "Europe/Moscow"

    desired_time = current_app.config['NOT_PAID_BATCH_NOTIFY_DESIRED_TIME']
    timeout_td = timedelta(seconds=current_app.config['NOT_PAID_BATCH_NOTIFY_TIMEOUT_SECONDS'])
    dt = calc_fixed_time_not_earlier(datetime.utcnow(), desired_time, timeout_td, timezone_name)

    CeleryScheduler.post(
        "fw.async_tasks.not_paid_check_send.not_finalised_check_and_send",
        task_id="not_finalised_check_and_send%s" % str(batch_id),
        force_replace_task=True,
        kwargs={
            'batch_id': str(batch_id),
            'last_change_dt_str': batch.last_change_dt.strftime("%Y-%m-%dT%H:%M:%S")
        },
        eta=dt
    )
    def run(self):
        self.logger.info(u"Перегенерация пакета документов")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'batch id: ')
        try:
            ObjectId(batch_id)
        except Exception:
            self.logger.error(u"Invalid batch id")
            return False

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            self.logger.error(u"Batch not found")
            return False

        if batch.status == BatchStatusEnum.BS_FINALISED:

            batch.status = BatchStatusEnum.BS_BEING_FINALISED
            sqldb.session.commit()
            async_result = rendering.render_batch.delay(batch_id)

            if not async_result.ready():
                batch.current_task_id = async_result.id
                sqldb.session.commit()

            return True
        elif batch.status == BatchStatusEnum.BS_EDITED:
            manager = BatchManager.init(batch)
            manager.finalize_batch(self.config, self.logger, batch)
            return True
        self.logger.error(u"Invalid current batch status")
        return False
Beispiel #22
0
def get_bank_partners(batch_id=None):
    bank_partner_list = []

    try:
        address = BatchManager.get_batch_address(batch_id)
        city = address['region'] if address[
            'region'] in SPECIAL_CITY_REGIONS else address.get(
                'city', address.get('village', u""))
    except Exception:
        raise errors.BatchNotFound()

    banks = BankPartnersObject.query.filter_by(enabled=True)
    if city:
        banks = banks.filter(
            or_(BankPartnersObject.city.contains([city]),
                BankPartnersObject.city == None))

    for item in banks.order_by(BankPartnersObject.sort_index.asc()):
        bank_partner_list.append({
            "id": item.id,
            "link": item.link,
            "banner": item.banner,
            "title": item.title,
            "conditions": item.conditions or []
        })

    return {"result": {"banks_partners": bank_partner_list}}
Beispiel #23
0
def render_document_preview(document_object_id):
    document_object_id = document_object_id
    config = celery.conf.get('config')
    request = current_task.request
    sys.path.append(
        os.path.normpath(os.path.abspath(os.path.dirname(__name__))))

    logger = celery.log.get_default_logger()

    with celery.conf['flask_app']().app_context():
        db_doc = BatchDocumentDbObject.query.filter_by(
            id=document_object_id).scalar()
        if not db_doc or db_doc.status not in (UserDocumentStatus.DS_NEW,
                                               UserDocumentStatus.DS_RENDERING_FAILED,
                                               UserDocumentStatus.DS_RENDERING) \
           or db_doc._celery_task_id not in (None, request.id):
            return False

        from fw.documents.batch_manager import BatchManager

        batch_manager = BatchManager.init(db_doc.batch)
        doc_caption = batch_manager.get_batch_caption(db_doc.batch)
        return render_single_document(db_doc, doc_caption,
                                      'preview_watermark.png', config, logger,
                                      request.id)
Beispiel #24
0
    def check_and_send_not_paid_user_notification(batch_id, config, logger):
        last_change_dt = datetime.utcnow()
        eta = datetime.utcnow()
        eta = eta.replace(tzinfo=pytz.utc)
        timezone_name = BatchManager.get_batch_timezone(batch_id) or "Europe/Moscow"
        eta = datetime.astimezone(eta, pytz.timezone(timezone_name))
        eta += timedelta(seconds=config['NOT_PAID_BATCH_NOTIFY_TIMEOUT_SECONDS'])

        try:
            not_paid_batch_notify_desired_time = config['NOT_PAID_BATCH_NOTIFY_DESIRED_TIME']
            if not_paid_batch_notify_desired_time:
                desired_time = datetime.strptime(not_paid_batch_notify_desired_time, "%H:%M")
                dt = eta.replace(hour=desired_time.hour, minute=desired_time.minute)
                if dt < eta:
                    dt += timedelta(days=1)
                eta = dt
            eta = eta.astimezone(pytz.utc).replace(tzinfo=None)
        except Exception:
            logger.exception(u"Failed to calculate correct send time")

        from fw.async_tasks import not_paid_check_send

        not_paid_check_send.not_paid_check_and_send.apply_async(kwargs={
            'batch_id': str(batch_id),
            'last_change_dt_str': last_change_dt.strftime("%Y-%m-%dT%H:%M:%S")
        }, eta=eta)
    def run(self):
        self.logger.info(u"Перегенерация пакета документов")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'batch id: ')
        try:
            ObjectId(batch_id)
        except Exception:
            self.logger.error(u"Invalid batch id")
            return False

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            self.logger.error(u"Batch not found")
            return False

        if batch.status == BatchStatusEnum.BS_FINALISED:

            batch.status = BatchStatusEnum.BS_BEING_FINALISED
            sqldb.session.commit()
            async_result = rendering.render_batch.delay(batch_id)

            if not async_result.ready():
                batch.current_task_id = async_result.id
                sqldb.session.commit()

            return True
        elif batch.status == BatchStatusEnum.BS_EDITED:
            manager = BatchManager.init(batch)
            manager.finalize_batch(self.config, self.logger, batch)
            return True
        self.logger.error(u"Invalid current batch status")
        return False
Beispiel #26
0
def unfinalize_batch(batch_id=None, force=False):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id,
                                                  _owner=current_user,
                                                  deleted=False).scalar()
    if not batch:
        raise errors.BatchNotFound()
    if batch.status not in (BatchStatusEnum.BS_FINALISED, ):
        raise errors.DocumentBatchDefinalizationError()

    if batch.current_task_id:
        from celery import app as celery
        current_app.logger.debug(u"There are task id: %s" %
                                 unicode(batch.current_task_id))
        celery.default_app.control.revoke(batch.current_task_id)
        remove_task_id_run_file(current_app.config, batch.current_task_id)
        batch.current_task_id = None
        batch.batch_rendering_start = None
        sqldb.session.commit()

    batch_manager = BatchManager.init(batch)
    try:
        if not batch_manager.definalize_batch(
                current_app.config, current_app.logger, batch, force):
            raise errors.DocumentBatchDefinalizationError()
    except Exception:
        batch.status = BatchStatusEnum.BS_FINALISED
        sqldb.session.commit()
        raise errors.DocumentBatchDefinalizationError()

    return {"result": True}
Beispiel #27
0
def unfinalize_batch(batch_id=None, force=False):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()
    if not batch:
        raise errors.BatchNotFound()
    if batch.status not in (BatchStatusEnum.BS_FINALISED,):
        raise errors.DocumentBatchDefinalizationError()

    if batch.current_task_id:
        from celery import app as celery
        current_app.logger.debug(u"There are task id: %s" % unicode(batch.current_task_id))
        celery.default_app.control.revoke(batch.current_task_id)
        remove_task_id_run_file(current_app.config, batch.current_task_id)
        batch.current_task_id = None
        batch.batch_rendering_start = None
        sqldb.session.commit()

    batch_manager = BatchManager.init(batch)
    try:
        if not batch_manager.definalize_batch(current_app.config, current_app.logger, batch, force):
            raise errors.DocumentBatchDefinalizationError()
    except Exception:
        batch.status = BatchStatusEnum.BS_FINALISED
        sqldb.session.commit()
        raise errors.DocumentBatchDefinalizationError()

    return {"result": True}
Beispiel #28
0
def schedule_notification_email(batch_id):
    batch = DocumentBatchDbObject.query.filter_by(
        id=batch_id, deleted=False, finalisation_count=0).scalar()
    if not batch or batch.status not in (BatchStatusEnum.BS_NEW,
                                         BatchStatusEnum.BS_EDITED):
        return

    mail_type = 'please_finalise'
    if mail_type in (batch.sent_mails or []):
        return False

    user = batch._owner
    if not user or not user.email:
        return

    manager = BatchManager.init(batch)
    timezone_name = manager.get_batch_timezone(batch_id) or "Europe/Moscow"

    desired_time = current_app.config['NOT_PAID_BATCH_NOTIFY_DESIRED_TIME']
    timeout_td = timedelta(
        seconds=current_app.config['NOT_PAID_BATCH_NOTIFY_TIMEOUT_SECONDS'])
    dt = calc_fixed_time_not_earlier(datetime.utcnow(), desired_time,
                                     timeout_td, timezone_name)

    CeleryScheduler.post(
        "fw.async_tasks.not_paid_check_send.not_finalised_check_and_send",
        task_id="not_finalised_check_and_send%s" % str(batch_id),
        force_replace_task=True,
        kwargs={
            'batch_id':
            str(batch_id),
            'last_change_dt_str':
            batch.last_change_dt.strftime("%Y-%m-%dT%H:%M:%S")
        },
        eta=dt)
Beispiel #29
0
def act(action, batch_db, event_data, plugin_config, logger, config):
    assert batch_db
    descriptors = filter(lambda x: x['name'] == action, get_actions())
    action_descriptor = descriptors[0] if descriptors else None
    if not action_descriptor:
        raise ValueError(u'Invalid action: %s for %s plugin' %
                         (action, PLUGIN_NAME))

    if action == 'get_policy_info_async':
        policy_series_field_name = plugin_config['policy_series_field_name']
        policy_number_field_name = plugin_config['policy_number_field_name']

        policy_series = batch_db.data.get(policy_series_field_name, None)
        policy_number = batch_db.data.get(policy_number_field_name, None)

        if not policy_number or not policy_series:
            return False
        get_policy_info_async.delay(policy_series, policy_number, event_data,
                                    batch_db.id)
    elif action == 'get_policy_info_first_try':
        policy_series_field_name = plugin_config['policy_series_field_name']
        policy_number_field_name = plugin_config['policy_number_field_name']

        policy_series = batch_db.data.get(policy_series_field_name, None)
        policy_number = batch_db.data.get(policy_number_field_name, None)

        if not policy_number or not policy_series:
            return False
        try:
            get_policy_info_async(policy_series,
                                  policy_number,
                                  event_data,
                                  batch_db.id,
                                  async=False,
                                  logger=logger)
        except Exception:
            BatchManager.handle_event(batch_db.id,
                                      "on_policy_info_receive_fail",
                                      event_data,
                                      logger,
                                      config=config)
            return False
    else:
        raise Exception(u"Invalid action %s for plugin %s" %
                        (action, PLUGIN_NAME))

    return True
Beispiel #30
0
def get_accountant_partners(batch_id=None):
    accountant_partner_list = []

    try:
        region = BatchManager.get_batch_region(batch_id)
    except Exception, ex:
        current_app.logger.exception(u"Failed to get batch region")
        raise errors.BatchNotFound()
Beispiel #31
0
def get_accountant_partners(batch_id=None):
    accountant_partner_list = []

    try:
        region = BatchManager.get_batch_region(batch_id)
    except Exception, ex:
        current_app.logger.exception(u"Failed to get batch region")
        raise errors.BatchNotFound()
Beispiel #32
0
def cancel_batch_finalization(batch_id=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id,
                                                  _owner=current_user,
                                                  deleted=False).first()
    if not batch:
        raise errors.BatchNotFound()
    if batch.status != BatchStatusEnum.BS_BEING_FINALISED:
        return {"result": True}

    try:
        BatchManager.cancel_batch_finalization(batch, current_app.config,
                                               current_app.logger)
    except Exception:
        current_app.logger.exception(u"Failed to cancel batch finalization.")
        return {"result": False}

    return {"result": True}
Beispiel #33
0
def batch_update(batch_id=None, batch=None):
    with current_app.model_cache_context:
        current_batch_db_object = DocumentBatchDbObject.query.filter_by(
            id=batch_id, _owner=current_user, deleted=False).first()
        if not current_batch_db_object:
            raise errors.BatchNotFound()

        if current_batch_db_object.status == BatchStatusEnum.BS_BEING_FINALISED:
            current_app.logger.debug(
                u"Updating batch during finalization - cancelling finalization"
            )

            try:
                BatchManager.cancel_batch_finalization(current_batch_db_object,
                                                       current_app.config,
                                                       current_app.logger)
            except Exception:
                current_app.logger.exception(
                    u"Failed to cancel batch finalisation")
                DocumentBatchDbObject.query.filter_by(
                    id=batch_id,
                    status=BatchStatusEnum.BS_BEING_FINALISED).update(
                        {'status': BatchStatusEnum.BS_EDITED})
                sqldb.session.commit()
                raise errors.DocumentBatchUpdateError()

        manager = BatchManager.init(current_batch_db_object)

        batch_type = current_batch_db_object.batch_type
        batch['batch_type'] = batch_type
        if 'metadata' in batch:
            batch['_metadata'] = batch['metadata']

        new_batch = DocumentBatch.parse_raw_value(batch, api_data=True)

        new_batch_api_data = manager.update_batch(batch_id, new_batch,
                                                  current_user.id,
                                                  current_app.config,
                                                  current_app.logger)

        DocumentBatchDbObject.query.filter_by(id=batch_id).update(
            {'last_change_dt': datetime.utcnow()})
        sqldb.session.commit()
        if batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:
            schedule_notification_email(batch_id)
        return new_batch_api_data
Beispiel #34
0
def get_render_batch_documents_state(batch_id=None, document_types=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id,
                                                  _owner=current_user,
                                                  deleted=False).first()
    if not batch:
        raise errors.BatchNotFound()

    batch_manager = BatchManager.init(batch)
    try:
        document_types = json.loads(document_types)
        if not isinstance(document_types, list) and not isinstance(
                document_types, tuple):
            raise Exception()
    except Exception:
        raise errors.InvalidParameterValue('document_type')

    doc_type_set = set()
    for doc_type in document_types:
        if not batch_manager.check_doc_type_support(batch.batch_type,
                                                    doc_type):
            raise errors.InvalidParameterValue('document_type')
        doc_type_set.add(doc_type)

    result = []

    for doc_type in doc_type_set:
        doc_obj = BatchDocumentDbObject.query.filter_by(
            batch_id=batch_id, document_type=doc_type).first()
        if not doc_obj:
            result.append({
                'state': UserDocumentStatus.DS_NEW,
                'document_type': doc_type
            })
            continue

        doc_info = {'state': doc_obj.status, 'document_type': doc_type}

        if doc_obj.status == UserDocumentStatus.DS_RENDERED:
            if doc_obj.file:
                doc_info['links'] = {
                    'pdf': FileStorage.get_url(doc_obj.file,
                                               current_app.config),
                    'jpeg': []
                }
                result.append(doc_info)
            else:
                current_app.logger.debug(
                    u"Not found rendered documents for rendered document %s. "
                    u"Returning as rendering_failed" % doc_type)
                result.append({
                    'state': UserDocumentStatus.DS_RENDERING_FAILED,
                    'document_type': doc_type
                })
        else:
            result.append(doc_info)

    return result
    def test_transit_on_event(self):
        batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)
        new_data = {
            'short_name': u'Тест нейм'
        }
        new_batch_data = {
            'data': new_data,
            'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,
            'metadata': {}
        }

        new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)
        result = self.test_client.post('/batch/update/', data={
            'batch_id': batch.id,
            'batch': json.dumps(new_batch.get_api_structure())
        })
        self.assertEqual(result.status_code, 200)
        doc = BatchDocumentDbObject.query.scalar()
        d = json.loads(result.data)
        del d['result']['creation_date']
        self.assertEqual(d, {u'result': {
            u'all_docs': [{
                u'caption': u'Тестовый документ 1',
                u'document_id': doc.id,
                u'document_type': u'test_doc_1',
                u'file_link': None
            }],
            u'batch_type': u'_test',
            u'data': {u'short_name': u'Тест нейм'},
            u'id': batch.id,
            u'metadata': {},
            u'name': u'Тестовый батч',
            u'paid': u'false',
            u'result_fields': {u'name': u'Тест нейм'},
            u'status': u'new',
            u'status_data': {'finalisation_count': u'0'}
        }
        })

        BatchManager.handle_event(batch.id, 'simple_event', {}, current_app.logger, config=self.config)

        doc = DocumentBatchDbObject.query.scalar()
        self.assertEqual(doc.status, 'after_simple_event')
Beispiel #36
0
def send(batch_id, event, event_data=None):
    event_data = event_data or {}
    from fw.documents.batch_manager import BatchManager
    app = celery.conf['flask_app']()
    logger = app.logger
    logger.info(u"PROCESSING event %s for batch %s" % (event, batch_id))
    with app.app_context():
        result = BatchManager.handle_event(batch_id, event, event_data, logger=logger, config=app.config)
        logger.info(u"FINISH PROCESSING event %s for batch %s" % (event, batch_id))
        return result
    def test_transit_on_docs_group_generated(self):
        batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)
        new_data = {
            'short_name': u'создай второй документ',
            'text_field': u'текстфилд'
        }
        new_batch_data = {
            'data': new_data,
            'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,
            'metadata': {}
        }

        new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)
        manager = BatchManager.init(batch)
        result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)

        result = self.test_client.post('/batch/render_document/', data={
            'batch_id': batch.id,
            'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_1, DocumentTypeEnum.DT_TEST_DOC_2])
        })
        self.assertEqual(result.status_code, 200)
        self.assertEqual(len(self.events), 6)
        sqldb.session.commit()
        batch_db = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()
        doc1 = BatchDocumentDbObject.query.filter_by(batch_id=batch.id).order_by(BatchDocumentDbObject.creation_date.asc()).first()
        doc2 = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.batch_id==batch.id, BatchDocumentDbObject.id != doc1.id).first()
        self.assertEqual(batch_db.status, 'finalised')
        self.assertEqual(self.events[0]['batch'].id, batch_db.id)
        self.assertEqual(self.events[1]['batch'].id, batch_db.id)
        self.assertEqual(self.events[2]['batch'].id, batch_db.id)
        del self.events[0]['batch']
        del self.events[1]['batch']
        del self.events[2]['batch']
        del self.events[3]['batch']
        del self.events[4]['batch']
        del self.events[5]['batch']
        self.assertEqual(self.events[0], {'event': 'batch_manager.on_field_changed',
                                          'event_data': {'field_name': 'short_name',
                                          'new_value': u'создай второй документ',
                                          'old_value': None}})
        self.assertEqual(self.events[1], {'event': 'batch_manager.on_field_changed',
                                          'event_data': {'field_name': 'text_field',
                                          'new_value': u'текстфилд',
                                          'old_value': None}})
        self.assertEqual(self.events[2], {'event': 'batch_manager.on_fieldset_changed',
                                          'event_data': {'fields': [{'field_name': 'short_name',
                                          'new_value': u'создай второй документ',
                                          'old_value': None}, {'field_name': 'text_field',
                                          'new_value': u'текстфилд',
                                          'old_value': None}]}})
        self.assertEqual(self.events[3], {'event': 'doc_render_success', 'event_data': {'doc_id': doc1.id}})
        self.assertEqual(self.events[4], {'event': 'doc_render_success', 'event_data': {'doc_id': doc2.id}})
        self.assertEqual(self.events[5], {'event': 'doc_group_render_success',
                                          'event_data': {'batch_id': batch_db.id,
                                                         'doc_types': ['test_doc_1', 'test_doc_2']}})
Beispiel #38
0
def batch_create(batch_type=None):
    try:
        DocRequisitiesStorage.get_batch_descriptor(batch_type)
    except Exception:
        raise errors.InvalidParameterValue('batch_type')

    batch_manager = BatchManager.init(batch_type=batch_type)
    new_batch = batch_manager.create_batch(current_user)
    sqldb.session.add(new_batch)
    sqldb.session.commit()
    doc_batch = DocumentBatch.db_obj_to_field(new_batch)
    return {'result': doc_batch.get_api_structure()}
Beispiel #39
0
def get_render_batch_documents_state(batch_id=None, document_types=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()
    if not batch:
        raise errors.BatchNotFound()

    batch_manager = BatchManager.init(batch)
    try:
        document_types = json.loads(document_types)
        if not isinstance(document_types, list) and not isinstance(document_types, tuple):
            raise Exception()
    except Exception:
        raise errors.InvalidParameterValue('document_type')

    doc_type_set = set()
    for doc_type in document_types:
        if not batch_manager.check_doc_type_support(batch.batch_type, doc_type):
            raise errors.InvalidParameterValue('document_type')
        doc_type_set.add(doc_type)

    result = []

    for doc_type in doc_type_set:
        doc_obj = BatchDocumentDbObject.query.filter_by(batch_id=batch_id, document_type=doc_type).first()
        if not doc_obj:
            result.append({
                'state': UserDocumentStatus.DS_NEW,
                'document_type': doc_type
            })
            continue

        doc_info = {
            'state': doc_obj.status,
            'document_type': doc_type
        }

        if doc_obj.status == UserDocumentStatus.DS_RENDERED:
            if doc_obj.file:
                doc_info['links'] = {
                    'pdf': FileStorage.get_url(doc_obj.file, current_app.config),
                    'jpeg': []
                }
                result.append(doc_info)
            else:
                current_app.logger.debug(u"Not found rendered documents for rendered document %s. "
                                         u"Returning as rendering_failed" % doc_type)
                result.append({
                    'state': UserDocumentStatus.DS_RENDERING_FAILED,
                    'document_type': doc_type
                })
        else:
            result.append(doc_info)

    return result
Beispiel #40
0
def batch_create(batch_type=None):
    try:
        DocRequisitiesStorage.get_batch_descriptor(batch_type)
    except Exception:
        raise errors.InvalidParameterValue('batch_type')

    batch_manager = BatchManager.init(batch_type=batch_type)
    new_batch = batch_manager.create_batch(current_user)
    sqldb.session.add(new_batch)
    sqldb.session.commit()
    doc_batch = DocumentBatch.db_obj_to_field(new_batch)
    return {'result': doc_batch.get_api_structure()}
Beispiel #41
0
def render_document_plugin(batch_id, event_data):
    doc_id = event_data['doc_id']
    from fw.documents.batch_manager import BatchManager
    config = celery.conf.get('config')
    app = celery.conf['flask_app']()
    logger = celery.log.get_default_logger()
    with app.app_context():
        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            BatchManager.handle_event(batch_id, "doc_render_fail", event_data, logger, config=config)
            batch_group_gen_check_task.delay()
            raise Exception("Batch not found: %s" % batch_id)
        try:
            render_batch_document_raw(batch_id, doc_id, config)
            doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()
            assert(doc)
            event = "doc_render_success" if doc.status == UserDocumentStatus.DS_RENDERED else "doc_render_fail"
            logger.info(u"render_document_plugin event %s for document %s" % (event, doc.id))
            BatchManager.handle_event(batch_id, event, event_data, logger, config=config)
            batch_group_gen_check_task.delay()
        except Exception:
            zabbix_sender.send("celery_failures", 1)
            BatchManager.handle_event(batch_id, "doc_render_fail", event_data, logger, config=config)
            batch_group_gen_check_task.delay()
            raise
Beispiel #42
0
def batch_group_gen_check_task():
    from fw.documents.batch_manager import BatchManager
    app = celery.conf['flask_app']()
    logger = celery.log.get_default_logger()
    with app.app_context():
        for batch_check in DocGroupRenderTaskCheck.query.filter_by(
                check_completed=False):
            logger.info(u"Checking check %s" % str(batch_check.id))
            batch = DocumentBatchDbObject.query.filter_by(
                id=batch_check.batch_id).scalar()
            if not batch:
                BatchManager.handle_event(batch_check.batch_id,
                                          "doc_group_render_fail", {},
                                          logger,
                                          config=app.config)
                raise Exception("Batch not found: %s" % batch_check.batch_id)
            doc_id_list = batch_check.doc_id_list
            all_rendered = True
            logger.info(u"Checking. Doc id list length: %s" %
                        str(len(doc_id_list)))
            for doc_id in doc_id_list:
                doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()
                logger.info(u"Checking doc %s." % str(doc_id))
                if not doc or doc.status == UserDocumentStatus.DS_RENDERING_FAILED or \
                   doc._celery_task_id and abs((datetime.utcnow() - doc._celery_task_started).total_seconds()) > 60:
                    res = DocGroupRenderTaskCheck.query.filter_by(
                        id=batch_check.id, check_completed=False).update(
                            {'check_completed': True})
                    sqldb.session.commit()
                    logger.info(u"Checking -> checked %s." % str(res))
                    if res:
                        all_rendered = False
                        logger.info(
                            u"Checking: handling doc_group_render_fail for %s"
                            % str(batch_check.batch_id))
                        BatchManager.handle_event(batch_check.batch_id,
                                                  "doc_group_render_fail",
                                                  batch_check.event_data,
                                                  logger,
                                                  config=app.config)
                    break
                if doc.status != UserDocumentStatus.DS_RENDERED:
                    logger.info(u"Checking: doc %s is not rendered" %
                                str(doc.id))
                    all_rendered = False
            if all_rendered:
                logger.info(u"Checking: All rendered")
                res = DocGroupRenderTaskCheck.query.filter_by(
                    id=batch_check.id,
                    check_completed=False).update({'check_completed': True})
                sqldb.session.commit()
                if res:
                    BatchManager.handle_event(batch_check.batch_id,
                                              "doc_group_render_success",
                                              batch_check.event_data,
                                              logger,
                                              config=app.config)

    return True
 def create_batch(self,
                  batch_type,
                  owner,
                  do_not_save_to_db=False,
                  status=None):
     manager = BatchManager.init(batch_type=batch_type)
     batch = manager.create_batch(owner)
     if status is not None:
         batch.status = status
     if not do_not_save_to_db:
         sqldb.session.add(batch)
         sqldb.session.commit()
     return batch
    def test_send_email_on_transition(self):
        batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)
        new_data = {
            'short_name': u'Тест нейм'
        }
        new_batch_data = {
            'data': new_data,
            'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,
            'metadata': {}
        }

        new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)
        result = self.test_client.post('/batch/update/', data={
            'batch_id': batch.id,
            'batch': json.dumps(new_batch.get_api_structure())
        })
        self.assertEqual(result.status_code, 200)

        self.assertEqual(len(self.mailer.mails), 0)
        BatchManager.handle_event(batch.id, 'simple_event', None, logger=current_app.logger, config=self.config)

        self.assertEqual(len(self.mailer.mails), 1)
Beispiel #45
0
def batch_update(batch_id=None, batch=None):
    with current_app.model_cache_context:
        current_batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user,
                                                                        deleted=False).first()
        if not current_batch_db_object:
            raise errors.BatchNotFound()

        if current_batch_db_object.status == BatchStatusEnum.BS_BEING_FINALISED:
            current_app.logger.debug(u"Updating batch during finalization - cancelling finalization")

            try:
                BatchManager.cancel_batch_finalization(current_batch_db_object,
                                                       current_app.config, current_app.logger)
            except Exception:
                current_app.logger.exception(u"Failed to cancel batch finalisation")
                DocumentBatchDbObject.query.filter_by(id=batch_id, status=BatchStatusEnum.BS_BEING_FINALISED).update(
                    {'status': BatchStatusEnum.BS_EDITED})
                sqldb.session.commit()
                raise errors.DocumentBatchUpdateError()

        manager = BatchManager.init(current_batch_db_object)

        batch_type = current_batch_db_object.batch_type
        batch['batch_type'] = batch_type
        if 'metadata' in batch:
            batch['_metadata'] = batch['metadata']

        new_batch = DocumentBatch.parse_raw_value(batch, api_data=True)

        new_batch_api_data = manager.update_batch(batch_id, new_batch, current_user.id,
                                                  current_app.config, current_app.logger)

        DocumentBatchDbObject.query.filter_by(id=batch_id).update({'last_change_dt': datetime.utcnow()})
        sqldb.session.commit()
        if batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:
            schedule_notification_email(batch_id)
        return new_batch_api_data
Beispiel #46
0
def notarius_list(batch_id=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()
    if not batch:
        raise errors.InvalidParameterValue('batch_id')

    batch_manager = BatchManager.init(batch)
    assert batch_manager

    region = batch_manager.get_batch_region(batch_id)

    if not region:
        return {'result': []}
    query = NotariusObject.query.filter_by(region=region)
    result = [item.get_api_structure() for item in query]
    return {'result': result}
Beispiel #47
0
def send(batch_id, event, event_data=None):
    event_data = event_data or {}
    from fw.documents.batch_manager import BatchManager
    app = celery.conf['flask_app']()
    logger = app.logger
    logger.info(u"PROCESSING event %s for batch %s" % (event, batch_id))
    with app.app_context():
        result = BatchManager.handle_event(batch_id,
                                           event,
                                           event_data,
                                           logger=logger,
                                           config=app.config)
        logger.info(u"FINISH PROCESSING event %s for batch %s" %
                    (event, batch_id))
        return result
def yurist_check(config, batch_db, file_obj_list, logger):
    # get batch id and check if it is still in active state
    batch_check = YuristBatchCheckObject.query.filter(
        YuristBatchCheckObject.batch_id == batch_db.id,
        YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES)
    ).order_by(YuristBatchCheckObject.create_date.desc()).first()
    # this check should be performed later
    if not batch_check:
        return False
    user = batch_db._owner
    if not user:
        raise Exception("Failed to find batch owner")

    from fw.documents.batch_manager import BatchManager
    attaches = BatchManager.get_shared_links_to_rendered_docs(batch_db, config, logger)

    schema = config['WEB_SCHEMA']
    domain = config['DOMAIN']
    for file_obj in file_obj_list:
        path = FileStorage.get_path(file_obj, config)
        if os.path.exists(path):
            if file_obj._owner:
                url = u"%s://%s%s" % (schema, domain, FileStorage.get_shared_link(file_obj.id, config))
            else:
                url = u"%s://%s%s" % (schema, domain, FileStorage.get_url(file_obj, config))

            attaches.append({
                'url': url,
                'title': file_obj.file_name or url
            })

    rec_list = config['YURIST_EMAIL_LIST']
    from services.yurist.async_tasks import yurist_check_send
    batch_check_id = batch_check.id if batch_check else "not-found"
    # countdown 2 hours before execution
    yurist_check_send.check_and_send.apply_async(
        args=[],
        kwargs=dict(
            email=user.email,
            batch_check_id=batch_check_id,
            server_url_schema=config['WEB_SCHEMA'],
            api_url=config['api_url'],
            attaches=attaches,
            mail_type='yurist_batch_check',
            rec_list=rec_list
        ),
        countdown=config['SEND_DOCS_TO_YURIST_DELAY_SECONDS']
    )
    def update_batch(self, batch_id, new_batch, current_user_id, config,
                     logger):
        current_batch_db_object = DocumentBatchDbObject.query.filter_by(
            id=batch_id, _owner_id=current_user_id, deleted=False).first()
        if not current_batch_db_object:
            raise errors.BatchNotFound()

        batch_status = current_batch_db_object.status
        if batch_status not in (BatchStatusEnum.BS_NEW,
                                BatchStatusEnum.BS_EDITED):
            logger.warn(u"Can't update batch %s in status %s" %
                        (unicode(batch_id), unicode(batch_status)))
            raise errors.DocumentBatchUpdateError()

        try:
            current_batch = DocumentBatch.db_obj_to_field(
                current_batch_db_object)
        except Exception:
            logger.fatal(u"Failed to validate batch from DB!")
            raise

        current_fields = current_batch.data.value
        assert isinstance(current_fields, dict)

        # STEP 1: make new data and metadata
        #         and collect changed fields names
        new_batch_db = new_batch.get_db_object()
        merged_fields, changed_field_names = self._merge_raw_fields(
            current_batch_db_object.data, new_batch_db.data)

        current_batch_db_object._metadata = new_batch_db._metadata
        current_batch_db_object.data = merged_fields
        sqldb.session.commit()

        # STEP 2: make document set from data and schema
        try:
            new_field_set, new_docs, _ = self.make_docs_for_new_data(
                current_batch.data.value, new_batch.data.value,
                current_batch_db_object,
                BatchManager.get_batch_document_fields(
                    current_batch_db_object))
        except errors.DocumentBatchUpdateError, ex:
            logger.exception(u"Failed to update batch with new values")
            current_batch_db_object.error_info = {
                "error": u"unknown error (%s)" % str(ex)
            }
            sqldb.session.commit()
            raise
Beispiel #50
0
def yurist_check(config, batch_db, file_obj_list, logger):
    # get batch id and check if it is still in active state
    batch_check = YuristBatchCheckObject.query.filter(
        YuristBatchCheckObject.batch_id == batch_db.id,
        YuristBatchCheckObject.status.notin_(
            YuristBatchCheckStatus.FINAL_STATUSES)).order_by(
                YuristBatchCheckObject.create_date.desc()).first()
    # this check should be performed later
    if not batch_check:
        return False
    user = batch_db._owner
    if not user:
        raise Exception("Failed to find batch owner")

    from fw.documents.batch_manager import BatchManager
    attaches = BatchManager.get_shared_links_to_rendered_docs(
        batch_db, config, logger)

    schema = config['WEB_SCHEMA']
    domain = config['DOMAIN']
    for file_obj in file_obj_list:
        path = FileStorage.get_path(file_obj, config)
        if os.path.exists(path):
            if file_obj._owner:
                url = u"%s://%s%s" % (schema, domain,
                                      FileStorage.get_shared_link(
                                          file_obj.id, config))
            else:
                url = u"%s://%s%s" % (schema, domain,
                                      FileStorage.get_url(file_obj, config))

            attaches.append({'url': url, 'title': file_obj.file_name or url})

    rec_list = config['YURIST_EMAIL_LIST']
    from services.yurist.async_tasks import yurist_check_send
    batch_check_id = batch_check.id if batch_check else "not-found"
    # countdown 2 hours before execution
    yurist_check_send.check_and_send.apply_async(
        args=[],
        kwargs=dict(email=user.email,
                    batch_check_id=batch_check_id,
                    server_url_schema=config['WEB_SCHEMA'],
                    api_url=config['api_url'],
                    attaches=attaches,
                    mail_type='yurist_batch_check',
                    rec_list=rec_list),
        countdown=config['SEND_DOCS_TO_YURIST_DELAY_SECONDS'])
Beispiel #51
0
def notarius_list(batch_id=None):
    batch = DocumentBatchDbObject.query.filter_by(id=batch_id,
                                                  _owner=current_user,
                                                  deleted=False).scalar()
    if not batch:
        raise errors.InvalidParameterValue('batch_id')

    batch_manager = BatchManager.init(batch)
    assert batch_manager

    region = batch_manager.get_batch_region(batch_id)

    if not region:
        return {'result': []}
    query = NotariusObject.query.filter_by(region=region)
    result = [item.get_api_structure() for item in query]
    return {'result': result}
    def update_batch(self, batch_id, new_batch, current_user_id, config, logger):
        current_batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner_id=current_user_id,
                                                                        deleted=False).first()
        if not current_batch_db_object:
            raise errors.BatchNotFound()

        batch_status = current_batch_db_object.status
        if batch_status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):
            logger.warn(u"Can't update batch %s in status %s" % (unicode(batch_id), unicode(batch_status)))
            raise errors.DocumentBatchUpdateError()

        try:
            current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object)
        except Exception:
            logger.fatal(u"Failed to validate batch from DB!")
            raise

        current_fields = current_batch.data.value
        assert isinstance(current_fields, dict)

        # STEP 1: make new data and metadata
        #         and collect changed fields names
        new_batch_db = new_batch.get_db_object()
        merged_fields, changed_field_names = self._merge_raw_fields(current_batch_db_object.data, new_batch_db.data)

        current_batch_db_object._metadata = new_batch_db._metadata
        current_batch_db_object.data = merged_fields
        sqldb.session.commit()

        # STEP 2: make document set from data and schema
        try:
            new_field_set, new_docs, _ = self.make_docs_for_new_data(
                current_batch.data.value,
                new_batch.data.value,
                current_batch_db_object,
                BatchManager.get_batch_document_fields(current_batch_db_object)
            )
        except errors.DocumentBatchUpdateError, ex:
            logger.exception(u"Failed to update batch with new values")
            current_batch_db_object.error_info = {"error": u"unknown error (%s)" % str(ex)}
            sqldb.session.commit()
            raise
Beispiel #53
0
def get_stamp_partners(batch_id=None):
    stamp_partner_list = []

    try:
        region = BatchManager.get_batch_region(batch_id)
    except Exception:
        raise errors.BatchNotFound()

    stamps = StampPartnersObject.query.filter_by(enabled=True)
    if region:
        stamps = stamps.filter(or_(StampPartnersObject.region.contains([region]), StampPartnersObject.region == None))
    for item in stamps.order_by(StampPartnersObject.sort_index.asc()):
        stamp_partner_list.append({
            "id": item.id,
            "link": item.link,
            "banner": item.banner,
            "title": item.title
        })

    return {"result": {"stamp_partners": stamp_partner_list}}
Beispiel #54
0
def render_document_preview(document_object_id):
    document_object_id = document_object_id
    config = celery.conf.get('config')
    request = current_task.request
    sys.path.append(os.path.normpath(os.path.abspath(os.path.dirname(__name__))))

    logger = celery.log.get_default_logger()

    with celery.conf['flask_app']().app_context():
        db_doc = BatchDocumentDbObject.query.filter_by(id=document_object_id).scalar()
        if not db_doc or db_doc.status not in (UserDocumentStatus.DS_NEW,
                                               UserDocumentStatus.DS_RENDERING_FAILED,
                                               UserDocumentStatus.DS_RENDERING) \
           or db_doc._celery_task_id not in (None, request.id):
            return False

        from fw.documents.batch_manager import BatchManager

        batch_manager = BatchManager.init(db_doc.batch)
        doc_caption = batch_manager.get_batch_caption(db_doc.batch)
        return render_single_document(db_doc, doc_caption, 'preview_watermark.png', config, logger, request.id)
Beispiel #55
0
def get_stamp_partners(batch_id=None):
    stamp_partner_list = []

    try:
        region = BatchManager.get_batch_region(batch_id)
    except Exception:
        raise errors.BatchNotFound()

    stamps = StampPartnersObject.query.filter_by(enabled=True)
    if region:
        stamps = stamps.filter(
            or_(StampPartnersObject.region.contains([region]),
                StampPartnersObject.region == None))
    for item in stamps.order_by(StampPartnersObject.sort_index.asc()):
        stamp_partner_list.append({
            "id": item.id,
            "link": item.link,
            "banner": item.banner,
            "title": item.title
        })

    return {"result": {"stamp_partners": stamp_partner_list}}
Beispiel #56
0
def _finalize_batch(batch):
    batch_status = batch.status
    if batch_status == BatchStatusEnum.BS_FINALISED:
        return {"result": True}

    if batch_status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):
        raise errors.DocumentBatchFinalizationError()

    docs = batch._documents
    if not docs:
        return {"result": False}

    batch_manager = BatchManager.init(batch)

    try:
        if not batch_manager.finalize_batch(current_app.config, current_app.logger, batch):
            raise errors.DocumentBatchFinalizationError()
    except Exception:
        current_app.logger.exception(u"Finalisation error")
        return {"result": False}

    return {"result": True}
Beispiel #57
0
def _finalize_batch(batch):
    batch_status = batch.status
    if batch_status == BatchStatusEnum.BS_FINALISED:
        return {"result": True}

    if batch_status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):
        raise errors.DocumentBatchFinalizationError()

    docs = batch._documents
    if not docs:
        return {"result": False}

    batch_manager = BatchManager.init(batch)

    try:
        if not batch_manager.finalize_batch(current_app.config,
                                            current_app.logger, batch):
            raise errors.DocumentBatchFinalizationError()
    except Exception:
        current_app.logger.exception(u"Finalisation error")
        return {"result": False}

    return {"result": True}
    def run(self):
        self.logger.info(u"Отправка пакета документов на email")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'batch id: ')
        email = get_single(u'email: ')
        try:
            ObjectId(batch_id)
        except Exception:
            self.logger.error(u"Invalid batch id")
            return False

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            self.logger.error(u"Batch not found")
            return False

        if batch.status == BatchStatusEnum.BS_FINALISED:
            total_attachments = BatchManager.get_batch_rendered_docs(batch, current_app.config, current_app.logger)
            send_email.send_email.delay(email, 'email_batch_docs', attach=total_attachments)
            return True
        self.logger.error(u"Invalid current batch status")
        return True
Beispiel #59
0
def get_bank_partners(batch_id=None):
    bank_partner_list = []

    try:
        address = BatchManager.get_batch_address(batch_id)
        city = address['region'] if address['region'] in SPECIAL_CITY_REGIONS else address.get('city', address.get('village', u""))
    except Exception:
        raise errors.BatchNotFound()

    banks = BankPartnersObject.query.filter_by(enabled=True)
    if city:
        banks = banks.filter(or_(BankPartnersObject.city.contains([city]),
                                 BankPartnersObject.city == None))

    for item in banks.order_by(BankPartnersObject.sort_index.asc()):
        bank_partner_list.append({
            "id": item.id,
            "link": item.link,
            "banner": item.banner,
            "title": item.title,
            "conditions": item.conditions or []
        })

    return {"result": {"banks_partners": bank_partner_list}}