def run(self):
        self.logger.info(u"Отправка пакета документов на email")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'batch id: ')
        email = get_single(u'email: ')
        try:
            ObjectId(batch_id)
        except Exception:
            self.logger.error(u"Invalid batch id")
            return False

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            self.logger.error(u"Batch not found")
            return False

        if batch.status == BatchStatusEnum.BS_FINALISED:
            total_attachments = BatchManager.get_batch_rendered_docs(
                batch, current_app.config, current_app.logger)
            send_email.send_email.delay(email,
                                        'email_batch_docs',
                                        attach=total_attachments)
            return True
        self.logger.error(u"Invalid current batch status")
        return True
 def run(self):
     self.logger.info(u"Добавление нового пользователя:")
     self.logger.info(u'=' * 50)
     username = get_single(u'username: '******'password: '******'не удалось создать пользователя')
         exit(-1)
 def run(self):
     self.logger.info(u"Добавление нового пользователя:")
     self.logger.info(u'=' * 50)
     username = get_single(u'username: '******'password: '******'не удалось создать пользователя')
         exit(-1)
    def run(self):
        self.logger.info(u"Добавление нотариуса")
        self.logger.info(u'=' * 50)

        filename_str = get_single(u'File name: ', validator=FilenameSimpleValidator(), error_hint=u"File not found")

        with codecs.open(filename_str, encoding='utf8') as f:
            content = f.read()
            try:
                data = json.loads(content)
                notarius = NotariusObject(
                    id = data['id'],
                    surname = data.get('surname', u''),
                    name = data.get('name', u''),
                    patronymic = data.get('patronymic', None),

                    schedule = data['schedule'],
                    schedule_caption = data['schedule_caption'],
                    title = data['title'],
                    address = data['address'],
                    region = data['region'],
                    metro_station = data.get('metro_station', u'')
                )
                sqldb.session.add(notarius)
                sqldb.session.commit()
            except Exception, ex:
                self.logger.exception(u"Не удалось прочитать файл. Проверьте формат.")
    def run(self):
        self.logger.info(u"Перегенерация пакета документов")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'batch id: ')
        try:
            ObjectId(batch_id)
        except Exception:
            self.logger.error(u"Invalid batch id")
            return False

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            self.logger.error(u"Batch not found")
            return False

        if batch.status == BatchStatusEnum.BS_FINALISED:

            batch.status = BatchStatusEnum.BS_BEING_FINALISED
            sqldb.session.commit()
            async_result = rendering.render_batch.delay(batch_id)

            if not async_result.ready():
                batch.current_task_id = async_result.id
                sqldb.session.commit()

            return True
        elif batch.status == BatchStatusEnum.BS_EDITED:
            manager = BatchManager.init(batch)
            manager.finalize_batch(self.config, self.logger, batch)
            return True
        self.logger.error(u"Invalid current batch status")
        return False
    def run(self):
        self.logger.info(u"Перегенерация пакета документов")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'batch id: ')
        try:
            ObjectId(batch_id)
        except Exception:
            self.logger.error(u"Invalid batch id")
            return False

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            self.logger.error(u"Batch not found")
            return False

        if batch.status == BatchStatusEnum.BS_FINALISED:

            batch.status = BatchStatusEnum.BS_BEING_FINALISED
            sqldb.session.commit()
            async_result = rendering.render_batch.delay(batch_id)

            if not async_result.ready():
                batch.current_task_id = async_result.id
                sqldb.session.commit()

            return True
        elif batch.status == BatchStatusEnum.BS_EDITED:
            manager = BatchManager.init(batch)
            manager.finalize_batch(self.config, self.logger, batch)
            return True
        self.logger.error(u"Invalid current batch status")
        return False
 def run(self):
     self.logger.info(u"Set date")
     batch_id = get_single(u'batch id: ')
     for doc in BatchDocumentDbObject.query.filter_by(batch_id=batch_id, deleted=False):
         if doc.data and 'doc_date' in doc.data:
             d = doc.data
             d['doc_date'] = datetime(2015, 6, 13)
             BatchDocumentDbObject.query.filter_by(id=doc.id).update({'data':d})
             self.logger.info('updated %s' % doc.document_type)
     sqldb.session.commit()
 def run(self):
     self.logger.info(u"Set date")
     batch_id = get_single(u'batch id: ')
     for doc in BatchDocumentDbObject.query.filter_by(batch_id=batch_id,
                                                      deleted=False):
         if doc.data and 'doc_date' in doc.data:
             d = doc.data
             d['doc_date'] = datetime(2015, 6, 13)
             BatchDocumentDbObject.query.filter_by(id=doc.id).update(
                 {'data': d})
             self.logger.info('updated %s' % doc.document_type)
     sqldb.session.commit()
    def run(self):
        self.logger.info(u"Перегенерация документа")
        self.logger.info(u'=' * 50)

        doc_id = get_single(u'document id: ')
        doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()
        if not doc:
            self.logger.error(u"Failed to find document")
            return False
        batch = doc.batch
        rendering.render_batch_document.delay(batch.id, doc_id)

        self.logger.error(u"Started document render")
        return False
    def run(self):
        self.logger.info(u"Отправка пакета документов на email")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'batch id: ')
        email = get_single(u'email: ')
        try:
            ObjectId(batch_id)
        except Exception:
            self.logger.error(u"Invalid batch id")
            return False

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()
        if not batch:
            self.logger.error(u"Batch not found")
            return False

        if batch.status == BatchStatusEnum.BS_FINALISED:
            total_attachments = BatchManager.get_batch_rendered_docs(batch, current_app.config, current_app.logger)
            send_email.send_email.delay(email, 'email_batch_docs', attach=total_attachments)
            return True
        self.logger.error(u"Invalid current batch status")
        return True
    def run(self):
        self.logger.info(u"Перегенерация документа")
        self.logger.info(u'=' * 50)

        doc_id = get_single(u'document id: ')
        doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()
        if not doc:
            self.logger.error(u"Failed to find document")
            return False
        batch = doc.batch
        rendering.render_batch_document.delay(batch.id, doc_id)

        self.logger.error(u"Started document render")
        return False
    def run(self):

        file_name = get_single(u'File name: ', validator=UpdateGeoDbFromFileCommand.FilenameSimpleValidator(),
                               error_hint=u"File not found")

        tmp_dir = '/tmp/geo_files'
        if not os.path.exists(tmp_dir):
            os.makedirs(tmp_dir)

        subprocess.call(shlex.split('tar -xzvf %s -C %s' % (file_name, tmp_dir)))

        cities_file_name = os.path.join(tmp_dir, "cities.txt")
        data_file_name = os.path.join(tmp_dir, "cidr_optim.txt")

        if not os.path.exists(cities_file_name) or not os.path.exists(data_file_name):
            self.logger.error("missing required file(s)")
            return

        cities_file = codecs.open(cities_file_name, 'r', 'cp1251')
        cities = cities_file.read()
        cities_file.close()

        data_file = codecs.open(data_file_name, 'r', 'cp1251')
        data = data_file.read()
        data_file.close()

        os.unlink(cities_file_name)
        os.unlink(data_file_name)

        self.logger.info('processing')

        cities_objects = []

        for line in cities.split('\n'):
            line = line.strip()
            if not line:
                continue
            match = re.search(ur'(\d+)\t(.+)\t(.+)\t(.+)\t(.+)\t(.+)', line)
            if not match:
                self.logger.warn(u"Failed to process: %s" % line)
                continue

            cid = int(match.groups(0)[0])
            city_name = match.groups(0)[1]
            region = match.groups(0)[2]
            # district = match.groups(0)[3]
            lat = match.groups(0)[4]
            lng = match.groups(0)[5]
            cities_objects.append({
                'cid': cid,
                'name': city_name,
                'region': region,
                'lat': lat,
                'lng': lng
            })

        geo_ranges = []
        for line in data.split('\n'):
            line = line.strip()
            if not line:
                continue
            match = re.search(
                ur'(\d+)\t(\d+)\t(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3} - \d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\t(.+)\t(.+)',
                line)
            if not match:
                self.logger.warn(u"Failed to process: %s" % line)
                continue
            cid = match.groups(0)[4]
            if not cid.isdigit():
                continue
            geo_ranges.append({
                'start': int(match.groups(0)[0]),
                'end': int(match.groups(0)[1]),
                'cid': int(cid)
            })

        for g in GeoCities.query.filter():
            g.delete()
        sqldb.session.commit()
        for g in GeoRanges.query.filter():
            g.delete()
        sqldb.session.commit()

        for city in cities_objects:
            new_gc = GeoCities(
                name=city['name'],
                cid=city['cid'],
                region=city['region'],
                lat=city['lat'],
                lng=city['lng']
            )
            sqldb.session.add(new_gc)
        sqldb.session.commit()

        for geo in geo_ranges:
            new_gr = GeoRanges(
                cid=geo['cid'],
                start=geo['start'],
                end=geo['end']
            )
            sqldb.session.add(new_gr)
        sqldb.session.commit()
Beispiel #13
0
    def run(self):
        self.logger.info(u"Отправка письма о регистрации компании")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'Batch id: ')

        batch = DocumentBatchDbObject.query.filter_by(
            id=batch_id, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC).first()
        if not batch:
            self.logger.info(u'Batch not found')
            return

        if not batch.result_fields or 'ifns_reg_info' not in batch.result_fields:
            self.logger.info(u'Company not registered')
            return

        reg_info = batch.result_fields['ifns_reg_info']
        if 'status' not in reg_info or reg_info[
                'status'] != 'registered' or not reg_info[
                    'reg_date'] or not reg_info['ogrn']:
            self.logger.info(u'Company not registered')
            return

        ogrn = reg_info['ogrn']
        reg_date = reg_info['reg_date'].strptime("%d.%m.%Y")

        recipient = batch._owner.email
        if not recipient:
            self.logger.info(u'Company owner has no email')
            return

        short_name = batch.data.get('short_name', u"")
        doc_rec_type = batch.data.get('obtain_way', None)

        ifns_book_doc_receive_url = "%s://%s/ip/?id=%s" % (
            self.config['WEB_SCHEMA'], self.config['DOMAIN'], batch_id)
        ifns_book_doc_receive_url = utm_args(
            ifns_book_doc_receive_url, 'ifns_ip_reg_success',
            batch._owner_id) + u"#page=obtaining"
        ifns_book_doc_receive_url = UserManager.make_auth_url(
            ifns_book_doc_receive_url, batch._owner).get_url(self.config)

        docs_recipient_fio = u""
        if doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT:
            doc = BatchDocumentDbObject.query.filter_by(
                batch=batch, document_type=DocumentTypeEnum.DT_P11001).first()
            if doc:
                founders = doc.data['founders']
                for founder in founders:
                    if founder.get('documents_recipient_type', '') != '':
                        person = founder.get('person', None)
                        if person and '_id' in person:
                            person_obj = PrivatePersonDbObject.query.filter_by(
                                id=person['_id']).scalar()
                            if person_obj:
                                pp = PrivatePerson.db_obj_to_field(person_obj)
                                if pp:
                                    docs_recipient_fio = pp.full_name
                        else:
                            company = founder.get('company', None)
                            if company:
                                company_db_object = CompanyDbObject.query.filter_by(
                                    id=company['_id']).scalar()
                                if company_db_object:
                                    cc = CompanyObject.db_obj_to_field(
                                        company_db_object)
                                    if cc and cc.general_manager and cc.general_manager.initialized:
                                        docs_recipient_fio = cc.general_manager.full_name
        elif doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT:
            doc = BatchDocumentDbObject.query.filter_by(
                batch=batch,
                document_type=DocumentTypeEnum.DT_DOVERENNOST_OBTAIN).first()

            if doc:
                doc_obtain_person = doc.data.get('doc_obtain_person', None)
                if doc_obtain_person and '_id' in doc_obtain_person:
                    person_obj = PrivatePersonDbObject.query.filter_by(
                        id=doc_obtain_person['_id']).scalar()
                    if person_obj:
                        pp = PrivatePerson.db_obj_to_field(person_obj)
                        if pp:
                            docs_recipient_fio = pp.full_name

        send_email.send_email(
            recipient,
            'ifns_llc_reg_success',
            short_name=short_name,
            doc_rec_by_email=(
                doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_SEND_BY_MAIL),
            doc_rec_by_responsible=(doc_rec_type == DocumentDeliveryTypeStrEnum
                                    .DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT),
            ifns_book_doc_receive_url=ifns_book_doc_receive_url,
            schema=self.config['WEB_SCHEMA'],
            domain=self.config['DOMAIN'],
            ogrn=ogrn,
            docs_ready_date=u"%d %s %s года" %
            (reg_date.day, get_russian_month_skl(
                reg_date.month), reg_date.year),
            docs_recipient_fio=docs_recipient_fio,
            obtain_person_fio=u"",
            service_startup=datetime.utcnow() < datetime(2015, 6, 1),
            user_id=str(batch._owner_id))
        self.logger.info(u'Sent %s to %s' %
                         ('ifns_llc_reg_success', recipient))
    def run(self):

        file_name = get_single(
            u'File name: ',
            validator=UpdateGeoDbFromFileCommand.FilenameSimpleValidator(),
            error_hint=u"File not found")

        tmp_dir = '/tmp/geo_files'
        if not os.path.exists(tmp_dir):
            os.makedirs(tmp_dir)

        subprocess.call(
            shlex.split('tar -xzvf %s -C %s' % (file_name, tmp_dir)))

        cities_file_name = os.path.join(tmp_dir, "cities.txt")
        data_file_name = os.path.join(tmp_dir, "cidr_optim.txt")

        if not os.path.exists(cities_file_name) or not os.path.exists(
                data_file_name):
            self.logger.error("missing required file(s)")
            return

        cities_file = codecs.open(cities_file_name, 'r', 'cp1251')
        cities = cities_file.read()
        cities_file.close()

        data_file = codecs.open(data_file_name, 'r', 'cp1251')
        data = data_file.read()
        data_file.close()

        os.unlink(cities_file_name)
        os.unlink(data_file_name)

        self.logger.info('processing')

        cities_objects = []

        for line in cities.split('\n'):
            line = line.strip()
            if not line:
                continue
            match = re.search(ur'(\d+)\t(.+)\t(.+)\t(.+)\t(.+)\t(.+)', line)
            if not match:
                self.logger.warn(u"Failed to process: %s" % line)
                continue

            cid = int(match.groups(0)[0])
            city_name = match.groups(0)[1]
            region = match.groups(0)[2]
            # district = match.groups(0)[3]
            lat = match.groups(0)[4]
            lng = match.groups(0)[5]
            cities_objects.append({
                'cid': cid,
                'name': city_name,
                'region': region,
                'lat': lat,
                'lng': lng
            })

        geo_ranges = []
        for line in data.split('\n'):
            line = line.strip()
            if not line:
                continue
            match = re.search(
                ur'(\d+)\t(\d+)\t(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3} - \d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\t(.+)\t(.+)',
                line)
            if not match:
                self.logger.warn(u"Failed to process: %s" % line)
                continue
            cid = match.groups(0)[4]
            if not cid.isdigit():
                continue
            geo_ranges.append({
                'start': int(match.groups(0)[0]),
                'end': int(match.groups(0)[1]),
                'cid': int(cid)
            })

        for g in GeoCities.query.filter():
            g.delete()
        sqldb.session.commit()
        for g in GeoRanges.query.filter():
            g.delete()
        sqldb.session.commit()

        for city in cities_objects:
            new_gc = GeoCities(name=city['name'],
                               cid=city['cid'],
                               region=city['region'],
                               lat=city['lat'],
                               lng=city['lng'])
            sqldb.session.add(new_gc)
        sqldb.session.commit()

        for geo in geo_ranges:
            new_gr = GeoRanges(cid=geo['cid'],
                               start=geo['start'],
                               end=geo['end'])
            sqldb.session.add(new_gr)
        sqldb.session.commit()
Beispiel #15
0
    def run(self):
        self.logger.info(u"Отправка письма о регистрации компании")
        self.logger.info(u'=' * 50)

        batch_id = get_single(u'Batch id: ')

        batch = DocumentBatchDbObject.query.filter_by(id=batch_id, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC).first()
        if not batch:
            self.logger.info(u'Batch not found')
            return

        if not batch.result_fields or 'ifns_reg_info' not in batch.result_fields:
            self.logger.info(u'Company not registered')
            return

        reg_info = batch.result_fields['ifns_reg_info']
        if 'status' not in reg_info or reg_info['status'] != 'registered' or not reg_info['reg_date'] or not reg_info['ogrn']:
            self.logger.info(u'Company not registered')
            return

        ogrn = reg_info['ogrn']
        reg_date = reg_info['reg_date'].strptime("%d.%m.%Y")

        recipient = batch._owner.email
        if not recipient:
            self.logger.info(u'Company owner has no email')
            return

        short_name = batch.data.get('short_name', u"")
        doc_rec_type = batch.data.get('obtain_way', None)

        ifns_book_doc_receive_url = "%s://%s/ip/?id=%s" % (self.config['WEB_SCHEMA'], self.config['DOMAIN'], batch_id)
        ifns_book_doc_receive_url = utm_args(ifns_book_doc_receive_url, 'ifns_ip_reg_success', batch._owner_id) + u"#page=obtaining"
        ifns_book_doc_receive_url = UserManager.make_auth_url(ifns_book_doc_receive_url, batch._owner).get_url(self.config)

        docs_recipient_fio = u""
        if doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT:
            doc = BatchDocumentDbObject.query.filter_by(batch=batch,
                                                        document_type=DocumentTypeEnum.DT_P11001).first()
            if doc:
                founders = doc.data['founders']
                for founder in founders:
                    if founder.get('documents_recipient_type', '') != '':
                        person = founder.get('person', None)
                        if person and '_id' in person:
                            person_obj = PrivatePersonDbObject.query.filter_by(
                                id=person['_id']).scalar()
                            if person_obj:
                                pp = PrivatePerson.db_obj_to_field(person_obj)
                                if pp:
                                    docs_recipient_fio = pp.full_name
                        else:
                            company = founder.get('company', None)
                            if company:
                                company_db_object = CompanyDbObject.query.filter_by(
                                    id=company['_id']).scalar()
                                if company_db_object:
                                    cc = CompanyObject.db_obj_to_field(company_db_object)
                                    if cc and cc.general_manager and cc.general_manager.initialized:
                                        docs_recipient_fio = cc.general_manager.full_name
        elif doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT:
            doc = BatchDocumentDbObject.query.filter_by(batch=batch,
                                                        document_type=DocumentTypeEnum.DT_DOVERENNOST_OBTAIN).first()

            if doc:
                doc_obtain_person = doc.data.get('doc_obtain_person', None)
                if doc_obtain_person and '_id' in doc_obtain_person:
                    person_obj = PrivatePersonDbObject.query.filter_by(
                        id=doc_obtain_person['_id']).scalar()
                    if person_obj:
                        pp = PrivatePerson.db_obj_to_field(person_obj)
                        if pp:
                            docs_recipient_fio = pp.full_name

        send_email.send_email(recipient,
            'ifns_llc_reg_success',
            short_name=short_name,
            doc_rec_by_email=(doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_SEND_BY_MAIL),
            doc_rec_by_responsible=(
                doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT),
            ifns_book_doc_receive_url=ifns_book_doc_receive_url,
            schema=self.config['WEB_SCHEMA'],
            domain=self.config['DOMAIN'],
            ogrn=ogrn,
            docs_ready_date=u"%d %s %s года" % (
                reg_date.day, get_russian_month_skl(reg_date.month), reg_date.year),
            docs_recipient_fio=docs_recipient_fio,
            obtain_person_fio=u"",
            service_startup=datetime.utcnow() < datetime(2015, 6, 1),
            user_id=str(batch._owner_id))
        self.logger.info(u'Sent %s to %s' % ('ifns_llc_reg_success', recipient))