예제 #1
0
파일: ir_module.py 프로젝트: 3iData-nc/odoo
    def import_zipfile(self, module_file, force=False):
        if not module_file:
            raise Exception(_("No file sent."))
        if not zipfile.is_zipfile(module_file):
            raise UserError(_('File is not a zip file!'))

        success = []
        errors = dict()
        module_names = []
        with zipfile.ZipFile(module_file, "r") as z:
            for zf in z.filelist:
                if zf.file_size > MAX_FILE_SIZE:
                    raise UserError(_("File '%s' exceed maximum allowed file size") % zf.filename)

            with tempdir() as module_dir:
                z.extractall(module_dir)
                dirs = [d for d in os.listdir(module_dir) if os.path.isdir(opj(module_dir, d))]
                for mod_name in dirs:
                    module_names.append(mod_name)
                    try:
                        # assert mod_name.startswith('theme_')
                        path = opj(module_dir, mod_name)
                        self.import_module(mod_name, path, force=force)
                        success.append(mod_name)
                    except Exception, e:
                        _logger.exception('Error while importing module')
                        errors[mod_name] = exception_to_unicode(e)
예제 #2
0
def dump_db(db_name, stream, backup_format="zip"):
    if backup_format == "zip":
        res = dump_db.super(db_name, False, backup_format)
        with osutil.tempdir() as dump_dir:
            with zipfile.ZipFile(res, "r") as zip:
                zip.extractall(dump_dir)
                files = file.get_store_path(db_name)
                if os.path.exists(files):
                    shutil.copytree(files, os.path.join(dump_dir, "files"))
            if stream:
                osutil.zip_dir(
                    dump_dir,
                    stream,
                    include_dir=False,
                    fnct_sort=lambda file_name: file_name != "dump.sql",
                )
            else:
                t = tempfile.TemporaryFile()
                osutil.zip_dir(
                    dump_dir,
                    t,
                    include_dir=False,
                    fnct_sort=lambda file_name: file_name != "dump.sql",
                )
                t.seek(0)
                return t
    else:
        return dump_db.super(db_name, stream, backup_format)
예제 #3
0
    def import_zipfile(self, module_file, force=False):
        if not module_file:
            raise Exception(_("No file sent."))
        if not zipfile.is_zipfile(module_file):
            raise UserError(_('File is not a zip file!'))

        success = []
        errors = dict()
        module_names = []
        with zipfile.ZipFile(module_file, "r") as z:
            for zf in z.filelist:
                if zf.file_size > MAX_FILE_SIZE:
                    raise UserError(
                        _("File '%s' exceed maximum allowed file size") %
                        zf.filename)

            with tempdir() as module_dir:
                z.extractall(module_dir)
                dirs = [
                    d for d in os.listdir(module_dir)
                    if os.path.isdir(opj(module_dir, d))
                ]
                for mod_name in dirs:
                    module_names.append(mod_name)
                    try:
                        # assert mod_name.startswith('theme_')
                        path = opj(module_dir, mod_name)
                        self.import_module(mod_name, path, force=force)
                        success.append(mod_name)
                    except Exception, e:
                        _logger.exception('Error while importing module')
                        errors[mod_name] = exception_to_unicode(e)
예제 #4
0
def restore_db(db, dump_file, copy=False):
    res = restore_db.super(db, dump_file, copy)
    with osutil.tempdir() as dump_dir:
        if zipfile.is_zipfile(dump_file):
            with zipfile.ZipFile(dump_file, "r") as zip:
                files = [m for m in zip.namelist() if m.startswith("files/")]
                if files:
                    z.extractall(dump_dir, files)
                    files_path = os.path.join(dump_dir, "files")
                    shutil.move(files_path, file.get_store_path(db_name))
    return res
    def create_zip_report(self, docids, data):
        context = dict(self.env.context)
        model = self._context.get("active_model", False)
        with tempdir() as dump_dir:
            for action in self._context.get('active_report_ids'):
                #_logger.info("REPORT %s::%s::%s" % (action, action.report_id, action.sub_model))
                report_id = action.report_id
                sub_model = action.sub_model
                if sub_model:
                    report = getattr(
                        self.with_context(dict(
                            context, active_model=model))._get_objs_for_report(
                                docids, data), sub_model)
                    data_report = report_id.with_context(
                        context,
                        active_model=report._name).report_action(report)
                    data_file_to_save, ext = report_id.render(
                        data_report['context'].get('active_ids'),
                        data=data_report['data'])
                else:
                    report = self.with_context(
                        dict(context,
                             active_model=model))._get_objs_for_report(
                                 docids, data)
                    data_report = report_id.with_context(
                        context, active_model=model).report_action(report)
                    data_file_to_save, ext = report_id.render(
                        data_report['context'].get('active_ids'),
                        data=data_report['data'] is not None or {})

                #_logger.info("REPORTS %s::%s::%s" % (report, self._get_report_charset_from_name(report_id.print_report_name), data_file))
                report_name = safe_eval(report_id.print_report_name, {
                    'objects': report,
                    'time': time,
                    'multi': True
                })
                if len(data_file_to_save) > 0:
                    if not isinstance(data_file_to_save, (bytes, bytearray)):
                        data_file_to_save = data_file_to_save.encode(
                            self._get_report_charset_from_name(
                                report_id.print_report_name))
                    with open(
                            os.path.join(dump_dir, report_name) + "." + ext,
                            'wb') as fh:
                        fh.write(data_file_to_save)

            zip_file = tempfile.TemporaryFile()
            zip_dir(dump_dir, zip_file, include_dir=False)
            zip_file.seek(0)
        return zip_file.read(), 'zip'
예제 #6
0
    def import_master_bank_statement(self):
        ResPartnerBank = self.env['res.partner.bank']
        stage = self.env['helpdesk.stage'].search([('name', '=', 'Solved')],
                                                  limit=1)
        for ticket in self:
            attachments = self.env['ir.attachment'].search([
                ('res_id', '=', ticket.id),
                ('res_model', '=', 'helpdesk.ticket')
            ])
            if not attachments:
                continue

            acc_number = ticket.name.split("[")[1][:-1]
            bank_account_id = ResPartnerBank.search(
                [('acc_number', 'in', (acc_number, '00000' + acc_number))],
                limit=1).id
            journal_id = self.env['account.journal'].search(
                [('bank_account_id', '=', bank_account_id)], limit=1).id
            for attachment in attachments:
                fp = BytesIO()
                fp.write(base64.b64decode(attachment.datas))
                if not zipfile.is_zipfile(fp):
                    continue
                if zipfile.is_zipfile(fp):
                    with zipfile.ZipFile(fp, "r") as z:
                        with tempdir() as module_dir:
                            import odoo.modules.module as module
                            try:
                                module.ad_paths.append(module_dir)
                                z.extractall(module_dir)
                                for d in os.listdir(module_dir):
                                    extract_file = z.open(d)
                                    new_rec = self.env[
                                        'master.account.bank.statement.import'].with_context(
                                            journal_id=journal_id).create({
                                                'data_file':
                                                base64.b64encode(
                                                    extract_file.read()),
                                                'filename':
                                                'test'
                                            })
                                    new_rec.with_context(
                                        ticket=ticket).import_file()
                                    ticket.write({'stage_id': stage.id})
                            except Exception as e:
                                ticket.message_post(body=str(e))
                            finally:
                                module.ad_paths.remove(module_dir)
예제 #7
0
def dump_db(db_name, stream, backup_format='zip'):
    if backup_format == 'zip':
        filestore_paths = []
        connection = sql_db.db_connect(db_name)
        with closing(connection.cursor()) as cr:
            env = api.Environment(cr, SUPERUSER_ID, {})
            settings = env['muk_dms.settings'].search([('save_type', '=',
                                                        'file')])
            for setting in settings:
                filestore_paths.append({
                    'complete_base_path': setting.complete_base_path,
                    'base_path': setting.base_path,
                    'db_name': db_name
                })
        res = dump_db.super(db_name, False, backup_format)
        with osutil.tempdir() as dump_dir:
            with zipfile.ZipFile(res, 'r') as zip:
                zip.extractall(dump_dir)
            with open(os.path.join(dump_dir, 'dms_system_files.json'),
                      'w') as fh:
                dms_system_files = []
                for path in filestore_paths:
                    filestore_id = uuid.uuid4().hex
                    dms_system_files.append({'id': filestore_id, 'data': path})
                    if os.path.exists(path['complete_base_path']):
                        shutil.copytree(
                            path['complete_base_path'],
                            os.path.join(dump_dir, 'dms_system_files',
                                         filestore_id))
                json.dump(dms_system_files, fh, indent=4)
            if stream:
                osutil.zip_dir(
                    dump_dir,
                    stream,
                    include_dir=False,
                    fnct_sort=lambda file_name: file_name != 'dump.sql')
            else:
                t = tempfile.TemporaryFile()
                osutil.zip_dir(
                    dump_dir,
                    t,
                    include_dir=False,
                    fnct_sort=lambda file_name: file_name != 'dump.sql')
                t.seek(0)
                return t
    else:
        return dump_db.super(db_name, stream, backup_format)
예제 #8
0
    def import_zipfile(self, module_file, force=False):
        if not module_file:
            raise Exception(_("No file sent."))
        if not zipfile.is_zipfile(module_file):
            raise UserError(_('Only zip files are supported.'))

        success = []
        errors = dict()
        module_names = []
        with zipfile.ZipFile(module_file, "r") as z:
            for zf in z.filelist:
                if zf.file_size > MAX_FILE_SIZE:
                    raise UserError(
                        _("File '%s' exceed maximum allowed file size") %
                        zf.filename)

            with tempdir() as module_dir:
                import odoo.modules.module as module
                try:
                    module.ad_paths.append(module_dir)
                    z.extractall(module_dir)
                    dirs = [
                        d for d in os.listdir(module_dir)
                        if os.path.isdir(opj(module_dir, d))
                    ]
                    for mod_name in dirs:
                        module_names.append(mod_name)
                        try:
                            # assert mod_name.startswith('theme_')
                            path = opj(module_dir, mod_name)
                            if self._import_module(mod_name, path,
                                                   force=force):
                                success.append(mod_name)
                        except Exception as e:
                            _logger.exception('Error while importing module')
                            errors[mod_name] = exception_to_unicode(e)
                finally:
                    module.ad_paths.remove(module_dir)
        r = ["Successfully imported module '%s'" % mod for mod in success]
        for mod, error in errors.items():
            r.append(
                "Error while importing module '%s'.\n\n %s \n Make sure those modules are installed and try again."
                % (mod, error))
        return '\n'.join(r), module_names
예제 #9
0
def restore_db(db, dump_file, copy=False):
    res = restore_db.super(db, dump_file, copy)
    with osutil.tempdir() as dump_dir:
        if zipfile.is_zipfile(dump_file):
            with zipfile.ZipFile(dump_file, 'r') as zip:
                dms_system_files = [
                    m for m in zip.namelist()
                    if m.startswith('dms_system_files/')
                ]
                zip.extractall(dump_dir,
                               ['dms_system_files.json'] + dms_system_files)
                if dms_system_files:
                    system_file_path = os.path.join(dump_dir,
                                                    'dms_system_files')
                    with open(os.path.join(dump_dir,
                                           'dms_system_files.json')) as file:
                        data = json.load(file)
                        for info in data:
                            shutil.move(
                                os.path.join(system_file_path, info['id']),
                                os.path.join(info['data']['base_path'], db))
    return res
예제 #10
0
    def cashfront_import_wizard(self):
        if not self.fdata:
            raise UserError(_("Select zip file to import"))

        journal_file_list = []
        sale_file_list = []
        with tempdir() as dump_dir:
            archive = zipfile.ZipFile(io.BytesIO(base64.b64decode(self.fdata)))
            files = archive.namelist()
            archive.extractall(dump_dir, files)
            for file in files:
                if '/Jornal/' in file:
                    if '.txt' in file:
                        journal_file_list.append(os.path.join(dump_dir, file))
                if '/Sales/' in file:
                    if '.txt' in file:
                        sale_file_list.append(os.path.join(dump_dir, file))

            fdata_sale = sorted(sale_file_list, key=lambda x: self.sort_by_date(x))
            name_sale_path = fdata_sale[0].split('/')
            path = '/'.join(name_sale_path[:len(name_sale_path) - 1])
            fdata_journal = sorted([x for x in journal_file_list if path + '/' + x.split('/')[len(name_sale_path) - 1] in fdata_sale],
                        key=lambda x: self.sort_by_date(x))

            for fdata_sale, fdata_journal in zip(fdata_sale, fdata_journal):
                _logger.info('{} | {}'.format(fdata_sale, fdata_journal))
                start_at = str(datetime.strptime(self.get_date(fdata_sale), '%d.%m.%Y'))
                active_id = self.env.context.get('active_id')
                posconfig = self.env['pos.config'].browse(active_id)
                session = self.env['pos.session'].create({
                    'config_id': posconfig.id,
                    'start_at': start_at,
                })

                fdata_sale = open(fdata_sale, encoding='cp1251').readlines()
                fdata_journal = open(fdata_journal, encoding='cp1251').readlines()

                self.cashfront_import(fdata_sale, fdata_journal, session)
예제 #11
0
파일: ir_module.py 프로젝트: a8992030/odoo
    def import_zipfile(self, module_file, force=False):
        if not module_file:
            raise Exception(_("No file sent."))
        if not zipfile.is_zipfile(module_file):
            raise UserError(_('File is not a zip file!'))

        success = []
        errors = dict()
        module_names = []
        with zipfile.ZipFile(module_file, "r") as z:
            for zf in z.filelist:
                if zf.file_size > MAX_FILE_SIZE:
                    raise UserError(_("File '%s' exceed maximum allowed file size") % zf.filename)

            with tempdir() as module_dir:
                import odoo.modules as addons
                try:
                    addons.module.ad_paths.append(module_dir)
                    z.extractall(module_dir)
                    dirs = [d for d in os.listdir(module_dir) if os.path.isdir(opj(module_dir, d))]
                    for mod_name in dirs:
                        module_names.append(mod_name)
                        try:
                            # assert mod_name.startswith('theme_')
                            path = opj(module_dir, mod_name)
                            self._import_module(mod_name, path, force=force)
                            success.append(mod_name)
                        except Exception as e:
                            _logger.exception('Error while importing module')
                            errors[mod_name] = exception_to_unicode(e)
                finally:
                    addons.module.ad_paths.remove(module_dir)
        r = ["Successfully imported module '%s'" % mod for mod in success]
        for mod, error in pycompat.items(errors):
            r.append("Error while importing module '%s': %r" % (mod, error))
        return '\n'.join(r), module_names
예제 #12
0
    def create_merged_report(self, docids, data):
        context = dict(self.env.context)
        model = self._context.get("active_model", False)
        type = 'qweb-html'
        ext = set([])
        filenames = {}
        with tempdir() as dump_dir:
            for action in self._context.get('active_report_ids').sorted(
                    lambda r: r.sequence):
                #_logger.info("REPORT %s::%s::%s::%s::%s::%s" % (action, self.with_context(dict(context, active_model=model))._get_objs_for_report(docids, data), docids, data, action.report_id and action.report_id.name or action.report_id, action.sub_model))
                report_id = action.report_id
                sub_model = action.sub_model
                if sub_model:
                    report = getattr(
                        self.with_context(dict(
                            context, active_model=model))._get_objs_for_report(
                                docids, data), sub_model)
                    data_report = report_id.with_context(
                        context,
                        active_model=report._name).report_action(report)
                    data_file = report_id.render(
                        data_report['context'].get('active_ids'),
                        data=data_report['data'])
                else:
                    report = self.with_context(
                        dict(context,
                             active_model=model))._get_objs_for_report(
                                 docids, data)
                    data_report = action.with_context(
                        context, active_model=model).report_action(report)
                    data_file = report_id.render(
                        data_report['context'].get('active_ids'),
                        data=data_report['data'] is not None or {})

                if len(data_file[0]) == 0:
                    return '', type
                #_logger.info("REPORTS %s::%s::%s::%s" % (report, data_file, self._get_report_charset_from_name(report_id.print_report_name), data_report))
                ext.update([data_file[1]])
                report_name = safe_eval(report_id.print_report_name, {
                    'objects': report,
                    'time': time,
                    'multi': True
                })
                with open(
                        os.path.join(dump_dir, report_name) + "." +
                        data_file[1], 'wb') as fh:
                    filenames[action.sequence] = os.path.join(
                        dump_dir, report_name) + "." + data_file[1]
                    if data_file[1] in ('csv', 'txt'):
                        fh.write(data_file[0].encode(
                            self._get_report_charset_from_name(
                                report_id.print_report_name)))
                    else:
                        fh.write(data_file[0])

            report_name_pdf = False
            merged_file = tempfile.NamedTemporaryFile(delete=False)
            filenames = dict(sorted(filenames.items(),
                                    key=lambda kv: kv[0])).values()
            #_logger.info("MERGED %s" % list(filenames))
            if filenames and any([x for x in ext if x == "pdf"]):
                # start for pdf merging
                type = "qweb-pdf"
                merger = PdfFileMerger()
                for report_name in filenames:
                    if not report_name.lower().endswith(".pdf"):
                        old_file = report_name
                        report_name_pdf = tempfile.NamedTemporaryFile()
                        pdfkit.from_file(
                            old_file,
                            report_name_pdf.name,
                            options={
                                'encoding':
                                self._get_report_charset_from_name(
                                    report_id.print_report_name)
                            })
                    merger.append(open(report_name, 'rb'))
                with merged_file as outfile:
                    merger.write(outfile)
                    outfile.seek(0)
                    data = outfile.read()
            elif filenames and len([x for x in ext if x in ('csv', 'txt')
                                    ]) == len(ext):
                type = list(ext)[0]
                with merged_file as outfile:
                    for filename in filenames:
                        if filename == merged_file.name:
                            # don't want to copy the output into the output
                            continue
                        with open(filename, 'rb') as report_name:
                            #_logger.info("FILES %s:%s" % (filename, filenames))
                            shutil.copyfileobj(report_name, outfile)
                    outfile.seek(0)
                    data = outfile.read()
            os.unlink(merged_file.name)
            if report_name_pdf and os.path.isfile(report_name_pdf.name):
                os.unlink(report_name_pdf.name)
        return data, type