def check_tables_exist(self, cr): """ Verify that all tables are present and try to initialize those that are missing. """ # This monkey patch is meant to avoid that the _logger writes # warning and error messages, while running an update all, # in case the model is a bi-view-generated model. env = api.Environment(cr, SUPERUSER_ID, {}) table2model = { model._table: name for name, model in env.items() if not model._abstract and not _bi_view(name) # here is the patch } missing_tables = set(table2model).difference( existing_tables(cr, table2model)) if missing_tables: missing = {table2model[table] for table in missing_tables} _logger.warning("Models have no table: %s.", ", ".join(missing)) # recreate missing tables following model dependencies deps = {name: model._depends for name, model in env.items()} for name in topological_sort(deps): if name in missing: _logger.info("Recreate table of model %s.", name) env[name].init() # check again, and log errors if tables are still missing missing_tables = set(table2model).difference( existing_tables(cr, table2model)) for table in missing_tables: _logger.error("Model %s has no table.", table2model[table])
def check_tables_exist(self, cr): """ Verify that all tables are present and try to initialize those that are missing. """ env = odoo.api.Environment(cr, SUPERUSER_ID, {}) table2model = { model._table: name for name, model in env.items() if not model._abstract } missing_tables = set(table2model).difference( existing_tables(cr, table2model)) if missing_tables: missing = {table2model[table] for table in missing_tables} _logger.warning("Models have no table: %s.", ", ".join(missing)) # recreate missing tables following model dependencies deps = {name: model._depends for name, model in env.items()} for name in topological_sort(deps): if name in missing: _logger.info("Recreate table of model %s.", name) env[name].init() # check again, and log errors if tables are still missing missing_tables = set(table2model).difference( existing_tables(cr, table2model)) for table in missing_tables: _logger.error("Model %s has no table.", table2model[table])
def test_full(args): """ Test full install/uninstall/reinstall cycle for all modules """ with odoo.api.Environment.manage(): with odoo.registry(args.database).cursor() as cr: env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {}) def valid(module): return not (module.name in BLACKLIST or module.name.startswith(IGNORE) or module.state in ('installed', 'uninstallable')) modules = env['ir.module.module'].search([]).filtered(valid) # order modules in topological order modules = modules.browse( topological_sort({ module.id: module.dependencies_id.depend_id.ids for module in modules })) modules_todo = [(module.id, module.name) for module in modules] resume = args.resume_at skip = set(args.skip.split(',')) if args.skip else set() for module_id, module_name in modules_todo: if module_name == resume: resume = None if resume or module_name in skip: install(args.database, module_id, module_name) else: cycle(args.database, module_id, module_name)
def init_models(self, cr, model_names, context): """ Initialize a list of models (given by their name). Call methods ``_auto_init``, ``init``, and ``_auto_end`` on each model to create or update the database tables supporting the models. The ``context`` may contain the following items: - ``module``: the name of the module being installed/updated, if any; - ``update_custom_fields``: whether custom fields should be updated. """ if 'module' in context: _logger.info('module %s: creating or updating database tables', context['module']) context = dict(context, todo=[]) env = odoo.api.Environment(cr, SUPERUSER_ID, context) models = [env[model_name] for model_name in model_names] for model in models: model._auto_init() model.init() cr.commit() for model in models: model._auto_end() cr.commit() for _, func, args in sorted(context['todo']): func(*args) if models: # OpenUpgrade: Don't trigger workflows on recomputation set_workflow_org = odoo.models.BaseModel.step_workflow odoo.models.BaseModel.step_workflow = lambda *args, **kwargs: None # end OpenUpgrade models[0].recompute() # OpenUpgrade: reenable workflow triggers odoo.models.BaseModel.step_workflow = set_workflow_org # end OpenUpgrade cr.commit() # make sure all tables are present missing = [ name for name, model in env.items() if not model._abstract and not model._table_exist() ] if missing: _logger.warning("Models have no table: %s.", ", ".join(missing)) # recreate missing tables following model dependencies deps = {name: model._depends for name, model in env.items()} for name in topological_sort(deps): if name in missing: _logger.info("Recreate table of model %s.", name) env[name].init() cr.commit() # check again, and log errors if tables are still missing for name, model in env.items(): if not model._abstract and not model._table_exist(): _logger.error("Model %s has no table.", name)
def module_installed(environment): # Candidates module the current heuristic is the /static dir loadable = list(http.addons_manifest) # Retrieve database installed modules # TODO The following code should move to ir.module.module.list_installed_modules() Modules = environment['ir.module.module'] domain = [('state', '=', 'installed'), ('name', 'in', loadable)] modules = OrderedDict((module.name, module.dependencies_id.mapped('name')) for module in Modules.search(domain)) sorted_modules = topological_sort(modules) return sorted_modules
def commits(self, pr): """ Returns a PR's commits oldest first (that's what GH does & is what we want) """ commits = list(self.commits_lazy(pr)) # map shas to the position the commit *should* have idx = { c: i for i, c in enumerate(topological_sort({ c['sha']: [p['sha'] for p in c['parents']] for c in commits })) } return sorted(commits, key=lambda c: idx[c['sha']])
def init_models(self, cr, model_names, context): """ Initialize a list of models (given by their name). Call methods ``_auto_init``, ``init``, and ``_auto_end`` on each model to create or update the database tables supporting the models. The ``context`` may contain the following items: - ``module``: the name of the module being installed/updated, if any; - ``update_custom_fields``: whether custom fields should be updated. """ if 'module' in context: _logger.info('module %s: creating or updating database tables', context['module']) context = dict(context, todo=[]) env = odoo.api.Environment(cr, SUPERUSER_ID, context) models = [env[model_name] for model_name in model_names] for model in models: model._auto_init() model.init() cr.commit() for model in models: model._auto_end() cr.commit() for _, func, args in sorted(context['todo']): func(*args) if models: models[0].recompute() cr.commit() # make sure all tables are present missing = [name for name, model in env.items() if not model._abstract and not model._table_exist()] if missing: _logger.warning("Models have no table: %s.", ", ".join(missing)) # recreate missing tables following model dependencies deps = {name: model._depends for name, model in env.items()} for name in topological_sort(deps): if name in missing: _logger.info("Recreate table of model %s.", name) env[name].init() cr.commit() # check again, and log errors if tables are still missing for name, model in env.items(): if not model._abstract and not model._table_exist(): _logger.error("Model %s has no table.", name)
def modules(self): loadable = list(http.addons_manifest) env = api.Environment(http.request.cr, SUPERUSER_ID, {}) records = env['ir.module.module'].search([('state', '=', 'installed'), ('name', 'in', loadable)]) result = OrderedDict( (record.name, record.dependencies_id.mapped('name')) for record in records) content = json.dumps(topological_sort(result), sort_keys=True, indent=4, cls=ResponseEncoder) return Response(content, content_type='application/json;charset=utf-8', status=200)
def init_models(self, cr, model_names, context): """ Initialize a list of models (given by their name). Call methods ``_auto_init`` and ``init`` on each model to create or update the database tables supporting the models. The ``context`` may contain the following items: - ``module``: the name of the module being installed/updated, if any; - ``update_custom_fields``: whether custom fields should be updated. """ if 'module' in context: _logger.info('module %s: creating or updating database tables', context['module']) env = odoo.api.Environment(cr, SUPERUSER_ID, context) models = [env[model_name] for model_name in model_names] for model in models: model._auto_init() model.init() while self._post_init_queue: func = self._post_init_queue.popleft() func() if models: models[0].recompute() # make sure all tables are present table2model = { model._table: name for name, model in env.items() if not model._abstract } missing_tables = set(table2model).difference( existing_tables(cr, table2model)) if missing_tables: missing = {table2model[table] for table in missing_tables} _logger.warning("Models have no table: %s.", ", ".join(missing)) # recreate missing tables following model dependencies deps = {name: model._depends for name, model in env.items()} for name in topological_sort(deps): if name in missing: _logger.info("Recreate table of model %s.", name) env[name].init() # check again, and log errors if tables are still missing missing_tables = set(table2model).difference( existing_tables(cr, table2model)) for table in missing_tables: _logger.error("Model %s has no table.", table2model[table])
def field_sequence(self): """ Return a function mapping a field to an integer. The value of a field is guaranteed to be strictly greater than the value of the field's dependencies. """ # map fields on their dependents dependents = { field: set(dep for dep, _ in model._field_triggers[field] if dep != field) for model in self.values() for field in model._fields.values() } # sort them topologically, and associate a sequence number to each field mapping = { field: num for num, field in enumerate(reversed(topological_sort(dependents))) } return mapping.get
def test_topological_sort(self): random.shuffle(self.mods) modules = [(k, sample(self.mods[:i])) for i, k in enumerate(self.mods)] random.shuffle(modules) ms = dict(modules) seen = set() sorted_modules = topological_sort(ms) for module in sorted_modules: deps = ms[module] self.assertGreaterEqual( seen, set(deps), 'Module %s (index %d), ' \ 'missing dependencies %s from loaded modules %s' % ( module, sorted_modules.index(module), deps, seen )) seen.add(module)
def test_topological_sort(self): random.shuffle(self.mods) modules = [ (k, sample(self.mods[:i])) for i, k in enumerate(self.mods)] random.shuffle(modules) ms = dict(modules) seen = set() sorted_modules = topological_sort(ms) for module in sorted_modules: deps = ms[module] self.assertGreaterEqual( seen, set(deps), 'Module %s (index %d), ' \ 'missing dependencies %s from loaded modules %s' % ( module, sorted_modules.index(module), deps, seen )) seen.add(module)
def init_models(self, cr, model_names, context): """ Initialize a list of models (given by their name). Call methods ``_auto_init`` and ``init`` on each model to create or update the database tables supporting the models. The ``context`` may contain the following items: - ``module``: the name of the module being installed/updated, if any; - ``update_custom_fields``: whether custom fields should be updated. """ if 'module' in context: _logger.info('module %s: creating or updating database tables', context['module']) env = odoo.api.Environment(cr, SUPERUSER_ID, context) models = [env[model_name] for model_name in model_names] for model in models: model._auto_init() model.init() while self._post_init_queue: func = self._post_init_queue.popleft() func() if models: models[0].recompute() # make sure all tables are present table2model = {model._table: name for name, model in env.items() if not model._abstract} missing_tables = set(table2model).difference(existing_tables(cr, table2model)) if missing_tables: missing = {table2model[table] for table in missing_tables} _logger.warning("Models have no table: %s.", ", ".join(missing)) # recreate missing tables following model dependencies deps = {name: model._depends for name, model in env.items()} for name in topological_sort(deps): if name in missing: _logger.info("Recreate table of model %s.", name) env[name].init() # check again, and log errors if tables are still missing missing_tables = set(table2model).difference(existing_tables(cr, table2model)) for table in missing_tables: _logger.error("Model %s has no table.", table2model[table])
def check_tables_exist(self, cr): """ Verify that all tables are present and try to initialize those that are missing. """ env = odoo.api.Environment(cr, SUPERUSER_ID, {}) table2model = {model._table: name for name, model in env.items() if not model._abstract} missing_tables = set(table2model).difference(existing_tables(cr, table2model)) if missing_tables: missing = {table2model[table] for table in missing_tables} _logger.warning("Models have no table: %s.", ", ".join(missing)) # recreate missing tables following model dependencies deps = {name: model._depends for name, model in env.items()} for name in topological_sort(deps): if name in missing: _logger.info("Recreate table of model %s.", name) env[name].init() # check again, and log errors if tables are still missing missing_tables = set(table2model).difference(existing_tables(cr, table2model)) for table in missing_tables: _logger.error("Model %s has no table.", table2model[table])
def generate_module(module, data): """ Return an iterator of pairs (filename, content) to put in the exported module. Returned filenames are local to the module directory. Only exports models in MODELS_TO_EXPORT. Groups exported data by model in separated files. """ get_xmlid = xmlid_getter() # Generate xml files and yield them filenames = [] # filenames to include in the module to export depends = set() # module dependencies of the module to export skipped = [] # non-exported field values for model in MODELS_TO_EXPORT: # determine records to export for model model_data = data.filtered(lambda r: r.model == model) records = data.env[model].browse(model_data.mapped('res_id')).exists() if not records: continue # retrieve module and inter-record dependencies fields = [records._fields[name] for name in FIELDS_TO_EXPORT[model]] record_deps = OrderedDict.fromkeys(records, records.browse()) for record in records: xmlid = get_xmlid(record) if xmlid.split('.')[0] != module.name: # data depends on a record from another module depends.add(xmlid.split('.')[0]) for field in fields: rel_records = get_relations(record, field) if not rel_records: continue for rel_record in rel_records: rel_xmlid = get_xmlid(rel_record, check=False) if rel_xmlid and rel_xmlid.split('.')[0] != module.name: # data depends on a record from another module depends.add(rel_xmlid.split('.')[0]) if rel_records._name == model: # fill in inter-record dependencies record_deps[record] |= rel_records # sort records to satisfy inter-record dependencies records = topological_sort(record_deps) # create the XML containing the generated record nodes nodes = [] for record in records: record_node, record_skipped = generate_record(record, get_xmlid) nodes.append(record_node) skipped.extend(record_skipped) root = E.odoo(*nodes) xml = etree.tostring(root, pretty_print=True, encoding='UTF-8', xml_declaration=True) # add the XML file to the archive filename = os.path.join('data', '%s.xml' % model.replace('.', '_')) yield (filename, xml) filenames.append(filename) # yield a warning file to notify that some data haven't been exported if skipped: content = [ "The following relational data haven't been exported because they either refer", "to a model that Studio doesn't export, or have no XML id:", "", ] for xmlid, field, value in skipped: content.append("Record: %s" % xmlid) content.append("Model: %s" % field.model_name) content.append("Field: %s" % field.name) content.append("Type: %s" % field.type) content.append("Value: %s (%s)" % (value, ', '.join("%r" % v.display_name for v in value))) content.append("") yield ('warning.txt', "\n".join(content)) # add 'web_studio' to the list of dependencies of the exported module # because the 'mail_thread' field used to identify models inheriting from # 'mail_thread' is defined in web_studio. # DO NOT FORWARDPORT PAST SAAS-14 depends.add('web_studio') # yield files '__manifest__.py' and '__init__.py' yield ('__manifest__.py', """# -*- coding: utf-8 -*- { 'name': %r, 'version': '1.0', 'category': 'Studio', 'description': %s, 'author': %r, 'depends': [%s ], 'data': [%s ], 'application': %s, 'license': %r, } """ % ( module.display_name, 'u"""\n%s\n"""' % module.description, module.author, ''.join("\n %r," % d for d in sorted(depends)), ''.join("\n %r," % f for f in filenames), module.application, module.license, )) yield ('__init__.py', '')
def generate_module(module, data): """ Return an iterator of pairs (filename, content) to put in the exported module. Returned filenames are local to the module directory. Only exports models in MODELS_TO_EXPORT. Groups exported data by model in separated files. The content of the files is yielded as an encoded bytestring (utf-8) """ get_xmlid = xmlid_getter() # Generate xml files and yield them filenames = [] # filenames to include in the module to export # depends contains module dependencies of the module to export, as a result # we add web_studio by default to deter importing studio customizations # in community databases depends = set([u'web_studio']) skipped = [] # non-exported field values for model in MODELS_TO_EXPORT: # determine records to export for model model_data = data.filtered(lambda r: r.model == model) records = data.env[model].browse(model_data.mapped('res_id')).exists() if not records: continue # retrieve module and inter-record dependencies fields = [ records._fields[name] for name in get_fields_to_export(records) ] record_deps = OrderedDict.fromkeys(records, records.browse()) for record in records: xmlid = get_xmlid(record) module_name = xmlid.split('.', 1)[0] if module_name != module.name: # data depends on a record from another module depends.add(module_name) for field in fields: rel_records = get_relations(record, field) if not rel_records: continue for rel_record in rel_records: rel_xmlid = get_xmlid(rel_record, check=False) if rel_xmlid and rel_xmlid.split('.')[0] != module.name: # data depends on a record from another module depends.add(rel_xmlid.split('.')[0]) if rel_records._name == model: # fill in inter-record dependencies record_deps[record] |= rel_records if record._name == 'ir.model.fields' and record.ttype == 'monetary': # add a dependency on the currency field rel_record = record._get(record.model, 'currency_id') or record._get( record.model, 'x_currency_id') rel_xmlid = get_xmlid(rel_record, check=False) if rel_xmlid and rel_xmlid.split('.')[0] != module.name: # data depends on a record from another module depends.add(rel_xmlid.split('.')[0]) record_deps[record] |= rel_record # sort records to satisfy inter-record dependencies records = topological_sort(record_deps) # create the XML containing the generated record nodes nodes = [] for record in records: xmlid = get_xmlid(record) if xmlid.split('.', 1)[0] != '__export__': record_node, record_skipped = generate_record( record, get_xmlid) nodes.append(record_node) skipped.extend(record_skipped) root = E.odoo(*nodes) xml = etree.tostring(root, pretty_print=True, encoding='UTF-8', xml_declaration=True) # add the XML file to the archive filename = '/'.join(['data', '%s.xml' % model.replace('.', '_')]) yield (filename, xml) filenames.append(filename) # yield a warning file to notify that some data haven't been exported if skipped: content = [ "The following relational data haven't been exported because they either refer", "to a model that Studio doesn't export, or have no XML id:", "", ] for xmlid, field, value in skipped: content.append("Record: %s" % xmlid) content.append("Model: %s" % field.model_name) content.append("Field: %s" % field.name) content.append("Type: %s" % field.type) content.append("Value: %s (%s)" % (value, ', '.join("%r" % v.display_name for v in value))) content.append("") yield ('warning.txt', "\n".join(content)) # yield files '__manifest__.py' and '__init__.py' manifest = """# -*- coding: utf-8 -*- { 'name': %r, 'version': %r, 'category': 'Studio', 'description': %s, 'author': %r, 'depends': [%s ], 'data': [%s ], 'application': %s, 'license': %r, } """ % ( module.display_name, module.installed_version, 'u"""\n%s\n"""' % module.description, module.author, ''.join("\n %r," % d for d in sorted(depends - {'__export__'})), ''.join("\n %r," % f for f in filenames), module.application, module.license, ) manifest = manifest.encode('utf-8') yield ('__manifest__.py', manifest) yield ('__init__.py', b'')