def run_reconcile_411_pos(self, nb_lines_per_job=100):
        # Prepare session for job
        session = ConnectorSession(self._cr,
                                   self._uid,
                                   context=self.env.context)
        account_411100 = self.env['account.account'].search([('code', '=',
                                                              '411100')])

        debit_moves_domain = [('reconciled', '=', False),
                              ('account_id', '=', account_411100.id),
                              ('partner_id', '!=', None), ('debit', '>', 0),
                              ('credit', '=', 0)]
        # Create jobs
        lines = self.search(debit_moves_domain, order='id')
        line_ids = lines.mapped('id')
        total_lines = len(line_ids)
        job_lines = nb_lines_per_job

        number_of_jobs = int(
            total_lines / job_lines) + (total_lines % job_lines > 0)
        start_line = 0
        for i in range(1, number_of_jobs + 1):
            start_line = i * job_lines - job_lines
            end_line = i * job_lines
            chunk_ids = line_ids[start_line:end_line]
            if i == number_of_jobs:
                chunk_ids = line_ids[start_line:]

            session = ConnectorSession(self._cr,
                                       self._uid,
                                       context=self.env.context)
            job_reconcile_411_pos.delay(session, 'account.move.line',
                                        chunk_ids, account_411100.id)
Beispiel #2
0
    def write(self, vals):
        res = super(StockMove, self).write(vals)
        picking_done = []
        for move in self:
            if vals.get('picking_id', False) or (vals.get('state', False)
                                                 and move.picking_id):
                vals_picking = {}
                if vals.get('state', False):
                    vals_picking = {'state': vals['state']}
                else:
                    vals_picking = {'state': move.picking_id.state}
                session = ConnectorSession(self.env.cr,
                                           SUPERUSER_ID,
                                           context=self.env.context)
                if move.picking_id.id not in picking_done:
                    on_record_write.fire(session, 'stock.picking',
                                         move.picking_id.id, vals_picking)
                    picking_done.append(move.picking_id.id)

            if vals.get('state', False) and vals["state"] != "draft":
                session = ConnectorSession(self.env.cr,
                                           SUPERUSER_ID,
                                           context=self.env.context)
                on_stock_move_change.fire(session, 'stock.move', move.id)
        return res
Beispiel #3
0
 def import_partners(self):
     session = ConnectorSession(self.env.cr, self.env.uid,
                                context=self.env.context)
     import_start_time = datetime.now()
     for website in self:
         backend_id = website.backend_id.id
         if website.import_partners_from_date:
             from_string = fields.Datetime.from_string
             from_date = from_string(website.import_partners_from_date)
         else:
             from_date = None
         partner_import_batch.delay(
             session, 'magento.res.partner', backend_id,
             {'magento_website_id': website.magento_id,
              'from_date': from_date,
              'to_date': import_start_time})
     # Records from Magento are imported based on their `created_at`
     # date.  This date is set on Magento at the beginning of a
     # transaction, so if the import is run between the beginning and
     # the end of a transaction, the import of a record may be
     # missed.  That's why we add a small buffer back in time where
     # the eventually missed records will be retrieved.  This also
     # means that we'll have jobs that import twice the same records,
     # but this is not a big deal because they will be skipped when
     # the last `sync_date` is the same.
     next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER)
     next_time = fields.Datetime.to_string(next_time)
     self.write({'import_partners_from_date': next_time})
     return True
Beispiel #4
0
 def purchase_cutoff(self, cr, uid, ids, context=None):
     """  Process purchase order cut-offs from all warehouses """
     if not hasattr(ids, '__iter__'):
         ids = [ids]
     warehouse_obj = self.pool.get('bots.warehouse')
     purchase_obj = self.pool.get('purchase.order')
     warehouse_ids = warehouse_obj.search(cr,
                                          uid, [('backend_id', 'in', ids)],
                                          context=context)
     warehouses = warehouse_obj.browse(cr,
                                       uid,
                                       warehouse_ids,
                                       context=context)
     for warehouse in warehouses:
         if warehouse.backend_id.feat_picking_out_crossdock:
             # Find all POs passed their cut off
             cutoff = self._get_cutoff_date(cr,
                                            uid, [warehouse.backend_id.id],
                                            context=context)
             purchase_ids = purchase_obj.search(
                 cr,
                 uid, [('warehouse_id', '=', warehouse.warehouse_id.id),
                       ('bots_cross_dock', '=', True),
                       ('minimum_planned_date', '<=', cutoff),
                       ('state', '=', 'approved'),
                       ('bots_cut_off', '=', False)],
                 context=context)
             if purchase_ids:
                 session = ConnectorSession(cr, uid, context=context)
                 for purchase_id in purchase_ids:
                     purchase_cutoff.delay(session, 'bots.warehouse',
                                           warehouse.id, [purchase_id])
     return True
Beispiel #5
0
 def setUp(self):
     super(TestBaseImportConnector, self).setUp()
     self.import_obj = self.registry['base_import.import']
     self.move_obj = self.registry['account.move']
     self.job_obj = self.registry['queue.job']
     self.session = ConnectorSession(self.cr, self.uid)
     self.storage = OpenERPJobStorage(self.session)
Beispiel #6
0
    def create_payments(self, ps_orders):
        _logger.debug("CREATE PAYMENTS")
        _logger.debug(ps_orders)
        
        for order in self.browse(ps_orders ):
            _logger.debug("CHECK for order %s with id %s" % (order.name, order.openerp_id.id))     
#            if order.openerp_id.id != 88:
#                continue
                                           
            session = ConnectorSession(self.env.cr, self.env.uid,
                                   context=self.env.context)
            backend_id = order.backend_id
            env = get_environment(session, 'prestashop.sale.order', backend_id.id)
            _logger.debug(env)
            
            adapter = env.get_connector_unit(SaleOrderAdapter)
            ps_order = adapter.read(order.prestashop_id)
            #Force the rules check
            rules = env.get_connector_unit(SaleImportRule)
            rules.check(ps_order)
            
            if rules._get_paid_amount(ps_order) and \
                    rules._get_paid_amount(ps_order) >= 0.0 :
                amount = float(rules._get_paid_amount(ps_order))
                order.openerp_id.automatic_payment(amount)
Beispiel #7
0
    def _delay_post_marked(self, cr, uid, eta=None, context=None):
        """Create a job for every move marked for posting.

        If some moves already have a job, they are skipped.

        """

        if context is None:
            context = {}

        session = ConnectorSession(cr, uid, context=context)

        move_ids = self.search(cr,
                               uid, [
                                   ('to_post', '=', True),
                                   ('post_job_uuid', '=', False),
                                   ('state', '=', 'draft'),
                               ],
                               context=context)
        name = self._name

        # maybe not creating too many dictionaries will make us a bit faster
        values = {'post_job_uuid': None}
        _logger.info(u'{0} jobs for posting moves have been created.'.format(
            len(move_ids)))

        for move_id in move_ids:
            job_uuid = validate_one_move.delay(session, name, move_id, eta=eta)
            values['post_job_uuid'] = job_uuid
            self.write(cr, uid, [move_id], values)
            cr.commit()  # pylint:disable=invalid-commit
Beispiel #8
0
    def pass_former_member(self, cr, uid, ids, context=None):
        """
        Pass to former Member for all partner
        If concerned partner number is > to the ir.parameter value then
        call connector to delay this work
        **Note**
        ir.parameter or `WORKER_PIVOT`
        """

        try:
            parameter_obj = self.pool['ir.config_parameter']
            worker_pivot = int(
                parameter_obj.get_param(cr, uid, 'worker_pivot', WORKER_PIVOT))
        except:
            worker_pivot = WORKER_PIVOT
        for wiz in self.browse(cr, uid, ids, context=context):
            partner_ids = eval(wiz.concerned_partner_ids)
            if len(partner_ids) > worker_pivot:
                session = ConnectorSession(cr, uid, context=context)
                pass_former_member_action.delay(session,
                                                self._name,
                                                partner_ids,
                                                context=context)
            else:
                do_pass_former_member_action(self.pool['res.partner'],
                                             cr,
                                             uid,
                                             partner_ids,
                                             context=context)
 def _refresh_model(self, ids):
     uid = self.env.context.get('delegate_user', self.env.user.id)
     session = ConnectorSession(self._cr, uid, self._context)
     refresh_materialized_view_job.delay(session,
                                         'materialized.sql.view',
                                         ids,
                                         description="")
Beispiel #10
0
    def send_mail(self, cr, uid, ids, context=None):
        """
        Send mail by asynchronous way depending parameters
        """
        if context is None:
            context = {}
        if context.get('active_ids'):
            if not context.get('not_async'):
                try:
                    parameter_obj = self.pool['ir.config_parameter']
                    worker_pivot = int(
                        parameter_obj.get_param(cr, uid, 'mail_worker_pivot',
                                                WORKER_PIVOT))
                except:
                    worker_pivot = WORKER_PIVOT
                if len(context['active_ids']) > worker_pivot:
                    res_ids = context['active_ids']
                    vals = self.read(cr, uid, ids, [], context=context)[0]
                    self._prepare_vals(vals)

                    session = ConnectorSession(cr, uid, context=context)
                    description = _('Send Mail "%s (Chunk Process)') %\
                        (vals['subject'])
                    prepare_mailings.delay(session,
                                           self._name,
                                           vals,
                                           res_ids,
                                           description=description,
                                           context=context)
                    return

        super(mail_compose_message, self).send_mail(cr,
                                                    uid,
                                                    ids,
                                                    context=context)
 def export_getresponse_contact_delay(self):
     """ Export Contact Binding (personemailgruppe) for enabled Campaigns (zgruppedetail) to GetResponse """
     session = ConnectorSession(self.env.cr,
                                self.env.uid,
                                context=self.env.context)
     for binding in self:
         export_record.delay(session, binding._name, binding.id)
Beispiel #12
0
    def generate_reference(self, cr, uid, ids, context=None):
        """
        Generate reference for all partners
        If concerned partner number is > to the ir.parameter value then
        call connector to delay this work
        **Note**
        ir.parameter or `WORKER_PIVOT`
        """

        try:
            parameter_obj = self.pool['ir.config_parameter']
            worker_pivot = int(
                parameter_obj.get_param(cr, uid, 'worker_pivot', WORKER_PIVOT))
        except:
            worker_pivot = WORKER_PIVOT
        for wiz in self.browse(cr, uid, ids, context=context):
            partner_ids = eval(wiz.partner_ids)
            session = ConnectorSession(cr, uid, context=context)
            if len(partner_ids) > worker_pivot:
                generate_reference_action.delay(session,
                                                self._name,
                                                partner_ids,
                                                wiz.reference_date,
                                                context=context)
            else:
                generate_reference_action(session,
                                          self._name,
                                          partner_ids,
                                          wiz.reference_date,
                                          context=context)
Beispiel #13
0
 def invoice_validate(self):
     res = super(AccountInvoice, self).invoice_validate()
     session = ConnectorSession(self.env.cr, self.env.uid,
                                context=self.env.context)
     for record_id in self.ids:
         on_invoice_validated.fire(session, self._name, record_id)
     return res
Beispiel #14
0
 def pabi_action(self, process_xml_id, job_desc, func_name, **kwargs):
     self.ensure_one()
     # Enqueue
     if self.async_process:
         session = ConnectorSession(self._cr, self._uid, self._context)
         return_action = self._context.get('return_action', False)
         uuid = pabi_action_job.delay(session,
                                      self._name,
                                      func_name,
                                      kwargs,
                                      return_action,
                                      description=job_desc)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref(process_xml_id)
         self.write({'state': 'get', 'uuid': uuid})
     else:
         # Call prepared function form the extended class
         (records, result_msg) = getattr(self, func_name)(**kwargs)
         self.write({
             'state': 'get',
             'res_model': records._name,
             'res_ids': str(records.ids),
             'result_msg': result_msg
         })
     return {
         'type': 'ir.actions.act_window',
         'res_model': self._name,
         'view_mode': 'form',
         'view_type': 'form',
         'res_id': self.id,
         'views': [(False, 'form')],
         'target': 'new',
     }
Beispiel #15
0
    def post_entries(self):

        #check state draft more than one
        check_state_draft = self.env['pabi.asset.depre.batch'].search([('state', '=', 'draft')])
        count_state_draft = len(check_state_draft.ids)
        if(count_state_draft>=2):
            raise UserError(
            _('Please check the Asset Depre. Batch menu, don\'t list the Draft state more than one!!'))

        self.ensure_one()
        if self._context.get('job_uuid', False):  # Called from @job
            return self.action_post_entries()
        # Enqueue
        if self.async_process:
            if self.job_id:
                message = _('Post Asset Depre. Batch')
                action = self.env.ref('pabi_utils.action_my_queue_job')
                raise RedirectWarning(message, action.id, _('Go to My Jobs'))
            session = ConnectorSession(self._cr, self._uid, self._context)
            description = '%s - Post Asset Depre. Batch' % self.name
            uuid = action_post_asset_depre_batch.delay(
                session, self._name, self.id, description=description)
            job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
            # Process Name
            job.process_id = self.env.ref('pabi_async_process.'
                                          'post_asset_depre_batch')
        else:
            return self.action_post_entries()
Beispiel #16
0
 def action_generate_budget_plan(self):
     if self._context.get('job_uuid', False):  # Called from @job
         return \
             super(GenerateBudgetPlan, self).action_generate_budget_plan()
     # Enqueue
     if self.async_process:
         session = ConnectorSession(self._cr, self._uid, self._context)
         chart_view = dict(CHART_VIEW_LIST).get(self.chart_view, False)
         fiscalyear = self.fiscalyear_id.name
         description = 'Generate Budget Plans - %s - %s' \
             % (chart_view, fiscalyear)
         uuid = action_generate_budget_plan.delay(session,
                                                  self._name,
                                                  self.id,
                                                  description=description)
         # Checking for running task, use the same signature as delay()
         task_name = "%s('%s', %s)" % ('action_generate_budget_plan',
                                       self._name, self.id)
         self._check_queue(task_name,
                           desc=description,
                           type='always',
                           uuid=uuid)
     else:
         return \
             super(GenerateBudgetPlan, self).action_generate_budget_plan()
    def export_product_categories(self, cr, uid, ids, context=None):
        """
        Export product categories to Drupal
        """
        context = context or {}
        session = ConnectorSession(cr, uid, context=context)

        categ_obj = self.pool.get('product.category')
        bind_obj = self.pool.get('drupal.product.category')

        backend = self.browse(cr, uid, ids, context=context)[0]
        vocabulary = backend.main_product_category_id

        record_ids = categ_obj.search(cr, uid,
                                      [('id', 'child_of', [vocabulary.id])])

        # Exclude send the category mapped to Drupal Vocabulary
        record_ids = [x for x in record_ids if x != vocabulary.id]

        # Create missing binding records, and send them to Drupal
        # Usually this is a one time operation so we can afford wait and block
        # the user interface for the time the export is being doing
        for record in categ_obj.browse(cr, uid, record_ids, context=context):
            # Refresh the record cache because maybe we have created the bind
            # object as export dependency resolution.
            record.refresh()
            if not record.drupal_bind_ids:
                _ = bind_obj.create(cr,
                                    uid, {
                                        'openerp_id': record.id,
                                        'backend_id': backend.id
                                    },
                                    context=context)
        return True
 def setUp(self):
     super(test_backend_register, self).setUp()
     self.service = 'calamitorium'
     self.version = '1.14'
     self.parent = Backend(self.service)
     self.backend = Backend(parent=self.parent, version=self.version)
     self.session = ConnectorSession(self.cr, self.uid)
Beispiel #19
0
    def _price_changed(self, cr, uid, ids, vals, context=None):
        """ Fire the ``on_product_price_changed`` on all the variants of
        the template if the price if the product could have changed.

        If one of the field used in a sale pricelist item has been
        modified, we consider that the price could have changed.

        There is no guarantee that's the price actually changed,
        because it depends on the pricelists.
        """
        if context is None:
            context = {}
        type_obj = self.pool['product.price.type']
        price_fields = type_obj.sale_price_fields(cr, uid, context=context)
        # restrict the fields to the template ones only, so if
        # the write has been done on product.product, we won't
        # update all the variant if a price field of the
        # variant has been changed
        tmpl_fields = [field for field in vals if field in self._columns]
        if any(field in price_fields for field in tmpl_fields):
            product_obj = self.pool['product.product']
            session = ConnectorSession(cr, uid, context=context)
            product_ids = product_obj.search(
                cr, uid, [('product_tmpl_id', 'in', ids)], context=context,
            )
            # when the write is done on the product.product, avoid
            # to fire the event 2 times
            if context.get('from_product_ids'):
                product_ids = list(
                    set(product_ids) - set(context['from_product_ids'])
                )
            for prod_id in product_ids:
                on_product_price_changed.fire(
                    session, product_obj._name, prod_id
                )
 def test_change_context_uninitialized(self):
     """ Change the context on a session not initialized with a context """
     session = ConnectorSession(self.cr, self.uid)
     test_key = 'test_key'
     with session.change_context({test_key: 'value'}):
         self.assertEqual(session.context.get('test_key'), 'value')
     self.assertNotIn(test_key, session.context)
Beispiel #21
0
    def _cancel_jobs(self, cr, uid, context=None):
        """Find moves where the mark has been removed and cancel the jobs.

        For the moves that are posted already it's too late: we skip them.

        """

        if context is None:
            context = {}

        session = ConnectorSession(cr, uid, context=context)
        storage = OpenERPJobStorage(session)

        move_ids = self.search(cr,
                               uid, [
                                   ('to_post', '=', False),
                                   ('post_job_uuid', '!=', False),
                                   ('state', '=', 'draft'),
                               ],
                               context=context)

        for move in self.browse(cr, uid, move_ids, context=context):
            job_rec = storage.load(move.post_job_uuid)
            if job_rec.state in (u'pending', u'enqueued'):
                job_rec.set_done(result=_(
                    u'Task set to Done because the user unmarked the move'))
                storage.store(job_rec)
Beispiel #22
0
 def import_carriers(self, cr, uid, ids, context=None):
     if not hasattr(ids, '__iter__'):
         ids = [ids]
     session = ConnectorSession(cr, uid, context=context)
     for backend_id in ids:
         import_carriers.delay(session, backend_id, priority=10)
     return True
Beispiel #23
0
 def setUp(self):
     super(TestJobChannels, self).setUp()
     self.function_model = self.env['queue.job.function']
     self.channel_model = self.env['queue.job.channel']
     self.job_model = self.env['queue.job']
     self.root_channel = self.env.ref('connector.channel_root')
     self.session = ConnectorSession(self.cr, self.uid, context={})
Beispiel #24
0
 def import_payment_methods(self, cr, uid, ids, context=None):
     if not hasattr(ids, '__iter__'):
         ids = [ids]
     session = ConnectorSession(cr, uid, context=context)
     for backend_record in self.browse(cr, uid, ids, context=context):
         import_batch.delay(session, 'account.payment.mode', backend_record.id)
     return True
 def setUp(self):
     super(test_import_address_book, self).setUp()
     self.backend_model = self.registry('magento.backend')
     self.session = ConnectorSession(self.cr, self.uid)
     self.session.context['__test_no_commit'] = True
     self.model = self.registry('magento.res.partner')
     self.address_model = self.registry('magento.address')
     backend_ids = self.backend_model.search(
         self.cr, self.uid,
         [('name', '=', 'Test Magento Address book')])
     if backend_ids:
         self.backend_id = backend_ids[0]
     else:
         data_obj = self.registry('ir.model.data')
         warehouse_id = data_obj.get_object_reference(
             self.cr, self.uid, 'stock', 'warehouse0')[1]
         self.backend_id = self.backend_model.create(
             self.cr,
             self.uid,
             {'name': 'Test Magento Address book',
              'version': '1.7',
              'location': 'http://anyurl',
              'username': '******',
              'warehouse_id': warehouse_id,
              'password': '******'})
 def _scheduler_export_catalog(self, cr, uid, ids=None, context=None):
     if ids and not hasattr(ids, '__iter__'):
         ids = [ids]
     models = [
         'magento.product.category',
         'magento.product.product',
         'magento.product.image',
     ]
     session = ConnectorSession(cr, uid, context=context)
     for model_name in models:
         _logger.info('Create Job for exporting model %s', model_name)
         obj = self.pool[model_name]
         domain = self._get_domain_to_export(cr,
                                             uid,
                                             ids,
                                             model_name,
                                             context=context)
         binding_ids = obj.search(cr, uid, domain, context=context)
         for binding_id in binding_ids:
             delay_export(session,
                          model_name,
                          binding_id,
                          vals={'sync_state': 'complete'})
         domain = self._get_domain_to_partial_export(cr,
                                                     uid,
                                                     ids,
                                                     model_name,
                                                     context=context)
         binding_ids = obj.search(cr, uid, domain, context=context)
         for binding_id in binding_ids:
             partial_export.delay(session,
                                  model_name,
                                  binding_id,
                                  vals={'sync_state': 'partial'})
Beispiel #27
0
 def _import_from_date(self, model, from_date_field):
     session = ConnectorSession(self.env.cr, self.env.uid,
                                context=self.env.context)
     import_start_time = datetime.now()
     for backend in self:
         backend.check_magento_structure()
         from_date = getattr(backend, from_date_field)
         if from_date:
             from_date = fields.Datetime.from_string(from_date)
         else:
             from_date = None
         import_batch.delay(session, model,
                            backend.id,
                            filters={'from_date': from_date,
                                     'to_date': import_start_time})
     # Records from Magento are imported based on their `created_at`
     # date.  This date is set on Magento at the beginning of a
     # transaction, so if the import is run between the beginning and
     # the end of a transaction, the import of a record may be
     # missed.  That's why we add a small buffer back in time where
     # the eventually missed records will be retrieved.  This also
     # means that we'll have jobs that import twice the same records,
     # but this is not a big deal because they will be skipped when
     # the last `sync_date` is the same.
     next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER)
     next_time = fields.Datetime.to_string(next_time)
     self.write({from_date_field: next_time})
Beispiel #28
0
 def copy_quotation(self, cr, uid, ids, context=None):
     if isinstance(ids, (tuple, list)):
         assert len(ids) == 1, ("1 ID expected, "
                                "got the following list %s" % (ids,))
     if context is None:
         context = {}
     else:
         context = context.copy()
     context['__copy_from_quotation'] = True
     result = super(sale_order, self).copy_quotation(cr, uid, ids,
                                                     context=context)
     # link binding of the canceled order to the new order, so the
     # operations done on the new order will be sync'ed with Magento
     new_id = result['res_id']
     binding_obj = self.pool['magento.sale.order']
     binding_ids = binding_obj.search(cr, uid,
                                      [('openerp_id', '=', ids[0])],
                                      context=context)
     binding_obj.write(cr, uid, binding_ids,
                       {'openerp_id': new_id},
                       context=context)
     session = ConnectorSession(cr, uid, context=context)
     for binding in binding_obj.browse(cr, uid, binding_ids,
                                       context=context):
         # the sales' status on Magento is likely 'canceled'
         # so we will export the new status (pending, processing, ...)
         export_state_change.delay(
             session,
             'magento.sale.order',
             binding.id,
             description="Reopen sales order %s" % binding.magento_id)
     return result
Beispiel #29
0
 def action_get_report(self):
     self.ensure_one()
     # Enqueue
     if self.async_process:
         Job = self.env['queue.job']
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = 'Excel Report - %s' % (self._name, )
         uuid = get_report_job.delay(session,
                                     self._name,
                                     self.id,
                                     description=description,
                                     lang=session.context.get(
                                         'lang', False))
         job = Job.search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('pabi_utils.xlsx_report')
         self.write({'state': 'get', 'uuid': uuid})
     else:
         out_file, out_name = self.get_report()
         self.write({'state': 'get', 'data': out_file, 'name': out_name})
     return {
         'type': 'ir.actions.act_window',
         'res_model': self._name,
         'view_mode': 'form',
         'view_type': 'form',
         'res_id': self.id,
         'views': [(False, 'form')],
         'target': 'new',
     }
Beispiel #30
0
 def setUp(self):
     super(SetUpMagentoBase, self).setUp()
     self.backend_model = self.registry('magento.backend')
     self.session = ConnectorSession(self.cr, self.uid)
     data_model = self.registry('ir.model.data')
     self.get_ref = partial(data_model.get_object_reference, self.cr,
                            self.uid)
     backend_ids = self.backend_model.search(
         self.cr, self.uid, [('name', '=', 'Test Magento')])
     if backend_ids:
         self.backend_id = backend_ids[0]
     else:
         __, warehouse_id = self.get_ref('stock', 'warehouse0')
         self.backend_id = self.backend_model.create(
             self.cr, self.uid, {
                 'name': 'Test Magento',
                 'version': '1.7',
                 'location': 'http://anyurl',
                 'username': '******',
                 'warehouse_id': warehouse_id,
                 'password': '******'
             })
         # payment method needed to import a sale order
         __, workflow_id = self.get_ref('sale_automatic_workflow',
                                        'manual_validation')
         __, journal_id = self.get_ref('account', 'check_journal')
         self.registry('payment.method').create(
             self.cr, self.uid, {
                 'name': 'checkmo',
                 'workflow_process_id': workflow_id,
                 'import_rule': 'always',
                 'days_before_cancel': 0,
                 'journal_id': journal_id
             })