Beispiel #1
0
class TestDocumentParseState(common.TransactionCase):
    def setUp(self):
        super(TestDocumentParseState, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend', {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(self.backend_record, self.session,
                               self.model_name)

        self.policy = CSVParsePolicy(self.env)

    def test_new_attachment_binding_state_pending(self):
        """A new file should have state pending."""
        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        document = self.session.browse('ir.attachment.binding', document_id)

        self.assertEquals(document.parse_state, 'pending')

    def test_parse_one_state_done(self):
        """If a file is parsed, the state of the file should be 'done'."""

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        document = self.session.browse('ir.attachment.binding', document_id)
        self.policy.parse_one(document_id)

        self.assertEquals(document.parse_state, 'done')
class TestFTPGetPolicyWithOE(common.TransactionCase):
    """Integrated Tests for the FTP Get Policy that do use OpenERP."""
    def setUp(self):
        super(TestFTPGetPolicyWithOE, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_model = self.registry('file_import.backend')
        self.backend_id = self.backend_model.create(
            self.cr, self.uid, {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            }, self.session.context)
        self.env = Environment(self.backend_record, self.session,
                               self.model_name)
        self.policy = FTPFileGetterPolicy(self.env)

    def test_create_one_file(self):
        actual_attachment_b_id = create_one_file(self.session, self.model_name,
                                                 self.backend_id,
                                                 'to_openerp/s1.csv',
                                                 'to_openerp/s1.md5')

        actual_attachment_b_browse = self.session.browse(
            self.model_name, actual_attachment_b_id)
        self.assertEquals(actual_attachment_b_browse.datas_fname, 's1.csv')

        with open(expand_path('two_chunks.csv')) as expected_file_like:
            self.assertEquals(
                base64.b64decode(actual_attachment_b_browse.datas),
                expected_file_like.read())

    def test_create_file_uniq(self):
        """Test idempotency of file creation.

        We check that if the job to create a file is executed many times,
        just one file is created, without raising exceptions.

        """

        actual_attachment_b_id = create_one_file(self.session, self.model_name,
                                                 self.backend_id,
                                                 'to_openerp/s1.csv',
                                                 'to_openerp/s1.md5')

        actual_attachment_b_id_second_time = create_one_file(
            self.session, self.model_name, self.backend_id,
            'to_openerp/s1.csv', 'to_openerp/s1.md5')

        self.assertIsInstance(actual_attachment_b_id, (int, long))
        self.assertIs(actual_attachment_b_id_second_time, None)
class TestIntCSVParse(common.TransactionCase):
    """Test that parsing a file creates unique chunks in the database."""
    def setUp(self):
        super(TestIntCSVParse, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend', {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(self.backend_record, self.session,
                               self.model_name)

        self.policy = CSVParsePolicy(self.env)

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        self.document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        self.document = self.session.browse('ir.attachment.binding',
                                            self.document_id)

    def test_parse_document_create_chunks(self):
        """Parsing a file should create 2 chunks in the database."""

        self.policy.parse_one(self.document_id)
        chunk_ids = self.session.search(
            'file.chunk.binding',
            [('attachment_binding_id', '=', self.document_id)])
        self.assertEquals(len(chunk_ids), 2)

    def test_parse_document_again_do_nothing(self):
        """Parsing a file twice should not create new chunks."""

        self.policy.parse_one(self.document_id)
        self.policy.parse_one(self.document_id)

        chunk_ids = self.session.search(
            'file.chunk.binding',
            [('attachment_binding_id', '=', self.document_id)])
        self.assertEquals(len(chunk_ids), 2)
    def write(self, cr, uid, ids, vals, context=None):
        if not hasattr(ids, '__iter__'):
            ids = [ids]

        # magento_qty maybe 0, also need to be updated
        if "magento_qty" in vals:
            for record_id in ids:
                session = ConnectorSession(cr, uid, context=context)
                if session.context.get('connector_no_export'):
                    continue
                if session.browse('magento.product.product', record_id).no_stock_sync:
                    continue
                inventory_fields = list(set(vals).intersection(INVENTORY_FIELDS))
                if inventory_fields:
                    export_product_inventory.delay(session, 'magento.product.product',
                                                   record_id, fields=inventory_fields,
                                                   priority=20)

        return super(magento_product_product, self).write(cr, uid, ids, vals,
                                             context=context)
Beispiel #5
0
    def write(self, cr, uid, ids, vals, context=None):
        if not hasattr(ids, "__iter__"):
            ids = [ids]

        # cancel sales order on Magento (do not export the other
        # state changes, Magento handles them itself)
        if vals.get("state") == "cancel":
            session = ConnectorSession(cr, uid, context=context)
            for order in session.browse("sale.order", ids):
                old_state = order.state
                if old_state == "cancel":
                    continue  # skip if already canceled
                for binding in order.magento_bind_ids:
                    export_state_change.delay(
                        session,
                        "magento.sale.order",
                        binding.id,
                        # so if the state changes afterwards,
                        # it won't be exported
                        allowed_states=["cancel"],
                        description="Cancel sales order %s" % binding.magento_id,
                    )
        return super(sale_order, self).write(cr, uid, ids, vals, context=context)
Beispiel #6
0
    def write(self, cr, uid, ids, vals, context=None):
        if not hasattr(ids, '__iter__'):
            ids = [ids]

        # cancel sales order on Magento (do not export the other
        # state changes, Magento handles them itself)
        if vals.get('state') == 'cancel':
            session = ConnectorSession(cr, uid, context=context)
            for order in session.browse('sale.order', ids):
                old_state = order.state
                if old_state == 'cancel':
                    continue  # skip if already canceled
                for binding in order.magento_bind_ids:
                    export_state_change.delay(
                        session,
                        'magento.sale.order',
                        binding.id,
                        # so if the state changes afterwards,
                        # it won't be exported
                        allowed_states=['cancel'],
                        description="Cancel sales order %s" %
                                    binding.magento_id)
        return super(sale_order, self).write(cr, uid, ids, vals,
                                             context=context)
class TestFTPGetPolicyWithOE(common.TransactionCase):

    """Integrated Tests for the FTP Get Policy that do use OpenERP."""

    def setUp(self):
        super(TestFTPGetPolicyWithOE, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_model = self.registry('file_import.backend')
        self.backend_id = self.backend_model.create(
            self.cr,
            self.uid,
            {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            },
            self.session.context)
        self.env = Environment(
            self.backend_record,
            self.session,
            self.model_name
        )
        self.policy = FTPFileGetterPolicy(self.env)

    def test_create_one_file(self):
        actual_attachment_b_id = create_one_file(
            self.session,
            self.model_name,
            self.backend_id,
            'to_openerp/s1.csv',
            'to_openerp/s1.md5')

        actual_attachment_b_browse = self.session.browse(
            self.model_name,
            actual_attachment_b_id)
        self.assertEquals(actual_attachment_b_browse.datas_fname, 's1.csv')

        with open(expand_path('two_chunks.csv')) as expected_file_like:
            self.assertEquals(
                base64.b64decode(actual_attachment_b_browse.datas),
                expected_file_like.read()
            )

    def test_create_file_uniq(self):
        """Test idempotency of file creation.

        We check that if the job to create a file is executed many times,
        just one file is created, without raising exceptions.

        """

        actual_attachment_b_id = create_one_file(
            self.session,
            self.model_name,
            self.backend_id,
            'to_openerp/s1.csv',
            'to_openerp/s1.md5')

        actual_attachment_b_id_second_time = create_one_file(
            self.session,
            self.model_name,
            self.backend_id,
            'to_openerp/s1.csv',
            'to_openerp/s1.md5')

        self.assertIsInstance(actual_attachment_b_id, (int, long))
        self.assertIs(actual_attachment_b_id_second_time, None)
class TestIntLoad(common.TransactionCase):

    """Integrated tests of the Load chunk. We hit the DB here."""

    def setUp(self):
        super(TestIntLoad, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend',
            {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(
            self.backend_record,
            self.session,
            self.model_name
        )

        self.policy = MoveLoadPolicy(self.env)

        self.parsed_header = '["ref", "date", "period_id", "journal_id", "line_id/account_id", "line_id/partner_id", "line_id/name", "line_id/analytic_account_id", "line_id/debit", "line_id/credit", "line_id/tax_code_id"]'  # noqa

        self.parsed_good_chunk = '[["1728274", "2014-02-02", "02/2014", "Sales Journal - (test)", "X11001", "Bank Wealthy and sons", "Camptocamp", "", "37.8", "", ""], ["", "", "", "", "X1111", "Bank Wealthy and sons", "Camptocamp", "AA009", "", "31.5", ""], ["", "", "", "", "X2001", "Bank Wealthy and sons", "Camptocamp", "AA001", "", "3.83", ""], ["", "", "", "", "X2110", "Bank Wealthy and sons", "Camptocamp", "AA001", "3.83", "", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "6.3", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-0", ""]]'  # noqa

        self.parsed_chunk_missing_journal = '[["1728274", "2014-02-02", "02/2014", "Sales Journal - (test)", "X11001", "Bank Wealthy and sons", "Camptocamp", "", "37.8", "", ""], ["", "", "", "", "X1111", "Bank Wealthy and sons", "Camptocamp", "AA009", "", "31.5", ""], ["", "", "", "", "X2001", "Bank Wealthy and sons", "Camptocamp", "AA001", "", "3.83", ""], ["", "", "", "", "X2110", "Bank Wealthy and sons", "Camptocamp", "AA001", "3.83", "", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-6.3", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-0", ""]]'  # noqa

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        self.document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
                'prepared_header': self.parsed_header,
            })

    def test_new_chunk_binding_state_pending(self):
        """A new chunk should have state pending."""
        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_good_chunk,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        chunk = self.session.browse(
            'file.chunk.binding',
            chunk_id)

        self.assertEquals(chunk.load_state, 'pending')

    def test_chunk_load_state_done(self):
        """Once loaded, a chunk should have state done."""
        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_good_chunk,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        chunk = self.session.browse(
            'file.chunk.binding',
            chunk_id)

        self.policy.load_one_chunk(chunk_id)

        self.assertEquals(chunk.load_state, 'done', msg=chunk.exc_info)

    def no_test_broken_chunk_state_failed(self):
        """If load fails, we should have load_state failed.

        Implicitly, the exception should pass (the job will be done).

        FIXME: I disabled this test because it causes a false positive on
        travis. Tests pass on my machine. See:

        https://github.com/camptocamp/connector-file/issues/2
        https://github.com/OCA/maintainer-quality-tools/issues/43

        """

        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_chunk_missing_journal,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        chunk = self.session.browse(
            'file.chunk.binding',
            chunk_id)

        self.policy.load_one_chunk(chunk_id)

        self.assertEquals(chunk.load_state, 'failed')

        self.assertIn(u'Error during load', chunk.exc_info)
        self.assertIn(u'violates check constraint', chunk.exc_info)

    def test_one_chunk_creates_one_move(self):
        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_good_chunk,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        self.policy.load_one_chunk(chunk_id)

        move_ids = self.session.search('account.move', [
            ('ref', '=', '1728274')
        ])
        self.assertEquals(len(move_ids), 1)

    def test_load_one_chunk_twice_creates_one_move(self):
        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_good_chunk,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        self.policy.load_one_chunk(chunk_id)
        self.policy.load_one_chunk(chunk_id)

        move_ids = self.session.search('account.move', [
            ('ref', '=', '1728274')
        ])
        self.assertEquals(len(move_ids), 1)
class TestIntCSVParse(common.TransactionCase):

    """Test that parsing a file creates unique chunks in the database."""

    def setUp(self):
        super(TestIntCSVParse, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend',
            {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(
            self.backend_record,
            self.session,
            self.model_name
        )

        self.policy = CSVParsePolicy(self.env)

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        self.document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        self.document = self.session.browse(
            'ir.attachment.binding',
            self.document_id)

    def test_parse_document_create_chunks(self):
        """Parsing a file should create 2 chunks in the database."""

        self.policy.parse_one(self.document_id)
        chunk_ids = self.session.search('file.chunk.binding', [
            ('attachment_binding_id', '=', self.document_id)
        ])
        self.assertEquals(len(chunk_ids), 2)

    def test_parse_document_again_do_nothing(self):
        """Parsing a file twice should not create new chunks."""

        self.policy.parse_one(self.document_id)
        self.policy.parse_one(self.document_id)

        chunk_ids = self.session.search('file.chunk.binding', [
            ('attachment_binding_id', '=', self.document_id)
        ])
        self.assertEquals(len(chunk_ids), 2)