Example #1
0
class TestDocumentParseState(common.TransactionCase):
    def setUp(self):
        super(TestDocumentParseState, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend', {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(self.backend_record, self.session,
                               self.model_name)

        self.policy = CSVParsePolicy(self.env)

    def test_new_attachment_binding_state_pending(self):
        """A new file should have state pending."""
        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        document = self.session.browse('ir.attachment.binding', document_id)

        self.assertEquals(document.parse_state, 'pending')

    def test_parse_one_state_done(self):
        """If a file is parsed, the state of the file should be 'done'."""

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        document = self.session.browse('ir.attachment.binding', document_id)
        self.policy.parse_one(document_id)

        self.assertEquals(document.parse_state, 'done')
class TestIntCSVParse(common.TransactionCase):
    """Test that parsing a file creates unique chunks in the database."""
    def setUp(self):
        super(TestIntCSVParse, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend', {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(self.backend_record, self.session,
                               self.model_name)

        self.policy = CSVParsePolicy(self.env)

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        self.document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        self.document = self.session.browse('ir.attachment.binding',
                                            self.document_id)

    def test_parse_document_create_chunks(self):
        """Parsing a file should create 2 chunks in the database."""

        self.policy.parse_one(self.document_id)
        chunk_ids = self.session.search(
            'file.chunk.binding',
            [('attachment_binding_id', '=', self.document_id)])
        self.assertEquals(len(chunk_ids), 2)

    def test_parse_document_again_do_nothing(self):
        """Parsing a file twice should not create new chunks."""

        self.policy.parse_one(self.document_id)
        self.policy.parse_one(self.document_id)

        chunk_ids = self.session.search(
            'file.chunk.binding',
            [('attachment_binding_id', '=', self.document_id)])
        self.assertEquals(len(chunk_ids), 2)
    def import_product_product(self):
        try:
            print ":::::::::::"
            new_cr = sql_db.db_connect(self.env.cr.dbname).cursor()
            uid, context = self.env.uid, self.env.context
            with api.Environment.manage():
                self.env = api.Environment(new_cr, uid, context)
                self.test_connection()
                session = ConnectorSession(self.env.cr, self.env.uid,
                                           context=self.env.context)
                self.import_products_from_date = datetime.now()
                products = shopify.Product.find()
                for product in products:
                    vals_product_tmpl = {}
                    dict_attr = product.__dict__['attributes']
                    if not session.search('product.template',
                                          [('shopify_product_id',
                                            '=', dict_attr['id'])]):
                        image_urls = [getattr(i, 'src') for i in product.images]
                        if len(image_urls) > 0:
                            photo = base64.encodestring(urllib2.urlopen(image_urls[0]).read())
                            vals_product_tmpl.update({'image_medium': photo})

                        custom_collection = shopify.CustomCollection.find(product_id=dict_attr['id'])
                        if custom_collection:
                            for categ in custom_collection:
                                product_cate_obj = session.search('product.category',
                                                                  [('shopify_product_cate_id',
                                                                '=', categ.__dict__['attributes']['id'])])
                                if product_cate_obj:
                                    vals_product_tmpl.update({'categ_id': product_cate_obj[0]})
                        vals_product_tmpl.update({'name': dict_attr['title'],
                                                'type': 'consu',
                                                'shopify_product_id': dict_attr['id'],
                                                'description': dict_attr['body_html'],
                                                'state': 'add'})
                        product_tid = session.create('product.template', vals_product_tmpl)
                        new_cr.commit()
                        variants = dict_attr['variants']
                        for variant in variants:
                            dict_variant = variant.__dict__['attributes']
                            u = session.create('product.product',
                                               {'product_tmpl_id': product_tid,
                                                'product_sfy_variant_id': dict_variant['id']})
                            new_cr.commit()
        except:
            raise Warning(_('Facing a problems while importing product!'))
        finally:
            self.env.cr.close()
 def import_product_categories(self):
     try:
         new_cr = sql_db.db_connect(self.env.cr.dbname).cursor()
         uid, context = self.env.uid, self.env.context
         with api.Environment.manage():
             self.env = api.Environment(new_cr, uid, context)
             self.import_categories_from_date = datetime.now()
             self.test_connection()
             session = ConnectorSession(self.env.cr, self.env.uid,
                                        context=self.env.context)
             product_category_ids = session.search('product.category', [('name',
                                                 '=', 'Shopify Products')])
             if not product_category_ids:
                 category_id = session.create('product.category',
                                              {'name': 'Shopify Products'})
                 new_cr.commit()
             shopify_collection = shopify.CustomCollection.find()
             if shopify_collection:
                 for category in shopify_collection:
                     vals = {}
                     dict_category = category.__dict__['attributes']
                     if product_category_ids:
                         vals.update({'parent_id': product_category_ids[0]})
                     else:
                         vals.update({'parent_id': category_id})
                     vals.update({'name': dict_category['title'],
                                  'description': dict_category['body_html'],
                                  'write_uid': self.env.uid,
                                  'shopify_product_cate_id': dict_category['id']})
                     product_cate_id = session.search('product.category',
                                                      [('shopify_product_cate_id',
                                                      '=', dict_category['id'])])
                     if not product_cate_id:
                         session.create('product.category', vals)
                         new_cr.commit()
                     else:
                         session.write('product.category', product_cate_id[0], vals)
                         new_cr.commit()
     except:
         raise Warning(_('Facing a problems while importing product categories!'))
     finally:
         self.env.cr.close()
Example #5
0
    def import_datas(self, cr, uid, file_doc, backend, context=None):
        if file_doc.file_type not in FLOW_PARAMS.keys():
            flow_type = FLOW_PARAMS.keys()
            LOGISTIC_TYPE_ERROR = _("""
The File Document type (file_type) '%s' doesn't match these types %s
Probably, you don't have correctly fill tasks in the 'logistics backend'
with the right type.
Fill the right type in Task and in File Document and click on run again""" %
                                    (file_doc.file_type, flow_type))
            raise orm.except_orm(_("Error in file document type"),
                                 LOGISTIC_TYPE_ERROR)
        flow = FLOW_PARAMS[file_doc.file_type]
        session = ConnectorSession(cr, uid, context)
        struct = False
        method = 'import_' + file_doc.file_type[9:]
        struct = getattr(self, method)(cr,
                                       uid,
                                       file_doc,
                                       flow,
                                       session,
                                       context=context)
        if struct:
            priority = 100
            fields = filter_field_names(flow['fields'])
            for picking_id in struct:
                buffer_id = session.create('connector.buffer', {
                    'data': {
                        picking_id: struct[picking_id]
                    },
                })
                session.context['buffer_id'] = buffer_id
                import_one_line_from_file.delay(session,
                                                flow['model_table'],
                                                fields,
                                                buffer_id,
                                                file_doc.id,
                                                priority=priority)
                priority += 1
                file_doc._set_state('running', context=context)
        return True
class TestIntLoad(common.TransactionCase):

    """Integrated tests of the Load chunk. We hit the DB here."""

    def setUp(self):
        super(TestIntLoad, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend',
            {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(
            self.backend_record,
            self.session,
            self.model_name
        )

        self.policy = MoveLoadPolicy(self.env)

        self.parsed_header = '["ref", "date", "period_id", "journal_id", "line_id/account_id", "line_id/partner_id", "line_id/name", "line_id/analytic_account_id", "line_id/debit", "line_id/credit", "line_id/tax_code_id"]'  # noqa

        self.parsed_good_chunk = '[["1728274", "2014-02-02", "02/2014", "Sales Journal - (test)", "X11001", "Bank Wealthy and sons", "Camptocamp", "", "37.8", "", ""], ["", "", "", "", "X1111", "Bank Wealthy and sons", "Camptocamp", "AA009", "", "31.5", ""], ["", "", "", "", "X2001", "Bank Wealthy and sons", "Camptocamp", "AA001", "", "3.83", ""], ["", "", "", "", "X2110", "Bank Wealthy and sons", "Camptocamp", "AA001", "3.83", "", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "6.3", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-0", ""]]'  # noqa

        self.parsed_chunk_missing_journal = '[["1728274", "2014-02-02", "02/2014", "Sales Journal - (test)", "X11001", "Bank Wealthy and sons", "Camptocamp", "", "37.8", "", ""], ["", "", "", "", "X1111", "Bank Wealthy and sons", "Camptocamp", "AA009", "", "31.5", ""], ["", "", "", "", "X2001", "Bank Wealthy and sons", "Camptocamp", "AA001", "", "3.83", ""], ["", "", "", "", "X2110", "Bank Wealthy and sons", "Camptocamp", "AA001", "3.83", "", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-6.3", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-0", ""]]'  # noqa

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        self.document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
                'prepared_header': self.parsed_header,
            })

    def test_new_chunk_binding_state_pending(self):
        """A new chunk should have state pending."""
        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_good_chunk,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        chunk = self.session.browse(
            'file.chunk.binding',
            chunk_id)

        self.assertEquals(chunk.load_state, 'pending')

    def test_chunk_load_state_done(self):
        """Once loaded, a chunk should have state done."""
        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_good_chunk,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        chunk = self.session.browse(
            'file.chunk.binding',
            chunk_id)

        self.policy.load_one_chunk(chunk_id)

        self.assertEquals(chunk.load_state, 'done', msg=chunk.exc_info)

    def no_test_broken_chunk_state_failed(self):
        """If load fails, we should have load_state failed.

        Implicitly, the exception should pass (the job will be done).

        FIXME: I disabled this test because it causes a false positive on
        travis. Tests pass on my machine. See:

        https://github.com/camptocamp/connector-file/issues/2
        https://github.com/OCA/maintainer-quality-tools/issues/43

        """

        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_chunk_missing_journal,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        chunk = self.session.browse(
            'file.chunk.binding',
            chunk_id)

        self.policy.load_one_chunk(chunk_id)

        self.assertEquals(chunk.load_state, 'failed')

        self.assertIn(u'Error during load', chunk.exc_info)
        self.assertIn(u'violates check constraint', chunk.exc_info)

    def test_one_chunk_creates_one_move(self):
        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_good_chunk,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        self.policy.load_one_chunk(chunk_id)

        move_ids = self.session.search('account.move', [
            ('ref', '=', '1728274')
        ])
        self.assertEquals(len(move_ids), 1)

    def test_load_one_chunk_twice_creates_one_move(self):
        chunk_id = self.session.create(
            'file.chunk.binding', {
                'prepared_data': self.parsed_good_chunk,
                'backend_id': self.backend_id,
                'attachment_binding_id': self.document_id,
            })

        self.policy.load_one_chunk(chunk_id)
        self.policy.load_one_chunk(chunk_id)

        move_ids = self.session.search('account.move', [
            ('ref', '=', '1728274')
        ])
        self.assertEquals(len(move_ids), 1)
class TestIntCSVParse(common.TransactionCase):

    """Test that parsing a file creates unique chunks in the database."""

    def setUp(self):
        super(TestIntCSVParse, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend',
            {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(
            self.backend_record,
            self.session,
            self.model_name
        )

        self.policy = CSVParsePolicy(self.env)

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        self.document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        self.document = self.session.browse(
            'ir.attachment.binding',
            self.document_id)

    def test_parse_document_create_chunks(self):
        """Parsing a file should create 2 chunks in the database."""

        self.policy.parse_one(self.document_id)
        chunk_ids = self.session.search('file.chunk.binding', [
            ('attachment_binding_id', '=', self.document_id)
        ])
        self.assertEquals(len(chunk_ids), 2)

    def test_parse_document_again_do_nothing(self):
        """Parsing a file twice should not create new chunks."""

        self.policy.parse_one(self.document_id)
        self.policy.parse_one(self.document_id)

        chunk_ids = self.session.search('file.chunk.binding', [
            ('attachment_binding_id', '=', self.document_id)
        ])
        self.assertEquals(len(chunk_ids), 2)