Esempio n. 1
0
    def delete(self):
        # you cannot delete the default program
        if self.default:
            raise Exception(_('You cannot delete the default program'))

        default = Program.default_for_domain(self.domain)

        products = Product.by_program_id(
            self.domain,
            self._id,
            wrap=False
        )
        to_save = []

        for product in products:
            product['program_id'] = default._id
            to_save.append(product)

            # break up saving in case there are many products
            if to_save > 500:
                Product.get_db().bulk_save(to_save)
                to_save = []

        Product.get_db().bulk_save(to_save)

        # bulk update sqlproducts
        SQLProduct.objects.filter(program_id=self._id).update(program_id=default._id)

        return super(Program, self).delete()
Esempio n. 2
0
def import_products(domain, importer):
    from corehq.apps.products.views import ProductFieldsView
    results = {'errors': [], 'messages': []}
    to_save = []
    product_count = 0
    seen_product_ids = set()

    custom_data_validator = ProductFieldsView.get_validator(domain)

    for row in importer.worksheet:
        try:
            p = Product.from_excel(row, custom_data_validator)
        except Exception, e:
            results['errors'].append(
                _(u'Failed to import product {name}: {ex}'.format(
                    name=row['name'] or '',
                    ex=e,
                ))
            )
            continue

        importer.add_progress()
        if not p:
            # skip if no product is found (or the row is blank)
            continue
        if not p.domain:
            # if product doesn't have domain, use from context
            p.domain = domain
        elif p.domain != domain:
            # don't let user import against another domains products
            results['errors'].append(
                _(u"Product {product_name} belongs to another domain and was not updated").format(
                    product_name=p.name
                )
            )
            continue

        if p.code and p.code in seen_product_ids:
            results['errors'].append(_(
                u"Product {product_name} could not be imported \
                due to duplicated product ids in the excel \
                file"
            ).format(
                product_name=p.name
            ))
            continue
        elif p.code:
            seen_product_ids.add(p.code)

        product_count += 1
        to_save.append(p)

        if len(to_save) > 500:
            Product.get_db().bulk_save(to_save)
            for couch_product in to_save:
                couch_product.sync_to_sql()
            to_save = []
    def handle(self, *args, **options):
        for domain in Domain.get_all():
            if domain['commtrack_enabled']:
                fields_definition = cdm.CustomDataFieldsDefinition.get_or_create(
                    domain['name'], 'ProductFields')
                had_fields = bool(fields_definition.fields)

                product_ids = Product.ids_by_domain(domain['name'])

                existing_field_slugs = set(
                    [field.slug for field in fields_definition.fields])
                for product in iter_docs(Product.get_db(), product_ids):
                    product_data = product.get('product_data', {})
                    for key in product_data.keys():
                        if key and key not in existing_field_slugs:
                            existing_field_slugs.add(key)
                            fields_definition.fields.append(
                                cdm.CustomDataField(
                                    slug=key,
                                    label=key,
                                    is_required=False,
                                ))

                for field in fields_definition.fields:
                    if cdm.is_system_key(field.slug):
                        fields_definition.fields.remove(field)
                # Only save a definition for domains which use custom product data
                if fields_definition.fields or had_fields:
                    fields_definition.save()
            print 'finished domain "{}"'.format(domain['name'])
    def handle(self, *args, **options):
        for domain in Domain.get_all():
            if domain['commtrack_enabled']:
                fields_definition = CustomDataFieldsDefinition.get_or_create(
                    domain['name'],
                    'ProductFields'
                )

                product_ids = Product.ids_by_domain(domain['name'])

                existing_field_slugs = set(
                    [field.slug for field in fields_definition.fields]
                )
                for product in iter_docs(Product.get_db(), product_ids):
                    product_data = product.get('product_data', {})
                    for key in product_data.keys():
                        if key and key not in existing_field_slugs:
                            existing_field_slugs.add(key)
                            fields_definition.fields.append(CustomDataField(
                                slug=key,
                                label=key,
                                is_required=False
                            ))

                # Only save a definition for domains which use custom product data
                if fields_definition.fields:
                    fields_definition.save()
Esempio n. 5
0
    def handle(self, *args, **options):
        self.stdout.write("Processing products...\n")

        relevant_ids = set([
            r['id'] for r in Product.get_db().view(
                'commtrack/products',
                reduce=False,
            ).all()
        ])

        to_save = []

        for product in iter_docs(Product.get_db(), relevant_ids):
            if 'last_modified' not in product or not product['last_modified']:
                product['last_modified'] = datetime.utcnow().isoformat()
                to_save.append(product)

                if len(to_save) > 500:
                    Product.bulk_save(to_save)
                    to_save = []

        if to_save:
            Product.bulk_save(to_save)

        self.stdout.write("Processing programs...\n")

        relevant_ids = set([
            r['id'] for r in Program.get_db().view(
                'commtrack/programs',
                reduce=False,
            ).all()
        ])

        to_save = []

        for program in iter_docs(Program.get_db(), relevant_ids):
            if 'last_modified' not in program or not program['last_modified']:
                program['last_modified'] = datetime.utcnow().isoformat()
                to_save.append(program)

                if len(to_save) > 500:
                    Program.get_db().bulk_save(to_save)
                    to_save = []

        if to_save:
            Program.get_db().bulk_save(to_save)
    def forwards(self, orm):
        product_ids = [r['id'] for r in Product.get_db().view(
            'commtrack/products',
            reduce=False,
        ).all()]

        for product in iter_docs(Product.get_db(), product_ids):
            try:
                sql_product = orm.SQLProduct.objects.get(product_id=product['_id'])
            except orm.SQLProduct.DoesNotExist:
                # weird - something failed syncing products. force creation now by resaving it.
                Product.wrap(product).save()
                sql_product = orm.SQLProduct.objects.get(product_id=product['_id'])

            if 'last_modified' in product.keys() and product['last_modified']:
                sql_product.created_at = product['last_modified']
                sql_product.last_modified = product['last_modified']
                sql_product.save()
    def forwards(self, orm):
        # hack to force sync docs before this runs
        sync_docs.sync(commtrack_models, verbosity=2)

        # sync products first

        properties_to_sync = [
            ('product_id', '_id'),
            'domain',
            'name',
            'is_archived',
            ('code', 'code_'),
            'description',
            'category',
            'program_id',
            'cost',
            ('units', 'unit'),
            'product_data',
        ]

        product_ids = [r['id'] for r in Product.get_db().view(
            'commtrack/products',
            reduce=False,
        ).all()]

        for product in iter_docs(Product.get_db(), product_ids):
            sql_product = orm.SQLProduct()

            for prop in properties_to_sync:
                if isinstance(prop, tuple):
                    sql_prop, couch_prop = prop
                else:
                    sql_prop = couch_prop = prop

                if couch_prop in product:
                    setattr(sql_product, sql_prop, product[couch_prop])

            sql_product.save()

        # now update stock states

        for ss in orm.StockState.objects.all():
            ss.sql_product = orm.SQLProduct.objects.get(product_id=ss.product_id)
            ss.save()
    def handle(self, *args, **options):
        self.stdout.write("Processing products...\n")

        relevant_ids = set([r["id"] for r in Product.get_db().view("commtrack/products", reduce=False).all()])

        to_save = []

        for product in iter_docs(Product.get_db(), relevant_ids):
            if "last_modified" not in product or not product["last_modified"]:
                product["last_modified"] = datetime.utcnow().isoformat()
                to_save.append(product)

                if len(to_save) > 500:
                    Product.bulk_save(to_save)
                    to_save = []

        if to_save:
            Product.bulk_save(to_save)

        self.stdout.write("Processing programs...\n")

        relevant_ids = set([r["id"] for r in Program.get_db().view("commtrack/programs", reduce=False).all()])

        to_save = []

        for program in iter_docs(Program.get_db(), relevant_ids):
            if "last_modified" not in program or not program["last_modified"]:
                program["last_modified"] = datetime.utcnow().isoformat()
                to_save.append(program)

                if len(to_save) > 500:
                    Program.get_db().bulk_save(to_save)
                    to_save = []

        if to_save:
            Program.get_db().bulk_save(to_save)
    def handle(self, *args, **options):
        for domain in Domain.get_all():
            if domain["commtrack_enabled"]:
                fields_definition = cdm.CustomDataFieldsDefinition.get_or_create(domain["name"], "ProductFields")
                had_fields = bool(fields_definition.fields)

                product_ids = Product.ids_by_domain(domain["name"])

                existing_field_slugs = set([field.slug for field in fields_definition.fields])
                for product in iter_docs(Product.get_db(), product_ids):
                    product_data = product.get("product_data", {})
                    for key in product_data.keys():
                        if key and key not in existing_field_slugs:
                            existing_field_slugs.add(key)
                            fields_definition.fields.append(cdm.CustomDataField(slug=key, label=key, is_required=False))

                for field in fields_definition.fields:
                    if cdm.is_system_key(field.slug):
                        fields_definition.fields.remove(field)
                # Only save a definition for domains which use custom product data
                if fields_definition.fields or had_fields:
                    fields_definition.save()
            print 'finished domain "{}"'.format(domain["name"])
Esempio n. 10
0
 def _get_products(domain):
     for p_doc in iter_docs(Product.get_db(),
                            Product.ids_by_domain(domain)):
         # filter out archived products from export
         if not ('is_archived' in p_doc and p_doc['is_archived']):
             yield Product.wrap(p_doc)
Esempio n. 11
0
                product_name=p.name
            ))
            continue
        elif p.code:
            seen_product_ids.add(p.code)

        product_count += 1
        to_save.append(p)

        if len(to_save) > 500:
            Product.get_db().bulk_save(to_save)
            for couch_product in to_save:
                couch_product.sync_to_sql()
            to_save = []

    if to_save:
        Product.get_db().bulk_save(to_save)
        for couch_product in to_save:
            couch_product.sync_to_sql()

    if product_count:
        results['messages'].insert(
            0,
            _('Successfully updated {number_of_products} products with {errors} '
              'errors.').format(
                number_of_products=product_count, errors=len(results['errors'])
            )
        )

    return results
Esempio n. 12
0
 def _get_products(domain):
     product_ids = SQLProduct.objects.filter(domain=domain).product_ids()
     for p_doc in iter_docs(Product.get_db(), product_ids):
         # filter out archived products from export
         if not ('is_archived' in p_doc and p_doc['is_archived']):
             yield Product.wrap(p_doc)
Esempio n. 13
0
 def _get_products(domain):
     for p_doc in iter_docs(Product.get_db(), Product.ids_by_domain(domain)):
         # filter out archived products from export
         if not ('is_archived' in p_doc and p_doc['is_archived']):
             yield Product.wrap(p_doc)