def set_default_engine_ids(apps, schema_editor): if not settings.UNIT_TESTING: sync_docs.sync(ucr_models, verbosity=2) ucr_db = DataSourceConfiguration.get_db() with IterDB(ucr_db) as iter_db: for doc in iter_docs(ucr_db, DataSourceConfiguration.all_ids()): if not doc.get("engine_id"): doc["engine_id"] = DEFAULT_ENGINE_ID iter_db.save(doc)
def do_sync(app_index): """ Get the app for the given index. For multiprocessing can't pass a complex object hence the call here...again """ #sanity check: app = get_apps()[app_index] sync_docs.sync(app, verbosity=2, temp='tmp') print "preindex %s complete" % app_index return app_index
def forwards(self, orm): # hack: manually force sync SMS design docs before # we try to load from them. the bootstrap commands are dependent on these. sync_docs.sync(sms_models, verbosity=2) # ensure default currency orm["accounting.Currency"].objects.get_or_create(code=settings.DEFAULT_CURRENCY) orm["accounting.Currency"].objects.get_or_create(code="EUR") orm["accounting.Currency"].objects.get_or_create(code="INR") call_command("bootstrap_grapevine_gateway") call_command("bootstrap_mach_gateway") call_command("bootstrap_tropo_gateway") call_command("bootstrap_twilio_gateway") call_command("bootstrap_unicel_gateway") call_command("bootstrap_usage_fees")
def forwards(self, orm): # hack: manually force sync SMS design docs before # we try to load from them. the bootstrap commands are dependent on these. sync_docs.sync(sms_models, verbosity=2) # ensure default currency orm['accounting.Currency'].objects.get_or_create( code=settings.DEFAULT_CURRENCY) orm['accounting.Currency'].objects.get_or_create(code='EUR') orm['accounting.Currency'].objects.get_or_create(code='INR') bootstrap_grapevine_gateway(orm) bootstrap_mach_gateway(orm) bootstrap_tropo_gateway(orm) bootstrap_twilio_gateway(orm) bootstrap_unicel_gateway(orm) call_command('bootstrap_usage_fees')
def bootstrap_sms(apps, schema_editor): sync_docs.sync(sms_models, verbosity=2) bootstrap_grapevine_gateway(apps) bootstrap_mach_gateway(apps) bootstrap_tropo_gateway(apps) bootstrap_twilio_gateway( apps, 'corehq/apps/smsbillables/management/commands/pricing_data/twilio-rates-2015_10_06.csv' ) bootstrap_unicel_gateway(apps) call_command('bootstrap_usage_fees') bootstrap_moz_gateway(apps) bootstrap_test_gateway(apps) bootstrap_telerivet_gateway(apps) bootstrap_twilio_gateway_incoming(apps) bootstrap_yo_gateway(apps) add_moz_zero_charge(apps) bootstrap_grapevine_gateway_update(apps)
def forwards(self, orm): # hack to force sync docs before this runs sync_docs.sync(commtrack_models, verbosity=2) # sync products first properties_to_sync = [ ('product_id', '_id'), 'domain', 'name', 'is_archived', ('code', 'code_'), 'description', 'category', 'program_id', 'cost', ('units', 'unit'), 'product_data', ] product_ids = [r['id'] for r in Product.get_db().view( 'commtrack/products', reduce=False, ).all()] for product in iter_docs(Product.get_db(), product_ids): sql_product = orm.SQLProduct() for prop in properties_to_sync: if isinstance(prop, tuple): sql_prop, couch_prop = prop else: sql_prop = couch_prop = prop if couch_prop in product: setattr(sql_product, sql_prop, product[couch_prop]) sql_product.save() # now update stock states for ss in orm.StockState.objects.all(): ss.sql_product = orm.SQLProduct.objects.get(product_id=ss.product_id) ss.save()
def forwards(self, orm): # if the view doesn't exist manually create it. # typically for initial load or tests. try: Application.get_db().view( 'app_manager/applications', limit=1, ).all() except ResourceNotFound: sync_docs.sync(app_models, verbosity=2) errors = [] def _migrate_app_ids(app_ids): to_save = [] count = len(app_ids) logger.info('migrating {} apps'.format(count)) for i, app_doc in enumerate( iter_docs(Application.get_db(), app_ids)): try: if app_doc["doc_type"] in [ "Application", "Application-Deleted" ]: application = Application.wrap(app_doc) should_save = self.migrate_app(application) if should_save: to_save.append(application) if len(to_save) > 25: self.bulk_save(to_save) logger.info('completed {}/{} apps'.format( i, count)) to_save = [] except Exception: errors.append( "App {id} not properly migrated because {error}". format(id=app_doc['_id'], error=sys.exc_info()[0])) if to_save: self.bulk_save(to_save) logger.info('migrating applications') _migrate_app_ids(self.get_app_ids()) if errors: logger.info('\n'.join(errors))
def forwards(self, orm): # if the view doesn't exist manually create it. # typically for initial load or tests. try: Application.get_db().view( 'app_manager/applications', limit=1, ).all() except ResourceNotFound: sync_docs.sync(app_models, verbosity=2) errors = [] def _migrate_app_ids(app_ids): to_save = [] count = len(app_ids) logger.info('migrating {} apps'.format(count)) for i, app_doc in enumerate(iter_docs(Application.get_db(), app_ids)): try: if app_doc["doc_type"] in ["Application", "Application-Deleted"]: application = Application.wrap(app_doc) should_save = self.migrate_app(application) if should_save: to_save.append(application) if len(to_save) > 25: self.bulk_save(to_save) logger.info('completed {}/{} apps'.format(i, count)) to_save = [] except Exception: errors.append("App {id} not properly migrated because {error}".format(id=app_doc['_id'], error=sys.exc_info()[0])) if to_save: self.bulk_save(to_save) logger.info('migrating applications') _migrate_app_ids(self.get_app_ids()) if errors: logger.info('\n'.join(errors))
def forwards(self, orm): # hack: manually force sync Location design docs before # we try to load from them sync_docs.sync(location_models, verbosity=2) properties_to_sync = [ ('location_id', '_id'), 'domain', 'name', 'location_type', 'site_code', 'external_id', 'latitude', 'longitude', 'is_archived', ] location_ids = set([r['id'] for r in Location.get_db().view( 'locations/by_name', reduce=False, ).all()]) for location, sp in iter_location_join_supply_point(location_ids): try: sql_location = orm.SQLLocation.objects.get(location_id=location['_id']) except orm.SQLLocation.DoesNotExist: # this populates bogus mptt data because otherwise # null constraints will blow up but do not worry, we # rebuild this at the end sql_location = orm.SQLLocation.objects.create( location_id=location['_id'], lft=0, rght=0, tree_id=0, level=0 ) for prop in properties_to_sync: if isinstance(prop, tuple): sql_prop, couch_prop = prop else: sql_prop = couch_prop = prop if couch_prop in location: setattr(sql_location, sql_prop, location[couch_prop]) if sp: sql_location.supply_point_id = sp._id # sync parent connection lineage = location.get('lineage', None) if lineage: try: sql_location.parent = orm.SQLLocation.objects.get(location_id=lineage[0]) except orm.SQLLocation.DoesNotExist: # create a placeholder for the parent if it does # not yet exist, assuming that it will be properly # populated with data when its turn comes up in the # loop sql_location.parent = orm.SQLLocation.objects.create( location_id=lineage[0], lft=0, rght=0, tree_id=0, level=0 ) sql_location.parent.save() sql_location.save() # this is the important bit that rebuilds mptt tree structures SQLLocation.objects.rebuild()
def _sync_couch(): """ Sync couch docs before running the sql migration as it requires data from couch """ sync_docs.sync(userreports_models, verbosity=2)