Ejemplo n.º 1
0
    def handle(self, *args, **options):
        db = get_db()
        c = Consumer(db)

        # sync design docs to the target db
        # lots of source diving to figure out this magic
        new_dbs = [(app, global_config.database.uri)
                   for app, _ in settings.COUCHDB_DATABASES]
        couchdbkit_handler = CouchdbkitHandler(new_dbs)
        for app, _ in new_dbs:
            try:
                couchdbkit_handler.sync(models.get_app(app))
            except ImproperlyConfigured:
                # if django doesn't think this is an app it throws this error
                # this is probably fine
                pass

        # also sync couchapps
        sync_design_docs(global_config.database)

        def sync_if_necessary(line):
            try:
                change = Change(line)
                # don't bother with deleted or old documents
                if change.deleted or not change.is_current(db):
                    return

                # get doc
                doc = get_db().get(change.id)

                # check if transforms, and if so, save to new domain/db
                transforms = global_config.get_transforms(doc)
                for transform in transforms:
                    global_config.save(transform)

                # update the checkpoint, somewhat arbitrarily
                global domainsync_counter
                domainsync_counter = domainsync_counter + 1
                if domainsync_counter % CHECKPOINT_FREQUENCY == 0:
                    Checkpoint.set_checkpoint(CHECKPOINT_ID, change.seq)

            except Exception, e:
                logging.exception("problem in domain sync for line: %s\n%s" %
                                  (line, e))
Ejemplo n.º 2
0
 def handle(self, *args, **options):
     db = get_db()
     c = Consumer(db)
     
     # sync design docs to the target db
     # lots of source diving to figure out this magic
     new_dbs = [(app, global_config.database.uri) for app, _ in settings.COUCHDB_DATABASES]
     couchdbkit_handler = CouchdbkitHandler(new_dbs)
     for app, _ in new_dbs:
         try:
             couchdbkit_handler.sync(models.get_app(app))
         except ImproperlyConfigured:
             # if django doesn't think this is an app it throws this error
             # this is probably fine
             pass
     
     # also sync couchapps
     sync_design_docs(global_config.database)
     
     def sync_if_necessary(line):
         try:
             change = Change(line)
             # don't bother with deleted or old documents
             if change.deleted or not change.is_current(db):
                 return 
             
             # get doc
             doc = get_db().get(change.id)
             
             # check if transforms, and if so, save to new domain/db
             transforms = global_config.get_transforms(doc)
             for transform in transforms:
                 global_config.save(transform)
             
             # update the checkpoint, somewhat arbitrarily
             global domainsync_counter
             domainsync_counter = domainsync_counter + 1
             if domainsync_counter % CHECKPOINT_FREQUENCY == 0:
                 Checkpoint.set_checkpoint(CHECKPOINT_ID, change.seq)
         
         except Exception, e:
             logging.exception("problem in domain sync for line: %s\n%s" % (line, e))
    def handle(self, *args, **options):


        start = datetime.utcnow()
        if len(args) == 0:
            num_pool = POOL_SIZE
        else:
            num_pool = int(args[0])

        if len(args) > 1:
            username = args[1]
        else:
            username = '******'

        pool = Pool(num_pool)

        apps = get_apps()

        completed = set()
        app_ids = set(range(len(apps)))
        for app_id in sorted(app_ids.difference(completed)):
            #keep trying all the preindexes until they all complete satisfactorily.
            print "Trying to preindex view (%d/%d) %s" % (app_id, len(apps), apps[app_id])
            pool.spawn(do_sync, app_id)

        # sshhhhhh: if we're using HQ also preindex the couch apps
        # this could probably be multithreaded too, but leaving for now
        try:
            from corehq.couchapps import sync_design_docs
        except ImportError:
            pass
        else:
            sync_design_docs(get_db(), temp="tmp")

        # same hack above for MVP
        try:
            from mvp_apps import sync_design_docs as mvp_sync
        except ImportError:
            pass
        else:
            mvp_sync(get_db(), temp="tmp")

        # same hack above for MVP
        try:
            from fluff.sync_couchdb import sync_design_docs as fluff_sync
        except ImportError:
            pass
        else:
            fluff_sync(temp="tmp")

        print "All apps loaded into jobs, waiting..."
        pool.join()
        print "All apps reported complete."

        message = "Preindex results:\n"
        message += "\tInitiated by: %s\n" % username

        delta = datetime.utcnow() - start
        message += "Total time: %d seconds" % delta.seconds
        print message

        send_mail('%s Preindex Complete' % settings.EMAIL_SUBJECT_PREFIX,
                  message,
                  settings.SERVER_EMAIL,
                  [x[1] for x in settings.ADMINS],
                  fail_silently=True)
Ejemplo n.º 4
0
    def handle(self, *args, **options):

        start = datetime.utcnow()
        if len(args) == 0:
            num_pool = POOL_SIZE
        else:
            num_pool = int(args[0])

        if len(args) > 1:
            username = args[1]
        else:
            username = '******'

        pool = Pool(num_pool)

        apps = get_apps()

        completed = set()
        app_ids = set(range(len(apps)))
        for app_id in sorted(app_ids.difference(completed)):
            #keep trying all the preindexes until they all complete satisfactorily.
            print "Trying to preindex view (%d/%d) %s" % (app_id, len(apps),
                                                          apps[app_id])
            pool.spawn(do_sync, app_id)

        # sshhhhhh: if we're using HQ also preindex the couch apps
        # this could probably be multithreaded too, but leaving for now
        try:
            from corehq.couchapps import sync_design_docs
        except ImportError:
            pass
        else:
            sync_design_docs(get_db(), temp="tmp")

        # same hack above for MVP
        try:
            from mvp_apps import sync_design_docs as mvp_sync
        except ImportError:
            pass
        else:
            mvp_sync(get_db(), temp="tmp")

        # same hack above for MVP
        try:
            from fluff.sync_couchdb import sync_design_docs as fluff_sync
        except ImportError:
            pass
        else:
            fluff_sync(temp="tmp")

        print "All apps loaded into jobs, waiting..."
        pool.join()
        print "All apps reported complete."

        message = "Preindex results:\n"
        message += "\tInitiated by: %s\n" % username

        delta = datetime.utcnow() - start
        message += "Total time: %d seconds" % delta.seconds
        print message

        send_mail('%s Preindex Complete' % settings.EMAIL_SUBJECT_PREFIX,
                  message,
                  settings.SERVER_EMAIL, [x[1] for x in settings.ADMINS],
                  fail_silently=True)
    def handle(self, *args, **options):

        start = datetime.utcnow()
        if len(args) == 0:
            num_pool = POOL_SIZE
        else:
            num_pool = int(args[0])

        if len(args) > 1:
            username = args[1]
        else:
            username = "******"

        pool = Pool(num_pool)

        apps = get_apps()

        completed = set()
        app_ids = set(range(len(apps)))
        for app_id in sorted(app_ids.difference(completed)):
            # keep trying all the preindexes until they all complete satisfactorily.
            print "Trying to preindex view (%d/%d) %s" % (app_id, len(apps), apps[app_id])
            pool.spawn(do_sync, app_id)

        # sshhhhhh: if we're using HQ also preindex the couch apps
        # this could probably be multithreaded too, but leaving for now
        try:
            from corehq.couchapps import sync_design_docs
        except ImportError:
            pass
        else:
            sync_design_docs(get_db(), temp="tmp")

        # same hack above for MVP
        try:
            from mvp_apps import sync_design_docs as mvp_sync
        except ImportError:
            pass
        else:
            mvp_sync(get_db(), temp="tmp")

        print "All apps loaded into jobs, waiting..."
        pool.join()
        print "All apps reported complete."

        # Git info
        message = "Preindex results:\n"
        message += "\tInitiated by: %s\n" % username

        delta = datetime.utcnow() - start
        message += "Total time: %d seconds" % delta.seconds
        print message

        # todo: customize this more for other users
        send_mail(
            "[commcare-hq] Preindex Complete",
            message,
            "*****@*****.**",
            ["*****@*****.**"],
            fail_silently=True,
        )