Beispiel #1
0
    def handle(self, *args, **options):
        if len(args) < 2:
            raise CommandError('Usage is copy_case, %s' % self.args)

        sourcedb = Database(args[0])
        case_id = args[1]
        domain = args[2] if len(args) > 2 else None

        print 'getting case'
        case = CommCareCase.wrap(sourcedb.get(case_id))
        if domain is not None:
            case.domain = domain
        case.save(force_update=True)

        print 'copying %s xforms' % len(case.xform_ids)

        def form_wrapper(row):
            doc = row['doc']
            doc.pop('_attachments', None)
            return XFormInstance.wrap(doc)

        xforms = sourcedb.all_docs(
            keys=case.xform_ids,
            include_docs=True,
            wrapper=form_wrapper,
        ).all()
        for form in xforms:
            if domain is not None:
                form.domain = domain
            form.save(force_update=True)
            print 'saved %s' % form._id
    def testDbFromUri(self):
        db = self.Server.create_db("couchdbkit_test")

        db1 = Database("http://127.0.0.1:5984/couchdbkit_test")
        self.assertIs(hasattr(db1, "dbname"), True)
        self.assertEqual(db1.dbname, "couchdbkit_test")
        info = db1.info()
        self.assertEqual(info["db_name"], "couchdbkit_test")
Beispiel #3
0
 def setUpClass(cls):
     super(OverrideDBTest, cls).setUpClass()
     cls.other_db_1 = Database(settings.COUCH_DATABASE + '_foo-test',
                               create=True)
     cls.other_db_2 = Database(settings.COUCH_DATABASE + '_foo-boo-test',
                               create=True)
     cls.normal_db = CommCareCase.get_db()
     cls.normal_get_db = CommCareCase.get_db
Beispiel #4
0
    def handle(self, *args, **options):
        if len(args) < 2:
            raise CommandError('Usage is copy_case, %s' % self.args)

        sourcedb = Database(args[0])
        case_id = args[1]
        doc_ids = [case_id]

        domain = args[2] if len(args) > 2 else None
        def _migrate_case(case_id):
            print 'getting case %s' % case_id
            case = CommCareCase.wrap(sourcedb.get(case_id))
            original_domain = case.domain
            if domain is not None:
                case.domain = domain
            case.save(force_update=True)
            return case, original_domain

        case, orig_domain = _migrate_case(case_id)
        print 'copying %s parent cases' % len(case.indices)
        for index in case.indices:
            _migrate_case(index.referenced_id)
            doc_ids.append(index.referenced_id)

        # hack, set the domain back to make sure we get the reverse indices correctly
        case.domain = orig_domain
        with OverrideDB(CommCareCase, sourcedb):
            child_indices = get_reverse_indices(case)
        print 'copying %s child cases' % len(child_indices)
        for index in child_indices:
            _migrate_case(index.referenced_id)
            doc_ids.append(index.referenced_id)

        print 'copying %s xforms' % len(case.xform_ids)

        def form_wrapper(row):
            doc = row['doc']
            doc.pop('_attachments', None)
            return XFormInstance.wrap(doc)

        xforms = sourcedb.all_docs(
            keys=case.xform_ids,
            include_docs=True,
            wrapper=form_wrapper,
        ).all()
        for form in xforms:
            if domain is not None:
                form.domain = domain
            form.save(force_update=True)
            print 'saved %s' % form._id
            doc_ids.append(form._id)

        if options['postgres_db']:
            copy_postgres_data_for_docs(options['postgres_db'], doc_ids)
    def handle(self, *args, **options):
        raise CommandError(
            'copy_group_data is currently broken. '
            'Ask Danny or Ethan to fix it along the lines of '
            'https://github.com/dimagi/commcare-hq/pull/9180/files#diff-9d976dc051a36a028c6604581dfbce5dR95'
        )

        if len(args) != 2:
            raise CommandError('Usage is copy_group_data %s' % self.args)

        sourcedb = Database(args[0])
        group_id = args[1]
        exclude_user_owned = options["exclude_user_owned"]

        print 'getting group'
        group = Group.wrap(sourcedb.get(group_id))
        group.save(force_update=True)

        print 'getting domain'
        domain = Domain.wrap(
            sourcedb.view('domain/domains',
                          key=group.domain,
                          include_docs=True,
                          reduce=False,
                          limit=1).one()['doc'])
        dt = DocumentTransform(domain._obj, sourcedb)
        save(dt, Domain.get_db())

        owners = [group_id]
        if not exclude_user_owned:
            owners.extend(group.users)

        print 'getting case ids'

        with OverrideDB(CommCareCase, sourcedb):
            case_ids = get_case_ids_in_domain_by_owner(domain.name,
                                                       owner_id__in=owners)

        xform_ids = set()

        print 'copying %s cases' % len(case_ids)

        for i, subset in enumerate(chunked(case_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            cases = [
                CommCareCase.wrap(case['doc']) for case in sourcedb.all_docs(
                    keys=list(subset),
                    include_docs=True,
                )
            ]

            for case in cases:
Beispiel #6
0
    def handle(self, *args, **options):
        if len(args) < 2 or len(args) > 3:
            raise CommandError('Usage is copy_doc %s' % self.args)

        sourcedb = Database(args[0])
        app_id = args[1]
        domain = args[2] if len(args) == 3 else None

        app_json = sourcedb.get(app_id)
        if domain:
            app_json['domain'] = domain
        dt = DocumentTransform(app_json, sourcedb)
        save(dt, get_db())
def replicate(db_url, username, friend_name, friend_db_url):
    auth_filters = get_credentials()
    db = Database(db_url, filters=auth_filters)
    replicator_db = db.server['_replicator']

    # this describes the replication task
    replication_doc = {
        "source": friend_db_url,
        "target": db_url,
        "continuous": True,
        "filter": "things/from_friend",
        "query_params": {
            "friend": friend_name,
        }
    }

    # we try to delete an existing replication with the same Id
    # this would stop the replication
    try:
        del replicator_db[friend_name]
    except ResourceNotFound:
        pass

    # we store the replication task, which will automatically start it
    replication_id = '{src}-{target}'.format(src=friend_name, target=username)
    replicator_db[replication_id] = replication_doc
Beispiel #8
0
def get_node_dbs(nodes, dbname, username="******"):
    def node_url(proxy_url, node):
        return urlunparse(proxy_url._replace(netloc=f'{auth}@{node}'))

    proxy_url = urlparse(settings.COUCH_DATABASE)._replace(path=f"/{dbname}")
    auth = username + ":" + proxy_url.netloc.split('@')[0].split(":", 1)[1]
    return [Database(node_url(proxy_url, node)) for node in nodes]
    def handle(self, sourcedb, doc_ids_or_file, domain, **options):
        sourcedb = Database(sourcedb)

        if os.path.isfile(doc_ids_or_file):
            with open(doc_ids_or_file) as f:
                doc_ids = f.read().splitlines()
        else:
            doc_ids = doc_ids_or_file.split(',')

        print("Starting copy of {} docs".format(len(doc_ids)))
        for doc_id in doc_ids:
            print('Copying doc: {}'.format(doc_id))
            doc_json = sourcedb.get(doc_id)
            if domain:
                doc_json['domain'] = domain
            dt = DocumentTransform(doc_json, sourcedb)
            save(dt, get_db())
    def handle(self, **options):
        # build a data structure indexing databases to relevant design docs
        db_label_map = defaultdict(lambda: set())

        # pull design docs from preindex plugins
        plugins = get_preindex_plugins()
        for plugin in plugins:
            for design in plugin.get_designs():
                if design.design_path:
                    db_label_map[design.db.uri].add(design.app_label)

        designs_to_delete = {}
        for db_uri in db_label_map:
            db = Database(db_uri)
            expected_designs = db_label_map[db_uri]
            design_docs = get_design_docs(db)
            found_designs = set(dd.name for dd in design_docs)
            to_delete = found_designs - expected_designs
            if to_delete:
                designs_to_delete[db] = [
                    ddoc._doc for ddoc in design_docs if ddoc.name in to_delete
                ]
                print('\ndeleting from {}:\n---------------------'.format(
                    db.dbname))
                print('\n'.join(sorted(to_delete)))

        if designs_to_delete:
            if options['noinput'] or input('\n'.join([
                    '\n\nReally delete all the above design docs?',
                    'If any of these views are actually live, bad things will happen. '
                    '(Type "delete designs" to continue):',
                    '',
            ])).lower() == 'delete designs':
                for db, design_docs in designs_to_delete.items():
                    for design_doc in design_docs:
                        # If we don't delete conflicts, then they take the place of the
                        # document when it's deleted. (That's how couch works.)
                        # This results in a huge reindex for an old conflicted version
                        # of a design doc we don't even want anymore.
                        delete_conflicts(db, design_doc['_id'])
                    db.delete_docs(design_docs)
            else:
                print('aborted!')
        else:
            print('database already completely pruned!')
Beispiel #11
0
def _lookup_id_in_couch(doc_id):
    db_urls = [settings.COUCH_DATABASE] + settings.EXTRA_COUCHDB_DATABASES.values()
    for url in db_urls:
        db = Database(url)
        try:
            doc = db.get(doc_id)
        except ResourceNotFound:
            pass
        else:
            return {
                "doc": json.dumps(doc, indent=4, sort_keys=True),
                "doc_id": doc_id,
                "doc_type": doc.get('doc_type', 'Unknown'),
                "dbname": db.dbname,
            }
    return {
        "doc": "NOT FOUND",
        "doc_id": doc_id,
    }
Beispiel #12
0
    def handle(self, *args, **options):
        # build a data structure indexing databases to relevant design docs
        db_label_map = defaultdict(lambda: set())

        # pull design docs from normal couchbkit apps
        app_infos = [get_app_sync_info(app) for app in get_apps()]
        for info in app_infos:
            for design in info.designs:
                if design.design_path:
                    db_label_map[design.db.uri].add(design.app_label)

        # pull design docs from preindex plugins
        plugins = get_preindex_plugins()
        for plugin in plugins:
            for design in plugin.get_designs():
                if design.design_path:
                    db_label_map[design.db.uri].add(design.app_label)

        designs_to_delete = {}
        for db_uri in db_label_map:
            db = Database(db_uri)
            expected_designs = db_label_map[db_uri]
            design_docs = get_design_docs(db)
            found_designs = set(dd.name for dd in design_docs)
            to_delete = found_designs - expected_designs
            if to_delete:
                designs_to_delete[db] = [ddoc._doc for ddoc in design_docs if ddoc.name in to_delete]
                print '\ndeleting from {}:\n---------------------'.format(db.dbname)
                print '\n'.join(sorted(to_delete))

        if designs_to_delete:
            if options['noinput'] or raw_input('\n'.join([
                    '\n\nReally delete all the above design docs?',
                    'If any of these views are actually live, bad things will happen. '
                    '(Type "delete designs" to continue):',
                    '',
            ])).lower() == 'delete designs':
                for db, design_docs in designs_to_delete.items():
                    db.delete_docs(design_docs)
            else:
                print 'aborted!'
        else:
            print 'database already completely pruned!'
    def handle(self, *args, **options):
        # build a data structure indexing databases to relevant design docs
        db_label_map = defaultdict(lambda: set())

        # pull design docs from preindex plugins
        plugins = get_preindex_plugins()
        for plugin in plugins:
            for design in plugin.get_designs():
                if design.design_path:
                    db_label_map[design.db.uri].add(design.app_label)

        designs_to_delete = {}
        for db_uri in db_label_map:
            db = Database(db_uri)
            expected_designs = db_label_map[db_uri]
            design_docs = get_design_docs(db)
            found_designs = set(dd.name for dd in design_docs)
            to_delete = found_designs - expected_designs
            if to_delete:
                designs_to_delete[db] = [ddoc._doc for ddoc in design_docs if ddoc.name in to_delete]
                print '\ndeleting from {}:\n---------------------'.format(db.dbname)
                print '\n'.join(sorted(to_delete))

        if designs_to_delete:
            if options['noinput'] or raw_input('\n'.join([
                    '\n\nReally delete all the above design docs?',
                    'If any of these views are actually live, bad things will happen. '
                    '(Type "delete designs" to continue):',
                    '',
            ])).lower() == 'delete designs':
                for db, design_docs in designs_to_delete.items():
                    for design_doc in design_docs:
                        # If we don't delete conflicts, then they take the place of the
                        # document when it's deleted. (That's how couch works.)
                        # This results in a huge reindex for an old conflicted version
                        # of a design doc we don't even want anymore.
                        delete_conflicts(db, design_doc['_id'])
                    db.delete_docs(design_docs)
            else:
                print 'aborted!'
        else:
            print 'database already completely pruned!'
Beispiel #14
0
    def handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError('Usage is copy_group_data %s' % self.args)

        sourcedb = Database(args[0])
        group_id = args[1]
        exclude_user_owned = options["exclude_user_owned"]

        print 'getting group'
        group = Group.wrap(sourcedb.get(group_id))
        group.save(force_update=True)

        print 'getting domain'
        domain = Domain.wrap(
            sourcedb.view('domain/domains', key=group.domain, include_docs=True,
                          reduce=False, limit=1).one()['doc']
        )
        dt = DocumentTransform(domain._obj, sourcedb)
        save(dt, Domain.get_db())

        owners = [group_id]
        if not exclude_user_owned:
            owners.extend(group.users)

        print 'getting case ids'

        with OverrideDB(CommCareCase, sourcedb):
            case_ids = get_case_ids_in_domain_by_owner(
                domain.name, owner_id__in=owners)

        xform_ids = set()

        print 'copying %s cases' % len(case_ids)

        for i, subset in enumerate(chunked(case_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            cases = [CommCareCase.wrap(case['doc']) for case in sourcedb.all_docs(
                keys=list(subset),
                include_docs=True,
            )]

            for case in cases:
Beispiel #15
0
 def _get_designs(self):
     # Instantiate here to make sure that it's instantiated after the dbs settings
     # are patched for tests
     couch_config = CouchConfig()
     db = Database(
         couch_config.get_db_uri_for_app_label(self.app_config.label), create=True)
     return [
         DesignInfo(app_label=self.app_config.label,
                    db=db,
                    design_path=self.dir)
     ]
Beispiel #16
0
def _get_couch_node_databases(db, node_port=COUCH_NODE_PORT):
    def node_url(proxy_url, node):
        return urlunparse(proxy_url._replace(netloc=f'{auth}@{node}:{node_port}'))

    resp = db.server._request_session.get(urljoin(db.server.uri, '/_membership'))
    resp.raise_for_status()
    membership = resp.json()
    nodes = [node.split("@")[1] for node in membership["cluster_nodes"]]
    proxy_url = urlparse(settings.COUCH_DATABASE)._replace(path=f"/{db.dbname}")
    auth = proxy_url.netloc.split('@')[0]
    return [Database(node_url(proxy_url, node)) for node in nodes]
Beispiel #17
0
def doc_in_es(request):
    doc_id = request.GET.get("id")
    if not doc_id:
        return render(request, "hqadmin/doc_in_es.html", {})

    couch_doc = {}
    db_urls = [settings.COUCH_DATABASE] + settings.EXTRA_COUCHDB_DATABASES.values()
    for url in db_urls:
        try:
            couch_doc = Database(url).get(doc_id)
            break
        except ResourceNotFound:
            pass
    query = {"filter":
                {"ids": {
                    "values": [doc_id]}}}

    def to_json(doc):
        return json.dumps(doc, indent=4, sort_keys=True) if doc else "NOT FOUND!"

    found_indices = {}
    doc_type = couch_doc.get('doc_type')
    es_doc_type = None
    for index, url in ES_URLS.items():
        res = run_query(url, query)
        if 'hits' in res and res['hits']['total'] == 1:
            es_doc = res['hits']['hits'][0]['_source']
            found_indices[index] = to_json(es_doc)
            es_doc_type = es_doc_type or es_doc.get('doc_type')

    doc_type = doc_type or es_doc_type or 'Unknown'

    context = {
        "doc_id": doc_id,
        "status": "found" if found_indices else "NOT FOUND!",
        "doc_type": doc_type,
        "couch_doc": to_json(couch_doc),
        "found_indices": found_indices,
    }
    return render(request, "hqadmin/doc_in_es.html", context)
Beispiel #18
0
    def handle(self, *args, **options):
        if len(args) < 2 or len(args) > 3:
            raise CommandError('Usage is copy_doc %s' % self.args)

        sourcedb = Database(args[0])
        doc_ids_or_file = args[1]
        domain = args[2] if len(args) == 3 else None

        if os.path.isfile(doc_ids_or_file):
            with open(doc_ids_or_file) as f:
                doc_ids = f.read().splitlines()
        else:
            doc_ids = doc_ids_or_file.split(',')

        print "Starting copy of {} docs".format(len(doc_ids))
        for doc_id in doc_ids:
            print 'Copying doc: {}'.format(doc_id)
            doc_json = sourcedb.get(doc_id)
            if domain:
                doc_json['domain'] = domain
            dt = DocumentTransform(doc_json, sourcedb)
            save(dt, get_db())
Beispiel #19
0
    def handle(self, *args, **options):
        if len(args) < 2 or len(args) > 3:
            raise CommandError('Usage is copy_doc %s' % self.args)

        sourcedb = Database(args[0])
        doc_ids_or_file = args[1]
        domain = args[2] if len(args) == 3 else None

        if os.path.isfile(doc_ids_or_file):
            with open(doc_ids_or_file) as f:
                doc_ids = f.read().splitlines()
        else:
            doc_ids = doc_ids_or_file.split(',')

        print("Starting copy of {} docs".format(len(doc_ids)))
        for doc_id in doc_ids:
            print('Copying doc: {}'.format(doc_id))
            doc_json = sourcedb.get(doc_id)
            if domain:
                doc_json['domain'] = domain
            dt = DocumentTransform(doc_json, sourcedb)
            save(dt, get_db())
def replication_status(db_url):
    auth_filters = get_credentials()
    db = Database(db_url, filters=auth_filters)
    server = db.server

    # print a nice header
    header = '{:>10s} {:35s} => {:35s} {:>5s} {:>6s}'.format('Id', 'Source',
            'Target', 'Docs', 'Prog.')
    print header
    print '=' * len(header)

    # /_active_tasks has information about all running tasks (indexers,
    # replication, etc). We use it to get progress info for active
    # replication tasks
    for task in server.active_tasks():
        if task.get('type', None) != 'replication':
            continue

        print '{:>10s} {:35s} => {:35s} {:5d} {:5d}%'.format(
                task.get('doc_id', ''),
                task.get('source', ''),
                task.get('target', ''),
                task.get('docs_written', 0),
                task.get('progress', 0)
            )


    # For information about failed replications (eg filter does not exist
    # at the source, or the source does not exist at all), we have to look
    # into the documents in the /_replicator database
    replicator_db = server['_replicator']

    for result in replicator_db.view('_all_docs', include_docs=True):

        # we're not interested in design documents
        if result['id'].startswith('_design/'):
            continue

        doc = result['doc']

        # all active (non-error) replication tasks have already been printed
        # above; we're only interested in those that failed
        if doc.get('_replication_state', None) != 'error':
            continue

        print '{:>10s} {:35s} => {:35s} {:>12s}'.format(
                result['id'],
                doc.get('source', ''),
                doc.get('target', ''),
                doc.get('_replication_state', '')
            )
Beispiel #21
0
    def handle(self, *args, **options):
        if len(args) < 2:
            raise CommandError('Usage is copy_case, %s' % self.args)

        sourcedb = Database(args[0])
        case_id = args[1]
        domain = args[2] if len(args) > 2 else None

        def _migrate_case(case_id):
            print 'getting case %s' % case_id
            case = CommCareCase.wrap(sourcedb.get(case_id))
            original_domain = case.domain
            if domain is not None:
                case.domain = domain
            case.save(force_update=True)
            return case, original_domain
Beispiel #22
0
def main():
    parser = argparse.ArgumentParser(description='send some docs')
    parser.add_argument('path', help='folder containing docs to send')
    parser.add_argument('dburl',
                        nargs='?',
                        help='db url',
                        default='http://127.0.0.1:5984/posts')

    parser.add_argument('--log-level',
                        dest='loglevel',
                        default='info',
                        help="log level")
    parser.add_argument('--log-output',
                        dest='logoutput',
                        default='-',
                        help="log output")

    args = parser.parse_args()

    # configure the logger
    loglevel = LOG_LEVELS.get(args.loglevel.lower(), logging.INFO)
    logger.setLevel(loglevel)
    if args.logoutput == "-":
        h = logging.StreamHandler()
    else:
        h = logging.FileHandler(args.logoutput)
    fmt = logging.Formatter(LOG_FMT, LOG_DATE_FMT)
    h.setFormatter(fmt)
    logger.addHandler(h)

    path = os.path.normpath(os.path.join(os.getcwd(), args.path))
    if not os.path.exists(path):
        sys.stderr.write("Error: %r doesn't exists" % args.path)
        sys.stderr.flush()
        sys.exit(1)

    try:
        # create db
        db = Database(args.dburl, create=True, wait_tries=1.)

        # send posts
        pushposts(db, path)
    except Exception, e:
        sys.stderr.write("Error: %r" % e)
        sys.stderr.flush()
        sys.exit(1)
def init_db(dburl):
    print 'Initializing', dburl

    print 'Authenticating'
    filters = get_credentials()

    db = Database(dburl, filters=filters)
    server = db.server

    try:
        server.delete_db(db.dbname)
        print 'Deleting', db.dbname

    except ResourceNotFound:
        pass

    db = server.get_or_create_db(db.dbname)
    print 'Created', db.dbname
Beispiel #24
0
def main(dburi, cables_path):
    db = Database(dburi, create=True)

    log.info("Start processing")

    extractor = Extractor(db, cables_path)

    docs = []
    for doc in extractor:
        if len(docs) == 100:
            log.info("Sending to CouchDB")
            save_docs(db, docs)
            docs = []
        docs.append(doc)

    if docs:
        log.info("Sending to CouchDB")
        save_docs(db, docs)
    log.info("%s cables processed." % extractor.processed)
    log.info("%s new cables." % extractor.new)
    def handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError("Usage is copy_group_data %s" % self.args)

        sourcedb = Database(args[0])
        group_id = args[1]

        print "getting group"
        group = Group.wrap(sourcedb.get(group_id))
        group.save(force_update=True)

        print "getting domain"
        domain = Domain.wrap(
            sourcedb.view("domain/domains", key=group.domain, include_docs=True, reduce=False, limit=1).one()["doc"]
        )
        domain.save(force_update=True)

        print "getting cases"
        cases = sourcedb.view(
            "hqcase/by_owner",
            keys=[[group.domain, group_id, False], [group.domain, group_id, True]],
            wrapper=lambda row: CommCareCase.wrap(row["doc"]),
            reduce=False,
            include_docs=True,
        ).all()
        self.lenient_bulk_save(CommCareCase, cases)

        print "compiling xform_ids"
        xform_ids = set()
        for case in cases:
            xform_ids.update(case.xform_ids)

        print "getting xforms"
        user_ids = set(group.users)
        CHUNK_SIZE = 100

        def form_wrapper(row):
            doc = row["doc"]
            doc.pop("_attachments", None)
            return XFormInstance.wrap(doc)

        for i, subset in enumerate(chunked(xform_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            xforms = sourcedb.all_docs(keys=list(subset), include_docs=True, wrapper=form_wrapper).all()
            self.lenient_bulk_save(XFormInstance, xforms)

            for xform in xforms:
                user_id = xform.metadata.userID
                user_ids.add(user_id)

        print "getting users"

        def wrap_user(row):
            doc = row["doc"]
            try:
                return CouchUser.wrap_correctly(doc)
            except Exception as e:
                logging.exception("trouble with user %s" % doc["_id"])
            return None

        users = sourcedb.all_docs(keys=list(user_ids), include_docs=True, wrapper=wrap_user).all()
        for user in users:
            # if we use bulk save, django user doesn't get sync'd
            user.save(force_update=True)
Beispiel #26
0
 def __init__(self):
     self.db = Database(settings['COUCHDB_URL'])
Beispiel #27
0
    def handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError('Usage is copy_group_data %s' % self.args)

        sourcedb = Database(args[0])
        group_id = args[1]
        include_user_owned = options["include_user_owned"]

        print 'getting group'
        group = Group.wrap(sourcedb.get(group_id))
        group.save(force_update=True)

        print 'getting domain'
        domain = Domain.wrap(
            sourcedb.view('domain/domains',
                          key=group.domain,
                          include_docs=True,
                          reduce=False,
                          limit=1).one()['doc'])
        domain.save(force_update=True)

        owners = [group_id]
        if include_user_owned:
            owners.extend(group.users)

        def keys_for_owner(domain, owner_id):
            return [
                [domain, owner_id, False],
                [domain, owner_id, True],
            ]

        def get_case_ids(owners):
            keys = list(
                itertools.chain(*[
                    keys_for_owner(domain.name, owner_id)
                    for owner_id in owners
                ]))
            results = sourcedb.view(
                'hqcase/by_owner',
                keys=keys,
                reduce=False,
                include_docs=False,
            )
            return [res['id'] for res in results]

        CHUNK_SIZE = 100
        print 'getting case ids'

        case_ids = get_case_ids(owners)
        xform_ids = set()

        print 'copying %s cases' % len(case_ids)

        for i, subset in enumerate(chunked(case_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            cases = [
                CommCareCase.wrap(case['doc']) for case in sourcedb.all_docs(
                    keys=list(subset),
                    include_docs=True,
                )
            ]

            for case in cases:
def sync_ddocs(dburl):
    auth_filters = get_credentials()
    db = Database(dburl, filters=auth_filters)

    loader = FileSystemDocsLoader('_design')
    loader.sync(db, verbose=True)
Beispiel #29
0
from datetime import datetime

from couchdbkit import Document, StringProperty, DateTimeProperty, Database
from settings import DB_URL
from utils import doc_repr

db = Database(DB_URL)


class Thing(Document):
    owner = StringProperty(required=True)
    name = StringProperty(required=True)

    __repr__ = doc_repr


Thing.set_db(db)


class Lending(Document):
    thing = StringProperty(required=True)
    owner = StringProperty(required=True)
    to_user = StringProperty(required=True)
    lent = DateTimeProperty(default=datetime.now)
    returned = DateTimeProperty()

    __repr__ = doc_repr


Lending.set_db(db)
Beispiel #30
0
 def setUpClass(cls):
     super(CachedCouchDbTest, cls).setUpClass()
     # this will ensure the database is created
     Database(settings.COUCH_DATABASE, create=True)
Beispiel #31
0
    def handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError('Usage is copy_group_data %s' % self.args)

        sourcedb = Database(args[0])
        group_id = args[1]

        print 'getting group'
        group = Group.wrap(sourcedb.get(group_id))
        group.save(force_update=True)

        print 'getting domain'
        domain = Domain.wrap(
            sourcedb.view('domain/domains',
                          key=group.domain,
                          include_docs=True,
                          reduce=False,
                          limit=1).one()['doc'])
        domain.save(force_update=True)

        print 'getting cases'
        cases = sourcedb.view(
            'hqcase/by_owner',
            keys=[
                [group.domain, group_id, False],
                [group.domain, group_id, True],
            ],
            wrapper=lambda row: CommCareCase.wrap(row['doc']),
            reduce=False,
            include_docs=True).all()
        self.lenient_bulk_save(CommCareCase, cases)

        print 'compiling xform_ids'
        xform_ids = set()
        for case in cases:
            xform_ids.update(case.xform_ids)

        print 'getting xforms'
        user_ids = set(group.users)
        CHUNK_SIZE = 100

        def form_wrapper(row):
            doc = row['doc']
            doc.pop('_attachments', None)
            return XFormInstance.wrap(doc)

        for i, subset in enumerate(chunked(xform_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            xforms = sourcedb.all_docs(
                keys=list(subset),
                include_docs=True,
                wrapper=form_wrapper,
            ).all()
            self.lenient_bulk_save(XFormInstance, xforms)

            for xform in xforms:
                user_id = xform.metadata.userID
                user_ids.add(user_id)

        print 'getting users'

        def wrap_user(row):
            doc = row['doc']
            try:
                return CouchUser.wrap_correctly(doc)
            except Exception as e:
                logging.exception('trouble with user %s' % doc['_id'])
            return None

        users = sourcedb.all_docs(keys=list(user_ids),
                                  include_docs=True,
                                  wrapper=wrap_user).all()
        for user in users:
            # if we use bulk save, django user doesn't get sync'd
            user.save(force_update=True)
Beispiel #32
0
    def handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError('Usage is copy_group_data %s' % self.args)

        sourcedb = Database(args[0])
        group_id = args[1]
        exclude_user_owned = options["exclude_user_owned"]

        print 'getting group'
        group = Group.wrap(sourcedb.get(group_id))
        group.save(force_update=True)

        print 'getting domain'
        domain = Domain.wrap(
            sourcedb.view('domain/domains', key=group.domain, include_docs=True,
                          reduce=False, limit=1).one()['doc']
        )
        dt = DocumentTransform(domain._obj, sourcedb)
        save(dt, Domain.get_db())

        owners = [group_id]
        if not exclude_user_owned:
            owners.extend(group.users)

        def keys_for_owner(domain, owner_id):
            return [
                [domain, owner_id, False],
                [domain, owner_id, True],
            ]

        def get_case_ids(owners):
            keys = list(itertools.chain(*[keys_for_owner(domain.name, owner_id) for owner_id in owners]))
            results = sourcedb.view(
                'hqcase/by_owner',
                keys=keys,
                reduce=False,
                include_docs=False,
            )
            return [res['id'] for res in results]

        print 'getting case ids'

        case_ids = get_case_ids(owners)
        xform_ids = set()

        print 'copying %s cases' % len(case_ids)

        for i, subset in enumerate(chunked(case_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            cases = [CommCareCase.wrap(case['doc']) for case in sourcedb.all_docs(
                keys=list(subset),
                include_docs=True,
            )]

            for case in cases:
                xform_ids.update(case.xform_ids)

            self.lenient_bulk_save(CommCareCase, cases)

        if not exclude_user_owned:
            # also grab submissions that may not have included any case data
            for user_id in group.users:
                xform_ids.update(res['id'] for res in sourcedb.view(
                    'reports_forms/all_forms',
                    startkey=['submission user', domain.name, user_id],
                    endkey=['submission user', domain.name, user_id, {}],
                    reduce=False
                ))

        print 'copying %s xforms' % len(xform_ids)
        user_ids = set(group.users)

        def form_wrapper(row):
            doc = row['doc']
            doc.pop('_attachments', None)
            return XFormInstance.wrap(doc)
        for i, subset in enumerate(chunked(xform_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            xforms = sourcedb.all_docs(
                keys=list(subset),
                include_docs=True,
                wrapper=form_wrapper,
            ).all()
            self.lenient_bulk_save(XFormInstance, xforms)

            for xform in xforms:
                user_id = xform.metadata.userID
                user_ids.add(user_id)

        print 'copying %s users' % len(user_ids)

        def wrap_user(row):
            try:
                doc = row['doc']
            except KeyError:
                logging.exception('trouble with user result %r' % row)
                return None

            try:
                return CouchUser.wrap_correctly(doc)
            except Exception:
                logging.exception('trouble with user %s' % doc['_id'])
                return None

        users = sourcedb.all_docs(
            keys=list(user_ids),
            include_docs=True,
            wrapper=wrap_user,
        ).all()

        role_ids = set([])
        for user in filter(lambda u: u is not None, users):
            # if we use bulk save, django user doesn't get sync'd
            if user.get_domain_membership(domain.name).role_id:
                role_ids.add(user.domain_membership.role_id)
            user.save(force_update=True)

        print 'copying %s roles' % len(role_ids)
        for i, subset in enumerate(chunked(role_ids, CHUNK_SIZE)):
            roles = [UserRole.wrap(role['doc']) for role in sourcedb.all_docs(
                keys=list(subset),
                include_docs=True,
            )]
            self.lenient_bulk_save(UserRole, roles)


        if options['include_sync_logs']:
            print 'copying sync logs'
            for user_id in user_ids:
                log_ids = [res['id'] for res in sourcedb.view("phone/sync_logs_by_user",
                    startkey=[user_id, {}],
                    endkey=[user_id],
                    descending=True,
                    reduce=False,
                    include_docs=True
                )]
                print 'user: %s, logs: %s' % (user_id, len(log_ids))
                for i, subset in enumerate(chunked(log_ids, CHUNK_SIZE)):
                    print i * CHUNK_SIZE
                    logs = [SyncLog.wrap(log['doc']) for log in sourcedb.all_docs(
                        keys=list(subset),
                        include_docs=True,
                    )]
                    self.lenient_bulk_save(SyncLog, logs)
Beispiel #33
0
    def handle(self, *args, **options):
        raise CommandError(
            'copy_group_data is currently broken. '
            'Ask Danny or Ethan to fix it along the lines of '
            'https://github.com/dimagi/commcare-hq/pull/9180/files#diff-9d976dc051a36a028c6604581dfbce5dR95'
        )

        if len(args) != 2:
            raise CommandError('Usage is copy_group_data %s' % self.args)

        sourcedb = Database(args[0])
        group_id = args[1]
        exclude_user_owned = options["exclude_user_owned"]

        print 'getting group'
        group = Group.wrap(sourcedb.get(group_id))
        group.save(force_update=True)

        print 'getting domain'
        domain = Domain.wrap(
            sourcedb.view('domain/domains', key=group.domain, include_docs=True,
                          reduce=False, limit=1).one()['doc']
        )
        dt = DocumentTransform(domain._obj, sourcedb)
        save(dt, Domain.get_db())

        owners = [group_id]
        if not exclude_user_owned:
            owners.extend(group.users)

        print 'getting case ids'

        with OverrideDB(CommCareCase, sourcedb):
            case_ids = get_case_ids_in_domain_by_owner(
                domain.name, owner_id__in=owners)

        xform_ids = set()

        print 'copying %s cases' % len(case_ids)

        for i, subset in enumerate(chunked(case_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            cases = [CommCareCase.wrap(case['doc']) for case in sourcedb.all_docs(
                keys=list(subset),
                include_docs=True,
            )]

            for case in cases:
                xform_ids.update(case.xform_ids)

            self.lenient_bulk_save(CommCareCase, cases)

        if not exclude_user_owned:
            # also grab submissions that may not have included any case data
            for user_id in group.users:
                xform_ids.update(res['id'] for res in sourcedb.view(
                    'all_forms/view',
                    startkey=['submission user', domain.name, user_id],
                    endkey=['submission user', domain.name, user_id, {}],
                    reduce=False
                ))

        print 'copying %s xforms' % len(xform_ids)
        user_ids = set(group.users)

        def form_wrapper(row):
            doc = row['doc']
            doc.pop('_attachments', None)
            doc.pop('external_blobs', None)
            return XFormInstance.wrap(doc)
        for i, subset in enumerate(chunked(xform_ids, CHUNK_SIZE)):
            print i * CHUNK_SIZE
            xforms = sourcedb.all_docs(
                keys=list(subset),
                include_docs=True,
                wrapper=form_wrapper,
            ).all()
            self.lenient_bulk_save(XFormInstance, xforms)

            for xform in xforms:
                user_id = xform.metadata.userID
                user_ids.add(user_id)

        print 'copying %s users' % len(user_ids)

        def wrap_user(row):
            try:
                doc = row['doc']
            except KeyError:
                logging.exception('trouble with user result %r' % row)
                return None

            try:
                return CouchUser.wrap_correctly(doc)
            except Exception:
                logging.exception('trouble with user %s' % doc['_id'])
                return None

        users = sourcedb.all_docs(
            keys=list(user_ids),
            include_docs=True,
            wrapper=wrap_user,
        ).all()

        role_ids = set([])
        for user in filter(lambda u: u is not None, users):
            # if we use bulk save, django user doesn't get sync'd
            domain_membership = user.get_domain_membership(domain.name)
            if domain_membership and domain_membership.role_id:
                role_ids.add(user.domain_membership.role_id)
            user.save(force_update=True)

        print 'copying %s roles' % len(role_ids)
        for i, subset in enumerate(chunked(role_ids, CHUNK_SIZE)):
            roles = [UserRole.wrap(role['doc']) for role in sourcedb.all_docs(
                keys=list(subset),
                include_docs=True,
            )]
            self.lenient_bulk_save(UserRole, roles)

        if options['include_sync_logs']:
            print 'copying sync logs'
            for user_id in user_ids:
                log_ids = [res['id'] for res in sourcedb.view("phone/sync_logs_by_user",
                    startkey=[user_id, {}],
                    endkey=[user_id],
                    descending=True,
                    reduce=False,
                    include_docs=True
                )]
                print 'user: %s, logs: %s' % (user_id, len(log_ids))
                for i, subset in enumerate(chunked(log_ids, CHUNK_SIZE)):
                    print i * CHUNK_SIZE
                    logs = [SyncLog.wrap(log['doc']) for log in sourcedb.all_docs(
                        keys=list(subset),
                        include_docs=True,
                    )]
                    self.lenient_bulk_save(SyncLog, logs)