Beispiel #1
0
    def get_export_files(self, format='', previous_export_id=None, filter=None,
                         use_cache=True, max_column_size=2000, separator='|', process=None, **kwargs):
        # the APIs of how these methods are broken down suck, but at least
        # it's DRY
        from couchexport.export import get_writer, get_export_components, get_headers, get_formatted_rows
        from django.core.cache import cache
        import hashlib

        export_tag = self.index

        CACHE_TIME = 1 * 60 * 60 # cache for 1 hour, in seconds

        def _build_cache_key(tag, prev_export_id, format, max_column_size):
            def _human_readable_key(tag, prev_export_id, format, max_column_size):
                return "couchexport_:%s:%s:%s:%s" % (tag, prev_export_id, format, max_column_size)
            return hashlib.md5(_human_readable_key(tag, prev_export_id,
                format, max_column_size)).hexdigest()

        # check cache, only supported for filterless queries, currently
        cache_key = _build_cache_key(export_tag, previous_export_id, format, max_column_size)
        if use_cache and filter is None:
            cached_data = cache.get(cache_key)
            if cached_data:
                (tmp, checkpoint) = cached_data
                return ExportFiles(tmp, checkpoint)

        fd, path = tempfile.mkstemp()
        with os.fdopen(fd, 'wb') as tmp:
            schema_index = export_tag
            config, updated_schema, export_schema_checkpoint = get_export_components(schema_index,
                                                                                     previous_export_id, filter)
            if config:
                writer = get_writer(format)

                # get cleaned up headers
                formatted_headers = self.remap_tables(get_headers(updated_schema, separator=separator))
                writer.open(formatted_headers, tmp, max_column_size=max_column_size)

                total_docs = len(config.potentially_relevant_ids)
                if process:
                    DownloadBase.set_progress(process, 0, total_docs)
                for i, doc in config.enum_docs():
                    if self.transform:
                        doc = self.transform(doc)

                    writer.write(self.remap_tables(get_formatted_rows(
                        doc, updated_schema, include_headers=False,
                        separator=separator)))
                    if process:
                        DownloadBase.set_progress(process, i + 1, total_docs)
                writer.close()

            checkpoint = export_schema_checkpoint

        if checkpoint:
            if use_cache:
                cache.set(cache_key, (path, checkpoint), CACHE_TIME)
            return ExportFiles(path, checkpoint)

        return None
Beispiel #2
0
def _write_export_instance(writer,
                           export_instance,
                           documents,
                           progress_tracker=None):
    """
    Write rows to the given open _Writer.
    Rows will be written to each table in the export instance for each of
    the given documents.
    :param writer: An open _Writer
    :param export_instance: An ExportInstance
    :param documents: A ScanResult, or if progress_tracker is None, any iterable yielding documents
    :param progress_tracker: A task for soil to track progress against
    :return: None
    """
    if progress_tracker:
        DownloadBase.set_progress(progress_tracker, 0, documents.count)

    for row_number, doc in enumerate(documents):
        for table in export_instance.selected_tables:
            rows = table.get_rows(
                doc,
                row_number,
                split_columns=export_instance.split_multiselects,
                transform_dates=export_instance.transform_dates,
            )
            for row in rows:
                # It might be bad to write one row at a time when you can do more (from a performance perspective)
                # Regardless, we should handle the batching of rows in the _Writer class, not here.
                writer.write(table, row)
        if progress_tracker:
            DownloadBase.set_progress(progress_tracker, row_number + 1,
                                      documents.count)
Beispiel #3
0
def prime_restore(user_ids, version=V1, cache_timeout=None, overwrite_cache=False):
    from corehq.apps.ota.views import get_restore_response
    total = len(user_ids)
    DownloadBase.set_progress(prime_restore, 0, total)

    ret = {'messages': []}
    for i, user_id in enumerate(user_ids):
        try:
            couch_user = CommCareUser.get(user_id)
        except ResourceNotFound:
            ret['messages'].append('User not found: {}'.format(user_id))
            continue

        try:
            get_restore_response(
                couch_user.domain,
                couch_user,
                since=None,
                version=version,
                force_cache=True,
                cache_timeout=cache_timeout,
                overwrite_cache=overwrite_cache
            )
        except Exception as e:
            ret['messages'].append('Error processing user: {}'.format(str(e)))

        DownloadBase.set_progress(prime_restore, i + 1, total)

    return ret
Beispiel #4
0
    def get_export_files(self, format='', previous_export_id=None, filter=None,
                         use_cache=True, max_column_size=2000, separator='|', process=None, **kwargs):
        # the APIs of how these methods are broken down suck, but at least
        # it's DRY
        from couchexport.export import get_writer, get_export_components, get_headers, get_formatted_rows
        from django.core.cache import cache
        import hashlib

        export_tag = self.index

        CACHE_TIME = 1 * 60 * 60 # cache for 1 hour, in seconds

        def _build_cache_key(tag, prev_export_id, format, max_column_size):
            def _human_readable_key(tag, prev_export_id, format, max_column_size):
                return "couchexport_:%s:%s:%s:%s" % (tag, prev_export_id, format, max_column_size)
            return hashlib.md5(_human_readable_key(tag, prev_export_id,
                format, max_column_size)).hexdigest()

        # check cache, only supported for filterless queries, currently
        cache_key = _build_cache_key(export_tag, previous_export_id, format, max_column_size)
        if use_cache and filter is None:
            cached_data = cache.get(cache_key)
            if cached_data:
                (tmp, checkpoint) = cached_data
                return ExportFiles(tmp, checkpoint)

        fd, path = tempfile.mkstemp()
        with os.fdopen(fd, 'wb') as tmp:
            schema_index = export_tag
            config, updated_schema, export_schema_checkpoint = get_export_components(schema_index,
                                                                                     previous_export_id, filter)
            if config:
                writer = get_writer(format)

                # get cleaned up headers
                formatted_headers = self.remap_tables(get_headers(updated_schema, separator=separator))
                writer.open(formatted_headers, tmp, max_column_size=max_column_size)

                total_docs = len(config.potentially_relevant_ids)
                if process:
                    DownloadBase.set_progress(process, 0, total_docs)
                for i, doc in config.enum_docs():
                    if self.transform:
                        doc = self.transform(doc)

                    writer.write(self.remap_tables(get_formatted_rows(
                        doc, updated_schema, include_headers=False,
                        separator=separator)))
                    if process:
                        DownloadBase.set_progress(process, i + 1, total_docs)
                writer.close()

            checkpoint = export_schema_checkpoint

        if checkpoint:
            if use_cache:
                cache.set(cache_key, (path, checkpoint), CACHE_TIME)
            return ExportFiles(path, checkpoint)

        return None
Beispiel #5
0
 def _increment_progress(self):
     if self._location_count is None:
         self._location_count = self.base_query.count()
         self._progress_update_chunksize = max(10, self._location_count // 100)
     self._locations_exported += 1
     if self._locations_exported % self._progress_update_chunksize == 0:
         DownloadBase.set_progress(self.async_task, self._locations_exported, self._location_count)
Beispiel #6
0
def fixture_upload_async(domain, download_id, replace):
    task = fixture_upload_async
    DownloadBase.set_progress(task, 0, 100)
    download_ref = DownloadBase.get(download_id)
    result = upload_fixture_file(domain, download_ref.get_filename(), replace, task)
    DownloadBase.set_progress(task, 100, 100)
    return {"messages": result}
Beispiel #7
0
def download_locations_async(domain, download_id, include_consumption=False):
    DownloadBase.set_progress(download_locations_async, 0, 100)
    dump_locations(domain,
                   download_id,
                   include_consumption=include_consumption,
                   task=download_locations_async)
    DownloadBase.set_progress(download_locations_async, 100, 100)
Beispiel #8
0
def dump_users_and_groups(domain, download_id, user_filters, task, owner_id):

    domains_list = user_filters['domains']

    users_groups_count = 0
    groups = set()
    for current_domain in domains_list:
        group_memoizer = load_memoizer(current_domain)
        users_groups_count += count_users_and_groups(current_domain, user_filters, group_memoizer)
        groups.update(group_memoizer.groups)

    DownloadBase.set_progress(task, 0, users_groups_count)

    user_headers, user_rows = parse_mobile_users(
        domain,
        user_filters,
        task,
        users_groups_count,
    )

    group_headers, group_rows = parse_groups(groups)
    headers = [
        ('users', [user_headers]),
        ('groups', [group_headers]),
    ]
    rows = [
        ('users', user_rows),
        ('groups', group_rows),
    ]

    filename = "{}_users_{}.xlsx".format(domain, uuid.uuid4().hex)
    _dump_xlsx_and_expose_download(filename, headers, rows, download_id, task, users_groups_count, owner_id)
Beispiel #9
0
 def add_progress(self, count=1):
     self.progress += count
     if self.task:
         DownloadBase.set_progress(self.task, self.progress, self.total_rows)
     if datetime.now() > self.last_update + timedelta(seconds=5):
         self.log("processed %s / %s", self.progress, self.total_rows)
         self.last_update = datetime.now()
Beispiel #10
0
def bulk_download_users_async(domain, download_id, user_filters):
    from corehq.apps.users.bulkupload import dump_users_and_groups, GroupNameError
    DownloadBase.set_progress(bulk_download_users_async, 0, 100)
    errors = []
    try:
        dump_users_and_groups(domain, download_id, user_filters)
    except GroupNameError as e:
        group_urls = [
            reverse('group_members', args=[domain, group.get_id])
            for group in e.blank_groups
        ]

        def make_link(url, i):
            return format_html('<a href="{}" target="_blank">{}</a>', url,
                               _('Blank Group %s') % i)

        group_links = [
            make_link(url, i + 1) for i, url in enumerate(group_urls)
        ]
        errors.append(
            format_html(
                _('The following groups have no name. '
                  'Please name them before continuing: {}'),
                mark_safe(', '.join(group_links))))
    except BulkFetchException:
        errors.append(_('Error exporting data. Please try again later.'))

    DownloadBase.set_progress(bulk_download_users_async, 100, 100)
    return {'errors': errors}
Beispiel #11
0
def explode_cases(user_id, domain, factor, task=None):
    user = CommCareUser.get_by_user_id(user_id, domain)
    messages = list()
    if task:
        DownloadBase.set_progress(explode_case_task, 0, 0)
    count = 0

    old_to_new = dict()
    child_cases = list()
    accessor = CaseAccessors(domain)

    case_ids = accessor.get_case_ids_by_owners(user.get_owner_ids(),
                                               closed=False)
    cases = accessor.iter_cases(case_ids)

    # copy parents
    for case in cases:
        # skip over user as a case
        if case.type == USERCASE_TYPE:
            continue
        # save children for later
        if case.indices:
            child_cases.append(case)
            continue
        old_to_new[case.case_id] = list()
        for i in range(factor - 1):
            new_case_id = uuid.uuid4().hex
            # add new parent ids to the old to new id mapping
            old_to_new[case.case_id].append(new_case_id)
            submit_case(case, new_case_id, domain)
            count += 1
            if task:
                DownloadBase.set_progress(explode_case_task, count, 0)
Beispiel #12
0
def import_locations(domain, worksheet, update_existing=False, task=None):
    fields = worksheet.headers

    data = list(worksheet)

    hierarchy_fields = []
    loc_types = defined_location_types(domain)
    for field in fields:
        if field in loc_types:
            hierarchy_fields.append(field)
        else:
            break
    property_fields = fields[len(hierarchy_fields):]

    if not hierarchy_fields:
        yield 'missing location hierarchy-related fields in left columns. aborting import'
        return

    loc_cache = LocationCache(domain)
    for index, loc in enumerate(data):
        if task:
            DownloadBase.set_progress(task, index, len(data))

        for m in import_location(domain, loc, hierarchy_fields, property_fields, update_existing, loc_cache):
            yield m
Beispiel #13
0
def import_products_async(domain, file_ref_id):
    task = import_products_async
    DownloadBase.set_progress(task, 0, 100)
    download_ref = DownloadBase.get(file_ref_id)
    results = import_products(domain, download_ref, task)
    DownloadBase.set_progress(task, 100, 100)
    return {'messages': results}
Beispiel #14
0
def import_products(domain, download, task):
    messages = []
    products = []
    data = download.get_content().split('\n')
    processed = 0
    total_rows = len(data) - 1
    reader = csv.DictReader(data)
    for row in reader:
        try:
            p = Product.from_csv(row)
            if p:
                if p.domain:
                    if p.domain != domain:
                        messages.append(
                            _("Product {product_name} belongs to another domain and was not updated").format(
                                product_name=p.name
                            )
                        )
                        continue
                else:
                    p.domain = domain
                products.append(p)
            if task:
                processed += 1
                DownloadBase.set_progress(task, processed, total_rows)
        except Exception, e:
            messages.append(str(e))
Beispiel #15
0
 def _increment_progress(self):
     if self._location_count is None:
         self._location_count = SQLLocation.active_objects.filter(domain=self.domain).count()
         self._progress_update_chunksize = max(10, self._location_count // 100)
     self._locations_exported += 1
     if self._locations_exported % self._progress_update_chunksize == 0:
         DownloadBase.set_progress(self.async_task, self._locations_exported, self._location_count)
Beispiel #16
0
def parse_web_users(domain, task=None, total_count=None):
    user_dicts = []
    max_location_length = 0
    location_cache = LocationIdToSiteCodeCache(domain)
    for n, user in enumerate(get_all_user_rows(domain, include_web_users=True, include_mobile_users=False,
                                               include_inactive=False, include_docs=True)):
        user_dict = make_web_user_dict(user, location_cache, domain)
        user_dicts.append(user_dict)
        max_location_length = max(max_location_length, len(user_dict["location_code"]))
        if task:
            DownloadBase.set_progress(task, n, total_count)
    for m, invite in enumerate(Invitation.by_domain(domain)):
        user_dict = make_invited_web_user_dict(invite, location_cache)
        user_dicts.append(user_dict)
        if task:
            DownloadBase.set_progress(task, n + m, total_count)

    user_headers = [
        'username', 'first_name', 'last_name', 'email', 'role', 'last_access_date (read only)',
        'last_login (read only)', 'status', 'remove'
    ]
    if domain_has_privilege(domain, privileges.LOCATIONS):
        user_headers.extend(json_to_headers(
            {'location_code': list(range(1, max_location_length + 1))}
        ))
    return user_headers, get_user_rows(user_dicts, user_headers)
Beispiel #17
0
def dump_usernames(domain, download_id, user_filters, task, owner_id):
    domains_list = [domain]
    if 'domains' in user_filters:
        domains_list = user_filters[
            'domains']  # for instances of multi-domain download
    users_count = 0
    for download_domain in domains_list:
        users_count += get_commcare_users_by_filters(download_domain,
                                                     user_filters,
                                                     count_only=True)
    DownloadBase.set_progress(task, 0, users_count)

    usernames = []
    for download_domain in domains_list:
        usernames += get_mobile_usernames_by_filters(download_domain,
                                                     user_filters)

    headers = [('users', [['username']])]
    rows = [('users', [[username] for username in usernames])]
    location_id = user_filters.get('location_id')
    location_name = ""
    if location_id:
        location = SQLLocation.active_objects.get_or_None(
            location_id=location_id)
        location_name = location.name if location else ""
    filename_prefix = "_".join([a for a in [domain, location_name] if bool(a)])
    filename = "{}_users.xlsx".format(filename_prefix)
    _dump_xlsx_and_expose_download(filename, headers, rows, download_id, task,
                                   users_count, owner_id)
Beispiel #18
0
def fixture_upload_async(domain,
                         download_id,
                         replace,
                         skip_orm,
                         user_email=None):
    task = fixture_upload_async
    DownloadBase.set_progress(task, 0, 100)
    download_ref = DownloadBase.get(download_id)
    time_start = datetime.datetime.now()
    result = upload_fixture_file(domain, download_ref.get_filename(), replace,
                                 task, skip_orm)
    time_end = datetime.datetime.now()
    DownloadBase.set_progress(task, 100, 100)
    messages = {
        'success': result.success,
        'messages': result.messages,
        'errors': result.errors,
        'number_of_fixtures': result.number_of_fixtures
    }
    if user_email:
        send_upload_fixture_complete_email(user_email, domain, time_start,
                                           time_end, messages)
    return {
        'messages': messages,
    }
Beispiel #19
0
def _write_export_instance(writer, export_instance, documents, progress_tracker=None):
    """
    Write rows to the given open _Writer.
    Rows will be written to each table in the export instance for each of
    the given documents.
    :param writer: An open _Writer
    :param export_instance: An ExportInstance
    :param documents: A ScanResult, or if progress_tracker is None, any iterable yielding documents
    :param progress_tracker: A task for soil to track progress against
    :return: None
    """
    if progress_tracker:
        DownloadBase.set_progress(progress_tracker, 0, documents.count)

    for row_number, doc in enumerate(documents):
        for table in export_instance.selected_tables:
            rows = table.get_rows(
                doc,
                row_number,
                split_columns=export_instance.split_multiselects,
                transform_dates=export_instance.transform_dates,
            )
            for row in rows:
                # It might be bad to write one row at a time when you can do more (from a performance perspective)
                # Regardless, we should handle the batching of rows in the _Writer class, not here.
                writer.write(table, row)
        if progress_tracker:
            DownloadBase.set_progress(progress_tracker, row_number + 1, documents.count)
Beispiel #20
0
def write_export_instance(writer, export_instance, documents, progress_tracker=None):
    """
    Write rows to the given open _Writer.
    Rows will be written to each table in the export instance for each of
    the given documents.
    :param writer: An open _Writer
    :param export_instance: An ExportInstance
    :param documents: An iterable yielding documents
    :param progress_tracker: A task for soil to track progress against
    :return: None
    """
    if progress_tracker:
        DownloadBase.set_progress(progress_tracker, 0, documents.count)

    start = _time_in_milliseconds()
    total_bytes = 0
    total_rows = 0
    compute_total = 0
    write_total = 0

    for row_number, doc in enumerate(documents):
        total_bytes += sys.getsizeof(doc)
        for table in export_instance.selected_tables:
            compute_start = _time_in_milliseconds()
            try:
                rows = table.get_rows(
                    doc,
                    row_number,
                    split_columns=export_instance.split_multiselects,
                    transform_dates=export_instance.transform_dates,
                )
            except Exception as e:
                notify_exception(None, "Error exporting doc", details={
                    'domain': export_instance.domain,
                    'export_instance_id': export_instance.get_id,
                    'export_table': table.label,
                    'doc_id': doc.get('_id'),
                })
                e.sentry_capture = False
                raise
            compute_total += _time_in_milliseconds() - compute_start

            write_start = _time_in_milliseconds()
            for row in rows:
                # It might be bad to write one row at a time when you can do more (from a performance perspective)
                # Regardless, we should handle the batching of rows in the _Writer class, not here.
                writer.write(table, row)
            write_total += _time_in_milliseconds() - write_start

            total_rows += len(rows)

        if progress_tracker:
            DownloadBase.set_progress(progress_tracker, row_number + 1, documents.count)

    end = _time_in_milliseconds()
    tags = ['format:{}'.format(writer.format)]
    _record_datadog_export_write_rows(write_total, total_bytes, total_rows, tags)
    _record_datadog_export_compute_rows(compute_total, total_bytes, total_rows, tags)
    _record_datadog_export_duration(end - start, total_bytes, total_rows, tags)
    _record_export_duration(end - start, export_instance)
Beispiel #21
0
 def add_progress(self, count=1):
     self.progress += count
     if self.task:
         DownloadBase.set_progress(self.task, self.progress, self.total_rows)
     if datetime.now() > self.last_update + timedelta(seconds=5):
         self.log("processed %s / %s", self.progress, self.total_rows)
         self.last_update = datetime.now()
Beispiel #22
0
def build_form_multimedia_zip(
        domain,
        export_id,
        datespan,
        user_types,
        download_id,
        owner_id,
):
    from corehq.apps.export.models import FormExportInstance
    export = FormExportInstance.get(export_id)
    form_ids = get_form_ids_having_multimedia(
        domain, export.app_id, export.xmlns, datespan, user_types
    )
    forms_info = _get_form_attachment_info(domain, form_ids, export)

    num_forms = len(forms_info)
    DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms)

    all_case_ids = set.union(*(info['case_ids'] for info in forms_info)) if forms_info else set()
    case_id_to_name = _get_case_names(domain, all_case_ids)

    with TransientTempfile() as temp_path:
        with open(temp_path, 'wb') as f:
            _write_attachments_to_file(temp_path, num_forms, forms_info, case_id_to_name)
        with open(temp_path, 'rb') as f:
            zip_name = 'multimedia-{}'.format(unidecode(export.name))
            _save_and_expose_zip(f, zip_name, domain, download_id, owner_id)

    DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
Beispiel #23
0
 def _increment_progress(self):
     if self._location_count is None:
         self._location_count = SQLLocation.active_objects.filter(domain=self.domain).count()
         self._progress_update_chunksize = max(10, self._location_count // 100)
     self._locations_exported += 1
     if self._locations_exported % self._progress_update_chunksize == 0:
         DownloadBase.set_progress(self.async_task, self._locations_exported, self._location_count)
Beispiel #24
0
def export(schema_index, file, format=Format.XLS_2007,
           previous_export_id=None, filter=None,
           max_column_size=2000, separator='|', export_object=None, process=None):
    """
    Exports data from couch documents matching a given tag to a file. 
    Returns true if it finds data, otherwise nothing
    """
    config, updated_schema, export_schema_checkpoint = get_export_components(schema_index,
                                                                    previous_export_id, filter)
    # transform docs onto output and save
    if config:
        writer = get_writer(format)

        # open the doc and the headers
        formatted_headers = get_headers(updated_schema, separator=separator)
        writer.open(formatted_headers, file, max_column_size=max_column_size)

        total_docs = len(config.potentially_relevant_ids)
        if process:
            DownloadBase.set_progress(process, 0, total_docs)
        for i, doc in config.enum_docs():
            if export_object and export_object.transform:
                doc = export_object.transform(doc)
            writer.write(format_tables(create_intermediate_tables(doc, updated_schema),
                                       include_headers=False, separator=separator))
            if process:
                DownloadBase.set_progress(process, i + 1, total_docs)
        writer.close()
    return export_schema_checkpoint
Beispiel #25
0
def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id,
                              export_id, zip_name, download_id,
                              export_is_legacy):

    form_ids = _get_form_ids_having_multimedia(domain, app_id, xmlns,
                                               startdate, enddate,
                                               export_is_legacy)
    properties = _get_export_properties(export_id, export_is_legacy)

    if not app_id:
        zip_name = 'Unrelated Form'
    forms_info = list()
    for form in FormAccessors(domain).iter_forms(form_ids):
        if not zip_name:
            zip_name = unidecode(form.name or 'unknown form')
        forms_info.append(_extract_form_attachment_info(form, properties))

    num_forms = len(forms_info)
    DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms)

    use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
    if use_transfer:
        fpath = _get_download_file_path(xmlns, startdate, enddate, export_id,
                                        app_id, num_forms)
    else:
        _, fpath = tempfile.mkstemp()

    _write_attachments_to_file(fpath, use_transfer, num_forms, forms_info)
    _expose_download(fpath, use_transfer, zip_name, download_id, num_forms)
Beispiel #26
0
def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id,
                              export_id, zip_name, download_id, export_is_legacy):

    form_ids = _get_form_ids(domain, app_id, xmlns, startdate, enddate, export_is_legacy)
    properties = _get_export_properties(export_id, export_is_legacy)

    if not app_id:
        zip_name = 'Unrelated Form'
    forms_info = list()
    for form in FormAccessors(domain).iter_forms(form_ids):
        if not zip_name:
            zip_name = unidecode(form.name or 'unknown form')
        forms_info.append(_extract_form_attachment_info(form, properties))

    num_forms = len(forms_info)
    DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms)

    use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
    if use_transfer:
        fpath = _get_download_file_path(xmlns, startdate, enddate, export_id, app_id, num_forms)
    else:
        _, fpath = tempfile.mkstemp()

    _write_attachments_to_file(fpath, use_transfer, num_forms, forms_info)
    _expose_download(fpath, use_transfer, zip_name, download_id, num_forms)
Beispiel #27
0
def operate_on_payloads(repeat_record_ids,
                        domain,
                        action,
                        task=None,
                        from_excel=False):
    if not repeat_record_ids:
        return {'messages': {'errors': [_('No payloads specified')]}}
    if not action:
        return {'messages': {'errors': [_('No action specified')]}}

    response = {
        'errors': [],
        'success': [],
    }

    success_count = 0

    if task:
        DownloadBase.set_progress(task, 0, len(repeat_record_ids))

    for record_id in repeat_record_ids:
        valid_record = _validate_record(record_id, domain)

        if valid_record:
            try:
                message = ''
                if action == 'resend':
                    valid_record.fire(force_send=True)
                    message = _("Successfully resent repeat record (id={})"
                                ).format(record_id)
                elif action == 'cancel':
                    valid_record.cancel()
                    valid_record.save()
                    message = _("Successfully cancelled repeat record (id={})"
                                ).format(record_id)
                elif action == 'requeue':
                    valid_record.requeue()
                    valid_record.save()
                    message = _("Successfully requeued repeat record (id={})"
                                ).format(record_id)
                response['success'].append(message)
                success_count = success_count + 1
            except Exception as e:
                message = _(
                    "Could not perform action for repeat record (id={}): {}"
                ).format(record_id, e)
                response['errors'].append(message)

            if task:
                DownloadBase.set_progress(task, success_count,
                                          len(repeat_record_ids))

    if from_excel:
        return response

    response["success_count_msg"] = \
        _("Successfully {action} {count} form(s)".format(action=action, count=success_count))

    return {"messages": response}
Beispiel #28
0
def write_export_instance(writer, export_instance, documents, progress_tracker=None):
    """
    Write rows to the given open _Writer.
    Rows will be written to each table in the export instance for each of
    the given documents.
    :param writer: An open _Writer
    :param export_instance: An ExportInstance
    :param documents: A ScanResult, or if progress_tracker is None, any iterable yielding documents
    :param progress_tracker: A task for soil to track progress against
    :return: None
    """
    if progress_tracker:
        DownloadBase.set_progress(progress_tracker, 0, documents.count)

    start = _time_in_milliseconds()
    total_bytes = 0
    total_rows = 0
    compute_total = 0
    write_total = 0

    for row_number, doc in enumerate(documents):
        total_bytes += sys.getsizeof(doc)
        for table in export_instance.selected_tables:
            compute_start = _time_in_milliseconds()
            try:
                rows = table.get_rows(
                    doc,
                    row_number,
                    split_columns=export_instance.split_multiselects,
                    transform_dates=export_instance.transform_dates,
                )
            except Exception as e:
                notify_exception(None, "Error exporting doc", details={
                    'domain': export_instance.domain,
                    'export_instance_id': export_instance.get_id,
                    'export_table': table.label,
                    'doc_id': doc.get('_id'),
                })
                e.sentry_capture = False
                raise
            compute_total += _time_in_milliseconds() - compute_start

            write_start = _time_in_milliseconds()
            for row in rows:
                # It might be bad to write one row at a time when you can do more (from a performance perspective)
                # Regardless, we should handle the batching of rows in the _Writer class, not here.
                writer.write(table, row)
            write_total += _time_in_milliseconds() - write_start

            total_rows += len(rows)

        if progress_tracker:
            DownloadBase.set_progress(progress_tracker, row_number + 1, documents.count)

    end = _time_in_milliseconds()
    tags = ['format:{}'.format(writer.format)]
    _record_datadog_export_write_rows(write_total, total_bytes, total_rows, tags)
    _record_datadog_export_compute_rows(compute_total, total_bytes, total_rows, tags)
    _record_datadog_export_duration(end - start, total_bytes, total_rows, tags)
Beispiel #29
0
def parse_mobile_users(domain, user_filters, task=None, total_count=None):
    from corehq.apps.users.views.mobile.custom_data_fields import UserFieldsView
    fields_definition = CustomDataFieldsDefinition.get_or_create(
        domain,
        UserFieldsView.field_type
    )

    unrecognized_user_data_keys = set()
    user_groups_length = 0
    max_location_length = 0
    user_dicts = []
    domains_list = [domain]
    is_multi_domain_download = False
    if 'domains' in user_filters:
        domains_list = user_filters['domains']
    if domains_list != [domain]:
        is_multi_domain_download = True

    current_user_downloaded_count = 0
    for current_domain in domains_list:
        location_cache = LocationIdToSiteCodeCache(current_domain)
        for n, user in enumerate(get_commcare_users_by_filters(current_domain, user_filters)):
            group_memoizer = load_memoizer(current_domain)
            group_names = sorted([
                group_memoizer.get(id).name for id in Group.by_user_id(user.user_id, wrap=False)
            ], key=alphanumeric_sort_key)
            user_dict = make_mobile_user_dict(user, group_names, location_cache, current_domain, fields_definition)
            user_dicts.append(user_dict)
            unrecognized_user_data_keys.update(user_dict['uncategorized_data'])
            user_groups_length = max(user_groups_length, len(group_names))
            max_location_length = max(max_location_length, len(user_dict["location_code"]))
            if task:
                DownloadBase.set_progress(task, n + current_user_downloaded_count, total_count)
        current_user_downloaded_count += n + 1

    user_headers = [
        'username', 'password', 'name', 'phone-number', 'email',
        'language', 'role', 'user_id', 'is_active', 'User IMEIs (read only)',
        'registered_on (read only)', 'last_submission (read only)', 'last_sync (read only)'
    ]

    if domain_has_privilege(domain, privileges.APP_USER_PROFILES):
        user_headers += ['user_profile']
    user_data_fields = [f.slug for f in fields_definition.get_fields(include_system=False)]
    user_headers.extend(build_data_headers(user_data_fields))
    user_headers.extend(build_data_headers(
        unrecognized_user_data_keys,
        header_prefix='uncategorized_data'
    ))
    user_headers.extend(json_to_headers(
        {'group': list(range(1, user_groups_length + 1))}
    ))
    if domain_has_privilege(domain, privileges.LOCATIONS):
        user_headers.extend(json_to_headers(
            {'location_code': list(range(1, max_location_length + 1))}
        ))
    if is_multi_domain_download:
        user_headers += ['domain']
    return user_headers, get_user_rows(user_dicts, user_headers)
Beispiel #30
0
def bulk_upload_async(domain, user_specs, group_specs):
    from corehq.apps.users.bulkupload import create_or_update_users_and_groups

    task = bulk_upload_async
    DownloadBase.set_progress(task, 0, 100)
    results = create_or_update_users_and_groups(domain, user_specs, group_specs, task=task)
    DownloadBase.set_progress(task, 100, 100)
    return {"messages": results}
Beispiel #31
0
def turn_on_demo_mode_task(couch_user, domain):
    from corehq.apps.ota.utils import turn_on_demo_mode

    DownloadBase.set_progress(turn_on_demo_mode_task, 0, 100)
    results = turn_on_demo_mode(couch_user, domain)
    DownloadBase.set_progress(turn_on_demo_mode_task, 100, 100)

    return {'messages': results}
Beispiel #32
0
def turn_on_demo_mode_task(couch_user, domain):
    from corehq.apps.ota.utils import turn_on_demo_mode

    DownloadBase.set_progress(turn_on_demo_mode_task, 0, 100)
    results = turn_on_demo_mode(couch_user, domain)
    DownloadBase.set_progress(turn_on_demo_mode_task, 100, 100)

    return {"messages": results}
Beispiel #33
0
    def __init__(self, task, file_ref_id):
        self.task = task
        self.progress = 0

        if self.task:
            DownloadBase.set_progress(self.task, 0, 100)

        download_ref = DownloadBase.get(file_ref_id)
        self.workbook = WorkbookJSONReader(download_ref.get_filename())
Beispiel #34
0
    def __init__(self, task, file_ref_id):
        self.task = task
        self.progress = 0

        if self.task:
            DownloadBase.set_progress(self.task, 0, 100)

        download_ref = DownloadBase.get(file_ref_id)
        self.workbook = WorkbookJSONReader(download_ref.get_filename())
Beispiel #35
0
def build_application_zip(include_multimedia_files,
                          include_index_files,
                          app,
                          download_id,
                          build_profile_id=None,
                          compress_zip=False,
                          filename="commcare.zip"):
    from corehq.apps.hqmedia.views import iter_app_files

    DownloadBase.set_progress(build_application_zip, 0, 100)

    errors = []
    compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED

    use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
    if use_transfer:
        fpath = os.path.join(
            settings.SHARED_DRIVE_CONF.transfer_dir,
            "{}{}{}{}{}".format(app._id,
                                'mm' if include_multimedia_files else '',
                                'ccz' if include_index_files else '',
                                app.version, build_profile_id))
    else:
        _, fpath = tempfile.mkstemp()

    if not (os.path.isfile(fpath)
            and use_transfer):  # Don't rebuild the file if it is already there
        files, errors = iter_app_files(app, include_multimedia_files,
                                       include_index_files, build_profile_id)
        with open(fpath, 'wb') as tmp:
            with zipfile.ZipFile(tmp, "w") as z:
                for path, data in files:
                    # don't compress multimedia files
                    extension = os.path.splitext(path)[1]
                    file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression
                    z.writestr(path, data, file_compression)

    common_kwargs = dict(
        mimetype='application/zip'
        if compress_zip else 'application/x-zip-compressed',
        content_disposition='attachment; filename="{fname}"'.format(
            fname=filename),
        download_id=download_id,
    )
    if use_transfer:
        expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs)
    else:
        expose_cached_download(
            FileWrapper(open(fpath)),
            expiry=(1 * 60 * 60),
            file_extension=file_extention_from_filename(filename),
            **common_kwargs)

    DownloadBase.set_progress(build_application_zip, 100, 100)
    return {
        "errors": errors,
    }
Beispiel #36
0
def fixture_upload_async(domain, download_id, replace):
    task = fixture_upload_async
    DownloadBase.set_progress(task, 0, 100)
    download_ref = DownloadBase.get(download_id)
    result = safe_fixture_upload(domain, download_ref, replace, task)
    DownloadBase.set_progress(task, 100, 100)
    return {
        'messages': result,
    }
Beispiel #37
0
def import_locations_async(domain, file_ref_id):
    importer = MultiExcelImporter(import_locations_async, file_ref_id)
    task = import_locations_async
    DownloadBase.set_progress(task, 0, 100)
    results = new_locations_import(domain, importer)
    DownloadBase.set_progress(task, 100, 100)
    importer.mark_complete()

    return {'messages': results}
Beispiel #38
0
 def queue_case(new_case, queue, progress):
     queue.append(new_case)
     if len(queue) >= 500:   # submit 500 cases at a time
         submit_case_blocks(queue, domain, user_id=user_id, device_id="explode_cases")
         progress += len(queue)
         if task:
             DownloadBase.set_progress(explode_case_task, progress, total_cases)
         del queue[:]
     return progress
Beispiel #39
0
def import_products_async(domain, file_ref_id):
    task = import_products_async
    DownloadBase.set_progress(task, 0, 100)
    download_ref = DownloadBase.get(file_ref_id)
    results = import_products(domain, download_ref, task)
    DownloadBase.set_progress(task, 100, 100)
    return {
        'messages': results
    }
Beispiel #40
0
def download_locations_async(domain, download_id, include_consumption,
                             headers_only):
    DownloadBase.set_progress(download_locations_async, 0, 100)
    dump_locations(domain,
                   download_id,
                   include_consumption=include_consumption,
                   headers_only=headers_only,
                   task=download_locations_async)
    DownloadBase.set_progress(download_locations_async, 100, 100)
Beispiel #41
0
def turn_on_demo_mode_task(commcare_user_id, domain):
    from corehq.apps.ota.utils import turn_on_demo_mode
    from corehq.apps.users.models import CommCareUser

    user = CommCareUser.get(commcare_user_id)
    DownloadBase.set_progress(turn_on_demo_mode_task, 0, 100)
    results = turn_on_demo_mode(user, domain)
    DownloadBase.set_progress(turn_on_demo_mode_task, 100, 100)

    return {'messages': results}
Beispiel #42
0
def build_application_zip(include_multimedia_files, include_index_files, domain, app_id,
                          download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip",
                          download_targeted_version=False):
    DownloadBase.set_progress(build_application_zip, 0, 100)
    app = get_app(domain, app_id)
    fpath = create_files_for_ccz(app, build_profile_id, include_multimedia_files, include_index_files,
                                 download_id, compress_zip, filename, download_targeted_version,
                                 task=build_application_zip, expose_link=True)
    _expose_download_link(fpath, filename, compress_zip, download_id)
    DownloadBase.set_progress(build_application_zip, 100, 100)
Beispiel #43
0
    def get_export_files(self,
                         format=None,
                         previous_export=None,
                         filter=None,
                         process=None,
                         max_column_size=None,
                         apply_transforms=True,
                         limit=0,
                         **kwargs):
        from couchexport.export import get_writer, get_formatted_rows
        if not format:
            format = self.default_format or Format.XLS_2007

        config, updated_schema, export_schema_checkpoint = self.get_export_components(
            previous_export, filter)

        # transform docs onto output and save
        writer = get_writer(format)

        # open the doc and the headers
        formatted_headers = list(self.get_table_headers())
        fd, path = tempfile.mkstemp()
        if six.PY2:
            path = path.decode('utf-8')
        with os.fdopen(fd, 'wb') as tmp:
            writer.open(formatted_headers,
                        tmp,
                        max_column_size=max_column_size,
                        table_titles=dict([(table.index, table.display)
                                           for table in self.tables
                                           if table.display]))

            total_docs = len(config.potentially_relevant_ids)
            if process:
                DownloadBase.set_progress(process, 0, total_docs)
            for i, doc in config.enum_docs():
                if limit and i > limit:
                    break
                if self.transform and apply_transforms:
                    doc = self.transform(doc)
                formatted_tables = self.trim(get_formatted_rows(doc,
                                                                updated_schema,
                                                                separator="."),
                                             doc,
                                             apply_transforms=apply_transforms)
                writer.write(formatted_tables)
                if process:
                    DownloadBase.set_progress(process, i + 1, total_docs)

            writer.close()

        if format == Format.PYTHON_DICT:
            return writer.get_preview()

        return ExportFiles(path, export_schema_checkpoint, format)
Beispiel #44
0
def build_application_zip(include_multimedia_files, include_index_files, app,
                          download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip"):
    from corehq.apps.hqmedia.views import iter_app_files

    DownloadBase.set_progress(build_application_zip, 0, 100)

    errors = []
    compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED

    use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
    if use_transfer:
        fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format(
            app._id,
            'mm' if include_multimedia_files else '',
            'ccz' if include_index_files else '',
            app.version,
            build_profile_id
        ))
    else:
        _, fpath = tempfile.mkstemp()

    if not (os.path.isfile(fpath) and use_transfer):  # Don't rebuild the file if it is already there
        files, errors = iter_app_files(app, include_multimedia_files, include_index_files, build_profile_id)
        with open(fpath, 'wb') as tmp:
            with zipfile.ZipFile(tmp, "w") as z:
                for path, data in files:
                    # don't compress multimedia files
                    extension = os.path.splitext(path)[1]
                    file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression
                    z.writestr(path, data, file_compression)

    common_kwargs = dict(
        mimetype='application/zip' if compress_zip else 'application/x-zip-compressed',
        content_disposition='attachment; filename="{fname}"'.format(fname=filename),
        download_id=download_id,
    )
    if use_transfer:
        expose_file_download(
            fpath,
            use_transfer=use_transfer,
            **common_kwargs
        )
    else:
        expose_cached_download(
            FileWrapper(open(fpath)),
            expiry=(1 * 60 * 60),
            file_extension=file_extention_from_filename(filename),
            **common_kwargs
        )

    DownloadBase.set_progress(build_application_zip, 100, 100)
    return {
        "errors": errors,
    }
Beispiel #45
0
    def __init__(self, task, file_ref_id):
        self.task = task
        self.progress = 0

        if self.task:
            DownloadBase.set_progress(self.task, 0, 100)

        download_ref = DownloadBase.get(file_ref_id)
        if download_ref is None:
            raise UnknownFileRefException("Could not find file wih ref %s. It may have expired" % file_ref_id)
        self.workbook = WorkbookJSONReader(download_ref.get_filename())
Beispiel #46
0
def import_locations_async(domain, file_ref_id):
    importer = MultiExcelImporter(import_locations_async, file_ref_id)
    task = import_locations_async
    DownloadBase.set_progress(task, 0, 100)
    results = new_locations_import(domain, importer)
    DownloadBase.set_progress(task, 100, 100)
    importer.mark_complete()

    return {
        'messages': results
    }
Beispiel #47
0
    def __init__(self, task, file_ref_id):
        self.task = task
        self.progress = 0

        if self.task:
            DownloadBase.set_progress(self.task, 0, 100)

        download_ref = DownloadBase.get(file_ref_id)
        if download_ref is None:
            raise UnknownFileRefException("Could not find file wih ref %s. It may have expired" % file_ref_id)
        self.workbook = WorkbookJSONReader(download_ref.get_filename())
Beispiel #48
0
def dump_web_users(domain, download_id, task, owner_id):
    users_count = get_web_user_count(domain, include_inactive=False)
    DownloadBase.set_progress(task, 0, users_count)

    user_headers, user_rows = parse_web_users(domain, task, users_count)

    headers = [('users', [user_headers])]
    rows = [('users', user_rows)]

    filename = "{}_users_{}.xlsx".format(domain, uuid.uuid4().hex)
    _dump_xlsx_and_expose_download(filename, headers, rows, download_id, task, users_count, owner_id)
Beispiel #49
0
def archive_or_restore_forms(domain, user, form_ids, archive_or_restore, task=None, from_excel=False):
    response = {
        'errors': [],
        'success': [],
    }

    missing_forms = set(form_ids)
    success_count = 0

    if task:
        DownloadBase.set_progress(task, 0, len(form_ids))

    for xform_doc in iter_docs(XFormInstance.get_db(), form_ids):
        xform = XFormInstance.wrap(xform_doc)
        missing_forms.discard(xform['_id'])

        if xform['domain'] != domain:
            response['errors'].append(_(u"XFORM {form_id} does not belong to domain {domain}").format(
                form_id=xform['_id'], domain=xform['domain']))
            continue

        xform_string = _(u"XFORM {form_id} for domain {domain} by user '{username}'").format(
            form_id=xform['_id'],
            domain=xform['domain'],
            username=user.username)

        try:
            if archive_or_restore.is_archive_mode():
                xform.archive(user_id=user.username)
                message = _(u"Successfully archived {form}").format(form=xform_string)
            else:
                xform.unarchive(user_id=user.username)
                message = _(u"Successfully unarchived {form}").format(form=xform_string)
            response['success'].append(message)
            success_count = success_count + 1
        except Exception as e:
            response['errors'].append(_(u"Could not archive {form}: {error}").format(
                form=xform_string, error=e))

        if task:
            DownloadBase.set_progress(task, success_count, len(form_ids))

    for missing_form_id in missing_forms:
        response['errors'].append(
            _(u"Could not find XForm {form_id}").format(form_id=missing_form_id))

    if from_excel:
        return response

    response["success_count_msg"] = _("{success_msg} {count} form(s)".format(
        success_msg=archive_or_restore.success_text,
        count=success_count))
    return {"messages": response}
Beispiel #50
0
def turn_on_demo_mode_task(commcare_user_id, domain):
    from corehq.apps.ota.utils import turn_on_demo_mode
    from corehq.apps.users.models import CommCareUser

    user = CommCareUser.get(commcare_user_id)
    DownloadBase.set_progress(turn_on_demo_mode_task, 0, 100)
    results = turn_on_demo_mode(user, domain)
    DownloadBase.set_progress(turn_on_demo_mode_task, 100, 100)

    return {
        'messages': results
    }
Beispiel #51
0
def build_form_multimedia_zip(
        domain,
        xmlns,
        startdate,
        enddate,
        app_id,
        export_id,
        zip_name,
        download_id,
        export_is_legacy,
        user_types=None,
        group=None):

    form_ids = get_form_ids_having_multimedia(
        domain,
        app_id,
        xmlns,
        parse(startdate),
        parse(enddate),
        group=group,
        user_types=user_types,
    )
    properties = _get_export_properties(export_id, export_is_legacy)

    if not app_id:
        zip_name = 'Unrelated Form'
    forms_info = list()
    for form in FormAccessors(domain).iter_forms(form_ids):
        if not zip_name:
            zip_name = unidecode(form.name or 'unknown form')
        forms_info.append(_extract_form_attachment_info(form, properties))

    num_forms = len(forms_info)
    DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms)

    case_id_to_name = _get_case_names(
        domain,
        set.union(*[form_info['case_ids'] for form_info in forms_info]) if forms_info else set(),
    )

    use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
    if use_transfer:
        fpath = _get_download_file_path(xmlns, startdate, enddate, export_id, app_id, num_forms)
    else:
        _, fpath = tempfile.mkstemp()

    _write_attachments_to_file(fpath, use_transfer, num_forms, forms_info, case_id_to_name)
    filename = "{}.zip".format(zip_name)
    expose_download(use_transfer, fpath, filename, download_id, 'zip')
    DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
Beispiel #52
0
def import_locations_async(domain, file_ref_id):
    task = import_locations_async

    DownloadBase.set_progress(task, 0, 100)
    download_ref = DownloadBase.get(file_ref_id)
    workbook = WorkbookJSONReader(download_ref.get_filename())
    worksheets = workbook.worksheets

    results = list(import_locations(domain, worksheets, task))

    DownloadBase.set_progress(task, 100, 100)

    return {
        'messages': results
    }
Beispiel #53
0
    def get_export_files(self, format=None, previous_export=None, filter=None, process=None, max_column_size=None,
                         apply_transforms=True, limit=0, **kwargs):
        from couchexport.export import get_writer, format_tables, create_intermediate_tables
        if not format:
            format = self.default_format or Format.XLS_2007

        config, updated_schema, export_schema_checkpoint = self.get_export_components(previous_export, filter)

        # transform docs onto output and save
        writer = get_writer(format)

        # open the doc and the headers
        formatted_headers = list(self.get_table_headers())
        fd, path = tempfile.mkstemp()
        with os.fdopen(fd, 'wb') as tmp:
            writer.open(
                formatted_headers,
                tmp,
                max_column_size=max_column_size,
                table_titles=dict([
                    (table.index, table.display)
                    for table in self.tables if table.display
                ])
            )

            total_docs = len(config.potentially_relevant_ids)
            if process:
                DownloadBase.set_progress(process, 0, total_docs)
            for i, doc in config.enum_docs():
                if limit and i > limit:
                    break
                if self.transform and apply_transforms:
                    doc = self.transform(doc)
                formatted_tables = self.trim(
                    format_tables(
                        create_intermediate_tables(doc, updated_schema),
                        separator="."
                    ),
                    doc,
                    apply_transforms=apply_transforms
                )
                writer.write(formatted_tables)
                if process:
                    DownloadBase.set_progress(process, i + 1, total_docs)

            writer.close()

        return ExportFiles(path, export_schema_checkpoint, format)
Beispiel #54
0
def reset_demo_user_restore_task(couch_user, domain):
    from corehq.apps.ota.utils import reset_demo_user_restore

    DownloadBase.set_progress(reset_demo_user_restore_task, 0, 100)

    try:
        reset_demo_user_restore(couch_user, domain)
        results = {"errors": []}
    except Exception as e:
        notify_exception(None, message=e.message)
        results = {
            "errors": [_("Something went wrong in creating restore for the user. Please try again or report an issue")]
        }

    DownloadBase.set_progress(reset_demo_user_restore_task, 100, 100)
    return {"messages": results}
Beispiel #55
0
def import_locations(domain, worksheets, task=None):
    processed = 0
    total_rows = sum(ws.worksheet.get_highest_row() for ws in worksheets)

    for worksheet in worksheets:
        location_type = worksheet.worksheet.title
        if location_type not in defined_location_types(domain):
            yield "location with type %s not found, this worksheet will not be imported" % location_type
        else:
            data = list(worksheet)

            for loc in data:
                yield import_location(domain, location_type, loc)['message']
                if task:
                    processed += 1
                    DownloadBase.set_progress(task, processed, total_rows)
Beispiel #56
0
def reset_demo_user_restore_task(commcare_user_id, domain):
    from corehq.apps.ota.utils import reset_demo_user_restore
    from corehq.apps.users.models import CommCareUser

    user = CommCareUser.get(commcare_user_id)

    DownloadBase.set_progress(reset_demo_user_restore_task, 0, 100)

    try:
        reset_demo_user_restore(user, domain)
        results = {'errors': []}
    except Exception as e:
        notify_exception(None, message=six.text_type(e))
        results = {'errors': [
            _("Something went wrong in creating restore for the user. Please try again or report an issue")
        ]}

    DownloadBase.set_progress(reset_demo_user_restore_task, 100, 100)
    return {'messages': results}
Beispiel #57
0
    def __init__(self, task, file_ref_id):
        self.start = self.last_update = datetime.now()
        self.task = task
        self.progress = 0
        self.total_rows = 100
        if getattr(settings, 'CELERY_ALWAYS_EAGER', False):
            # Log progress since tasks are executed synchronously when
            # CELERY_ALWAYS_EAGER is true
            self.log = logging.getLogger(__name__).info
        else:
            self.log = lambda *a, **k: None

        if self.task:
            DownloadBase.set_progress(self.task, 0, 100)

        download_ref = DownloadBase.get(file_ref_id)
        if download_ref is None:
            raise UnknownFileRefException("Could not find file wih ref %s. It may have expired" % file_ref_id)
        self.workbook = WorkbookJSONReader(download_ref.get_filename())
Beispiel #58
0
def _write_attachments_to_file(fpath, use_transfer, num_forms, forms_info, case_id_to_name):

    if not (os.path.isfile(fpath) and use_transfer):  # Don't rebuild the file if it is already there
        with open(fpath, 'wb') as zfile:
            with zipfile.ZipFile(zfile, 'w') as multimedia_zipfile:
                for form_number, form_info in enumerate(forms_info):
                    form = form_info['form']
                    for attachment in form_info['attachments']:
                        filename = _format_filename(
                            form_info,
                            attachment['question_id'],
                            attachment['extension'],
                            case_id_to_name
                        )
                        zip_info = zipfile.ZipInfo(filename, attachment['timestamp'])
                        multimedia_zipfile.writestr(zip_info, form.get_attachment(
                            attachment['name']),
                            zipfile.ZIP_STORED
                        )
                    DownloadBase.set_progress(build_form_multimedia_zip, form_number + 1, num_forms)
Beispiel #59
0
def _write_attachments_to_file(fpath, use_transfer, num_forms, forms_info):

    def filename(form_info, question_id, extension):
        return u"{}-{}-{}{}".format(
            unidecode(question_id),
            form_info['user'],
            form_info['id'],
            extension
        )

    if not (os.path.isfile(fpath) and use_transfer):  # Don't rebuild the file if it is already there
        with open(fpath, 'wb') as zfile:
            with zipfile.ZipFile(zfile, 'w') as z:
                for form_number, form_info in enumerate(forms_info):
                    f = form_info['form']
                    for a in form_info['attachments']:
                        fname = filename(form_info, a['question_id'], a['extension'])
                        zi = zipfile.ZipInfo(fname, a['timestamp'])
                        z.writestr(zi, f.get_attachment(a['name']), zipfile.ZIP_STORED)
                    DownloadBase.set_progress(build_form_multimedia_zip, form_number + 1, num_forms)