def handle(self, *args, **options): log_folder = "/var/tmp/media_usage" if not os.path.exists(log_folder): os.makedirs(log_folder) log_file_name = "%s" % datetime.datetime.now().strftime( 'media_usage_%H_%M_%d_%m_%Y.log') full_log_file_path = "%s/%s" % (log_folder, log_file_name) print "Calculate media usage per account" pool = DWThreadPool(3, 3) for db_name in all_db_names(): pool.submit(calculate_usage, db_name, full_log_file_path) pool.wait_for_completion() if os.path.exists(full_log_file_path): grand_tot_usage = _calculate_total_usage(full_log_file_path) with open(full_log_file_path, 'a') as f: f.write("\nTotal usage across accounts: %s mb\n" % grand_tot_usage) if 'send-mail' in args: print "Sending mail..." _send_email(full_log_file_path) print "Log file path: %s" % full_log_file_path else: print "No media usage across accounts." print "Done."
def get_list_of_indexes_to_reindex(full_reindex=False): db_names = all_db_names() try: list_of_indexes_out_of_sync = [] total_submissions = 0 for database_name in db_names: dbm = get_db_manager(database_name) questionnaires = dbm.load_all_rows_in_view('questionnaire') if not questionnaires: continue for row in questionnaires: if row['value']['is_registration_model']: continue form_model_doc = FormModelDocument.wrap(row["value"]) if full_reindex or is_mapping_out_of_sync(form_model_doc, dbm): es = Elasticsearch(hosts=[{ "host": ELASTIC_SEARCH_HOST, "port": ELASTIC_SEARCH_PORT }]) search = Search(using=es, index=dbm.database_name, doc_type=form_model_doc.id) no_of_submissions = search.count() questionnaire_info = dict( db_name=database_name, questionnaire_id=form_model_doc.id, name=form_model_doc.name, no_of_submissions=no_of_submissions) total_submissions += no_of_submissions list_of_indexes_out_of_sync.append(questionnaire_info) return list_of_indexes_out_of_sync, total_submissions except Exception as e: pass
def trigger(self, doc_count_threshold, commit_seq_threshold): self.db_names = all_db_names(self.db_server) logging.debug("get %s dbs" % len(self.db_names)) for db_name in self.db_names: self._try_to_update(db_name, doc_count_threshold, commit_seq_threshold) logging.info("*" * 80)
def async_list_of_indexes_to_reindex(full_reindex=False): questionnaire_details_task_results = [] db_names = all_db_names() for db_name in db_names: chain_obj = chain( async_fetch_questionnaires.s(db_name, full_reindex) | async_fetch_questionnaire_details.s(db_name, full_reindex)) result = chain_obj.apply_async() questionnaire_details_task_results.append(result) return questionnaire_details_task_results
def handle(self, *args, **options): es = get_elasticsearch_handle() if len(args) > 0: databases_to_index = args[0:] else: databases_to_index = all_db_names() for database_name in databases_to_index: logger = logging.getLogger(database_name) recreate_index_for_db(database_name, es, logger) logger.info('Done') print 'Completed!'
def handle(self, *args, **options): if len(args) > 0: databases_to_index = args[0:] else: databases_to_index = all_db_names() pool = DWThreadPool(threads, threads) for db_name in databases_to_index: pool.submit(recreate_index_for_db, db_name) pool.wait_for_completion() print 'Completed!'
def handle(self, *args, **options): get_cache_manager().flush_all() for database_name in all_db_names(): print("Database %s") % (database_name, ) print 'Deleting...' manager = get_db_manager(database_name) _delete_db_and_remove_db_manager(manager) for database_name in document_stores_to_process(args): recreated_manager = get_db_manager(database_name) print "Syncing Views....." SyncOnlyChangedViews().sync_view(recreated_manager) print "Loading data....." load_data() print "Done."
def check_survey_responses(): databases_to_index = all_db_names() for database_name in databases_to_index: logging.info('Starting checking for database %s' % database_name) try: dbm = get_db_manager(database_name) rows = dbm.database.query(invalid_surveyresponses, include_docs=False) if len(rows) != 0: logging.debug("invalid survey_responses present for %s" % database_name) except Exception as e: logging.error("retrival of survey responses failed for %s" % database_name) logging.error(e)
survey_response_docs.append(search_dict) if survey_response_docs: es.bulk_index(dbm.database_name, form_model.id, survey_response_docs) def create_index(database_name): try: dbm = get_db_manager(database_name) logger = logging.getLogger(database_name) for row in dbm.load_all_rows_in_view('questionnaire'): form_model_doc = FormModelDocument.wrap(row["value"]) form_model_change_handler(form_model_doc, dbm) try: create_submission_index(dbm, row) except Exception as e: logger.error( "Index update failed for database %s and for formmodel %s" % (database_name, row.id)) logger.error(e) except Exception as e: logger.error( "Mapping update failed for database %s for form model %s " % (database_name, form_model_doc.form_code)) logger.error(e) mark_as_completed(database_name) migrate(all_db_names(), create_index, version=(11, 0, 6), threads=2)
logger = logging.getLogger(db_name) try: dbm = get_db_manager(db_name) logger.info("starting data fix for " + db_name) all_data_senders = set(get_all_active_data_senders(dbm)) for project_doc in dbm.database.view("project_names/project_names", include_docs=True): try: project_data_senders = set(project_doc["doc"]["data_senders"]) invalid_ds = project_data_senders.difference(all_data_senders) project_doc = Project._wrap_row(project_doc) for ds in invalid_ds: logger.info("Found invalid data senders in project : " + str(project_doc) + " " + str(invalid_ds)) project_doc.delete_datasender(dbm, ds) except Exception as e: print "Error : " + db_name + " : " + str( project_doc) + e.message traceback.print_exc(file=sys.stdout) logger.info("done:" + db_name) mark_as_completed(db_name) except Exception as e: logger.exception("Failed Database : %s , with error :%s " % (db_name, e.message)) migrate(all_db_names(), remove_deleted_ds_from_project, version=(7, 0, 4))
count = 0 log_statement('Total documents to be voided : %s' % len(invalid_survey_response_docs)) for survey_response_doc in invalid_survey_response_docs: survey_response = SurveyResponse.new_from_doc(dbm=manager, doc=SurveyResponse.__document_class__.wrap(survey_response_doc['value'])) survey_response.void() count += 1 log_statement("Voided survey_response %s" % survey_response.uuid) log_statement("Total number of documents voided are %s" % count) log_statement('\nCompleted database : %s\n' % database) except Exception as e: log_statement('error:%s\n' % database) traceback.print_exc(file=log_file) def migrate_bug_2134(all_db_names): print "start ...." for database in all_db_names: if should_not_skip(database): print "starting database : %s" % database log_statement('\nStart ==============================================================================\n') migrate_db(database) log_statement('\n End ================================================================================\n') print "Completed migration" def log_statement(statement): log_file.writelines('%s : %s\n' % (datetime.utcnow(), statement)) migrate_bug_2134(all_db_names(SERVER))
import sys from datawinners.main.couchdb.utils import all_db_names if __name__ == "__main__" and __package__ is None: sys.path.insert(0, ".") import logging from datawinners.feeds.migrate import FeedBuilder from migration.couch.utils import mark_as_completed, migrate def migrate_survey_response_to_feed(db_name): logger = logging.getLogger(db_name) try: logger.info("Starting migration") mark_as_completed(db_name) FeedBuilder(db_name, logger).migrate_db() except Exception as e: logger.exception("FAILED") migrate(all_db_names(), migrate_survey_response_to_feed, version=(7, 0, 3))
return year_month_submission_count_dict def update_counters_for_date(date, key, organization, year_month_submission_count_dict): smart_phone_count = year_month_submission_count_dict[key].get('sp_count', 0) web_count = year_month_submission_count_dict[key].get('web_count', 0) message_tracker = organization._get_message_tracker(date) message_tracker.incoming_web_count = web_count message_tracker.incoming_sp_count = smart_phone_count message_tracker.save() def update_counters_for_submissions(db_name): logger = logging.getLogger(db_name) try: logger.info('Starting migration') dbm = get_db_manager(db_name) year_month_submission_count_dict = get_submission_count_aggregate(dbm) organization = OrganizationSetting.objects.get(document_store=dbm.database_name).organization for key in year_month_submission_count_dict.keys(): year, month = int(key.split('_')[0]), int(key.split('_')[1]) date = datetime.date(year, month, 1) update_counters_for_date(date, key, organization, year_month_submission_count_dict) mark_as_completed(db_name) logger.info("Completed") except ResourceNotFound as e: logger.warn(e.message) except Exception as e: logger.exception(e.message) migrate(all_db_names(), update_counters_for_submissions, version=(10, 0, 6), threads=1)
form_model_doc['data_senders'] = project_data['data_senders'] form_model_doc['reminder_and_deadline'] = project_data['reminder_and_deadline'] form_model_doc['sender_group'] = project_data['sender_group'] try: del form_model_doc['state'] except KeyError as e: logger.warn(e) dbm._save_document(form_model_doc) update_reminders(dbm, project_data, logger) logger.info("Deleting project with id: %s", row.id) dbm.database.delete(row.doc) except Exception as e: logger.error('Merging project and form_model failed for database : %s, project_doc with id: %s', dbm.database_name, row.id) logger.error(e) def migrate_to_merge_form_model_and_project(db_name): logger = logging.getLogger(db_name) try: logger.info('Starting migration') dbm = get_db_manager(db_name) merge_project_and_form_model_for(dbm, logger) except Exception as e: logger.exception(e.message) mark_as_completed(db_name) migrate(all_db_names(), migrate_to_merge_form_model_and_project, version=(11, 0, 4), threads=3)
logger = logging.getLogger(db_name) logger.info('Starting Migration') # dbm = get_db_manager(db_name) dbm = get_db_manager(server='http://178.79.161.90:5984', database=db_name) inconsistent_survey_response_list = _get_survey_responses_with_no_eid( dbm, logger) for survey_response in inconsistent_survey_response_list: if survey_response.owner_uid: # data_sender = Entity.get(dbm, survey_response.owner_uid) # survey_response.values['eid'] = data_sender.short_code # logger.info("Migrated survey response: %s" % survey_response.uuid) logger.warning("survey_rsponse_id: %s" % survey_response.uuid) logger.warning("form_model_id: %s" % survey_response.form_code) else: logger.warning( "Missing owner id for survey_response: %s, form_code: %s" % (survey_response.uuid, survey_response.form_code)) # logger.info("Number of survey responses migrated: %s" % len(inconsistent_survey_response_list)) length_of_survey_response_with_no_eid = len( inconsistent_survey_response_list) org_id = OrganizationSetting.objects.get( document_store=dbm.database_name).organization_id if length_of_survey_response_with_no_eid: logger.info("Number of incorrect survey responses: %s" % length_of_survey_response_with_no_eid) migrate(all_db_names(server="http://178.79.161.90:5984"), add_eid_field_for_survey_response_with_missing_eid_field, version=(8, 0, 3))
processed = [] for form_model_doc in subject_form_model_docs: form_model = get_instance_from_doc(manager, form_model_doc['value']) if form_model.form_code in processed: continue processed.append(form_model.form_code) logger.info("Process on :form_model: %s, form code : %s" % (form_model.id, form_model.form_code)) data_to_restore, current_data_length = renumber_fields_name( form_model) datarecord_docs = manager.database.query( map_datarecord_by_form_code, key=[form_model.form_code, current_data_length]) for datarecord_doc in datarecord_docs: migrate_entity(manager, form_model, datarecord_doc, data_to_restore) form_model.save() logger.info("End process on :form_model: %s , form code : %s" % (form_model.id, form_model.form_code)) logger.info('End migration on database') except Exception as e: logger.exception(e.message) migrate(all_db_names(), migrate_story_2099, version=(7, 0, 5))
dbm, [entity_type]): try: if entity_doc.data: subject = subject_dict(entity_type, entity_doc, dbm, form_model) subject.update({'id': entity_doc.id}) entity_docs.append(subject) except Exception as e: logger.error("Failed to index subject with id %s" % entity_doc.id) logger.error(e) if entity_docs: es.bulk_index(dbm.database_name, entity_type, entity_docs) es.refresh(dbm.database_name) logger.info('Changed index for subject with codes ' + str([a.get('id') for a in entity_docs])) except Exception as e: logger.error("Failed to create subject mapping for %s" % row.id) logger.error(e.message) logger.info('Completed Indexing') mark_as_completed(db_name) except Exception as e: logger.exception(e.message) es = get_elasticsearch_handle(timeout=600) migrate(all_db_names(), recreate_subject_index, version=(10, 0, 3), threads=1)
def check_for_name_mismatch(dbm): for row in dbm.database.query(list_all_projects, include_docs=True): try: project_data = row.doc form_model = FormModel.get(dbm, project_data.get("qid")) form_model_doc = form_model._doc if form_model_doc['name'] != project_data['name']: logging.debug( "name mismatch for database %s, project with_id %s" % (dbm.database_name, row.id)) except Exception as e: logging.error( 'something failed for for database : %s, project_doc with id: %s' % (dbm.database_name, row.id)) logging.error(e) def check_project_and_formmodel_name_mismatch(db_name): try: logging.info('Starting checking for database %s' % db_name) dbm = get_db_manager(db_name) check_for_name_mismatch(dbm) except Exception as e: logging.exception(e.message) for db_name in all_db_names(): check_project_and_formmodel_name_mismatch(db_name)
logger.info('Starting Migration') mark_as_completed(db_name) manager = get_db_manager(db_name) email_field_code = "email" try: form_model = get_form_model_by_code(manager, REGISTRATION_FORM_CODE) except FormModelDoesNotExistsException as f: logger.warning(f.message) return email_field_label = form_model.get_field_by_code(email_field_code).name org_id = OrganizationSetting.objects.get(document_store=manager.database_name).organization_id user_profiles = NGOUserProfile.objects.filter(org_id=org_id) for user_profile in user_profiles: try: short_code = user_profile.reporter_id email_value = user_profile.user.email data = (email_field_label, email_value) if short_code: add_email_data_to_entity_document(manager, short_code, data, logger) except DataObjectAlreadyExists as e: logger.warning(e.message) except Exception as e: logger.exception("FAILED to migrate: %s" %short_code) migrate(all_db_names(), migration_to_add_email_data_for_web_users_in_couch, version=(9, 0, 2))
db_name = dbm.database_name questionnaires = dbm.load_all_rows_in_view('all_questionnaire') for questionnaire in questionnaires: try: if not questionnaire['value']['void']: field_count = len(questionnaire.value["json_fields"]) questionnaire_id = questionnaire.value['_id'] questionnaire_name = questionnaire.value['name'] rows = dbm.load_all_rows_in_view( 'surveyresponse', reduce=True, start_key=[questionnaire_id], end_key=[questionnaire_id, {}]) if rows and len(rows) >= 1 and 'count' in rows[0]['value']: logger.info( "organization - %s, questionnaire_id - %s, questionnaire-name-%s field_count - %dSubmission count : %d" % (db_name, questionnaire_id, questionnaire_name, field_count, rows[0]['value']['count'])) except Exception: logger.exception() def get_counts_for(db_names): for db_name in db_names: dbm = get_db_manager(db_name) logger = logging.getLogger(db_name) get_count(dbm, logger) get_counts_for(all_db_names())
sys.path.insert(0, ".") from datetime import datetime from mangrove.datastore.database import get_db_manager from mangrove.datastore.documents import FormModelDocument from mangrove.form_model.form_model import FormModel from migration.couch.utils import configure_csv, should_not_skip, mark_as_completed from datawinners import settings SERVER = 'http://localhost:5984' credentials = settings.COUCHDBMAIN_CREDENTIALS log_file = open('/var/log/datawinners/migration_release_6_1_1.log', 'a') configure_csv('/var/log/datawinners/dbs_migrated_release_6_1_1.csv') db_names = all_db_names(SERVER) # excluding the following # doc.form_code != 'reg' - to ignore the registration form model # doc.form_code != 'delete' - to ignore the deletion form model map_form_model_questionnaire = """ function(doc) { if (doc.document_type == 'FormModel' && doc.form_code != 'reg' && doc.form_code != 'delete' && doc.is_registration_model != true && !doc.void) { emit(doc.form_code,doc) } }""" def get_form_model(manager, questionnaire): doc = FormModelDocument.wrap(questionnaire['value'])
import logging from mangrove.contrib.registration import GLOBAL_REGISTRATION_FORM_CODE from mangrove.utils.test_utils.database_utils import delete_and_create_form_model from datawinners.main.database import get_db_manager from datawinners.main.couchdb.utils import all_db_names from migration.couch.utils import migrate, mark_as_completed def migration_to_update_constraints_for_datasender_registration(db_name): logger = logging.getLogger(db_name) dbm = get_db_manager(db_name) try: delete_and_create_form_model(dbm, GLOBAL_REGISTRATION_FORM_CODE) mark_as_completed(db_name) except Exception as e: logger.exception("Datasender registration form re-creation failed for: %s with message %s" % (db_name, e.message)) logger.info('Completed Migration') migrate(all_db_names(), migration_to_update_constraints_for_datasender_registration, version=(10, 1, 2), threads=2)
import re import sys from mangrove.datastore.database import get_db_manager if __name__ == "__main__" and __package__ is None: sys.path.insert(0, ".") from datawinners.main.couchdb.utils import all_db_names from mangrove.datastore.entity import get_all_entities def get_DS_with_ca(dbname): dbm = get_db_manager(server="http://*****:*****@172.18.9.6:5984", database=dbname, credentials=('admin', 'admin')) all_ds = get_all_entities(dbm, ['reporter']) for ds in all_ds: if 'short_code' in ds.data.keys(): short_code = ds.data['short_code']['value'] if re.search('[A-Z]', short_code): print 'short_code is :' + short_code print 'database is :' + dbname for db in all_db_names(server="http://*****:*****@172.18.9.6:5984"): get_DS_with_ca(db)
import logging from datawinners.main.database import get_db_manager from migration.couch.utils import migrate, mark_as_completed datasender_document = """ function(doc) { if (doc.document_type == "Entity" && doc.aggregation_paths['_type'] == "reporter") { emit(doc.short_code, doc); } }""" def seperate_datasender_and_contact_document(db_name): logger = logging.getLogger(db_name) logger.info('Starting Migration') manager = get_db_manager(db_name) for row in manager.database.query(datasender_document): try: row['value']['document_type'] = 'Contact' manager.database.save(row['value'], process_post_update=False) except Exception as e: logger.error("Failed to update document with id:%s" % row['value']['_id']) logger.info('Completed Migration') mark_as_completed(db_name) migrate(all_db_names(), seperate_datasender_and_contact_document, version=(22, 0, 1), threads=3)
import logging from mangrove.datastore.documents import FormModelDocument from datawinners.main.couchdb.utils import all_db_names from datawinners.search import form_model_change_handler from datawinners.main.database import get_db_manager from migration.couch.utils import migrate, mark_as_completed def update_mapping(database_name): try: dbm = get_db_manager(database_name) logger = logging.getLogger(database_name) for row in dbm.load_all_rows_in_view('questionnaire'): form_model_doc = FormModelDocument.wrap(row["value"]) form_model_change_handler(form_model_doc, dbm) except Exception as e: logger.error( "Mapping update failed for database %s for form model %s " % (database_name, form_model_doc.form_code)) logger.error(e) mark_as_completed(database_name) migrate(all_db_names(), update_mapping, version=(27, 2, 1), threads=2)
f.pop('event_time_field_flag') #Remove entity type from questionnaire form models. if document_data.get('entity_type'): document_data.pop('entity_type') if short_code_dict: json_data.remove(short_code_dict) form_model = FormModel.new_from_doc(dbm, (FormModelDocument.wrap(document_data))) if short_code_field: form_model._form_fields.insert(index, short_code_field) if validator: form_model.add_validator(validator) _save_form_model_doc(dbm, form_model) except Exception as e: logger.error('Failed form model for database : %s, doc with id: %s', dbm.database_name, row.id) logger.error(e) def migrate_form_model_to_add_eid_fields(db_name): logger = logging.getLogger(db_name) try: logger.info('Starting migration') dbm = get_db_manager(db_name) add_unique_id_and_short_code_field(dbm, logger) except Exception as e: logger.exception(e.message) mark_as_completed(db_name) migrate(all_db_names(), migrate_form_model_to_add_eid_fields, version=(11, 0, 3), threads=3)
from datawinners import settings import logging from sys import argv from datawinners.main.initial_template_creation import create_questionnaire_templates from datawinners.main.couchdb.utils import all_db_names db_name = settings.QUESTIONNAIRE_TEMPLATE_DB_NAME def run_migration(): global logger, created_template_doc_ids, e logging.basicConfig( filename='/var/log/datawinners/migration_release_11_0_1.log', level=logging.DEBUG) logger = logging.getLogger(db_name) try: created_template_doc_ids = create_questionnaire_templates() logger.info("created template docs are :" + str(created_template_doc_ids)) except Exception as e: logger.exception(e.message) if 'force' in argv: run_migration() if not db_name in all_db_names(): run_migration()
from datawinners.main.database import get_db_manager if __name__ == "__main__" and __package__ is None: sys.path.insert(0, ".") from datawinners.main.couchdb.utils import all_db_names import logging from migration.couch.utils import migrate, mark_start_of_migration es = get_elasticsearch_handle() def delete_submission_mapping(db_name): logger = logging.getLogger(db_name) try: mark_start_of_migration(db_name) logger.info('Starting migration') dbm = get_db_manager(db_name) rows = dbm.load_all_rows_in_view("questionnaire") for row in rows: index_name = dbm.database_name doc_type = row['value']['_id'] try: es.send_request('DELETE', [index_name, doc_type, '_mapping']) except Exception as e: logging.error('Failed to delete mapping for index: %s and doctype: %s', index_name, doc_type) logging.error("exception %s", e) pass except Exception as e: logger.exception(e.message) migrate(all_db_names(), delete_submission_mapping, version=(10, 0, 8), threads=1)
def create_language_template(dbm, logger): create_custom_message_templates(dbm) def delete_existing_templates(dbm): customized_message_rows = dbm.load_all_rows_in_view('all_languages', include_docs=True) for row in customized_message_rows: dbm.database.delete(row.doc) account_message = dbm.database.get(ACCOUNT_MESSAGE_DOC_ID) if account_message: dbm.database.delete(account_message) def migrate_to_create_language_templates(db_name): logger = logging.getLogger(db_name) try: logger.info('Starting migration') dbm = get_db_manager(db_name) delete_existing_templates(dbm) create_language_template(dbm, logger) except Exception as e: logger.exception(e.message) mark_as_completed(db_name) migrate(all_db_names(), migrate_to_create_language_templates, version=(12, 0, 1), threads=3)
original_source = row['value']['source'] except KeyError as e: logger.info("Already migrated %s" % row['value']['_id']) #ignore, document already migrated continue doc = SurveyResponseDocument.wrap(row['value']) survey_response = SurveyResponse.new_from_doc(dbm, doc) data_sender_id = source_to_rep_id_map.get(original_source) survey_response.created_by = data_sender_id survey_response.modified_by = data_sender_id owner_short_code = override_owner_with_on_behalf_user(rep_id_to_uid_map, data_sender_id, survey_response) owner_uid = rep_id_to_uid_map.get(owner_short_code) if owner_uid: remove_attr_source_from_survey_response(survey_response) else: logger.warn("Unable to set owner_uid for source :" + original_source + " doc: " + doc_id) survey_response.owner_uid = owner_uid survey_response.save() logger.info("Migrated %s" % survey_response.id) except Exception as e: logger.exception("Failed DB: %s with message %s" % (db_name, e.message)) logger.info('Completed Migration') migrate(all_db_names(), migrate_survey_response_to_add_owner, version=(7, 0, 2))