def main(arguments): debug = arguments.get('--verbose') or False change_logging_level(debug) logger.info("Starting dump-rapidpro script...{}".format( " [DEBUG mode]" if debug else "")) # endpoints options = { 'after': arguments.get('--after') or None, 'resume': not (arguments.get('--noresume') or False) } do_contacts = arguments.get('--contacts', False) do_messages = arguments.get('--messages', False) do_relayers = arguments.get('--relayers', False) do_fields = arguments.get('--fields', False) do_flows = arguments.get('--flows', False) do_runs = arguments.get('--runs', False) if not (do_contacts or do_messages or do_relayers or do_fields or do_flows or do_runs): logger.error("You need to specify at least one action") return 1 if debug: logger.debug("Options: {}".format(options)) now = datetime.datetime.now() now_str = now.isoformat()[:-3] if do_contacts: dump_contacts(**options) update_meta('contacts', now_str) if do_relayers: dump_relayers(**options) update_meta('relayers', now_str) if do_messages: dump_messages(**options) update_meta('messages', now_str) if do_fields: dump_fields(**options) update_meta('fields', now_str) if do_flows: dump_flows(**options) update_meta('flows', now_str) if do_runs: dump_runs(**options) update_meta('runs', now_str) logger.info("-- All done. :)")
def post_api_data(url_or_path, payload): headers = {'Authorization': "Token {}".format(CONFIG.get("api_token")), 'Content-type': 'application/json'} if url_or_path.startswith('http'): url = url_or_path else: url = "{server}{path}" \ .format(server=CONFIG.get('server_url'), path=url_or_path) logger.debug("URL: {}".format(url)) try: r = requests.post(url=url, headers=headers, data=json.dumps(payload), timeout=TIMEOUT) assert r.status_code in (200, 201) except AssertionError: if r.status_code in (403, 401): logger.error( "Received {code} HTTP status code. Most likely " "a wrong API TOKEN in config ({token})." .format(code=r.status_code, token=CONFIG.get('api_token'))) elif r.status_code == 404: logger.error( "Received {code} HTTP status code. Most likely " "a wrong Server URL in config ({url})." .format(code=r.status_code, url=CONFIG.get('server_url'))) else: logger.error("Received unexcpected {code} HTTP status code." .format(code=r.status_code)) raise except Exception as e: logger.error("Unhandled Exception while requesting data.") logger.exception(e) raise else: return r.json()
def get_api_data(url_or_path, **params): headers = {'Authorization': "Token {}".format(CONFIG.get("api_token"))} if url_or_path.startswith('http'): url = url_or_path else: url = "{server}{path}" \ .format(server=CONFIG.get('server_url'), path=url_or_path) logger.debug("URL: {}?{}".format( url, "&".join(["{key}={val}".format(key=key, val=val) for key, val in params.items()]))) try: r = requests.get(url=url, headers=headers, params=params, timeout=TIMEOUT) assert r.status_code == requests.codes.ok except AssertionError: if r.status_code in (403, 401): logger.error( "Received {code} HTTP status code. Most likely " "a wrong API TOKEN in config ({token})." .format(code=r.status_code, token=CONFIG.get('api_token'))) elif r.status_code == 404: logger.error( "Received {code} HTTP status code. Most likely " "a wrong Server URL in config ({url})." .format(code=r.status_code, url=CONFIG.get('server_url'))) raise except Exception as e: logger.error("Unhandled Exception while requesting data.") logger.exception(e) raise else: return r.json()
def main(arguments): debug = arguments.get('--verbose') or False change_logging_level(debug) logger.info("Starting fix-contacts-names script...{}" .format(" [DEBUG mode]" if debug else "")) options = { 'export': arguments.get('export') or False, 'import': arguments.get('import') or False, 'file': arguments.get('--file') or None, } if options['export'] + options['import'] != 1: logger.error("You must specify whether to export or import data") return 1 if not options['file']: logger.error("You must specify a file path") return 1 if options['import'] and not os.path.exists(options['file']): logger.error("The filepath `{}` does not exist." .format(options['file'])) return 1 if options['export']: with open(options['file'], 'w') as fio: export_contact_names_to(fio) if options['import']: with open(options['file'], 'r') as fio: fix_contact_names_from(fio) logger.info("-- All done. :)")
def main(arguments): debug = arguments.get('--verbose') or False change_logging_level(debug) logger.info("Starting update-contacts script...{}".format( " [DEBUG mode]" if debug else "")) options = {'module': arguments.get('--module') or None} if options['module'] is None: logger.error("You must pass in a module.func path.") return 1 try: func = import_path(options['module']) except Exception as e: logger.error("Unable to load function path `{}`".format( options['module'])) logger.exception(e) return 1 else: if not callable(func): logger.error("You func path is not callable `{}`".format( options['module'])) return 1 logger.debug("Options: {}".format(options)) logger.info("Looping through {} contacts with {}".format( contacts.find().count(), func)) updated = 0 for contact in contacts.find(): logger.debug("{}/{}".format(contact['phone'], contact['name'])) if func(contact): updated += 1 logger.info("Updated {} contacts".format(updated)) logger.info("-- All done. :)")
def generate_periods_stats(destdir='', start_on=None, end_on=None): # when the DB is empty if not messages.count(): logger.error("No messages in DB. wrong config?") return # no start_on? use first message date if start_on is None: start_on = datetime_from_iso(messages.find().sort([ ('id', 1) ]).limit(1)[0].get('created_on')) if end_on is None: end_on = datetime_from_iso(messages.find().sort([ ('id', -1) ]).limit(1)[0].get('created_on')) periods = get_periods(start_on=start_on, end_on=end_on) # single statistics file with entries for each month logger.info("Generating all-periods stats by months") statistics = get_months_stats(periods) statistics.update({ 'relayers': get_relayers_details(), 'total': get_grand_total(start_on, end_on) }) statistics['total'].update({'update_time': datetime.datetime.now()}) with open(os.path.join(destdir, 'statistics.json'), 'w') as statistics_io: json.dump(statistics, statistics_io, indent=4, default=jsdthandler) # one stats file per month with entries for each day for period in sorted(periods['months'].values(), key=namesort): logger.info("Generating {} stats by days".format(period['name'])) month_stats = OrderedDict([ (dperiod['name'], period_stats(dperiod)) for dperiod in sorted(periods['days'].values(), key=namesort) if in_period(period, dperiod['middle']) ]) with open(os.path.join(destdir, '{}.json'.format(period['name'])), 'w') as io: json.dump(month_stats, io, indent=4, default=jsdthandler) # single cumulative stats file logger.info("Generating cumulative stats by days") def cperiod_for(period): p = period.copy() p.update({ 'start_on': start_on, 'middle': period_middle(p['start_on'], p['end_on']), 'middle_ts': js_timestamp(p['middle']) }) return p with open(os.path.join(destdir, 'cumulative.json'), 'w') as io: cumul_stats = OrderedDict([ (period['name'], period_stats(cperiod_for(period))) for period in sorted(periods['days'].values(), key=namesort) ]) json.dump(cumul_stats, io, indent=4, default=jsdthandler)