示例#1
0
def dump_messages(**options):
    logger.info("Updating Messages. Currently have {} messages in DB.".format(
        messages.count()))

    params = {}
    if options.get('after'):
        params.update({'after': options.get('after')})
    elif options.get('resume'):
        if meta.find_one({'endpoint': 'messages'}) is not None:
            params.update({
                'after':
                meta.find_one({
                    'endpoint': 'messages'
                }).get('updated_on')
            })

    # call messages API
    messages_list = get_api_data('/messages.json', **params)
    update_collection(collection=messages, data=messages_list, id_field='id')

    # loop through potential next pages
    while messages_list.get('next'):
        messages_list = get_api_data(messages_list.get('next'))
        update_collection(collection=messages,
                          data=messages_list,
                          id_field='id')

    logger.info(
        "Updated Messages completed. Now have {} messages in DB.".format(
            messages.count()))
示例#2
0
def dump_runs(**options):
    logger.info("Updating Runs. Currently have {} runs in DB.".format(
        runs.count()))

    params = {}
    if options.get('after'):
        params.update({'after': options.get('after')})
    elif options.get('resume'):
        if meta.find_one({'endpoint': 'runs'}) is not None:
            params.update({
                'after':
                meta.find_one({
                    'endpoint': 'runs'
                }).get('updated_on')
            })

    # call runs API
    runs_list = get_api_data('/runs.json', **params)
    update_collection(collection=runs, data=runs_list, id_field='run')

    # loop through potential next pages
    while runs_list.get('next'):
        runs_list = get_api_data(runs_list.get('next'))
        update_collection(collection=runs, data=runs_list, id_field='run')

    logger.info("Updated Runs completed. Now have {} runs in DB.".format(
        runs.count()))
示例#3
0
def main(arguments):
    debug = arguments.get('--verbose') or False
    change_logging_level(debug)

    logger.info("Starting fix-contacts-names script...{}"
                .format(" [DEBUG mode]" if debug else ""))

    options = {
        'export': arguments.get('export') or False,
        'import': arguments.get('import') or False,
        'file': arguments.get('--file') or None,
    }

    if options['export'] + options['import'] != 1:
        logger.error("You must specify whether to export or import data")
        return 1

    if not options['file']:
        logger.error("You must specify a file path")
        return 1

    if options['import'] and not os.path.exists(options['file']):
        logger.error("The filepath `{}` does not exist."
                     .format(options['file']))
        return 1

    if options['export']:
        with open(options['file'], 'w') as fio:
            export_contact_names_to(fio)

    if options['import']:
        with open(options['file'], 'r') as fio:
            fix_contact_names_from(fio)

    logger.info("-- All done. :)")
示例#4
0
def main(arguments):
    debug = arguments.get('--verbose') or False
    change_logging_level(debug)

    logger.info("Generating JSON exports for message statistics")

    generate_periods_stats(destdir=arguments.get('<destdir>') or None)

    logger.info("All Done.")
示例#5
0
def main(arguments):
    debug = arguments.get('--verbose') or False
    change_logging_level(debug)

    logger.info("Starting dump-rapidpro script...{}".format(
        " [DEBUG mode]" if debug else ""))

    # endpoints
    options = {
        'after': arguments.get('--after') or None,
        'resume': not (arguments.get('--noresume') or False)
    }
    do_contacts = arguments.get('--contacts', False)
    do_messages = arguments.get('--messages', False)
    do_relayers = arguments.get('--relayers', False)
    do_fields = arguments.get('--fields', False)
    do_flows = arguments.get('--flows', False)
    do_runs = arguments.get('--runs', False)

    if not (do_contacts or do_messages or do_relayers or do_fields or do_flows
            or do_runs):
        logger.error("You need to specify at least one action")
        return 1

    if debug:
        logger.debug("Options: {}".format(options))

    now = datetime.datetime.now()
    now_str = now.isoformat()[:-3]

    if do_contacts:
        dump_contacts(**options)
        update_meta('contacts', now_str)

    if do_relayers:
        dump_relayers(**options)
        update_meta('relayers', now_str)

    if do_messages:
        dump_messages(**options)
        update_meta('messages', now_str)

    if do_fields:
        dump_fields(**options)
        update_meta('fields', now_str)

    if do_flows:
        dump_flows(**options)
        update_meta('flows', now_str)

    if do_runs:
        dump_runs(**options)
        update_meta('runs', now_str)

    logger.info("-- All done. :)")
示例#6
0
def send_invitation(relayer, number_list):
    max_num = 100
    for it in range(0, int(math.ceil(len(number_list) / float(max_num)))):
        step = int(it * max_num)
        chunk = number_list[step:step + max_num]

        logger.info(".. sending {} invitations at once.".format(len(chunk)))

        post_api_data(
            '/messages.json', {
                'phone': ["+223{}".format(num) for num in chunk],
                'text': INVIT_TEXT,
                'channel': relayer
            })
示例#7
0
def main():
    pending_contacts = numbers.find({'sent': False})
    logger.info("We have {} potential numbers...".format(
        pending_contacts.count()))

    to_send = {
        ORANGE: [],
        MALITEL: [],
    }

    for number_item in pending_contacts:
        logger.info(number_item['number'])
        if not is_ureporter(number_item['number']):

            relayer = relayer_from_number(number_item['number'])
            if not relayer:
                continue

            to_send[relayer].append(number_item['number'])

            number_item.update({'sent': True})
            numbers.save(number_item)
        else:
            remove_number(number_item['number'])

    logger.info("Sending {} invitations to Orange users...".format(
        len(to_send[ORANGE])))
    send_invitation(ORANGE, to_send[ORANGE])

    logger.info("Sending {} invitations to Malitel users...".format(
        len(to_send[MALITEL])))
    send_invitation(MALITEL, to_send[MALITEL])
示例#8
0
def dump_fields(**options):
    logger.info("Updating Fields. Currently have {} fields in DB.".format(
        fields.count()))

    # call fields API
    fields_list = get_api_data('/fields.json')
    update_collection(collection=fields, data=fields_list, id_field='key')

    # loop through potential next pages
    while fields_list.get('next'):
        fields_list = get_api_data(fields_list.get('next'))
        update_collection(collection=fields, data=fields_list, id_field='key')

    logger.info("Updated Fields completed. Now have {} fields in DB.".format(
        fields.count()))
示例#9
0
def dump_contacts(**options):
    logger.info("Updating Contacts. Currently have {} contacts in DB.".format(
        contacts.count()))

    # call contacts API
    contacts_list = get_api_data('/contacts.json')
    update_collection(collection=contacts, data=contacts_list, id_field='uuid')

    # loop through potential next pages
    while contacts_list.get('next'):
        contacts_list = get_api_data(contacts_list.get('next'))
        update_collection(collection=contacts,
                          data=contacts_list,
                          id_field='uuid')

    logger.info(
        "Updated Contacts completed. Now have {} contacts in DB.".format(
            contacts.count()))
示例#10
0
def dump_relayers(**options):
    logger.info("Updating Relayers. Currently have {} relayers in DB.".format(
        relayers.count()))

    # call relayers API
    relayers_list = get_api_data('/relayers.json')
    update_collection(collection=relayers,
                      data=relayers_list,
                      id_field='relayer')

    # loop through potential next pages
    while relayers_list.get('next'):
        relayers_list = get_api_data(relayers_list.get('next'))
        update_collection(collection=relayers,
                          data=relayers_list,
                          id_field='relayer')

    logger.info(
        "Updated Relayers completed. Now have {} relayers in DB.".format(
            relayers.count()))
示例#11
0
def fix_contact_names_from(afile):
    csv_reader = csv.DictReader(afile, CSV_HEADERS)

    for entry in csv_reader:
        if csv_reader.line_num == 1:
            continue

        if not entry.get('is_ok') or not entry.get('uuid'):
            continue

        uuid = entry.get('uuid').strip()

        name = entry.get('name').strip() or ""

        contact = contacts.find_one({'uuid': uuid})

        # rapidpro doesn't update contact if name is empty
        if not name:
            name = "U-reporter"

        if contact['name'] != name:
            logger.info("Updating {}: {}".format(uuid, name))

            update_contact(contact=contact, name=name)
示例#12
0
def dump_flows(**options):
    logger.info("Updating Flows. Currently have {} flows in DB.".format(
        flows.count()))

    params = {}
    if options.get('after'):
        params.update({'after': options.get('after')})
    elif options.get('resume'):
        if meta.find_one({'endpoint': 'flows'}) is not None:
            params.update({
                'after':
                meta.find_one({
                    'endpoint': 'flows'
                }).get('updated_on')
            })

    # call flows API
    flows_list = get_api_data('/flows.json', **params)
    update_collection(collection=flows, data=flows_list, id_field='uuid')

    # loop through potential next pages
    while flows_list.get('next'):
        flows_list = get_api_data(flows_list.get('next'))
        update_collection(collection=flows, data=flows_list, id_field='uuid')
示例#13
0
def main(arguments):
    debug = arguments.get('--verbose') or False
    change_logging_level(debug)

    logger.info("Starting update-contacts script...{}".format(
        " [DEBUG mode]" if debug else ""))

    options = {'module': arguments.get('--module') or None}

    if options['module'] is None:
        logger.error("You must pass in a module.func path.")
        return 1

    try:
        func = import_path(options['module'])
    except Exception as e:
        logger.error("Unable to load function path `{}`".format(
            options['module']))
        logger.exception(e)
        return 1
    else:
        if not callable(func):
            logger.error("You func path is not callable `{}`".format(
                options['module']))
            return 1

    logger.debug("Options: {}".format(options))

    logger.info("Looping through {} contacts with {}".format(
        contacts.find().count(), func))
    updated = 0
    for contact in contacts.find():

        logger.debug("{}/{}".format(contact['phone'], contact['name']))

        if func(contact):
            updated += 1

    logger.info("Updated {} contacts".format(updated))

    logger.info("-- All done. :)")
示例#14
0
def main():
    uuids = [c['uuid'] for c in contacts.find({})]
    logger.info(len(uuids))
    for uuid in uuids:
        logger.info(uuid)
        update_groups(contacts.find_one({'uuid': uuid}), remove_others=True)
示例#15
0
def main(arguments):
    debug = arguments.get('--verbose') or False
    change_logging_level(debug)

    json_folder = arguments.get('--json') or None
    html_path = arguments.get('--output') or "dashboard.html"

    logger.info("Generating Dashboard.")

    # load global statistics
    with open(os.path.join(json_folder, 'statistics.json')) as f:
        statistics = OrderedDict(sorted(json.load(f).items(), key=tssort))

    # update stats with price data
    for key in statistics.keys():
        if key == 'relayers':
            continue
        statistics[key]['stats']['estimated_price'] = \
            estimated_price_for(statistics[key], statistics['relayers'])
        statistics[key]['stats']['estimated_vat'] = \
            multiply_items(statistics[key]['stats']['estimated_price'], 0.18)
        statistics[key]['stats']['estimated_price_total'] = \
            multiply_items(statistics[key]['stats']['estimated_price'], 1.18)

    # load daily total for each month
    def loadjs(key):
        with open(os.path.join(json_folder, '{}.json'.format(key)), 'r') as f:
            return OrderedDict(sorted(json.load(f).items(), key=tssort))

    daily_data = {
        key: loadjs(key)
        for key in sorted(statistics.keys())
        if key not in ('total', 'relayers')
    }

    # cumulative values for each days
    with open(os.path.join(json_folder, 'cumulative.json')) as f:
        cumulative = OrderedDict(sorted(json.load(f).items(), key=tssort))

    # list of fields to loop on
    fields = OrderedDict([
        ('nb_sms_total', "Nombre SMS TOTAL"),
        ('nb_sms_in', "Nombre SMS Entrant"),
        ('nb_sms_out', "Nombre SMS Sortant"),
        ('nb_messages_total', "Nombre Messages TOTAL"),
        ('nb_messages_in', "Nombre Messages Entrant"),
        ('nb_messages_out', "Nombre Messages Sortant"),
        ('estimated_price', "Coût estimatif HT"),
        ('estimated_vat', "Coût estimatif TVA"),
        ('estimated_price_total', "Coût estimatif TTC"),
    ])

    # prepare context
    context = {
        'update_time':
        datetime_from_iso(statistics['total']['update_time']).strftime(
            '%d %B %Y, %Hh%I').decode('utf-8'),
        'relayers':
        OrderedDict(
            sorted(statistics['relayers'].items(),
                   key=lambda x: x[1]['relayer'])),
        'months_data':
        OrderedDict([(k, v)
                     for k, v in sorted(statistics.items(), reverse=True)
                     if k != 'relayers']),
        'daily_data':
        daily_data,
        'cumulative':
        cumulative,
        'fields':
        fields,
        'orange_id':
        ORANGE,
        'malitel_id':
        MALITEL,
        'orange_key':
        text_type(ORANGE),
        'malitel_key':
        text_type(MALITEL),
        'amount_fields':
        [k for k in fields.keys() if k.startswith('estimated_')]
    }

    # render template in file
    template = jinja_env.get_template('dashboard_tmpl.html')
    with open(html_path, 'w') as f:
        html = template.render(**context)
        if PY2:
            html = html.encode('utf-8')
        f.write(html)
示例#16
0
    @cherrypy.tools.json_out()
    def index(self):
        imported = 0
        try:
            all_nums = cherrypy.request.json
            for num in all_nums:
                if handle_number(num):
                    imported += 1
            logger.debug("imported: {}".format(imported))
            return {"status": "ok", "imported": imported}
        except:
            return {"status": "error", "imported": 0}


def handle_number(num):
    num = clean_number(num)
    if not num:
        return

    if numbers.find({'number': num}).count():
        return

    numbers.insert({'number': num, 'sent': False})

    return True


if __name__ == '__main__':
    logger.info("Nb in DB: {}".format(numbers.find({}).count()))
    cherrypy.quickstart(UContactReceiver(), '/', 'app.conf')
示例#17
0
def generate_periods_stats(destdir='', start_on=None, end_on=None):

    # when the DB is empty
    if not messages.count():
        logger.error("No messages in DB. wrong config?")
        return

    # no start_on? use first message date
    if start_on is None:
        start_on = datetime_from_iso(messages.find().sort([
            ('id', 1)
        ]).limit(1)[0].get('created_on'))

    if end_on is None:
        end_on = datetime_from_iso(messages.find().sort([
            ('id', -1)
        ]).limit(1)[0].get('created_on'))

    periods = get_periods(start_on=start_on, end_on=end_on)

    # single statistics file with entries for each month
    logger.info("Generating all-periods stats by months")
    statistics = get_months_stats(periods)
    statistics.update({
        'relayers': get_relayers_details(),
        'total': get_grand_total(start_on, end_on)
    })
    statistics['total'].update({'update_time': datetime.datetime.now()})
    with open(os.path.join(destdir, 'statistics.json'), 'w') as statistics_io:
        json.dump(statistics, statistics_io, indent=4, default=jsdthandler)

    # one stats file per month with entries for each day
    for period in sorted(periods['months'].values(), key=namesort):
        logger.info("Generating {} stats by days".format(period['name']))
        month_stats = OrderedDict([
            (dperiod['name'], period_stats(dperiod))
            for dperiod in sorted(periods['days'].values(), key=namesort)
            if in_period(period, dperiod['middle'])
        ])
        with open(os.path.join(destdir, '{}.json'.format(period['name'])),
                  'w') as io:
            json.dump(month_stats, io, indent=4, default=jsdthandler)

    # single cumulative stats file
    logger.info("Generating cumulative stats by days")

    def cperiod_for(period):
        p = period.copy()
        p.update({
            'start_on': start_on,
            'middle': period_middle(p['start_on'], p['end_on']),
            'middle_ts': js_timestamp(p['middle'])
        })
        return p

    with open(os.path.join(destdir, 'cumulative.json'), 'w') as io:
        cumul_stats = OrderedDict([
            (period['name'], period_stats(cperiod_for(period)))
            for period in sorted(periods['days'].values(), key=namesort)
        ])
        json.dump(cumul_stats, io, indent=4, default=jsdthandler)