def __main__(): # Parse Command Line parser = optparse.OptionParser() parser.add_option('-s', '--sqlitedb', dest='sqlitedb', default=None, help='The SQLite Database') parser.add_option('-q', '--query', dest='query', default=None, help='SQL query') parser.add_option('-Q', '--query_file', dest='query_file', default=None, help='SQL query file') parser.add_option('-n', '--no_header', dest='no_header', default=False, action='store_true', help='Include a column headers line') parser.add_option('-c', '--comment_char', dest='comment_char', default='', help='comment character to prefix column header line') parser.add_option('-o', '--output', dest='output', default=None, help='Output file for query results') (options, args) = parser.parse_args() # determine output destination if options.output is not None: try: outputPath = os.path.abspath(options.output) outputFile = open(outputPath, 'w') except Exception as e: exit('Error: %s' % (e)) else: outputFile = sys.stdout query = None if options.query_file is not None: with open(options.query_file, 'r') as fh: query = fh.read() elif options.query is not None: query = options.query if query is None: try: describe_tables(get_connection(options.sqlitedb), outputFile) except Exception as e: exit('Error: %s' % (e)) exit(0) else: try: run_query(get_connection(options.sqlitedb), query, outputFile, no_header=options.no_header, comment_char=options.comment_char) except Exception as e: exit('Error: %s' % (e))
def current_active_numbers(): """ Current number of actived, de-activated, and re-activated facilities. """ bookings = run_query('bookings') bookings.created = bookings.created.apply(lambda d: d.date()) # Get the last sunday today = date.today().toordinal() current_date = date.fromordinal(today - (today % 7)) this_month = set(bookings[ (bookings.created <= current_date) & (bookings.created >= (current_date - timedelta(days=30)))] .facility_id.unique()) last_month = set(bookings[ (bookings.created >= (current_date - timedelta(days=60))) & (bookings.created < (current_date - timedelta(days=30)))] .facility_id.unique()) before = set(bookings[ bookings.created <= (current_date - timedelta(days=60))] .facility_id.unique()) activated = len(this_month.difference( last_month.union(before))) deactivated = len(last_month.difference(this_month)) reactivated = len(before.difference( last_month).intersection(this_month)) return ((deactivated, "De-activated"), (reactivated, "Re-activated"), (activated, "Activated"))
def most_active_free_plan(): """ Leaderboard with the most active facilities (in the last month) on a free plan. """ bookings = run_query('bookings_with_subscription') free_plans = ['flex', 'flex_legacy', '2014-11-free'] bookings = bookings[bookings.subscription_type.isin(free_plans)] bookings.created = bookings.created.apply(lambda d: d.date()) # Get the last sunday today = date.today().toordinal() current_date = date.fromordinal(today - (today % 7)) top20_this_week = bookings[ (bookings.created <= current_date) & (bookings.created >= ( current_date - timedelta(days=30)))].facility_id.value_counts( ascending=False).head(22) current_date -= timedelta(days=7) values_last_week = bookings[ (bookings.created <= current_date) & (bookings.created >= (current_date - timedelta(days=30)))].facility_id.value_counts( ascending=False) previous_ranks = [int(np.where(values_last_week.index == f)[0] + 1) for f in top20_this_week.index] return top20_this_week.index, top20_this_week.values, previous_ranks
def top_pages_facilities(): """ Leaderboard of facilities with bookings through landingpages. """ bookings = run_query('bookings') bookings = bookings[bookings.source == 'pages.resmio.com'] bookings.created = bookings.created.apply(lambda x: x.date()) bookings = bookings[bookings.created >= (date.today() - timedelta(days=30))] bookings_count = bookings.facility_id.value_counts() return (bookings_count.index, bookings_count.values)
def number_pages_bookings(): """ Weekly number of bookings through the landingpages. """ bookings = run_query('bookings') bookings = bookings[bookings.source == 'pages.resmio.com'] bookings = bookings.set_index('created') bookings = bookings.loc['20150101':] bookings_count = bookings.num.resample('w', how='count') bookings_count.index = map(lambda d: d.date(), bookings_count.index) dates = ['{}'.format(d) for d in bookings_count.index] return {'series': [{'data': bookings_count.values.tolist()[:-1], 'name': 'Bookings'}], 'x_axis': {'labels': dates[:-1], 'type': 'datetime'}}
def number_covers(): """ Weekly number of covers. """ bookings = run_query('bookings') bookings = bookings[bookings.source != ''] bookings = bookings.set_index('created') bookings = bookings.loc['20150101':] bookings_count = bookings.num.resample('w', how='sum') bookings_count.index = map(lambda d: d.date(), bookings_count.index) dates = ['{}'.format(d) for d in bookings_count.index] return {'series': [{'data': bookings_count.values.tolist()[:-1], 'name': 'Covers'}], 'x_axis': {'labels': dates[:-1], 'type': 'datetime'}}
def least_active_paying(): """ Leaderboard with the least active facilities on a payed plan. """ bookings = run_query('bookings_with_subscription') facilities = run_query('facilities_with_subscription') free_plans = ['flex', 'flex_legacy', '2014-11-free', 'basic', 'custom'] # Get the last sunday today = date.today().toordinal() current_date = date.fromordinal(today - (today % 7)) # Get all paying facilities with plans created at least 3 months ago facilities = facilities[~facilities.subscription_type.isin(free_plans)] facilities = facilities[ facilities.begins <= (current_date - timedelta(days=90))] facilities = facilities[facilities.ends.isnull()] # Get all bookings from paying facilities created at least 3 months ago bookings = bookings[~bookings.subscription_type.isin(free_plans)] bookings = bookings[bookings.s_begin <= (current_date - timedelta(days=90))] bookings.created = bookings.created.apply(lambda d: d.date()) least_bookings = bookings[ (bookings.created <= current_date) & (bookings.created >= ( current_date - timedelta(days=30)))].facility_id.value_counts( ascending=True) # Get facilities without bookings (not in bottom_this_week) no_bookings = set(facilities.id.values).difference( set(least_bookings.index)) # Bottom 22 facilities bottom = pd.Series(0, index=no_bookings).append(least_bookings).head(22) labels = ['{} ({})'.format( f, facilities[facilities.id == f]['subscription_type'].values[-1]) for f in bottom.index] return labels, bottom.values, np.arange(0, 22) + 1, 'ascending'
def paying_least_bookings(): """ Leaderboard with the least active facilities on a payed plan. """ bookings = run_query('bookings') facilities = run_query('facilities_with_subscription') free_plans = ['flex', 'flex_legacy', '2014-11-free', 'basic', 'custom'] bookings.created = bookings.created.apply(lambda d: d.date()) # Get the last sunday today = date.today().toordinal() current_date = date.fromordinal(today - (today % 7)) paying_facilities = facilities[ ~facilities.subscription_type.isin(free_plans)] paying_facilities = paying_facilities[ paying_facilities.begins <= (current_date - timedelta(days=60))] bookings_last_month = bookings[ bookings.created >= current_date - timedelta(days=30)] b_counts = bookings_last_month.facility_id.value_counts() labels = [] counts = [] for f_id in paying_facilities.id: labels.append(f_id) if f_id in b_counts.index: counts.append(b_counts[f_id]) else: counts.append(0) idx = np.argsort(counts) counts = np.asarray(counts)[idx][:22] labels = np.asarray(labels)[idx][:22] labels = ['{} ({})'.format( f, facilities[facilities.id == f]['subscription_type'].values[-1]) for f in labels] return labels, counts
def least_widget_views(): facilities = run_query('facilities_with_subscription') companies = intercom_companies() free_plans = ['flex', 'flex_legacy', '2014-11-free', 'basic', 'custom'] facilities.created = facilities.created.apply(lambda d: d.date()) last_month = date.today() - timedelta(days=30) paying_facilities = facilities[ ~facilities.subscription_type.isin(free_plans) & facilities.ends.isnull() & (facilities.created <= last_month)].id.tolist() paying_companies = companies[companies.company_id.isin(paying_facilities)] sorted_df = paying_companies.sort('number_of_unique_pageviews_last_month') labels = ['{} ({})'.format(f, facilities[ facilities.id == f]['subscription_type'].values[-1]) for f in sorted_df.company_id] return (labels[:22], sorted_df.number_of_unique_pageviews_last_month[:22] .values.astype(int), np.arange(0, 22) + 1, 'ascending')
def new_lost_active_facilities(): """ Weekly number of activated, de-activated, and re-activated facilities. """ bookings = run_query('bookings') bookings.created = bookings.created.apply(lambda d: d.date()) today = date.today().toordinal() # Get the last sunday current_date = date.fromordinal(today - (today % 7)) end_date = date(day=1, month=1, year=2015) delta = timedelta(days=7) dates = [] active = [] deactive = [] reactive = [] while current_date >= end_date: dates.append('{}'.format(current_date)) this_month = set(bookings[ (bookings.created <= current_date) & (bookings.created >= (current_date - timedelta(days=30)))] .facility_id.unique()) last_month = set(bookings[ (bookings.created >= (current_date - timedelta(days=60))) & (bookings.created < (current_date - timedelta(days=30)))] .facility_id.unique()) before = set(bookings[ bookings.created <= (current_date - timedelta(days=60))] .facility_id.unique()) active.append(len(this_month.difference( last_month.union(before)))) deactive.append(len(last_month.difference(this_month))) reactive.append(len(before.difference( last_month).intersection(this_month))) current_date -= delta return {'series': [{'data': active[::-1], 'name': 'Activated'}, {'data': deactive[::-1], 'name': 'De-activated'}, {'data': reactive[::-1], 'name': 'Re-activated'}], 'x_axis': {'labels': dates[::-1], 'type': 'datetime'}}
def active_verified_facilities(): """ Line chart with the weekly number of verified and active facilities. """ df = run_query('bookings_and_facilities') bookings = df[df.created.notnull()] bookings.created = bookings.created.apply(lambda d: d.date()) df.f_created = df.f_created.apply(lambda d: d.date()) current_date = date.today() end_date = date(day=1, month=1, year=2015) delta = timedelta(days=7) actives = [] verifieds = [] dates = [] while current_date >= end_date: dates.append('{}'.format(current_date)) actives.append(len(bookings[ (bookings.created <= current_date) & (bookings.created > (current_date - timedelta(days=30)))].facility_id.unique())) verifieds.append( len(df[df.f_created <= current_date].facility_id.unique())) current_date -= delta return {'series': [{'data': actives[::-1], 'name': 'Active'}, {'data': verifieds[::-1], 'name': 'Verified'}], 'x_axis': {'labels': dates[::-1], 'type': 'datetime'}}
def __main__(): # Parse Command Line parser = optparse.OptionParser() parser.add_option('-s', '--sqlitedb', dest='sqlitedb', default=None, help='The SQLite Database') parser.add_option('-j', '--jsonfile', dest='jsonfile', default=None, help='JSON dict of table specifications') parser.add_option('-q', '--query', dest='query', default=None, help='SQL query') parser.add_option('-Q', '--query_file', dest='query_file', default=None, help='SQL query file') parser.add_option('-n', '--no_header', dest='no_header', default=False, action='store_true', help='Include a column headers line') parser.add_option('-c', '--comment_char', dest='comment_char', default='', help='comment character to prefix column header line') parser.add_option('-o', '--output', dest='output', default=None, help='Output file for query results') parser.add_option('-d', '--debug', dest='debug', default=False, action='store_true', help='Output info to stderr') (options, args) = parser.parse_args() # determine output destination if options.output is not None: try: outputPath = os.path.abspath(options.output) outputFile = open(outputPath, 'w') except Exception as e: exit('Error: %s' % (e)) else: outputFile = sys.stdout def _create_table(ti, table): path = table['file_path'] table_name =\ table['table_name'] if 'table_name' in table else 't%d' % (ti + 1) comment_lines =\ table['comment_lines'] if 'comment_lines' in table else 0 comment_char =\ table['comment_char'] if 'comment_char' in table else None column_names =\ table['column_names'] if 'column_names' in table else None firstlinenames =\ table['firstlinenames'] if 'firstlinenames' in table else False if column_names: load_named_columns =\ table['load_named_columns']\ if 'load_named_columns' in table else False else: load_named_columns = False unique_indexes = table['unique'] if 'unique' in table else [] indexes = table['index'] if 'index' in table else [] filters = table['filters'] if 'filters' in table else None pkey_autoincr = \ table['pkey_autoincr'] if 'pkey_autoincr' in table else None create_table(get_connection(options.sqlitedb), path, table_name, pkey_autoincr=pkey_autoincr, firstlinenames=firstlinenames, column_names=column_names, skip=comment_lines, comment_char=comment_char, load_named_columns=load_named_columns, filters=filters, unique_indexes=unique_indexes, indexes=indexes) if options.jsonfile: try: with open(options.jsonfile) as fh: tdef = json.load(fh) if options.debug: print('JSON: %s' % tdef, file=sys.stderr) if 'tables' in tdef: for ti, table in enumerate(tdef['tables']): _create_table(ti, table) if 'sql_stmts' in tdef: for si, stmt in enumerate(tdef['sql_stmts']): rowcount = run_query(get_connection(options.sqlitedb), stmt, None) if options.debug: print('\nDB modification: %s \nrowcount: %s' % (stmt, rowcount), file=sys.stderr) if 'queries' in tdef: for qi, qstmt in enumerate(tdef['queries']): if 'header' in qstmt: no_header = False comment_char = qstmt['header'] else: no_header = True comment_char = None with open(qstmt['result_file'], 'w') as fh: query = qstmt['query'] rowcount = run_query(get_connection( options.sqlitedb), query, fh, no_header=no_header, comment_char=comment_char) if options.debug: print('\nSQL: %s \nrowcount: %s' % (query, rowcount), file=sys.stderr) except Exception as e: exit('Error: %s' % (e)) query = None if options.query_file is not None: with open(options.query_file, 'r') as fh: query = '' for line in fh: query += line elif options.query is not None: query = options.query if query is None: try: describe_tables(get_connection(options.sqlitedb), outputFile) except Exception as e: exit('Error: %s' % (e)) else: try: rowcount = run_query(get_connection(options.sqlitedb), query, outputFile, no_header=options.no_header, comment_char=options.comment_char) if options.debug: print('\nSQL: %s \nrowcount: %s' % (query, rowcount), file=sys.stderr) except Exception as e: exit('Error: %s' % (e))
def __main__(): # Parse Command Line parser = optparse.OptionParser() parser.add_option('-s', '--sqlitedb', dest='sqlitedb', default=None, help='The SQLite Database') parser.add_option('-j', '--jsonfile', dest='jsonfile', default=None, help='JSON dict of table specifications') parser.add_option('-q', '--query', dest='query', default=None, help='SQL query') parser.add_option('-Q', '--query_file', dest='query_file', default=None, help='SQL query file') parser.add_option('-n', '--no_header', dest='no_header', default=False, action='store_true', help='Include a column headers line') parser.add_option('-o', '--output', dest='output', default=None, help='Output file for query results') (options, args) = parser.parse_args() # determine output destination if options.output is not None: try: outputPath = os.path.abspath(options.output) outputFile = open(outputPath, 'w') except Exception as e: exit('Error: %s' % (e)) else: outputFile = sys.stdout def _create_table(ti, table): path = table['file_path'] table_name =\ table['table_name'] if 'table_name' in table else 't%d' % (ti + 1) comment_lines =\ table['comment_lines'] if 'comment_lines' in table else 0 comment_char =\ table['comment_char'] if 'comment_char' in table else None column_names =\ table['column_names'] if 'column_names' in table else None if column_names: load_named_columns =\ table['load_named_columns']\ if 'load_named_columns' in table else False else: load_named_columns = False unique_indexes = table['unique'] if 'unique' in table else [] indexes = table['index'] if 'index' in table else [] filters = table['filters'] if 'filters' in table else None pkey_autoincr = \ table['pkey_autoincr'] if 'pkey_autoincr' in table else None create_table(get_connection(options.sqlitedb), path, table_name, pkey_autoincr=pkey_autoincr, column_names=column_names, skip=comment_lines, comment_char=comment_char, load_named_columns=load_named_columns, filters=filters, unique_indexes=unique_indexes, indexes=indexes) if options.jsonfile: try: fh = open(options.jsonfile) tdef = json.load(fh) if 'tables' in tdef: for ti, table in enumerate(tdef['tables']): _create_table(ti, table) except Exception as e: exit('Error: %s' % (e)) query = None if (options.query_file is not None): with open(options.query_file, 'r') as fh: query = '' for line in fh: query += line elif (options.query is not None): query = options.query if (query is None): try: describe_tables(get_connection(options.sqlitedb), outputFile) except Exception as e: exit('Error: %s' % (e)) else: try: run_query(get_connection(options.sqlitedb), query, outputFile, no_header=options.no_header) except Exception as e: exit('Error: %s' % (e))
def __main__(): # Parse Command Line parser = optparse.OptionParser() parser.add_option('-s', '--sqlitedb', dest='sqlitedb', default=None, help='The SQLite Database') parser.add_option('-j', '--jsonfile', dest='jsonfile', default=None, help='JSON dict of table specifications') parser.add_option('-q', '--query', dest='query', default=None, help='SQL query') parser.add_option('-Q', '--query_file', dest='query_file', default=None, help='SQL query file') parser.add_option('-n', '--no_header', dest='no_header', default=False, action='store_true', help='Include a column headers line') parser.add_option('-o', '--output', dest='output', default=None, help='Output file for query results') (options, args) = parser.parse_args() # determine output destination if options.output is not None: try: outputPath = os.path.abspath(options.output) outputFile = open(outputPath, 'w') except Exception as e: exit('Error: %s' % (e)) else: outputFile = sys.stdout def _create_table(ti, table): path = table['file_path'] table_name =\ table['table_name'] if 'table_name' in table else 't%d' % (ti + 1) comment_lines =\ table['comment_lines'] if 'comment_lines' in table else 0 comment_char =\ table['comment_char'] if 'comment_char' in table else None column_names =\ table['column_names'] if 'column_names' in table else None if column_names: load_named_columns =\ table['load_named_columns']\ if 'load_named_columns' in table else False else: load_named_columns = False unique_indexes = table['unique'] if 'unique' in table else [] indexes = table['index'] if 'index' in table else [] filters = table['filters'] if 'filters' in table else None pkey_autoincr = \ table['pkey_autoincr'] if 'pkey_autoincr' in table else None create_table(get_connection(options.sqlitedb), path, table_name, pkey_autoincr=pkey_autoincr, column_names=column_names, skip=comment_lines, comment_char=comment_char, load_named_columns=load_named_columns, filters=filters, unique_indexes=unique_indexes, indexes=indexes) if options.jsonfile: try: with open(options.jsonfile) as fh: tdef = json.load(fh) if 'tables' in tdef: for ti, table in enumerate(tdef['tables']): _create_table(ti, table) except Exception as e: exit('Error: %s' % (e)) query = None if options.query_file is not None: with open(options.query_file, 'r') as fh: query = '' for line in fh: query += line elif options.query is not None: query = options.query if query is None: try: describe_tables(get_connection(options.sqlitedb), outputFile) except Exception as e: exit('Error: %s' % (e)) else: try: run_query(get_connection(options.sqlitedb), query, outputFile, no_header=options.no_header) except Exception as e: exit('Error: %s' % (e))