def test_delete_successful(self): setup_account() self.client.login("super", super_password) expected = {"status": "logout"} actual = self.client.request({"action": "delete"}) self.assertEqual(expected, actual) with self.assertRaises(InvalidLoginException): get_client().login("super", super_password)
def traffic_plot(): client = get_client() pageview_ts_df = client.get_pageview_ts() uv_ts_df = client.get_uv_ts() return dcc.Graph(id='Traffic', figure={ 'data': [ go.Scatter({ 'x': pageview_ts_df.t, 'y': pageview_ts_df.pageviews, 'mode': 'lines', 'name': 'Pageviews' }), go.Scatter({ 'x': uv_ts_df.t, 'y': uv_ts_df.uvs, 'mode': 'lines', 'name': 'Unique Visitors' }), ], 'layout': { 'title': 'Traffic Over Time' } })
def main(): logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO) logger.info('Start') try: day = datetime.datetime.strptime(sys.argv[1], '%Y-%m-%d').date() except ValueError: logger.error('Failed to parse date') sys.exit() # Setup a coreapi client client = get_client() schema = client.get(API_SCHEMA) # Get the list of all the stops we are interested in interesting_stops = get_stops(client, schema, BOUNDING_BOX) # Collect realtime journeys trips = get_trips(client, schema, day, interesting_stops) # Derive departure and arrival timings derive_timings(trips) emit_trips(day, trips) logger.info('Stop')
def main(): logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO) logger.info('Start') try: day = datetime.datetime.strptime(sys.argv[1], '%Y-%m-%d').date() except ValueError: logger.error('Failed to parse date') sys.exit(1) # Setup a coreapi client client = get_client() schema = client.get(API_SCHEMA) # Get the list of all the stops we are interested in interesting_stops = get_stops(client, schema, BOUNDING_BOX) if len(interesting_stops) == 0: logger.error('Failed to get any stops') sys.exit(2) # Retrieve timetable journeys journeys = get_journeys(day, interesting_stops, TNDS_REGIONS) if len(journeys) == 0: logger.warn('Failed to get any journeys') # Collect real-time journeys trips = get_trips(client, schema, day, interesting_stops) if len(trips) == 0: logger.warn('Failed to get any trips') # Derive trip departure and arrival timings derive_timings(trips) # Merge journeys and trips merged = do_merge(trips, journeys) # Classify matched journeys clasify_matches(merged) # Lookup stops referenced in the merged data all_stops = lookup_stops(client, schema, merged, interesting_stops) # Expand merged data into one row per journey/trip match rows = expand(day, merged, all_stops) # And print the result emit_stops(day, BOUNDING_BOX, all_stops) emit_json(day, BOUNDING_BOX, rows) # and again, as CSV emit_csv(day, rows) logger.info('Stop')
def main(): logging.basicConfig(level=logging.DEBUG) logging.debug('Shutting down') params = param.parse_params() # DON'T DELETE EXISTING COLLECTOR IF COLLECTOR_ID SPECIFIED if (not os.path.isfile(config.COLLECTOR_FOUND) and 'cleanup' in params and params['cleanup']): client = util.get_client(params) c = collector.find_collector(client, params) if c: logging.debug('Uninstalling collector.') # remove the collector collector.delete_collector(client, c) logging.debug('Shutdown complete.') sys.exit(0)
def main(): logging.basicConfig(level=logging.DEBUG) logging.debug('Shutting down') params = param.parse_params() # DON'T DELETE EXISTING COLLECTOR IF COLLECTOR_ID SPECIFIED if ( not os.path.isfile(config.COLLECTOR_FOUND) and 'cleanup' in params and params['cleanup'] ): client = util.get_client(params) c = collector.find_collector(client, params) if c: logging.debug('Uninstalling collector.') # remove the collector collector.delete_collector(client, c) logging.debug('Shutdown complete.') sys.exit(0)
def test_inventory_trade_successful(self): clear_inventory() element_uuid = add_inventory_element("CPU Cooler Plus") trade_user = create_random_user() expected = {"ok": True} actual = self.client.ms("inventory", ["inventory", "trade"], element_uuid=element_uuid, target=trade_user) self.assertEqual(actual, expected) client: Client = get_client() client.login("test", super_password) result = InventoryElement.list_inventory(client) self.assertEqual(1, len(result)) self.assertEqual(trade_user, result[0].owner) self.assertEqual("", result[0].related_ms) self.assertEqual(element_uuid, result[0].uuid) self.assertEqual("CPU Cooler Plus", result[0].name)
def main(): logger.info('Start') try: day = datetime.datetime.strptime(sys.argv[1], '%Y-%m-%d').date() except ValueError: logger.error('Failed to parse date') sys.exit() # Setup a coreapi client client = get_client() schema = client.get(API_SCHEMA) # Get the list of all the stops we are interested in interesting_stops = get_stops(client, schema, BOUNDING_BOX) # Retrieve timetable journeys journeys = get_journeys(day, interesting_stops, TNDS_REGIONS) emit_journeys(day, journeys) logger.info('Stop')
def main(): logger.info('Start') try: day = datetime.datetime.strptime(sys.argv[1], '%Y-%m-%d').date() except ValueError: logger.error('Failed to parse date') sys.exit() # Setup a coreapi client client = get_client() schema = client.get(API_SCHEMA) # Get the list of all the stops we are interested in interesting_stops = get_stops(client, schema, BOUNDING_BOX) matched_data = load_merged(day) stops = lookup_stops(client, schema, matched_data['merged'], interesting_stops) emit_stops(day, matched_data['bounding_box'], stops) logger.info('Stop')
def setUpClass(cls): setup_account() cls.client: Client = get_client() cls.client.login("super", super_password)
job = client.query(sql, location='US', job_config=bigquery.QueryJobConfig()) for row in list(job): result.append(row) if result: print( "{} has mis-matched columns. Mis-matching results:\n{}".format( table.table_id, DataFrame(result))) exit_code = 1 else: exit_code = 0 if not tables_with_want_and_got: print("No tables with want and got columns in this dataset") elif exit_code == 0: print( "Validation complete for all tables in this dataset. No mis-maching columns detected." ) exit(exit_code) if __name__ == "__main__": opts = docopt(__doc__, version="Is got what we want: ver 0.1") env = opts["--env"] data_set = opts["--data_set"] if not env: env = "production" if not data_set: data_set = 'tests' client = get_client(env, "bigquery") execute_tests(data_set)
def main(): logging.basicConfig(level=logging.DEBUG) params = param.parse_params() client = util.get_client(params) startup(client, params) sys.exit(0)
def setUp(self): self.client: Client = get_client()
def get_client_by_region(self, region): """Creates a google authentication client provided the region of the serice account""" if region == config.cloud_regions[config.cloud_region_eu]: return get_client(config.service_file_eu, config.service_name_eu) else: return get_client(config.service_file_us, config.service_name_us)