def submit(): looker = LookerApi(client_id, client_secret, api_endpoint) c = looker.get_look(528) channel_list = [d['channel_dimensions.organization'] for d in c] market_list = get_markets_list(looker) subtype_list = get_channel_subtype_list(looker) return render_template('submit.html', channel_list=channel_list, market_list=market_list, subtype_list=subtype_list)
def conn(looker_host, my_token, my_secret): from lookerapi import LookerApi my_host = 'https://%s.looker.com:19999/api/3.0/' % (looker_host) looker = LookerApi(host=my_host, token=my_token, secret=my_secret) if looker is None: notify('Alfred - Looker', 'Connection to Looker failed!') exit() return looker
def generate_rows(self, dataset_schema=None, dataset_partitioning=None, partition_id=None, records_limit=-1): looker = LookerApi(host=self.baseurl, token=self.clientid, secret=self.clientsecret) data = looker.get_look(self.lookid, "json", limit=5000) for row in data: yield row #print(data) ### ------- Done ------- #print("Done") """
def conn(host, au): f = open('config.yml') params = yaml.load(f) f.close() if au == 1: my_host = 'https://' + params['hosts'][host]['host'] + '.au.looker.com:19999/api/3.0/' else: my_host = 'https://' + params['hosts'][host]['host'] + '.looker.com:19999/api/3.0/' my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) if looker is None: print('Connection to Looker failed') exit() return looker
### ------- HERE ARE PARAMETERS TO CONFIGURE ------- dashboards_to_delete = sys.argv[1] host = 'localhost' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) ### ------- HANDLE ARGUMENT FILELIST OR SINGLE LOOK ------- if os.path.isfile(dashboards_to_delete): filelist = open(dashboards_to_delete) for i in filelist: print "deleting dashboard id: " + i data = looker.delete_dashboard(i) pprint(data) else: data = looker.delete_dashboard(dashboards_to_delete) pprint(data) ### ------- Done -------
"comma separated list of users that need email credentials and saml credentials deleted" ) args = parser.parse_args() host = 'localhost' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) for user_id in args.users.split(','): user_email_info = looker.get_users_email_credentials(user_id=user_id, fields='email') user_saml_email_info = looker.get_users_saml_credentials( user_id=user_id, fields='email,saml_user_id') print(user_email_info) print(user_saml_email_info) if user_email_info != None: print("not none") if user_email_info['email'] != user_saml_email_info['email']: print("email misalignment for user " + user_id) else: looker.delete_users_saml_credentials(user_id=user_id) else:
### ------- HERE ARE PARAMETERS TO CONFIGURE ------- host = 'localhost' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret = my_secret) ### ------- GET ALL USERS ------- userlistarray = looker.get_all_users() userlist = [] for user in userlistarray: userlist.append(user['id']) ### ------- GET ALL SCHEDULE ID/TITLES ----- scheduleattrlist = []
# the user attribute id that you're filtering on. # go to Admin > User Attributes. Click 'Edit' next to your selected user attribute, and you'll see the user_attribute_id at the end of the URL user_attribute_id = ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret = my_secret) ### ------- GET RELEVANT GROUP IDs ------- user_attributes_groups = looker.get_user_attribute_group_values(user_attribute_id) group_ids = [] for i in user_attributes_groups: group_ids.append(i['group_id']) ### ------- get first user_id in that group ------- user_ids = [] for i in group_ids: user_ids.append(looker.get_group_users(i)[0]['id']) ### ------- GET A LIST OF DASHBOARD_IDs IN A SPECIFIED SPACE-------
type=str) args = parser.parse_args() ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- host = "" # the name of a Looker environment specified in a config.yml file in the same directory as this script f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) def parse_csv_file_and_add_users_to_groups(): """Parse a CSV file containing a User ID in the first column and a Group ID in the second column and update groups for users""" f = open( args.filename, 'r', encoding='utf-8-sig' ) # the encoding may need to be updated depending on your file, see https://stackoverflow.com/a/17912811 csv_reader = csv.reader(f, delimiter=',') line_number, count_of_updates = 0, 0 for line in csv_reader: line_number = line_number + 1 try: if int(line[0]) > 0 and int(line[1]) > 0:
import yaml from lookerapi import LookerApi import csv f = open('config.yml') params = yaml.load(f) f.close() host = 'localhost' my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) all_users = looker.get_user() user_attributes = looker.get_user_attributes() writer = csv.writer(open('user-attributes.csv', 'w'), delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL, lineterminator='\n') headers = ['group_ids'] for attribute in user_attributes: headers.append(attribute['name']) writer.writerow(headers) for u in all_users:
### ------- HERE ARE PARAMETERS TO CONFIGURE ------- host = 'cse' output_csv_name = 'output.csv' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) ### ------- GET AND PRINT THE LOOK ------- space_data = looker.get_all_spaces(fields='id, parent_id, name') data = looker.get_content_validation() host_url = my_host[:my_host.index(":19999")] broken_content = data['content_with_errors'] # pprint(broken_content) output = [] for item in broken_content: if item['dashboard'] is None: type = 'look'
volume int(11) DEFAULT NULL, SHARD KEY tick (tick, dt) ); """) cursor.execute(query) # TODO: does not work yet # create the memsql pipeline query = ( "USE stocks; CREATE PIPELINE ticks AS LOAD DATA KAFKA 'public-kafka.memcompute.com:9092/stockticker' BATCH_INTERVAL 2500 REPLACE INTO TABLE ticks FIELDS TERMINATED BY ',' ENCLOSED BY '' ESCAPED BY '\\' LINES TERMINATED BY '\n' STARTING BY '';" ) cursor.execute(query) # TODO: does not work yet # start the memsql pipeline query = ("ALTER PIPELINE ticks SET OFFSETS LATEST; START PIPELINE ticks;") cursor.execute(query) cursor.close() conn.close() # create a looker session looker = LookerApi(host=looker_host, token=looker_token, secret=looker_secret) if looker is None: print('Connection to Looker failed') exit() # TODO: does not work yet # set the connection in looker to use the new memsql host r = looker.update_connection(looker_conn, memsql_host, memsql_user, memsql_pw) pp.pprint(r)
source_look_id = 58 ### The look number you would like to move destination_name = 'production' ### The name in your config.yml file destination_space_id = 769 ### The destination space ID to move the look to ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][source_name]['host'] my_secret = params['hosts'][source_name]['secret'] my_token = params['hosts'][source_name]['token'] dest_looker = LookerApi(host=my_host, token=my_token, secret=my_secret) my_host = params['hosts'][destination_name]['host'] my_secret = params['hosts'][destination_name]['secret'] my_token = params['hosts'][destination_name]['token'] source_looker = LookerApi(host=my_host, token=my_token, secret=my_secret) ### ------- GET THE SOURCE LOOK ------- look_body = source_looker.get_look_info(source_look_id, 'query_id, query, title') print "---- Source Look Body ----" pp(look_body) print "---- Source query ----"
import time ### ------- HERE ARE PARAMETERS TO CONFIGURE ------- dashboard_id = 7 host = 'cs_eng' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) dashboard = looker.get_dashboard(dashboard_id) # Collect dashboard filters dashboard_filters = [] filter_params = ['id', 'title', 'default_value', 'dashboard_id'] for dashboard_filter_object in dashboard['dashboard_filters']: dashboard_filter = { param: dashboard_filter_object[param] for param in filter_params } dashboard_filters.append(dashboard_filter) # Collect dashboard queries and associated filter listerners query_params = [
import yaml from lookerapi import LookerApi from datetime import datetime f = open('config.yml') params = yaml.load(f) f.close() host = 'teach' my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret = my_secret) me = looker.get_current_user()['id'] # print(me) all_users = looker.get_user() ids_to_disable = [] # print(all_users) days_to_disable = 30 no_login_count = 0 good_user_count = 0 errors = 0
### ------- HERE ARE PARAMETERS TO CONFIGURE ------- looks_to_delete = sys.argv[1] host = 'localhost' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) ### ------- HANDLE ARGUMENT FILELIST OR SINGLE LOOK ------- if os.path.isfile(looks_to_delete): filelist = open(looks_to_delete) for i in filelist: print "deleting lookid: " + i data = looker.delete_look(i) pprint(data) else: data = looker.delete_look(looks_to_delete) pprint(data) ### ------- Done -------
dest="dashboards", help="comma separated list of dashboards to soft delete") args = parser.parse_args() ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() host = 'localhost' my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) looks_to_delete = [] soft_delete = {"deleted": True} #applies to both look and dashboard patchs for dashboard_id in args.dashboards.split(','): dashboard_looks = looker.get_dashboard( dashboard_id, fields="dashboard_elements(look_id)") if dashboard_looks: looks_to_delete = looks_to_delete + [ look['look_id'] for look in dashboard_looks['dashboard_elements'] ] dashboard_updated = looker.update_dashboard(dashboard_id, body=soft_delete, fields='id') pprint("Soft deleted dashboard id " + str(dashboard_updated['id']))
import requests from lookerapi import LookerApi import csv clientId = 'mZB4cy9cmpCFdb6sctdg' clientSecret = 'dVr27XhxjqhQqNFShNNwX8rJ' host = "https://dev.looker.turner.com:19999/api/3.1/" looker = LookerApi(host=host, #host= 'https://dev.looker.turner.com:19999/api/3.1' , token= clientId, secret = clientSecret) firstRes = looker.get_look_data(6199, clientId, clientSecret, host) print(firstRes) secondRes = looker.get_look_data(6201, clientId, clientSecret, host) print(secondRes) countList = [] def findCount(res): totalCount = 0 groupsCount = {} sortedRes = sorted(res, key=itemgetter('pdt_dfp_operative_temp.modality')) for key, value in itertools.groupby(sortedRes, key=itemgetter('pdt_dfp_operative_temp.modality')): print(key) keyCount = 0 for i in value: totalCount += i.get('pdt_dfp_operative_temp.total_capacity')
host = 'localhost' project_name = 'hub' branch_name = 'dev-spoke-spoke-pwrc' user_id = 945 role_to_add_id = 136 ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) #add role to get access to core model roles = looker.get_user_role(user_id, "id") roles_source = roles.copy() roles.append({"id": role_to_add_id}) looker.set_user_role(user_id, roles) #begin issuing calls as the user who needs to catch up to production looker.login_user(user_id) looker.update_session_workspace()
# - Scheduled Task # - SQL Runner # - Suggest Filter sources_to_exclude = [] ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) queries = looker.get_running_queries() for i in queries: query_created_at = datetime.strptime( i['created_at'].split('.')[0].replace('T', ' '), '%Y-%m-%d %H:%M:%S') source = i['source'] history_id = i['id'] # Assumes system time is in UTC # Small, negative query running times can be expected if system times differ across the Looker MySQL database # and the machine from which the script is being executed running_time = (datetime.utcnow() - query_created_at).total_seconds() if running_time > threshold and source not in sources_to_exclude: print('killing query: {}'.format(i['query_task_id'])) looker.kill_query(i['query_task_id'])
import os import yaml ### install the pyyaml package import random from lookerapi import LookerApi from datetime import datetime from pprint import pprint from pptx import Presentation from pptx.util import Cm ### ------- HERE ARE PARAMETERS TO CONFIGURE ------- host = 'hosts' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret = my_secret) space = looker.get_space() all_looks_in_space = looker.get_looks_in_space() looks = all_looks_in_space
def index(): looker = LookerApi(client_id, client_secret, api_endpoint) markets = request.form.getlist('market') channels = request.form.getlist('channel') subtypes = request.form.getlist('subtype') start_date = request.form.getlist('start_date') end_date = request.form.getlist('end_date') r = looker.get_look_query(500) i = looker.get_look_query(532) r = r['query'] i = i['query'] # Format the date query the way Looker expects it if start_date[0] != '' and end_date[0] != '': date_query = dt.datetime.strptime(start_date[0], '%Y-%m-%d').strftime('%Y/%m/%d') + ' to '\ + dt.datetime.strptime(end_date[0], '%Y-%m-%d').strftime('%Y/%m/%d') else: date_query = r['filters']['visit_dimensions.local_visit_date'] print(date_query) # Get only necessary fields to run an inline query t = { k: r[k] for k in ('model', 'view', 'fields', 'limit', 'dynamic_fields', 'filters') } m = { n: i[n] for n in ('model', 'view', 'fields', 'limit', 'dynamic_fields', 'filters') } # Replace existing filters with new filters t['filters']['market_dimensions.market_name'] = ','.join(markets) t['filters']['channel_dimensions.organization'] = ','.join(channels) t['filters']['channel_dimensions.sub_type'] = ','.join(subtypes) t['filters']['visit_dimensions.local_visit_date'] = date_query r2 = looker.run_inline_query(t) m['filters']['market_dimensions.market_name'] = ','.join(markets) m['filters']['channel_dimensions.organization'] = ','.join(channels) m['filters']['channel_dimensions.sub_type'] = ','.join(subtypes) m['filters']['visit_dimensions.local_visit_date'] = date_query m2 = looker.run_inline_query(m) #code.interact(local=dict(globals(), **locals())) r2 = r2[0] try: on_route_time = int(round(r2['avg_on_route_time'], 0)) on_scene_time = int(round(r2['avg_on_scene_time'], 0)) bb_rate = int(round(r2['bb_rate'] * 100, 0)) ed_escalation = int(round(r2['ed_escalation_rate'] * 100, 0)) nps = int(round(r2['nps'], 0)) num_requests = "{:,}".format(r2['visit_facts.visits']) cost_svg = "{:,}".format(int(round(r2['cost_savings'], 0))) avg_age = int(round(r2['patient_dimensions.average_age'], 0)) pct_female = int(round(r2['pct_female'], 0)) pct_male = int(round(r2['pct_male'], 0)) pct_web = int(round(r2['pct_web'] * 100, 0)) pct_mobile = int(round(r2['pct_mobile'] * 100, 0)) pct_phone = int(round(r2['pct_phone'] * 100, 0)) except Exception as e: on_route_time = 0 on_scene_time = 0 bb_rate = 0 ed_escalation = 0 nps = 0 num_requests = "{:,}".format(r2['visit_facts.visits']) cost_svg = 0 avg_age = 0 pct_female = 0 pct_male = 0 pct_web = 0 pct_mobile = 0 pct_phone = 0 icds = m2[0:5] icd_codes = [i['icd_code_dimensions.diagnosis_group'] for i in icds] icd_pcts = [int(round(p['pct_of_total'] * 100, 0)) for p in icds] """Render the client overview report.""" return render_template('client_overview_report.html', on_route_time=on_route_time,cost_svg=cost_svg,\ on_scene_time=on_scene_time, bb_rate=bb_rate, ed_escalation=ed_escalation, nps=nps, num_requests=num_requests,\ avg_age=avg_age, pct_female=pct_female, pct_male=pct_male, pct_web=pct_web, pct_mobile=pct_mobile,\ pct_phone=pct_phone, icd_codes=icd_codes, icd_pcts=icd_pcts, markets=markets, channels=channels,\ subtypes=subtypes, start_date=start_date, end_date=end_date)
### ------- HERE ARE PARAMETERS TO CONFIGURE ------- look_to_get = sys.argv[1] host = 'localhost' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret = my_secret) ### ------- GET AND PRINT THE LOOK ------- data = looker.get_look(look_to_get) pprint(data) ### ------- Done ------- print "Done"
# These two lines enable debugging at httplib level (requests->urllib3->http.client) # You will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA. # The only thing missing will be the response.body which is not logged. import http.client as http_client http_client.HTTPConnection.debuglevel = 1 # You must initialize logging, otherwise you'll not see debug output. logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) requests_log = logging.getLogger("requests.packages.urllib3") requests_log.setLevel(logging.DEBUG) requests_log.propagate = True host = 'looker' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) look_id = 1 look = looker.get_look_info(look_id) pprint(look)
## --------- API Config ------------- f = open('config.yml') params = yaml.load(f) f.close() hostname = 'sandbox' my_host = params['hosts'][hostname]['host'] my_secret = params['hosts'][hostname]['secret'] my_token = params['hosts'][hostname]['token'] looker = LookerApi(host=my_host, token=my_token, secret = my_secret) ## --------- API Calls ------------- ## -- Get all models -- models = looker.get_model("") pp(models) for model in models: model_name = model['name'] ## -- Get single model -- model_def = looker.get_model(model_name) # pp(model_def)
host = 'localhost' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret = my_secret) ## --------- csv writing ------- lis = {} def write_fields(explore,value): model,exp= explore.split(',') # ### compile the line - this is possible to combine above, but here to keep things simple rowout = [] rowout.append(model) rowout.append(exp) rowout.append(value)
old_value = "Calvin Klein" new_value = "Allegra K" host = 'localhost' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret = my_secret) # GET request to /looks endpoint with look id and extract the query id query_id = looker.get_look_info(look_id,"query_id") pprint(query_id) # GET request to /queries endpoint with query id from step 1 to get the query definition query = looker.get_query(query_id['query_id'],"model,view,pivots,row_total,query_timezone,limit,filters,filter_expression,fill_fields,fields,dynamic_fields,column_limit,total,sorts") pprint(query) # Modify the body of the query object to change the filter value if query['filters'][filter_field] == old_value:
args = parser.parse_args() host = 'localhost' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) # This JSON query retrieves all current scheduled plans from Looker query_body = { "view": "scheduled_plan", "fields": [ "scheduled_plan.id", "scheduled_plan.cron_schedule" ], "pivots": None, "fill_fields": None, "filters": { "scheduled_plan.run_once": "no" }, "filter_expression": None,
### ------- HERE ARE PARAMETERS TO CONFIGURE ------- host = '' csv_file = '' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) def read_csv(file): f = open(file, 'rb') reader = csv.reader(f) header = reader.next() output = [dict(zip(header, map(str, row))) for row in reader] return output data = read_csv(csv_file) for i in data: user_id = i['user_id'] looker.update_user(user_id, i)
### ------- HERE ARE PARAMETERS TO CONFIGURE ------- host = 'saleseng' ### ------- OPEN THE CONFIG FILE and INSTANTIATE API ------- f = open('config.yml') params = yaml.load(f) f.close() my_host = params['hosts'][host]['host'] my_secret = params['hosts'][host]['secret'] my_token = params['hosts'][host]['token'] looker = LookerApi(host=my_host, token=my_token, secret=my_secret) # Pandas has a weird bug where it incorrectly calculates the output width and cuts off columns... pd.set_option('display.expand_frame_repr', False) # Create a dictionary of spaces:content_meta_data_ids so we can iterate through and check the accesses to those spaces spaces = \ looker.get_all_spaces(fields='id,is_personal,creator_id,is_personal_descendant,content_metadata_id,name' ) spaces_content_metadata_ids = {} spacejson = json.loads(json.dumps(spaces))