def test_file_credentials_user(self): project.initialize(_user=self.user_file) service = get_service('oauth2', 'v2', 'user') response = service.userinfo().get().execute() self.assertIn('email', response) self.helper_refresh()
def group_instances_resize(count): project.initialize(_service=settings.UI_SERVICE, _project=settings.UI_PROJECT) return API_Compute('service').instanceGroupManagers().resize( project=settings.UI_PROJECT, zone=settings.UI_ZONE, instanceGroupManager='starthinker-worker-group', size=count).execute()
def test_string_credentials_service(self): with open(self.service_file, 'r') as json_file: project.initialize(_service=json_file.read()) service = get_service('cloudresourcemanager', 'v1', 'service') response = service.projects().list().execute() self.assertIn('projects', response)
def test_dictionary_credentials_user(self): with open(self.user_file, 'r') as json_file: project.initialize(_user=json.load(json_file)) service = get_service('oauth2', 'v2', 'user') response = service.userinfo().get().execute() self.assertIn('email', response) self.helper_refresh()
def account_create(): project.initialize(_client=UI_CLIENT, _service=UI_SERVICE, _user=UI_USER) credentials = get_credentials('user') profile = get_profile() account = Account.objects.get_or_create_user(profile, credentials, 'password') return account
def storage_list(account): path = '%s:' % account.get_bucket(full_path=False) project.initialize(_project=CLOUD_PROJECT, _service=CLOUD_SERVICE) try: for filename_storage in object_list('service', path, files_only=True): yield Storage(filename_storage) except: pass # if no bucket then skip ( maybe not set up yet )
def storage_create(account): bucket = account.get_bucket(full_path=False) project.initialize(_project=CLOUD_PROJECT, _service=CLOUD_SERVICE) bucket_create('service', CLOUD_PROJECT, bucket) bucket_access('service', CLOUD_PROJECT, bucket, 'OWNER', emails=[account.email])
def log_get(job_id=None, timezone='America/Los_Angeles'): """Returns a log for the specified job or all jobs using given timezone for timestamps. Uses datastore to maintian logs of execution. All times are stored in UTC and converted at load. Catches ALL exceptions because fetching a alog can fail gracefully by returning no log. Loads a record in the format: [UI_PROJECT].Datastore.[UI_LOG_NAMESPACE].[UI_LOG_KIND].name[job_id] = { recipe_uid - computed from job id to allow list of values execution_id - from entity execution_status - from entity execution_timestamp - altered by applying time zone execution_timeago - computed from timestamp to allow relative execution age execution_stdout - from entity execution_stderr - from entity } Args: - job_id ( string ) - Optional, if provided returns a single record for a single job. - timezone ( string ) - The time zone to cast all record times into. Returns: - A dictionary of logs keyed by job uid or a single record. """ try: project.initialize(_service=UI_SERVICE, _project=UI_PROJECT) if job_id: ignore, log = datastore_read("service", UI_PROJECT, UI_LOG_NAMESPACE, UI_LOG_KIND, job_id).next() log['recipe_uid'] = job_id log['execution_timeago'] = time_ago(log['execution_timestamp']) log['execution_timestamp'] = time_local(log['execution_timestamp'], timezone) return log else: logs = dict( datastore_list("service", UI_PROJECT, UI_LOG_NAMESPACE, UI_LOG_KIND)) for k, v in logs.items(): v['recipe_uid'] = k v['execution_timeago'] = time_ago(v['execution_timestamp']) v['execution_timestamp'] = time_local(v['execution_timestamp'], timezone) return logs except Exception, e: print 'manager.log.log_get(...) Exception:', str(e) return {}
def group_instances_list(statuses=[]): project.initialize(_service=settings.UI_SERVICE, _project=settings.UI_PROJECT) return API_Compute( 'service', iterate=True).instanceGroupManagers().listManagedInstances( project=settings.UI_PROJECT, zone=settings.UI_ZONE, instanceGroupManager='starthinker-worker-group', filter=' OR '.join( ['(instanceStatus = "%s")' % status for status in statuses]), orderBy="creationTimestamp desc").execute()
def group_instances_delete(name): project.initialize(_service=settings.UI_SERVICE, _project=settings.UI_PROJECT) return API_Compute('service').instanceGroupManagers().deleteInstances( project=settings.UI_PROJECT, zone=settings.UI_ZONE, instanceGroupManager='starthinker-worker-group', body={ 'instances': ['zones/%s/instances/%s' % (settings.UI_ZONE, name)], "type": "PROACTIVE" }).execute()
def storage(request): bucket = request.user.get_bucket(full_path=False) # create and permission bucket ( will do nothing if it exists ) project.initialize(_project=CLOUD_PROJECT, _service=CLOUD_SERVICE) bucket_create('service', CLOUD_PROJECT, bucket) bucket_access('service', CLOUD_PROJECT, bucket, 'OWNER', emails=[request.user.email]) return HttpResponseRedirect(request.user.get_bucket())
def account_create(): accounts = Account.objects.all() if len(accounts) > 0: account = accounts[0] else: project.initialize(_client=UI_CLIENT, _service=UI_SERVICE, _user=UI_USER) credentials = get_credentials('user') account = Account.objects.get_or_create_user(credentials, 'password') return account
def log_put(event, job_id, execution_id, stdout='', stderr=''): """Generic log weiter used by helper functions. Writes to datastore. Creates a record that can be read using log_get function. Only the job_id is used as a key. Do not call this directly, use helper functions instead: - log_started - log_failed - log_completed - log_timedout Stores the following enitity: ``` [UI_PROJECT].Datastore.[UI_LOG_NAMESPACE].[UI_LOG_KIND].name[job_id] = { execution_id execution_status execution_timestamp execution_stdout execution_stderr } ``` Args: - event ( string ): One of the following: JOB_STARTED, JOB_COMPLETED, JOB_FAILED, JOB_TIMEDOUT - job_id ( string ): The universal identifier of a job, used as key for record. - execution_id ( string ): The id of the specific run of the job. - stdout ( string ): Mesaging output form the task. Present depending on task. - stderr ( string ): Error output form the task. Present if job failed. """ project.initialize(_service=UI_SERVICE, _project=UI_PROJECT) datastore_write( "service", UI_PROJECT, UI_LOG_NAMESPACE, UI_LOG_KIND, job_id, { 'execution_id': execution_id, 'execution_status': event, 'execution_timestamp': datetime.utcnow(), 'execution_stdout': stdout, 'execution_stderr': stderr }) client = bigquery.Client.from_service_account_json(UI_SERVICE) dataset = client.dataset(UI_LOG_DATASET) events_table = dataset.table(UI_LOG_TABLE) events_table.reload() events_table.insert_data([[ datetime.utcnow(), event, job_id, execution_id, stdout or '', stderr or '' ]])
def test_remote_credentials_user(self): project.initialize(_user=self.user_file) credentials = get_credentials('user') account = Account.objects.get_or_create_user(credentials, 'password') clear_credentials_cache() project.initialize(_user=account.get_credentials_path()) self.assertEqual(project.recipe['setup']['auth']['user'], account.get_credentials_path()) service = get_service('oauth2', 'v2', 'user') response = service.userinfo().get().execute() self.assertIn('email', response) self.helper_refresh()
def log_get(recipe_id=[], timezone='America/Los_Angeles', days=1): """Returns last actionable job run for a specific recipe or all recipes. Pulls status entries from StackDriver in reverse order. A single recipe may be run multiple times for multiple tasks at different hours, do not assume a JOB_END means a recipe is complete. Only way to ensure a recipe is complete is to compare all tasks run against all tasks in recipe ( not done by log code). Args: - recipe_id ( string or list ) - Optional, if provided returns a single record for a single job. - timezone ( string ) - The local timezone to cast all record times into. Returns: - ( iterator ) - Each log entry. """ body = { 'resourceNames': ['projects/%s' % UI_PROJECT,], 'filter': '\ logName="projects/%s/logs/StarThinker" \ AND labels.version="%s" \ AND labels.layer="JOB" \ ' % (UI_PROJECT, LOG_VERSION), 'orderBy': 'timestamp desc', 'pageSize': 1000 } if recipe_id: if isinstance(recipe_id, str): recipe_id = [recipe_id] body['filter'] += ' AND ( %s )' % ' OR '.join( 'operation.id="%s"' % r for r in recipe_id) project.initialize(_service=UI_SERVICE, _project=UI_PROJECT) for entry in API_StackDriver( 'service', iterate=True).entries().list(body=body).execute(): yield entry
def handle(self, *args, **kwargs): user = None accounts = Account.objects.all() # find user to send from for account in accounts: if account.email == kwargs['email_from']: user = account.get_credentials_path() if user: print("SEND USER FOUND") # initialize project project.initialize(_user=user) # load template with open(kwargs['template'], 'r') as json_file: email = EmailTemplate(json.load(json_file)) # loop through accounts for account in accounts: # if account is given only do that one if kwargs['email_to'] is None or account.email == kwargs[ 'email_to']: if account.email in kwargs['ignore']: print('IGNORING: ', account.email) else: print('EMAILING: ', account.email) if kwargs['test']: # write to STDOUT print(email.get_html()) else: # send message via email send_email('user', account.email, kwargs['email_from'], None, email.get_subject(), email.get_text(), email.get_html()) sleep(1)
def ready(self): print('CHECKING IF USER BUCKET EXISTS:', USER_BUCKET, USER_LOCATION) project.initialize(_project=settings.UI_PROJECT, _service=settings.UI_SERVICE) bucket_create('service', settings.UI_PROJECT, USER_BUCKET, USER_LOCATION)
def run(request): project.initialize(_recipe=request.get_json(force=True), _verbose=True) project.execute() return 'DONE'
def handle(self, *args, **kwargs): impact = [ ] #{ 'day': DATE, 'deployment':INT, 'account': INT, 'product': STRING, 'recipe': STRING, 'user': STRING } missing = {} id_max = 0 project.initialize(_service=settings.UI_SERVICE, _verbose=True) if table_exists('service', 'google.com:starthinker', 'dashboard', 'ST_Scripts'): id_max = next( query_to_rows('service', 'google.com:starthinker', 'dashboard', 'SELECT MAX(Deployment) FROM ST_Scripts', legacy=False))[0] for recipe in Recipe.objects.filter( id__gt=id_max).order_by('id')[:kwargs['recipes']]: project.initialize(_user=recipe.account.get_credentials_path(), _service=settings.UI_SERVICE, _verbose=True) values = recipe.get_values() for v in values: if v['tag'] in ('dcm_to_bigquery', 'dcm_to_sheets', 'dcm_to_storage', 'dcm_run', 'conversion_upload_from_bigquery', 'conversion_upload_from_sheets'): impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': v['values'].get('account'), 'script': v['tag'], 'product': 'dcm', 'user': recipe.account.email.replace('@google.com', '') }) elif v['tag'] in ('dbm_to_bigquery', 'dbm_to_sheets', 'dbm_to_storage'): for partner in account_from_dbm_report( v['values'].get('dbm_report_id'), v['values'].get('dbm_report_name')): impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': partner, 'script': v['tag'], 'product': 'dbm', 'user': recipe.account.email.replace('@google.com', '') }) elif v['tag'] in ('dt', ): impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': account_from_dt(v['values']), 'script': v['tag'], 'product': 'dcm', 'user': recipe.account.email.replace('@google.com', '') }) elif v['tag'] == 'barnacle': for account in v['values']['accounts']: impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': account, 'script': v['tag'], 'product': 'dcm', 'user': recipe.account.email.replace('@google.com', '') }) elif v['tag'] in ('entity', ): for partner in v['values']['partners']: impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': partner, 'script': v['tag'], 'product': 'dbm', 'user': recipe.account.email.replace('@google.com', '') }) elif v['tag'] == 'itp': impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': v['values']['dcm_account'], 'script': v['tag'], 'product': 'dcm', 'user': recipe.account.email.replace('@google.com', '') }) impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': v['values']['dbm_partner'], 'script': v['tag'], 'product': 'dbm', 'user': recipe.account.email.replace('@google.com', '') }) elif v['tag'] == 'itp_audit': impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': v['values']['cm_account_id'], 'script': v['tag'], 'product': 'dcm', 'user': recipe.account.email.replace('@google.com', '') }) for partner in account_from_dbm_report( None, v['values'].get('dv360_report_name')): impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': partner, 'script': v['tag'], 'product': 'dbm', 'user': recipe.account.email.replace('@google.com', '') }) else: impact.append({ 'day': recipe.birthday, 'deployment': recipe.id, 'account': None, 'script': v['tag'], 'product': None, 'user': recipe.account.email.replace('@google.com', '') }) missing.setdefault(v['tag'], 0) missing[v['tag']] += 1 if impact: if kwargs['test']: print(impact) else: print('WRITING TO ST_Scripts') rows_to_table('service', 'google.com:starthinker', 'dashboard', 'ST_Scripts', [(i['day'], i['deployment'], i['user'], i['product'], i['script'], i['account']) for i in impact], schema=[ { 'mode': 'REQUIRED', 'name': 'Day', 'type': 'Date' }, { 'mode': 'REQUIRED', 'name': 'Deployment', 'type': 'INTEGER' }, { 'mode': 'REQUIRED', 'name': 'User', 'type': 'STRING' }, { 'mode': 'NULLABLE', 'name': 'Product', 'type': 'STRING' }, { 'mode': 'NULLABLE', 'name': 'Recipe', 'type': 'STRING' }, { 'mode': 'NULLABLE', 'name': 'Account', 'type': 'INTEGER' }, ], skip_rows=0, disposition='WRITE_TRUNCATE' if id_max == 0 else 'WRITE_APPEND', wait=True) print('MISSING', missing) print('Coverage:', (len(impact) * 100) / (len(missing) + len(impact))) else: print('No recipes newer than:', id_max)
def log_put(event, severity, job=None, text=None): """Generic log writer used by helper functions. Writes to StackDriver. Creates a record that can be read using log_get function. Entire recipe is logged, worker data and stdout and stderr are added to the JOSN under worker key. Only JOB_EXCEPTION and MANAGER_EXCEPTION logs to text in case JSON is corrupt, everythng else is JSON. Do not call this directly, use helper functions instead: - log_manager_start - log_manager_error - log_manager_end - log_job_dispatch - log_job_receive - log_job_start - log_job_end - log_job_error - log_job_timeout WARNING: Do not corrupt recipe in log code, it is actively being used by workers while being logged. Args: - event ( string ): One of the JOB_* enums. - job ( json ): Recipe workflow to execute. - severity ( enum ): Stackdriver severity level. - text ( string ): Mesaging output form the task. Usual stdout and stderr. """ if VERBOSE: print "LOGGING:", event, severity, text or '' body = { "entries": [{ "logName": "projects/%s/logs/StarThinker" % UI_PROJECT, "severity": severity, "resource": { "type": "project", "labels": { "key": UI_PROJECT }, }, "labels": { "version": LOG_VERSION, "layer": event.split('_')[0], "event": event, "instance": get_instance_name(), }, #"operation": { # "id": string # "producer": string # "first": False, # "last": False, #}, # already in recipe worker logging task and instance, does this have additional value? #"sourceLocation": { # "file": string, # "line": string, # "function": string #}, }], "partialSuccess": False, "dryRun": False } if text is not None: body['entries'][0]["textPayload"] = text else: # Removing tasks from job REMOVES ALL POTENTIAL CREDENTIALS IN CODE job_buffer = json.loads( json.dumps(job, indent=2, sort_keys=True, default=str)) if 'tasks' in job_buffer['recipe']: del job_buffer['recipe']['tasks'] if 'auth' in job_buffer['recipe']['setup']: del job_buffer['recipe']['setup']['auth'] body['entries'][0]["jsonPayload"] = job_buffer project.initialize(_service=UI_SERVICE, _project=UI_PROJECT) API_StackDriver("service").entries().write(body=body).execute()
def handle(self, *args, **kwargs): # loop through accounts for account in Account.objects.all(): # if account is given only do that one if kwargs['email'] is None or account.email == kwargs['email']: print('CHECKING: ', account.email) status = False # start an email template email = EmailTemplate() email.segment_next() email.greeting(account.name) email.paragraph(EMAIL_PARAGRAPH) # loop through recipes rows = [] for recipe in account.recipe_set.all(): log = recipe.get_log() rows.append([ recipe.name, log.get('status'), log.get('ago'), 'http://starthinker.corp.google.com/recipe/edit/%d/' % recipe.pk ]) if rows: email.segment_next() email.header('Recipe Status') email.table(RECIPE_SCHEMA, rows) status = True # loop through storage #rows = [] #for recipe in storage_list(account): # recipe.log = logs.get(recipe.uid(), {}) # rows.append([recipe.name, recipe.log.get('status'), recipe.log.get('time_ago'), recipe.link_storage]) if rows: email.segment_next() email.header('Storage Status') email.table(RECIPE_SCHEMA, rows) status = True # # if at least one solution or recipe is running.... # if status: # # # show one random recipe # email.segment_next() # email.header('Featured Recipes') # email.paragraph('Each week we feature three recipes that could help your client or agency project. This weeks featured recipe is...') # # # script: card ( fetch random ) # for s in sample(list(Script.get_scripts()), 3): # email.header(s.get_name()) # email.paragraph(s.get_description()) # # # solution pitch # email.paragraph('Benefits', bold=True) # email.list(s.get_pitches()) # # # solution impact # email.table(IMPACT_SCHEMA, [(i[0], '%d%%' % i[1]) for i in s.get_impacts().items()]) # # email.table(DETAILS_SCHEMA, [ # ('Requires', ', '.join([r[0] for r in s.get_requirements().items() if r[1]])), # ('Categories', ', '.join(s.get_categories())), # ('Authors', mailto(s.get_authors())) # ]) # # email.button('Launch %s' % s.get_name(), '%s/client/edit/' % settings.CONST_URL, big=True) # # if s.get_open_source(): # email.button('Avaliable As Open Source', s.get_open_source(), big=True) # # # loop through links # email.segment_next() # email.header('Helpful Links') # email.paragraph('Reduce solution delivery to minutes and create custom solutions that exceed clients expectations with these helpful guides and tips.') for h in HELP_LINKS: email.section(h['name'], h['description'], None, h['button']['link'], h['button']['text']) if kwargs['test']: # write to STDOUT print(email.get_html()) else: print('EMAILING: ', account.email) # send message via email project.initialize() send_email('user', account.email, EMAIL_FROM, EMAIL_CC, EMAIL_SUBJECT, email.get_text(), email.get_html()) sleep(3)