def run(self, task, rows): ak = Client() task_log = get_task_log() n_rows = n_success = n_error = 0 for row in rows: task_log.sql_log(task, row) n_rows += 1 assert row.get("user_id") and int(row['user_id']) assert row.get("employer") assert row.get("num_employees") and int(row['num_employees']) assert row.get("processing_page_name") assert row.get("originating_page_name") assert row.get("originating_page_title") assert row.get("originating_action_id") and int(row['originating_action_id']) try: f = CoreActionField.objects.using("ak").select_related("parent").filter( parent__page__name=row['processing_page_name'], parent__user__id=int(row['user_id']), name="welcome_employer", value=row['employer'])[0] except IndexError: pass else: task_log.activity_log(task, {"id": row['user_id'], "existing_action": f.parent.id}) continue data = { 'id': row['user_id'], 'page': row['processing_page_name'], 'action_welcome_employer': row['employer'], 'action_welcome_num_employees': row['num_employees'], 'user_welcome_employer': row['employer'], 'user_welcome_num_employees': row['num_employees'], } if row['originating_page_name'].startswith("controlshift-"): data['action_welcome_originating_page_url'] = "https://www.coworker.org/petitions/%s" % row['originating_page_name'][len("controlshift-"):] data['action_welcome_originating_page_title'] = row['originating_page_title'][len("ControlShift: "):] else: data['action_welcome_originating_page_url'] = "http://act.coworker.org/act/%s" % row['originating_page_name'] data['action_welcome_originating_page_title'] = row['originating_page_title'] data['user_welcome_originating_page_url'] = data['action_welcome_originating_page_url'] data['user_welcome_originating_page_title'] = data['action_welcome_originating_page_title'] task_log.activity_log(task, data) try: resp = ak.act(data) task_log.success_log(task, resp) except Exception, e: n_error += 1 resp = {} resp['log_id'] = row['user_id'] resp['error'] = traceback.format_exc() task_log.error_log(task, resp) else: n_success += 1
def run(self, task, rows): n_rows = n_success = n_error = 0 task_log = get_task_log() for row in rows: task_log.sql_log(task, row) n_rows += 1 assert row.get("email") obj = { "email_address": row['email'], } task_log.activity_log(task, obj) resp = None try: resp = self.request( "post", "/segments/%s/members" % self.cleaned_data['mailchimp_segment_id'], obj) assert resp.status_code == 200 except Exception, e: n_error += 1 task_log.error_log( task, { "row": obj, "error": str(e), "resp": resp.text if resp else None }) else: n_success += 1 task_log.success_log(task, {"row": obj, "resp": resp.json()})
def run(self, task, rows): task_log = get_task_log() n_rows = n_success = n_error = 0 resp = requests.post("https://accounts.google.com/o/oauth2/token", data={ "grant_type": "refresh_token", "refresh_token": self.cleaned_data['google_refresh_token'], "client_id": self.cleaned_data['google_client_id'], "client_secret": self.cleaned_data['google_client_secret'] }) token = resp.json()['access_token'] credentials = google.oauth2.credentials.Credentials(token) api = build('drive', 'v3', credentials=credentials) for row in rows: task_log.sql_log(task, row) n_rows += 1 q = ("(name='%s' or name='%s') " "and mimeType='application/vnd.google-apps.folder' " "and sharedWithMe " % (row['gdrive_folder'], urllib.unquote(row['gdrive_folder']))) folder = api.files().list( q=q, fields="files(id)", ).execute() if len(folder['files']) != 1: task_log.error_log( task, { "row": row, "error": "cannot_find_folder", "query": q, "resp": folder }) n_error += 1 continue folder_id = folder['files'][0]['id'] # @@TODO skip if already true resp = api.files().update( fileId=folder_id, addParents='root', ).execute() task_log.activity_log(task, {"row": row, "resp": resp}) n_success += 1 return n_rows, n_success, n_error
def run(self, task, rows): task_log = get_task_log() resp = requests.post("https://accounts.google.com/o/oauth2/token", data={ "grant_type": "refresh_token", "refresh_token": self.cleaned_data['google_refresh_token'], "client_id": self.cleaned_data['google_client_id'], "client_secret": self.cleaned_data['google_client_secret'] }) token = resp.json()['access_token'] credentials = google.oauth2.credentials.Credentials(token) api = build('drive', 'v3', credentials=credentials) rest = RestClient() rest.safety_net = False n_rows = n_success = n_error = 0 for row in rows: n_rows += 1 try: self.maybe_download(row['s3_url'], row['local_dir']) except MissingFileError, e: filename = os.path.basename(row['s3_url']) filepath = os.path.join(row['local_dir'], filename) try: get_md5_local(filepath) except MissingFileError, f: n_error += 1 task_log.error_log( task, { "row": row, "error": "s3_file_missing", "url": e.url, "headers": e.headers, "status": e.status, }) continue task_log.activity_log( task, { "row": row, "warning": "s3_file_missing_but_exists_locally", "url": e.url, "headers": e.headers, "status": e.status, })
def run(self, task, rows): ak = Client() task_log = get_task_log() report = QueryReport.objects.using("ak").get(report_ptr__id=self.cleaned_data['report_id']) rows = list(self.run_sql(report.sql)) api = RestClient() api.signupform.patch(id=self.cleaned_data['actionkit_signup_form_id'], introduction_text=json.dumps(rows)) return 1, 1, 0
def run(self, task, rows): n_rows = n_success = n_error = 0 task_log = get_task_log() for row in rows: task_log.sql_log(task, row) n_rows += 1 assert row.get("email") assert row.get("status") obj = { "email_address": row['email'], "status_if_new": row['status'], "status": row['status'], "merge_fields": {}, } for key in row: if key.startswith("new_data_merge_"): obj["merge_fields"][key[15:]] = row[key] elif key.startswith("new_data_"): obj[key[9:]] = row[key] if not obj['merge_fields']: obj.pop("merge_fields") task_log.activity_log(task, obj) resp = None try: resp = self.request( "put", "/members/%s" % hashlib.md5(row['email'].lower()).hexdigest(), obj) assert resp.status_code == 200 except Exception, e: n_error += 1 task_log.error_log( task, { "row": obj, "error": str(e), "resp": resp.text if resp else None }) else: n_success += 1 task_log.success_log(task, {"row": obj, "resp": resp.json()})
def run(self, task, rows): lists = self.cleaned_data.get('unsubscribe_lists') or "" lists = [int(i.strip()) for i in lists.split(",") if i] page = self.cleaned_data.get('action_page', '').strip() or None ak = Client() n_rows = n_success = n_error = 0 task_log = get_task_log() for row in rows: task_log.sql_log(task, row) n_rows += 1 assert row.get('user_id') and int(row['user_id']) user_id = row['user_id'] caused_by_action = row.get('caused_by_action') or None unsubs = [] for list_id in lists: try: ak.User.unsubscribe({'id': user_id, 'list_id': list_id}) except: pass else: unsubs.append(list_id) if page is None: continue action = {'id': user_id, 'page': page} if unsubs: action['action_unsubscribed_from_lists'] = unsubs if caused_by_action: action['action_caused_by_action'] = caused_by_action try: resp = ak.act(action) resp['log_id'] = row['user_id'] task_log.success_log(task, resp) except Exception, e: n_error += 1 resp = {} resp['log_id'] = row['user_id'] resp['error'] = traceback.format_exc() task_log.error_log(task, resp) else: n_success += 1
def run(self, task, rows): new_actionfield_name = self.cleaned_data.get( "new_actionfield_name").strip() or None new_actionfield_value = self.cleaned_data.get( "new_actionfield_value").strip() or None rest = RestClient() rest.safety_net = False n_rows = n_success = n_error = 0 task_log = get_task_log() for row in rows: task_log.sql_log(task, row) n_rows += 1 assert row.get("actionfield_id") and int(row['actionfield_id']) actionfield = CoreActionField.objects.using("ak").select_related( "parent", "parent__page").get( id=row['actionfield_id']) data = { 'parent': "/rest/v1/%saction/%s/" % ( actionfield.parent.page.type.lower(), actionfield.parent.id), 'name': (new_actionfield_name or row['new_actionfield_name']), 'value': (new_actionfield_value or row.get('new_actionfield_value') or actionfield.value), } try: resp = rest.actionfield.put(actionfield.id, **data) resp['log_id'] = row['actionfield_id'] task_log.success_log(task, resp) except Exception, e: n_error += 1 resp = {} resp['log_id'] = row['actionfield_id'] resp['error'] = traceback.format_exc() task_log.error_log(task, resp) else: n_success += 1
def run(self, task, rows): n_rows = n_success = n_error = 0 task_log = get_task_log() for row in rows: task_log.sql_log(task, row) n_rows += 1 assert row.get("email") obj = { "email_address": row['email'], } task_log.activity_log(task, obj) resp = None try: resp = self.request( "delete", "/segments/{segment_id}/members/{email_hash}".format( segment_id=self.cleaned_data['mailchimp_segment_id'], email_hash=hashlib.md5( row['email'].lower()).hexdigest(), ), obj) assert resp.status_code == 204 except Exception, e: n_error += 1 task_log.error_log( task, { "row": obj, "error": str(e), "resp": resp.text if resp else None }) else: n_success += 1 task_log.success_log(task, {"row": obj})
def run(self, task, rows): task_log = get_task_log() n_rows = n_success = n_error = 0 resp = requests.post("https://accounts.google.com/o/oauth2/token", data={ "grant_type": "refresh_token", "refresh_token": self.cleaned_data['google_refresh_token'], "client_id": self.cleaned_data['google_client_id'], "client_secret": self.cleaned_data['google_client_secret'] }) token = resp.json()['access_token'] credentials = google.oauth2.credentials.Credentials(token) rest = RestClient() rest.safety_net = False api = build('drive', 'v3', credentials=credentials) aws_session = boto3.Session( aws_access_key_id=self.cleaned_data['aws_access_key_id'], aws_secret_access_key=self.cleaned_data['aws_secret_access_key'], ) s3 = aws_session.resource('s3') for row in rows: task_log.sql_log(task, row) n_rows += 1 s3_md5 = get_md5_s3(row['s3_url']) gdrive_md5 = get_md5_gdrive(row['gdrive_id'], api) task_log.activity_log(task, { "row": row, "checksums": { "s3": s3_md5, "gdrive": gdrive_md5, } }) if s3_md5 != gdrive_md5: n_error += 1 task_log.error_log( task, { "row": row, "checksums": { "s3": s3_md5, "gdrive": gdrive_md5, } }) continue bucket, key = re.match( "^https://([A-Za-z0-9\-]+).s3.amazonaws.com/(.*)$", row['s3_url'], ).groups() key = urllib.unquote(key) s3_resp = s3.Object(bucket, key).delete() ak_resp = rest.actionfield.delete(row['s3url_field_id']) n_success += 1 task_log.success_log(task, { "row": row, "s3": s3_resp, "ak": ak_resp }) return n_rows, n_success, n_error
def run(self, task, rows): userfield_value = self.cleaned_data.get("userfield_value").strip() or None userfield_name = self.cleaned_data['userfield_name'] page = self.cleaned_data.get('action_page', '').strip() or None ak = Client() rest = RestClient() rest.safety_net = False userfield_name = 'user_%s' % userfield_name n_rows = n_success = n_error = 0 task_log = get_task_log() for row in rows: task_log.sql_log(task, row) n_rows += 1 assert row.get('user_id') and int(row['user_id']) if page: try: action = CoreAction.objects.using("ak").select_related( "page").get( user__id=row['user_id'], page__name=page) except CoreAction.DoesNotExist: action = None except CoreAction.MultipleObjectsReturned: action = None else: action = None try: if page is None: resp = ak.User.save({ "id": row['user_id'], userfield_name: (userfield_value or row['userfield_value'])}) elif action is None: resp = ak.act({ "id": row['user_id'], "page": page, "source": "aktasks-%s" % task.id, userfield_name: (userfield_value or row['userfield_value'])}) else: page_url = "/rest/v1/%spage/%s/" % ( action.page.type.lower(), action.page_id) user_url = "/rest/v1/user/%s/" % action.user_id handler = getattr( rest, "%saction" % action.page.type.lower()) args = dict(page=page_url, user=user_url) args[userfield_name] = (userfield_value or row['userfield_value']) args['source'] = "aktasks-%s" % task.id handler.put(action.id, **args) resp['log_id'] = row['user_id'] task_log.success_log(task, resp) except Exception, e: n_error += 1 resp = {} resp['log_id'] = row['user_id'] resp['error'] = traceback.format_exc() task_log.error_log(task, resp) else: n_success += 1