def handle(self, *args, **options): t0 = time.time() self.stdout.write('%s:\t%s' % (time.ctime(), ' '.join(sys.argv))) N_days = options['days'] if options['days'] else KEEP_JOB * 30 t = time.time() self.stdout.write("Cleaning up obsolete job results...") all_job = JobIDs.objects.filter( date__range=(datetime.date(1970, 1, 2), datetime.date.today() - datetime.timedelta(days=N_days))) N_obsolete = 0 for job in all_job: try: if job.type == '1': obj = Design1D.objects.get(job_id=job.job_id) elif job.type == '2': obj = Design2D.objects.get(job_id=job.job_id) elif job.type == '3': obj = Design3D.objects.get(job_id=job.job_id) obj.delete() for f in glob.glob('%s/data/%sd/result_%s.*') % ( MEDIA_ROOT, job.type, job.job_id): os.remove(f) except Exception: pass job.delete() N_obsolete += 1 all_files = set() N_orphan = 0 for i in xrange(3): for f in glob.glob('%s/data/%sd/result_*.*' % (MEDIA_ROOT, i + 1)): all_files.add(f[f.find('/result_') - 2:f.rfind('.')]) for f in all_files: job_id = f[f.find('result_') + 7:] job_type = f[:f.find('/') - 1] if job_type == '1': obj = Design1D.objects.filter(job_id=job_id) elif job_type == '2': obj = Design2D.objects.filter(job_id=job_id) elif job_type == '3': obj = Design3D.objects.filter(job_id=job_id) if not len(obj): for ff in glob.glob('%s/data/%s.*' % (MEDIA_ROOT, f)): os.remove(ff) try: job = JobIDs.objects.get(job_id=job_id) job.delete() N_orphan += 1 except Exception: pass self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94m%s\033[0m obsolete job result files removed." % N_obsolete) self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94m%s\033[0m orphan job result files removed." % N_orphan) self.stdout.write("Time elapsed: %.1f s.\n" % (time.time() - t)) if not DEBUG: t_now = datetime.datetime.now().strftime( '%b %d %Y (%a) @ %H:%M:%S') send_notify_emails( '{%s} SYSTEM: Quarterly Cleanup Notice' % env('SERVER_NAME'), 'This is an automatic email notification for the success of scheduled quarterly cleanup of the %s Server local results.\n\nThe crontab job is scheduled at 00:00 (UTC) on 1st day of every 3 months.\n\nThe last system backup was performed at %s (PDT).\n\n%s Admin\n' % (env('SERVER_NAME'), t_now, env('SERVER_NAME'))) self.stdout.write("Admin email (Quarterly Cleanup Notice) sent.") self.stdout.write("All done successfully!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0))
def handle(self, *args, **options): t0 = time.time() self.stdout.write('%s:\t%s' % (time.ctime(), ' '.join(sys.argv))) if options['item']: is_apache = 'apache' in options['item'] is_config = 'config' in options['item'] is_mysql = 'mysql' in options['item'] is_static = 'static' in options['item'] else: is_apache, is_config, is_mysql, is_static = True, True, True, True d = time.strftime( '%Y%m%d') # datetime.datetime.now().strftime('%Y%m%d') gdrive_dir = 'echo' if DEBUG else 'cd %s' % APACHE_ROOT prefix = '_DEBUG' if DEBUG else '' flag = False if is_mysql: t = time.time() self.stdout.write("#1: Uploading MySQL database...") try: subprocess.check_call( '%s && drive upload -f %s/backup/backup_mysql.tgz -t %s_%s_mysql%s.tgz' % (gdrive_dir, MEDIA_ROOT, env('SERVER_NAME'), d, prefix), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: send_error_slack(traceback.format_exc(), 'Upload MySQL Database', ' '.join(sys.argv), 'log_cron_gdrive.log') flag = True else: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94mMySQL\033[0m database uploaded." ) self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) if is_static: t = time.time() self.stdout.write("#2: Uploading static files...") try: subprocess.check_call( '%s && drive upload -f %s/backup/backup_static.tgz -t %s_%s_static%s.tgz' % (gdrive_dir, MEDIA_ROOT, env('SERVER_NAME'), d, prefix), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: send_error_slack(traceback.format_exc(), 'Upload Static Files', ' '.join(sys.argv), 'log_cron_gdrive.log') flag = True else: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94mstatic\033[0m files uploaded." ) self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) if is_apache: t = time.time() self.stdout.write("#3: Uploading apache2 settings...") try: subprocess.check_call( '%s && drive upload -f %s/backup/backup_apache.tgz -t %s_%s_apache%s.tgz' % (gdrive_dir, MEDIA_ROOT, env('SERVER_NAME'), d, prefix), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: send_error_slack(traceback.format_exc(), 'Upload Apache2 Settings', ' '.join(sys.argv), 'log_cron_gdrive.log') flag = True else: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94mapache2\033[0m settings uploaded." ) self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) if is_config: t = time.time() self.stdout.write("#4: Uploading config settings...") try: subprocess.check_call( '%s && drive upload -f %s/backup/backup_config.tgz -t %s_%s_config%s.tgz' % (gdrive_dir, MEDIA_ROOT, env('SERVER_NAME'), d, prefix), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: send_error_slack(traceback.format_exc(), 'Upload Config Settings', ' '.join(sys.argv), 'log_cron_gdrive.log') flag = True else: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94mconfig\033[0m settings uploaded." ) self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) t = time.time() self.stdout.write("#5: Removing obsolete backups...") try: old = (datetime.date.today() - datetime.timedelta(days=KEEP_BACKUP) ).strftime('%Y-%m-%dT00:00:00') list_mysql = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and (title contains '_mysql.tgz' or title contains '_mysql_DEBUG.tgz') and modifiedDate <= '%s'\"| awk '{printf $1\" \"}'" % (gdrive_dir, env('SERVER_NAME'), old), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split()[1:] list_static = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and (title contains '_static.tgz' or title contains '_static_DEBUG.tgz') and modifiedDate <= '%s'\"| awk '{ printf $1\" \"}'" % (gdrive_dir, env('SERVER_NAME'), old), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split()[1:] list_apache = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and (title contains '_apache.tgz' or title contains '_apache_DEBUG.tgz') and modifiedDate <= '%s'\"| awk '{ printf $1\" \"}'" % (gdrive_dir, env('SERVER_NAME'), old), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split()[1:] list_config = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and (title contains '_config.tgz' or title contains '_config_DEBUG.tgz') and modifiedDate <= '%s'\"| awk '{ printf $1\" \"}'" % (gdrive_dir, env('SERVER_NAME'), old), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split()[1:] list_all = list_mysql + list_static + list_apache + list_config except Exception: send_error_slack(traceback.format_exc(), 'Check Obsolete Backup Files', ' '.join(sys.argv), 'log_cron_gdrive.log') flag = True for id in list_all: try: subprocess.check_call('%s && drive info -i %s' % (gdrive_dir, id), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) subprocess.check_call('%s && drive delete -i %s' % (gdrive_dir, id), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: send_error_slack(traceback.format_exc(), 'Remove Obsolete Backup Files', ' '.join(sys.argv), 'log_cron_gdrive.log') flag = True if not flag: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94m%s\033[0m obsolete backup files removed." % len(list_all)) self.stdout.write("Time elapsed: %.1f s.\n" % (time.time() - t)) if flag: self.stdout.write("Finished with errors!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0)) sys.exit(1) else: if DEBUG: self.stdout.write("\033[94m Uploaded to Google Drive. \033[0m") else: (t_cron, d_cron, t_now) = get_date_time('gdrive') gdrive_list = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and title contains '.tgz'\"" % (gdrive_dir, env('SERVER_NAME')), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split( )[4:] html = 'File\t\t\t\tTime\t\t\t\tSize\n\n' for i in xrange(0, len(gdrive_list), 6): html += '%s\t\t%s %s\t\t%s %s\n' % ( gdrive_list[i + 1], gdrive_list[i + 4], gdrive_list[i + 5], gdrive_list[i + 2], gdrive_list[i + 3]) if IS_SLACK: if (not DEBUG) and BOT['SLACK']['ADMIN']['MSG_GDRIVE']: send_notify_slack(SLACK['ADMIN_NAME'], '', [{ "fallback": 'SUCCESS', "mrkdwn_in": ["text"], "color": "good", "text": '*SUCCESS*: Scheduled weekly *Gdrive Sync* finished @ _%s_\n' % time.ctime() }]) # send_notify_slack(SLACK['ADMIN_NAME'], '>```%s```\n' % html, '') else: send_notify_emails( '{%s} SYSTEM: Weekly Sync Notice' % env('SERVER_NAME'), 'This is an automatic email notification for the success of scheduled weekly sync of the %s Website backup contents to Google Drive account.\n\nThe crontab job is scheduled at %s (UTC) on every %sday.\n\nThe last system backup was performed at %s (PDT).\n\n%s\n\n%s Website Admin\n' % (env('SERVER_NAME'), t_cron, d_cron, t_now, html, env('SERVER_NAME'))) get_backup_stat() self.stdout.write("Admin Backup Statistics refreshed.") self.stdout.write("All done successfully!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0))
def handle(self, *args, **options): if not BOT['SLACK']['IS_FLASH_SETUP']: return t0 = time.time() self.stdout.write('%s:\t%s' % (time.ctime(), ' '.join(sys.argv))) flag_mismatch = False try: result = dash_duty(0) ppls = result['ppls'] result = dash_schedule(0) offset_1 = int(BOT['SLACK']['REMINDER']['DAY_BEFORE_REMINDER_1']) offset_2 = int(BOT['SLACK']['REMINDER']['DAY_BEFORE_REMINDER_2']) day_1 = (result['weekday'] - offset_1) if day_1 < 0: day_1 += 7 if datetime.utcnow().date().isoweekday() != day_1: return types = {'ES': 'EteRNA Special', 'GM': 'Group Meeting', 'JC': 'Journal Club', 'FS': 'Flash Slides'} year = (datetime.utcnow() + timedelta(days=offset_1)).date().year date = datetime.strptime("%s %s" % (result['this']['date'], year), '%b %d %Y') if result['this']['type'] == 'N/A': msg_this = 'Hi all,\n\nThis is a reminder that there will be *`NO Meeting`* this week' if result['this']['note']: msg_this += ' due to: _%s_.' % result['this']['note'] else: msg_this += '.' send_to = SLACK['ADMIN_NAME'] if DEBUG else "#general" self.msg_handles.append( (send_to, '', [{"fallback": 'Reminder', "mrkdwn_in": ["text"], "color": "good", "title": 'Group Meeting Reminder', "text": msg_this, "thumb_url": 'https://daslab.stanford.edu/site_media/images/group/logo_bot.jpg'}]) ) self.stdout.write('\033[92mSUCCESS\033[0m: Google Presentation skipped (N/A for this week: \033[94m%s\033[0m).' % datetime.strftime(date, '%b %d %Y')) else: type_this = types[result['this']['type']] if (datetime.utcnow() + timedelta(days=offset_1)).date() != date.date(): (who_id, _) = find_slack_id(ppls['weekly']['group meeting']['main']) send_to = SLACK['ADMIN_NAME'] if DEBUG else '@' + who_id send_notify_slack(send_to, '', [{"fallback": 'ERROR', "mrkdwn_in": ["text"], "color": "warning", "text": 'Mismatch in Schedule Spreadsheet date. It seems to be not up-to-date.\nFlash Slide has *`NOT`* been setup yet for this week! Please investigate and fix the setup immediately.'}]) flag_mismatch = True sys.exit(1) title = 'Flash Slides: %s' % datetime.strftime(date, '%b %d %Y') access_token = requests.post('https://www.googleapis.com/oauth2/v3/token?refresh_token=%s&client_id=%s&client_secret=%s&grant_type=refresh_token' % (DRIVE['REFRESH_TOKEN'], DRIVE['CLIENT_ID'], DRIVE['CLIENT_SECRET'])).json()['access_token'] temp = requests.post('https://www.googleapis.com/drive/v2/files/%s/copy?access_token=%s' % (DRIVE['TEMPLATE_PRESENTATION_ID'], access_token), json={"title": "%s" % title}) ppt_id = temp.json()['id'] temp = requests.post('https://www.googleapis.com/drive/v2/files/%s/permissions?sendNotificationEmails=false&access_token=%s' % (ppt_id, access_token), json={"role": "writer", "type": "group", "value": "*****@*****.**"}) if temp.status_code != 200: self.stdout.write('\033[41mERROR\033[0m: Google Presentation (\033[94m%s\033[0m) created but NOT shared.' % ppt_id) if IS_SLACK: (who_id, _) = find_slack_id(ppls['weekly']['flash slide']['main']) send_to = SLACK['ADMIN_NAME'] if DEBUG else '@' + who_id send_notify_slack(send_to, '', [{"fallback": 'ERROR', "mrkdwn_in": ["text"], "color": "warning", "text": 'FlashSlide was created but failed on sharing to the group.\nFlash Slide setup is *`NOT`* complete! Please investigate and fix the setup immediately.'}]) send_error_slack(temp.json(), 'Group Meeting Setup', ' '.join(sys.argv), 'log_cron_cache.log') else: self.stdout.write('\033[92mSUCCESS\033[0m: Google Presentation (\033[94m%s\033[0m) created and shared.' % ppt_id) flash_slides = FlashSlide(date=date, link='https://docs.google.com/presentation/d/%s/edit#slide=id.p' % ppt_id) flash_slides.save() self.stdout.write('\033[92mSUCCESS\033[0m: Google Presentation (\033[94m%s\033[0m) saved in MySQL.' % ppt_id) flag = result['this']['note'].lower().replace(' ', '') name = result['this']['who'] ids = [] if '/' in name or '&' in name: names = name.replace('/', '*|*').replace('&', '*|*').replace(' ', '').split('*|*') elif name.strip() and name != '-': names = [name] else: names = [] if name: for name in names: (who_id, sunet_id) = find_slack_id(name) if flag == 'endofrotationtalk' and BOT['SLACK']['REMINDER']['ROT']['REMINDER_1']: if GROUP.find_type(sunet_id) == 'roton': msg_who = 'Just a reminder: Please send your presentation to %s (site admin) for `archiving` *after* your presentation this _%s_.' % (SLACK['ADMIN_NAME'], datetime.strftime(date, '%A')) ids.append('_' + name + '_ <@' + who_id + '>') send_to = SLACK['ADMIN_NAME'] if DEBUG else '@' + who_id self.msg_handles.append( (send_to, '', [{"fallback": 'Reminder', "mrkdwn_in": ["text"], "color": "good", "text": msg_who}])) else: if sunet_id == 'none': self.stdout.write('\033[41mERROR\033[0m: rotation student (\033[94m%s\033[0m) not found.' % name) elif sunet_id == 'ambiguous': self.stdout.write('\033[41mERROR\033[0m: rotation student (\033[94m%s\033[0m) is ambiguate (more than 1 match).' % name) else: self.stdout.write('\033[41mERROR\033[0m: rotation student (\033[94m%s\033[0m) not available in database.' % name) elif GROUP.find_type(sunet_id) in ['admin', 'group', 'alumni', 'other']: ids.append('_' + name + '_ <@' + who_id + '>') else: ids.append('_%s_' % name) else: ids = ['_(None)_'] (who_id, _) = find_slack_id(ppls['monthly']['website']['main']) send_to = SLACK['ADMIN_NAME'] if DEBUG else '@' + who_id if flag == 'endofrotationtalk' and BOT['SLACK']['REMINDER']['ROT']['REMINDER_ADMIN']: self.msg_handles.append( (send_to, '', [{"fallback": 'REMINDER', "mrkdwn_in": ["text"], "color": "warning", "text": '*REMINDER*: Add *RotationStudent* entry for _%s_.' % datetime.strftime(date, '%b %d %Y (%a)')}]) ) if result['this']['type'] == 'JC' and BOT['SLACK']['REMINDER']['JC']['REMINDER_ADMIN']: self.msg_handles.append( (send_to, '', [{"fallback": 'REMINDER', "mrkdwn_in": ["text"], "color": "warning", "text": '*REMINDER*: Add *JournalClub* entry for _%s_.' % datetime.strftime(date, '%b %d %Y (%a)')}]) ) elif result['this']['type'] == 'ES' and BOT['SLACK']['REMINDER']['ES']['REMINDER_ADMIN']: self.msg_handles.append( (send_to, '', [{"fallback": 'REMINDER', "mrkdwn_in": ["text"], "color": "warning", "text": '*REMINDER*: Add *EternaYoutube* entry for _%s_.' % datetime.strftime(date, '%b %d %Y (%a)')}]) ) send_to = SLACK['ADMIN_NAME'] if DEBUG else "#general" super_prefix = '*Extended/Super* ' if result['this']['type'] == 'FS' else '' self.msg_handles.append( (send_to, '', [{"fallback": 'Reminder', "mrkdwn_in": ["text", "fields"], "color": "good", "title": 'Group Meeting Reminder', "text": 'Hi all,\n\nThis is a reminder that group meeting will be %s*`%s`* for this week.\n' % (super_prefix, type_this), "thumb_url": 'https://daslab.stanford.edu/site_media/images/group/logo_bot.jpg', "fields": [{'title': 'Date', 'value': '_%s_' % datetime.strftime(date, '%b %d %Y (%a)'), 'short': True}, {'title': 'Time & Place', 'value': '_%s @ %s_' % (result['time']['start'], result['place']), 'short': True}, {'title': 'Type', 'value': '`%s`' % type_this, 'short': True}, {'title': 'Presenter', 'value': '%s' % ', \n'.join(ids), 'short': True}] }]) ) self.msg_handles.append( (send_to, '', [{"fallback": '%s' % title, "mrkdwn_in": ["text"], "color": "warning", "title": '%s' % title, "text": '*<https://docs.google.com/presentation/d/%s/edit#slide=id.p>*\nA <https://daslab.stanford.edu/group/flash_slide/|full list> of Flash Slide links is available on the DasLab Website.' % ppt_id}]) ) if result['last']['note'].lower().replace(' ', '') == 'endofrotationtalk' and BOT['SLACK']['REMINDER']['ROT']['REMINDER_2']: (who_id, _) = find_slack_id(ppls['quarterly']['github']['main']) send_to = SLACK['ADMIN_NAME'] if DEBUG else '@' + who_id self.msg_handles.append( (send_to, '', [{"fallback": 'REMINDER', "mrkdwn_in": ["text"], "color": "warning", "text": '*REMINDER*: Revoke permissions (_Group Website_ and _Slack Membership_) of recent finished *RotationStudent*.'}]) ) if result['next']['type'] == 'N/A': msg_next = 'For next week, there will be *`NO Meeting`*' if result['next']['note']: msg_next += ' due to: _%s_.' % result['next']['note'] else: msg_next += '.' send_to = SLACK['ADMIN_NAME'] if DEBUG else "#general" self.msg_handles.append( (send_to, '', [{"fallback": 'Reminder', "mrkdwn_in": ["text"], "color": "439fe0", "text": msg_next}]) ) else: type_next = types[result['next']['type']] year = (datetime.utcnow() + timedelta(days=(offset_1 + 7))).date().year date = datetime.strptime("%s %s" % (result['next']['date'], year), '%b %d %Y') msg_who = 'Just a reminder that you are up for `%s` *next* _%s_ (*%s*).\n' % (type_next, datetime.strftime(date, '%A'), datetime.strftime(date, '%b %d')) if result['next']['type'] == 'JC' and BOT['SLACK']['REMINDER']['JC']['REMINDER_1']: date = (datetime.utcnow() + timedelta(days=(offset_1 + 7 - offset_2))).date() msg_who += ' Please post your paper of choice to the group `#general` channel by *next* _%s_ (*%s*).\n' % (datetime.strftime(date, '%A'), datetime.strftime(date, '%b %d')) elif result['next']['type'] == 'ES' and BOT['SLACK']['REMINDER']['ES']['REMINDER_1']: date = (datetime.utcnow() + timedelta(days=(offset_1 + 7 - offset_2))).date() msg_who += ' Please post a brief description of the topic to the group `#general` channel by *next* _%s_ (*%s*) to allow time for releasing news on both DasLab Website and EteRNA broadcast.\n' % (datetime.strftime(date, '%A'), datetime.strftime(date, '%b %d')) name = result['next']['who'] ids = [] if '/' in name or '&' in name: names = name.replace('/', '*|*').replace('&', '*|*').replace(' ', '').split('*|*') elif name.strip() and name != '-': names = [name] else: names = [] if name: for name in names: (who_id, sunet_id) = find_slack_id(name) if GROUP.find_type(sunet_id) != 'unknown': ids.append('_' + name + '_ <@' + who_id + '>') if (result['next']['type'] == 'JC' and BOT['SLACK']['REMINDER']['JC']['REMINDER_1']) or (result['next']['type'] == 'ES' and BOT['SLACK']['REMINDER']['ES']['REMINDER_1']) or (result['next']['type'] == 'GM' and (BOT['SLACK']['REMINDER']['JC']['REMINDER_1'] or BOT['SLACK']['REMINDER']['ES']['REMINDER_1'] or BOT['SLACK']['REMINDER']['ROT']['REMINDER_1'])): send_to = SLACK['ADMIN_NAME'] if DEBUG else '@' + who_id self.msg_handles.append( (send_to, '', [{"fallback":'Reminder', "mrkdwn_in": ["text"], "color":"good", "text":msg_who}])) else: if sunet_id == 'none': self.stdout.write('\033[41mERROR\033[0m: member (\033[94m%s\033[0m) not found.' % name) elif sunet_id == 'ambiguous': self.stdout.write('\033[41mERROR\033[0m: member (\033[94m%s\033[0m) is ambiguate (more than 1 match).' % name) else: self.stdout.write('\033[41mERROR\033[0m: member (\033[94m%s\033[0m) not available in database.' % name) else: ids.append('_%s_' % name) else: ids = ['_(None)_'] send_to = SLACK['ADMIN_NAME'] if DEBUG else "#general" date = datetime.strptime("%s %s" % (result['next']['date'], year), '%b %d %Y') self.msg_handles.append( (send_to, '', [{"fallback": 'Reminder', "mrkdwn_in": ["text", "fields"], "color": "439fe0", "text": 'For next week: \n', "thumb_url": 'https: //daslab.stanford.edu/site_media/images/group/logo_bot.jpg', "fields": [{'title': 'Date', 'value': '_%s_' % datetime.strftime(date, '%b %d %Y (%a)'), 'short': True}, {'title': 'Time & Place', 'value': '_%s @ %s_' % (result['time']['start'], result['place']), 'short': True}, {'title': 'Type', 'value': '`%s`' % type_next, 'short': True}, {'title': 'Presenter', 'value': '%s' % ', \n'.join(ids), 'short': True}] }]) ) (who_id, _) = find_slack_id(ppls['weekly']['group meeting']['main']) self.msg_handles.append( (send_to, '', [{"fallback": 'Reminder', "mrkdwn_in": ["text"], "color": "danger", "text": 'The <https://daslab.stanford.edu/group/schedule/|full schedule> is available on the DasLab Website. For questions regarding the schedule, please contact <@%s>. Thanks for your attention.\n\nSite Admin: <%s>' % (who_id, SLACK['ADMIN_NAME'])}]) ) except Exception: if flag_mismatch: return send_error_slack(traceback.format_exc(), 'Group Meeting Setup', ' '.join(sys.argv), 'log_cron_meeting.log') if result['this']['type'] != 'N/A': year = (datetime.utcnow() + timedelta(days=offset_1)).date().year date = datetime.strptime("%s %s" % (result['this']['date'], year), '%b %d %Y') FlashSlide.objects.get(date=date).delete() requests.delete('https://www.googleapis.com/drive/v2/files/%s/?access_token=%s' % (ppt_id, access_token)) self.stdout.write('\033[92mSUCCESS\033[0m: Google Presentation (\033[94m%s\033[0m) deleted.' % ppt_id) self.stdout.write('\033[92mSUCCESS\033[0m: Google Presentation (\033[94m%s\033[0m) removed in MySQL.' % ppt_id) if IS_SLACK: (who_id, _) = find_slack_id(ppls['weekly']['flash slide']['main']) send_to = SLACK['ADMIN_NAME'] if DEBUG else '@' + who_id send_notify_slack(send_to, '', [{"fallback": 'ERROR', "mrkdwn_in": ["text"], "color": "warning", "text": 'FlashSlide table in MySQL database, presentation in Google Drive, and posted messages in Slack are rolled back.\nFlash Slide has *`NOT`* been setup yet for this week! Please investigate and fix the setup immediately.'}]) else: send_notify_emails('{%s} ERROR: Weekly Meeting Setup' % env('SERVER_NAME'), 'This is an automatic email notification for the failure of scheduled weekly flash slides setup. The following error occurred:\n\n%s\n\n%s\n\nFlashSlide table in MySQL database, presentation in Google Drive, and posted messages in Slack are rolled back.\n\n** Flash Slide has NOT been setup yet for this week! Please investigate and fix the setup immediately.\n\n%s Website Admin' % (ts, err, env('SERVER_NAME'))) self.stdout.write("Finished with \033[41mERROR\033[0m!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0)) sys.exit(1) else: for h in self.msg_handles: send_notify_slack(*h) if '@' in h[0]: self.stdout.write('\033[92mSUCCESS\033[0m: PM\'ed reminder to \033[94m%s\033[0m in Slack.' % h[0]) self.stdout.write('\033[92mSUCCESS\033[0m: Google Presentation posted in Slack.') self.stdout.write('\033[92mSUCCESS\033[0m: Meeting Reminder posted in Slack.') if (not DEBUG): (who_id, _) = find_slack_id(ppls['weekly']['flash slide']['main']) send_to = SLACK['ADMIN_NAME'] if DEBUG else '@' + who_id send_notify_slack(send_to, '', [{"fallback": 'SUCCESS', "mrkdwn_in": ["text"], "color": "good", "text": '*SUCCESS*: Scheduled weekly *Flash Slides Setup* finished @ _%s_. Please also paste the link in the Schedule Spreadsheet.\n' % time.ctime()}]) self.stdout.write("Finished with \033[92mSUCCESS\033[0m!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0))
def handle(self, *args, **options): t0 = time.time() self.stdout.write('%s:\t%s' % (time.ctime(), ' '.join(sys.argv))) if options['item']: is_apache = 'apache' in options['item'] is_config = 'config' in options['item'] is_mysql = 'mysql' in options['item'] is_static = 'static' in options['item'] else: is_apache, is_config, is_mysql, is_static = True, True, True, True flag = False if is_mysql: t = time.time() self.stdout.write("#1: Backing up MySQL database...") try: subprocess.check_call('mysqldump --quick %s -u %s -p%s > %s/backup/backup_mysql' % (env.db()['NAME'], env.db()['USER'], env.db()['PASSWORD'], MEDIA_ROOT), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) tarfile.open('%s/backup/backup_mysql.tgz' % MEDIA_ROOT, 'w:gz').add('%s/backup/backup_mysql' % MEDIA_ROOT, arcname='backup_mysql') os.remove('%s/backup/backup_mysql' % MEDIA_ROOT) except Exception: send_error_slack(traceback.format_exc(), 'Backup MySQL Database', ' '.join(sys.argv), 'log_cron_backup.log') flag = True else: self.stdout.write(" \033[92mSUCCESS\033[0m: \033[94mMySQL\033[0m database dumped.") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) if is_static: t = time.time() self.stdout.write("#2: Backing up static files...") try: tarfile.open('%s/backup/backup_static.tgz' % MEDIA_ROOT, 'w:gz').add('%s/data' % MEDIA_ROOT, arcname='data') except Exception: send_error_slack(traceback.format_exc(), 'Backup Static Files', ' '.join(sys.argv), 'log_cron_backup.log') flag = True else: self.stdout.write(" \033[92mSUCCESS\033[0m: \033[94mstatic\033[0m files synced.") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) if is_apache: t = time.time() self.stdout.write("#3: Backing up apache2 settings...") try: pass # tarfile.open('%s/backup/backup_apache2.tgz' % MEDIA_ROOT, 'w:gz').add('/etc/apache2', arcname='apache2') except Exception: send_error_slack(traceback.format_exc(), 'Backup Apache2 Settings', ' '.join(sys.argv), 'log_cron_backup.log') flag = True else: self.stdout.write(" \033[92mSUCCESS\033[0m: \033[94mapache2\033[0m settings saved.") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) if is_config: t = time.time() self.stdout.write("#4: Backing up config settings...") try: tarfile.open('%s/backup/backup_config.tgz' % MEDIA_ROOT, 'w:gz').add('%s/config' % MEDIA_ROOT, arcname='config') except Exception: send_error_slack(traceback.format_exc(), 'Backup Config Settings', ' '.join(sys.argv), 'log_cron_backup.log') flag = True else: self.stdout.write(" \033[92mSUCCESS\033[0m: \033[94mconfig\033[0m settings saved.") self.stdout.write("Time elapsed: %.1f s.\n" % (time.time() - t)) if flag: self.stdout.write("Finished with errors!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0)) sys.exit(1) else: if DEBUG: self.stdout.write("\033[94m Backed up locally. \033[0m") else: (t_cron, d_cron, t_now) = get_date_time('backup') local_list = subprocess.Popen('ls -gh %s/backup/*.*gz' % MEDIA_ROOT, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split() html = 'File\t\t\t\tTime\t\t\t\tSize\n\n' for i in xrange(0, len(local_list), 8): html += '%s\t\t%s %s, %s\t\t%s\n' % (local_list[i + 7], local_list[i + 4], local_list[i + 5], local_list[i + 6], local_list[i + 3]) if IS_SLACK: if (not DEBUG) and BOT['SLACK']['ADMIN']['MSG_BACKUP']: send_notify_slack(SLACK['ADMIN_NAME'], '', [{"fallback": 'SUCCESS', "mrkdwn_in": ["text"], "color": "good", "text": '*SUCCESS*: Scheduled weekly *Backup* finished @ _%s_\n' % time.ctime()}]) # send_notify_slack(SLACK['ADMIN_NAME'], '>```%s```\n' % html, '') else: send_notify_emails('{%s} SYSTEM: Weekly Backup Notice' % env('SERVER_NAME'), 'This is an automatic email notification for the success of scheduled weekly backup of the %s Website database and static contents.\n\nThe crontab job is scheduled at %s (UTC) on every %sday.\n\nThe last system backup was performed at %s (PDT).\n\n%s\n\n%s Website Admin\n' % (env('SERVER_NAME'), t_cron, d_cron, t_now, html, env('SERVER_NAME'))) self.stdout.write("Admin email (Weekly Backup Notice) sent.") get_backup_stat() self.stdout.write("Admin Backup Statistics refreshed.") self.stdout.write("All done successfully!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0))
def handle(self, *args, **options): t0 = time.time() self.stdout.write('%s:\t%s' % (time.ctime(), ' '.join(sys.argv))) flag = False t = time.time() self.stdout.write("#1: Backing up MySQL database...") try: subprocess.check_call('mysqldump --quick %s -u %s -p%s > %s/backup/backup_mysql' % (env.db()['NAME'], env.db()['USER'], env.db()['PASSWORD'], MEDIA_ROOT), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) tarfile.open('%s/backup/backup_mysql.tgz' % MEDIA_ROOT, 'w:gz').add('%s/backup/backup_mysql' % MEDIA_ROOT, arcname='backup_mysql') os.remove('%s/backup/backup_mysql' % MEDIA_ROOT) except Exception: self.stdout.write(" \033[41mERROR\033[0m: Failed to dump \033[94mMySQL\033[0m database.") err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_backup.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True else: self.stdout.write(" \033[92mSUCCESS\033[0m: \033[94mMySQL\033[0m database dumped.") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) t = time.time() self.stdout.write("#2: Backing up static files...") try: tf = tarfile.open('%s/backup/backup_static.tgz' % MEDIA_ROOT, 'w:gz') tf.add('%s/data/file' % MEDIA_ROOT, arcname='data/file') tf.add('%s/data/image' % MEDIA_ROOT, arcname='data/image') tf.add('%s/data/json' % MEDIA_ROOT, arcname='data/json') tf.add('%s/data/thumbnail' % MEDIA_ROOT, arcname='data/thumbnail') tf.close() except Exception: self.stdout.write(" \033[41mERROR\033[0m: Failed to archive \033[94mstatic\033[0m files.") err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_backup.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True else: self.stdout.write(" \033[92mSUCCESS\033[0m: \033[94mstatic\033[0m files synced.") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) t = time.time() self.stdout.write("#3: Backing up apache2 settings...") try: pass # tarfile.open('%s/backup/backup_apache2.tgz' % MEDIA_ROOT, 'w:gz').add('/etc/apache2', arcname='apache2') except Exception: self.stdout.write(" \033[41mERROR\033[0m: Failed to archive \033[94mapache2\033[0m settings.") err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_backup.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True else: self.stdout.write(" \033[92mSUCCESS\033[0m: \033[94mapache2\033[0m settings saved.") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) t = time.time() self.stdout.write("#4: Backing up config settings...") try: tarfile.open('%s/backup/backup_config.tgz' % MEDIA_ROOT, 'w:gz').add('%s/config' % MEDIA_ROOT, arcname='config') except Exception: self.stdout.write(" \033[41mERROR\033[0m: Failed to archive \033[94mconfig\033[0m settings.") err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_backup.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True else: self.stdout.write(" \033[92mSUCCESS\033[0m: \033[94mconfig\033[0m settings saved.") self.stdout.write("Time elapsed: %.1f s.\n" % (time.time() - t)) if flag: self.stdout.write("Finished with errors!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0)) sys.exit(1) else: if DEBUG: self.stdout.write("\033[94m Backed up locally. \033[0m") else: (t_cron, d_cron, t_now) = get_date_time('backup') local_list = subprocess.Popen('ls -gh %s/backup/*.*gz' % MEDIA_ROOT, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split() html = 'File\t\t\t\tTime\t\t\t\tSize\n\n' for i in xrange(0, len(local_list), 8): html += '%s\t\t%s %s, %s\t\t%s\n' % (local_list[i + 7], local_list[i + 4], local_list[i + 5], local_list[i + 6], local_list[i + 3]) send_notify_emails('{%s} SYSTEM: Weekly Backup Notice' % env('SERVER_NAME'), 'This is an automatic email notification for the success of scheduled weekly backup of the %s Server database and static contents.\n\nThe crontab job is scheduled at %s (UTC) on every %sday.\n\nThe last system backup was performed at %s (PDT).\n\n%s\n\n%s Admin\n' % (env('SERVER_NAME'), t_cron, d_cron, t_now, html, env('SERVER_NAME'))) self.stdout.write("Admin email (Weekly Backup Notice) sent.") get_backup_stat() self.stdout.write("Admin Backup Statistics refreshed.") self.stdout.write("All done successfully!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0))
def handle(self, *args, **options): t0 = time.time() self.stdout.write('%s:\t%s' % (time.ctime(), ' '.join(sys.argv))) d = time.strftime( '%Y%m%d') # datetime.datetime.now().strftime('%Y%m%d') gdrive_dir = 'echo' if DEBUG else 'cd %s' % APACHE_ROOT prefix = '_DEBUG' if DEBUG else '' flag = False t = time.time() self.stdout.write("#1: Uploading MySQL database...") try: subprocess.check_call( '%s && drive upload -f %s/backup/backup_mysql.tgz -t %s_%s_mysql%s.tgz' % (gdrive_dir, MEDIA_ROOT, env('SERVER_NAME'), d, prefix), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: self.stdout.write( " \033[41mERROR\033[0m: Failed to upload \033[94mMySQL\033[0m database." ) err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_gdrive.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True else: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94mMySQL\033[0m database uploaded." ) self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) t = time.time() self.stdout.write("#2: Uploading static files...") try: subprocess.check_call( '%s && drive upload -f %s/backup/backup_static.tgz -t %s_%s_static%s.tgz' % (gdrive_dir, MEDIA_ROOT, env('SERVER_NAME'), d, prefix), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: self.stdout.write( " \033[41mERROR\033[0m: Failed to upload \033[94mstatic\033[0m files." ) err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_gdrive.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True else: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94mstatic\033[0m files uploaded." ) self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) t = time.time() self.stdout.write("#3: Uploading apache2 settings...") try: subprocess.check_call( '%s && drive upload -f %s/backup/backup_apache.tgz -t %s_%s_apache%s.tgz' % (gdrive_dir, MEDIA_ROOT, env('SERVER_NAME'), d, prefix), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: self.stdout.write( " \033[41mERROR\033[0m: Failed to upload \033[94mapache2\033[0m settings." ) err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_gdrive.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True else: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94mapache2\033[0m settings uploaded." ) self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) t = time.time() self.stdout.write("#4: Uploading config settings...") try: subprocess.check_call( '%s && drive upload -f %s/backup/backup_config.tgz -t %s_%s_config%s.tgz' % (gdrive_dir, MEDIA_ROOT, env('SERVER_NAME'), d, prefix), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: self.stdout.write( " \033[41mERROR\033[0m: Failed to upload \033[94mconfig\033[0m settings." ) err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_gdrive.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True else: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94mconfig\033[0m settings uploaded." ) self.stdout.write("Time elapsed: %.1f s." % (time.time() - t)) t = time.time() self.stdout.write("#5: Removing obsolete backups...") try: old = (datetime.date.today() - datetime.timedelta(days=KEEP_BACKUP) ).strftime('%Y-%m-%dT00:00:00') list_mysql = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and (title contains '_mysql.tgz' or title contains '_mysql_DEBUG.tgz') and modifiedDate <= '%s'\"| awk '{printf $1\" \"}'" % (gdrive_dir, env('SERVER_NAME'), old), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split()[1:] list_static = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and (title contains '_static.tgz' or title contains '_static_DEBUG.tgz') and modifiedDate <= '%s'\"| awk '{ printf $1\" \"}'" % (gdrive_dir, env('SERVER_NAME'), old), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split()[1:] list_apache = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and (title contains '_apache.tgz' or title contains '_apache_DEBUG.tgz') and modifiedDate <= '%s'\"| awk '{ printf $1\" \"}'" % (gdrive_dir, env('SERVER_NAME'), old), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split()[1:] list_config = subprocess.Popen( "%s && drive list -q \"title contains '%s_' and (title contains '_config.tgz' or title contains '_config_DEBUG.tgz') and modifiedDate <= '%s'\"| awk '{ printf $1\" \"}'" % (gdrive_dir, env('SERVER_NAME'), old), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split()[1:] list_all = list_mysql + list_static + list_apache + list_config except: self.stdout.write( " \033[41mERROR\033[0m: Failed to check obsolete \033[94mbackup\033[0m files." ) err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_gdrive.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True for id in list_all: try: subprocess.check_call('%s && drive info -i %s' % (gdrive_dir, id), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) subprocess.check_call('%s && drive delete -i %s' % (gdrive_dir, id), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: self.stdout.write( " \033[41mERROR\033[0m: Failed to remove obsolete \033[94mbackup\033[0m files." ) err = traceback.format_exc() ts = '%s\t\t%s\n' % (time.ctime(), ' '.join(sys.argv)) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'a').write(ts) open('%s/cache/log_cron_gdrive.log' % MEDIA_ROOT, 'a').write('%s\n%s\n' % (ts, err)) flag = True if not flag: self.stdout.write( " \033[92mSUCCESS\033[0m: \033[94m%s\033[0m obsolete backup files removed." % len(list_all)) self.stdout.write("Time elapsed: %.1f s.\n" % (time.time() - t)) if flag: self.stdout.write("Finished with errors!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0)) sys.exit(1) else: if DEBUG: self.stdout.write("\033[94m Uploaded to Google Drive. \033[0m") else: (t_cron, d_cron, t_now) = get_date_time('gdrive') gdrive_list = subprocess.Popen( "%s && drive list -q \"title contains '%s_'\"" % (gdrive_dir, env('SERVER_NAME')), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0].strip().split( )[4:] html = 'File\t\t\t\tTime\t\t\t\tSize\n\n' for i in xrange(0, len(gdrive_list), 6): html += '%s\t\t%s %s\t\t%s %s\n' % ( gdrive_list[i + 1], gdrive_list[i + 4], gdrive_list[i + 5], gdrive_list[i + 2], gdrive_list[i + 3]) send_notify_emails( '{%s} SYSTEM: Weekly Sync Notice' % env('SERVER_NAME'), 'This is an automatic email notification for the success of scheduled weekly sync of the %s Server backup contents to Google Drive account.\n\nThe crontab job is scheduled at %s (UTC) on every %sday.\n\nThe last system backup was performed at %s (PDT).\n\n%s\n\n%s Admin\n' % (env('SERVER_NAME'), t_cron, d_cron, t_now, html, env('SERVER_NAME'))) self.stdout.write("Admin email (Weekly Sync Notice) sent.") get_backup_stat() self.stdout.write("Admin Backup Statistics refreshed.") self.stdout.write("All done successfully!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0))
def handle(self, *args, **options): if not BOT['SLACK']['IS_REPORT']: return t0 = time.time() self.stdout.write('%s:\t%s' % (time.ctime(), ' '.join(sys.argv))) try: if os.path.exists('%s/cache/log_alert_admin.log' % MEDIA_ROOT): lines = open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'r').readlines() lines = ''.join(lines) if (not IS_SLACK): send_notify_emails( '{%s} SYSTEM: Weekly Error Report' % env('SERVER_NAME'), 'This is an automatic email notification for the aggregated weekly error report. The following error occurred:\n\n\n%s\n\n%s Website Admin' % (lines, env('SERVER_NAME'))) open('%s/cache/log_alert_admin.log' % MEDIA_ROOT, 'w').write('') self.stdout.write( "\033[92mSUCCESS\033[0m: All errors were sent to \033[94mEmail\033[0m. Log cleared." ) else: self.stdout.write( "\033[92mSUCCESS\033[0m: All errors were reported to \033[94mSlack\033[0m already, nothing to do." ) if os.path.exists('%s/cache/log_cron.log' % MEDIA_ROOT): subprocess.check_call('gzip -f %s/cache/log_cron.log' % MEDIA_ROOT, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) self.stdout.write( "\033[92mSUCCESS\033[0m: \033[94mlog_cron.log\033[0m gzipped." ) else: self.stdout.write( "\033[92mSUCCESS\033[0m: \033[94mlog_cron.log\033[0m not exist, nothing to do." ) msgs = SlackMessage.objects.filter( date__lte=(datetime.utcnow() - timedelta(days=15)).date()) for msg in msgs: msg.delete() except Exception: send_error_slack(traceback.format_exc(), 'Weekly Error Report', ' '.join(sys.argv), 'log_cron_report.log') self.stdout.write("Finished with \033[41mERROR\033[0m!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0)) sys.exit(1) if (not DEBUG) and BOT['SLACK']['ADMIN']['MSG_REPORT']: send_notify_slack(SLACK['ADMIN_NAME'], '', [{ "fallback": 'SUCCESS', "mrkdwn_in": ["text"], "color": "good", "text": '*SUCCESS*: Scheduled weekly *Report* finished @ _%s_\n' % time.ctime() }]) self.stdout.write("Finished with \033[92mSUCCESS\033[0m!") self.stdout.write("Time elapsed: %.1f s." % (time.time() - t0))