def __close_ticket(ticket, resolution_id): """ Close ticket :param `Ticket` ticket : A Cerberus `Ticket` instance :param int resolution_id: The id of the Cerberus `Resolution` """ # Send mail to providers and defendant providers_emails = ContactedProvider.objects.filter(ticket_id=ticket.id).values_list('provider__email', flat=True).distinct() providers_emails = list(set(providers_emails)) common.send_email( ticket, providers_emails, settings.CODENAMES['case_closed'] ) # Close ticket if ticket.mailerId: ImplementationFactory.instance.get_singleton_of('MailerServiceBase').close_thread(ticket) ticket.previousStatus = ticket.status ticket.status = 'Closed' ticket.resolution_id = resolution_id
def notify_buyers(success, buyers, sale_id, device_id, sale_device_id): for buyer in buyers: user_id = buyer[0] debug(" Notifying buyer %s, success: %s" % (user_id, success)) (sale_name, sale_date) = get_sale_details_by_sale_id(sale_id) (device_name, device_description, price) = get_device_details_by_device_id(device_id) user_email = get_user_email_by_user_id(user_id) company_name = get_company_name() if success: close_date = sale_date.replace(hour=17, minute=00) + timedelta(hours=24) send_email( render_template('notify_success_buyer.html', sale_name=sale_name, sale_date=sale_date, device_name=device_name, device_description=device_description, price=price, close_date=close_date, company_name=company_name), user_email, 'Success!') update_db( "update tbl_user_sale_device set won=1 where user_id=%s and sale_device_id=%s" % (user_id, sale_device_id)) else: send_email( render_template('notify_failed_buyer.html', sale_name=sale_name, sale_date=sale_date, device_name=device_name, device_description=device_description, price=price, company_name=company_name), user_email, 'Sorry :(')
def initiate_next_task(self, user_id, user_email, current_task_nr, next_task_nr): """ Initiate the generation of the next task for the user if he has not got the task already and if it has already started """ if not c.is_valid_task_nr(self.dbs["course"], next_task_nr, \ self.queues["logger"], self.name): return if current_task_nr < next_task_nr: # user did not get this task yet task_start = c.get_task_starttime(self.dbs["course"], \ next_task_nr, \ self.queues["logger"], \ self.name) if task_start <= datetime.datetime.now(): # task has already started c.generate_task(self.queues["generator"], user_id, next_task_nr, user_email, "") else: c.send_email(self.queues["sender"], str(user_email), \ str(user_id), "CurLast", \ str(next_task_nr), "", "")
def reset(self,button): email = self.builder.get_object("email") # get the value from the field db = sqlite3.connect('SaedRobot.db') c = db.cursor() # query the DB if such an email exist c.execute('SELECT * from users WHERE email=?' , (email.get_text(),)) row=c.fetchone() # check+send the new password if row != None and len(row)>0: username = row[0] randPassword= id_generator() c = db.cursor() c.execute("update users set password=? where username=?" , (randPassword,username,)) db.commit() send_email(randPassword,username,"Saed Robot - Reset your password",email.get_text() ) dialog=Gtk.MessageDialog(None,0,Gtk.MessageType.INFO,Gtk.ButtonsType.OK,"Your new password has been sent to your email") dialog.set_title("Confirmation message") dialog.run() dialog.close() self.window.destroy() self.window=login.loginClass() # show error message if no such email exists else: dialog=Gtk.MessageDialog(None,0,Gtk.MessageType.ERROR,Gtk.ButtonsType.OK,"Invalid entry, please try again") dialog.set_title("Error message") dialog.run() dialog.close()
def onAddNewUserButtonPressed(self, button): db = sqlite3.connect('SaedRobot.db') c = db.cursor() c1 = db.cursor() c.execute('SELECT * from users WHERE username= ? ' , (str(self.username.get_text()),)) data=c.fetchall() c1.execute('SELECT * from users WHERE email= ? ' , (str(self.email.get_text()),)) data1=c1.fetchall() if len(str(self.username.get_text())) == 0: dialog = Gtk.MessageDialog(None,0,Gtk.MessageType.ERROR,Gtk.ButtonsType.OK, "No username entered, please enter a username") dialog.set_title("Error message") dialog.run() dialog.close() elif len(str(self.email.get_text())) == 0: dialog = Gtk.MessageDialog(None,0,Gtk.MessageType.ERROR,Gtk.ButtonsType.OK, "No email entered, please enter an email") dialog.set_title("Error message") dialog.run() dialog.close() elif len(data)>0: dialog = Gtk.MessageDialog(None,0,Gtk.MessageType.ERROR,Gtk.ButtonsType.OK, "This username is already exist!") dialog.set_title("Error message") dialog.run() dialog.close() elif len(data1)>0: dialog = Gtk.MessageDialog(None,0,Gtk.MessageType.ERROR,Gtk.ButtonsType.OK, "This email is already exist!") dialog.set_title("Error message") dialog.run() dialog.close() elif not re.match("^[a-zA-Z0-9_]+$", str(self.username.get_text())): dialog = Gtk.MessageDialog(None,0,Gtk.MessageType.ERROR,Gtk.ButtonsType.OK, "Invalid username address, please enter a valid username.") dialog.set_title("Error message") dialog.run() dialog.close() elif not validate_email(str(self.email.get_text())): dialog = Gtk.MessageDialog(None,0,Gtk.MessageType.ERROR,Gtk.ButtonsType.OK, "Invalid email address, please enter a valid address.") dialog.set_title("Error message") dialog.run() dialog.close() else: password = id_generator() c.execute('INSERT INTO users(USERNAME,PASSWORD,EMAIL,ADMIN) VALUES (?,?,?,0)', (str(self.username.get_text()),str(password),str(self.email.get_text()))) db.commit() send_email(password,self.username.get_text(),"Saed Robot - Registration Confirmation",str(self.email.get_text()) ) dialog = Gtk.MessageDialog(None,0,Gtk.MessageType.INFO,Gtk.ButtonsType.OK, "The user has been added") dialog.set_title("Confirmation message") dialog.run() dialog.close() self.window.destroy() self.window=ManageUsersAccounts.ManageUsersAccounts(self.MyUsername, self.userType)
def a_question_was_asked(self, user_email, mail, messageid): """" Process a question that was asked by a user. """ mail_subject = str(mail['subject']) logmsg = 'The user has a question, please take care of that!' c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", "Question", "", "", "") # was the question asked to a specific task_nr that is valid? search_obj = re.search('[0-9]+', mail_subject, ) if (search_obj != None) and int(search_obj.group()) <= c.get_num_tasks(self.coursedb, \ self.logger_queue, self.name): fwd_mails = self.get_taskoperator_emails(search_obj.group()) else: fwd_mails = self.get_admin_emails() for mail_address in fwd_mails: c.send_email(self.sender_queue, mail_address, "", "QFwd", "", mail, messageid) c.increment_db_statcounter(self.semesterdb, 'nr_questions_received', \ self.logger_queue, self.name)
def _create_closed_ticket(report, user): report.ticket = common.create_ticket(report, attach_new=False) report.save() # Add temp proof(s) for mail content temp_proofs = [] if not report.ticket.proof.count(): temp_proofs = common.get_temp_proofs(report.ticket) # Send email to Provider try: validate_email(report.provider.email.strip()) Logger.info(unicode('Sending email to provider')) common.send_email(report.ticket, [report.provider.email], settings.CODENAMES['not_managed_ip']) report.ticket.save() Logger.info(unicode('Mail sent to provider')) ImplementationFactory.instance.get_singleton_of('MailerServiceBase').close_thread(report.ticket) # Delete temp proof(s) for proof in temp_proofs: Proof.objects.filter(id=proof.id).delete() except (AttributeError, TypeError, ValueError, ValidationError): pass common.close_ticket(report, resolution_codename=settings.CODENAMES['invalid'], user=user) Logger.info(unicode('Ticket %d and report %d closed' % (report.ticket.id, report.id)))
def email_currency(**context): currency = context['task_instance'].xcom_pull(task_ids='convert_data') euro = float( currency[currency['currency_name'] == 'euro']['rate_to_dollar']) gbp = float(currency[currency['currency_name'] == 'gbp']['rate_to_dollar']) chf = float(currency[currency['currency_name'] == 'chf']['rate_to_dollar']) dkk = float(currency[currency['currency_name'] == 'dkk']['rate_to_dollar']) yesterday_currency = context['task_instance'].xcom_pull( task_ids='yesterday_currency') gbp_y = float(yesterday_currency[yesterday_currency['currency_name'] == 'gbp']['rate_to_dollar']) euro_y = float(yesterday_currency[yesterday_currency['currency_name'] == 'euro']['rate_to_dollar']) chf_y = float(yesterday_currency[yesterday_currency['currency_name'] == 'chf']['rate_to_dollar']) dkk_y = float(yesterday_currency[yesterday_currency['currency_name'] == 'dkk']['rate_to_dollar']) gbp_d = round(((gbp_y - gbp) / gbp_y), 4) euro_d = round(((euro_y - euro) / euro_y), 4) chf_d = round(((chf_y - chf) / chf_y), 4) dkk_d = round(((dkk_y - dkk) / dkk_y), 4) send_email( subject='Daily Currency {}'.format(context['ds']), message='', html_info=get_email_body(gbp, euro, chf, dkk, gbp_y, euro_y, chf_y, dkk_y, gbp_d, euro_d, chf_d, dkk_d), config_file_path='{}'.format(os.environ.get('EMAIL_YAML_PERSONAL')))
def __send_email(ticket, email, codename, lang='EN'): """ Wrapper to send email """ common.send_email( ticket, [email], codename, lang=lang, )
def send(): try: filelist = [] reportfiles = os.listdir(REPORTPATH) if len(reportfiles) > 0: for f in reportfiles: # print(f) # 判断为文件 if os.path.isfile(os.path.join(REPORTPATH, f)): # 判断为不是html格式文件则跳过 if os.path.basename(os.path.join( REPORTPATH, f)).split('.')[1] != 'html': # print(os.path.basename(os.path.join(REPORTPATH, f)).split('.')[1]) pass # print('文件不html格式,不处理') # 否则是html格式文件 else: # 判断html格式文件的名称是index,则是allure报告 ,需要压缩文件 if os.path.basename(os.path.join( REPORTPATH, f)).split('.')[0] == 'index': filelist.append(DataOperate().zip_file(REPORTPATH)) # 判断html格式文件的名称不是index,则可能是beautiful或pytest-html报告 ,则直接把路径加入列表 else: filelist.append(os.path.join(REPORTPATH, f)) else: pass # print('不是文件,不处理') else: print('目录下没有文件') # 把用例结果文件加入到列表 casefiles = os.listdir(CASEPATH) if len(casefiles) > 0: for f in casefiles: # case目录下的文件夹存放的是在写返回结果的用例,把这个放到邮箱 if os.path.isdir(os.path.join(CASEPATH, f)): for i in os.listdir(os.path.join(CASEPATH, f)): filelist.append( os.path.join(os.path.join(CASEPATH, f), i)) else: pass else: print('目录下没有文件') # print('打印发送邮件的文件',filelist) # 添加到邮件的附件 send_email(filepath=filelist) except Exception as e: print(e) return filelist
def add_new_user(self, user_name, user_email): """ Add the necessary entries to database for a newly registered user. """ curs, cons = c.connect_to_db(self.semesterdb, self.logger_queue, self.name) logmsg = 'New Account: User: %s' % user_name c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") data = {'Name': user_name, 'Email': user_email, 'TimeNow': str(int(time.time()))} sql_cmd = ("INSERT INTO Users " "(UserId, Name, Email, FirstMail, LastDone, CurrentTask) " "VALUES(NULL, :Name, :Email, datetime(:TimeNow, 'unixepoch', 'localtime')" ", NULL, 1)") curs.execute(sql_cmd, data) cons.commit() # the new user has now been added to the database. Next we need # to send him an email with the first task. # read back the new users UserId and create a directory for putting his # submissions in. data = {'Email': user_email} sql_cmd = "SELECT UserId FROM Users WHERE Email = :Email" curs.execute(sql_cmd, data) res = curs.fetchone() user_id = str(res[0]) dir_name = 'users/'+ user_id c.check_dir_mkdir(dir_name, self.logger_queue, self.name) cons.close() # NOTE: messageid is empty, cause this will be sent out by the welcome message! curc, conc = c.connect_to_db(self.coursedb, self.logger_queue, self.name) sql_cmd = "SELECT GeneratorExecutable FROM TaskConfiguration WHERE TaskNr == 1" curc.execute(sql_cmd) res = curc.fetchone() conc.close() if res != None: logmsg = "Calling Generator Script: " + str(res[0]) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") logmsg = "UserID " + user_id + ",UserEmail " + user_email c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") self.gen_queue.put(dict({"user_id": user_id, "user_email": user_email, \ "task_nr": "1", "message_id": ""})) else: # If there is no generator script, we assume, that there is a static # description.txt which shall be used. c.send_email(self.sender_queue, user_email, user_id, "Task", "1", "", "")
def action_by_subject(self, user_id, user_email, messageid, mail, mail_subject): if re.search('[Rr][Ee][Ss][Uu][Ll][Tt]', mail_subject): ############### # RESULT # ############### searchObj = re.search('[0-9]+', mail_subject) if searchObj == None: # Result + no number logmsg = ("Got a kind of message I do not understand. " "Sending a usage mail...") c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", "Usage", "", \ "", messageid) return #Result + number task_nr = searchObj.group() self.a_result_was_submitted(user_id, user_email, task_nr, messageid, \ mail) elif re.search('[Qq][Uu][Ee][Ss][Tt][Ii][Oo][Nn]', mail_subject): ############### # QUESTION # ############### self.a_question_was_asked(user_id, user_email, mail, messageid) elif re.search('[Ss][Tt][Aa][Tt][Uu][Ss]', mail_subject): ############### # STATUS # ############### self.a_status_is_requested(user_id, user_email, messageid) elif (self.allow_skipping == True) and re.search( '[Ss][Kk][Ii][Pp]', mail_subject): #################### # SKIP, IF ALLOWED # #################### self.skip_was_requested(user_id, user_email, messageid) else: ##################### # DEFAULT ACTION # ##################### logmsg = ("Got a kind of message I do not understand. " "Sending a usage mail...") c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", "Usage", "", \ "", messageid)
def confirm_email(): user_email = request.form['user_email'] device_id = request.form['device_id'] sale_id = request.form['sale_id'] if not user_email.endswith("@flexential.com"): flash("You must use your @flexential.com email address.", "error") return redirect(url_for('show_sale', sale_id=sale_id)) user_id = get_user_id_by_user_email(user_email) user_already_in_bucket = query_db( "select count(usd.user_id) from tbl_user_sale_device usd join tbl_sale_device sd on usd.sale_device_id=sd.sale_device_id where sd.sale_id=%s and sd.device_id=%s and usd.user_id=%s" % (sale_id, device_id, user_id))[0][0] if user_already_in_bucket > 0: device_name = get_device_name_by_device_id(device_id) sale_name = get_sale_name_by_sale_id(sale_id) flash( "There is already an entry for you under the bucket for a %s in %s" % (device_name, sale_name), "error") return redirect(url_for('show_sale', sale_id=sale_id)) (sale_name, sale_date) = get_sale_details_by_sale_id(sale_id) (device_name, device_description, price) = get_device_details_by_device_id(device_id) uuid = generate_uuid() baseuri = get_base_uri() company_name = get_company_name() send_email( render_template('confirm_email.html', sale_name=sale_name, sale_date=sale_date, device_name=device_name, device_description=device_description, price=price, sale_id=sale_id, uuid=uuid, baseuri=baseuri, company_name=company_name), user_email, 'Email Confirmation') print "device_id = %s" % device_id print "sale_id = %s" % sale_id sale_device_id = get_sale_device_id(device_id, sale_id) query = "insert into tbl_user_uuid(user_id, uuid, sale_device_id) values(%s, '%s', %s)" % ( user_id, uuid, sale_device_id) insert_to_db(query) flash( "Please check your email for a verification link to add your name to the bucket for the %s" % device_name) return redirect(url_for('show_sale', sale_id=sale_id))
def __send_ack(report, lang=None): """ Send acknoledgement to provider :param `abuse.models.Report` report: A `abuse.models.Report` instance :param string lang: The langage to use """ if settings.TAGS['no_autoack'] not in report.provider.tags.all().values_list('name', flat=True): common.send_email( report.ticket, [report.provider.email], settings.CODENAMES['ack_received'], lang=lang, acknowledged_report_id=report.id, ) report.ticket = Ticket.objects.get(id=report.ticket.id) report.save()
def execute(self, data): """ 1、压测 2、读取locust_stats.csv文件里的 请求总数、平均响应时间、最小响应时间、最大响应时间 3、将以上字段提取,发送邮件 :param data: :return: """ code = data.get('code') # 存储文件的时间 test_time = strftime('%Y%m%d-%H%M%S') CMD = "locust --headless -u {user} -r {rate} -t {time} --csv={stat_file_name} -f {locust_file}" # 拼接脚本和报告路径 path = os.path.dirname(os.path.abspath(__file__)) locust_file = os.sep.join([path, 'scripts', f'locust_script_{test_time}.py']) stat_file_path = os.sep.join([path, 'reports', test_time]) os.mkdir(stat_file_path) stat_file_name = os.sep.join([stat_file_path, 'locust']) # 将code值存到文件中 with open(locust_file, 'w', encoding='utf-8') as f: f.write(code) # 深拷贝一份data,用以格式化拼接 fmt_data = deepcopy(data) fmt_data['stat_file_name'] = stat_file_name fmt_data['locust_file'] = locust_file cmd = CMD.format(**fmt_data) os.system(cmd) # 延时等待测试结束及报告生成 sleep(int(fmt_data['time']) + 1) # 读取报告里的内容 email_content = "" with open(os.sep.join([stat_file_path, 'locust_stats.csv']), 'r') as f: report_lines = f.readlines() print(report_lines) for line in report_lines[1:-2]: if line != '\n': line = line.split(',') email_content += f'''<h3>{line[1]}</h3> <div> <label>请求总数:{line[2]}</label> <label>平均响应:{line[5]}</label> <label>最小响应:{line[6]}</label> <label>最大响应:{line[7]}</label> </div><p>''' send_email('*****@*****.**', email_content)
def skip_was_requested(self, user_id, user_email, messageid): # at which task_nr is the user cur_task = c.user_get_current_task(self.semesterdb, user_id, self.logger_queue, \ self.name) next_task = cur_task + 1 logmsg = ("Skip requested: User with UserId:{0}, from " "TaskNr= {1} to {2}").format(user_id, cur_task, next_task) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") #task with this tasknr exists? is_task = c.is_valid_task_nr(self.coursedb, next_task, self.logger_queue,\ self.name) if is_task == True: task_starttime = c.get_task_starttime(self.coursedb, next_task, self.logger_queue, self.name) task_has_started = task_starttime < datetime.datetime.now() if task_has_started == True: #set new current task c.user_set_current_task(self.semesterdb, next_task, user_id, \ self.logger_queue, self.name) #tell generator thread to create new task logmsg = ("Calling Generator to create " "TaskNr:{0} for UserId:{1}").format( next_task, user_id) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") self.gen_queue.put(dict({"user_id": user_id, "user_email": user_email, \ "task_nr": next_task, "messageid": messageid})) logmsg = ("Skip done: User with UserId:{0}, from " "TaskNr= {1} to {2}").format(user_id, cur_task, next_task) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") return #Skip not possible logmsg = ("Skip NOT POSSIBLE: User with UserId:{0}, from " "TaskNr= {1} to {2}").format(user_id, cur_task, next_task) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", "SkipNotPossible", \ "", "", messageid)
def a_question_was_asked(self, user_id, user_email, mail, messageid): """ Process a question that was asked by a user. """ mail_subject = str(mail['subject']) logmsg = 'The user has a question, please take care of that!' c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", "Question", "", "", "") # was the question asked to a specific task_nr that is valid? search_obj = re.search( '[0-9]+', mail_subject, ) if (search_obj != None) and int(search_obj.group()) <= c.get_num_tasks(self.coursedb, \ self.logger_queue, self.name): tasknr = search_obj.group() fwd_mails = self.get_taskoperator_emails(tasknr) if fwd_mails == "": logmsg = ("Error getting the taskoperator email for task {0}. " "Question from user with email={1} " "dropped.").format(tasknr, user_email) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") return else: fwd_mails = self.get_admin_emails() if fwd_mails == "": logmsg = ("Error getting the admin email for task {0}. " "Question from user with email={1} " "dropped.").format(tasknr, user_email) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") return for mail_address in fwd_mails: c.send_email(self.sender_queue, mail_address, user_id, "QFwd", "", mail, messageid) c.increment_db_statcounter(self.semesterdb, 'nr_questions_received', \ self.logger_queue, self.name)
def a_status_is_requested(self, user_email, messageid): """ Process a question about a user status. """ curs, cons = c.connect_to_db(self.semesterdb, self.logger_queue, self.name) data = {'Email': user_email} sql_cmd = "SELECT UserId, CurrentTask FROM Users WHERE Email == :Email" curs.execute(sql_cmd, data) res = curs.fetchone() user_id = res[0] current_task = res[1] cons.close() c.send_email(self.sender_queue, user_email, user_id, "Status", current_task, \ "", messageid) c.increment_db_statcounter(self.semesterdb, 'nr_status_requests', \ self.logger_queue, self.name)
def request_bucket_list(sale_id): if request.method == 'POST': user_email = request.form['user_email'] items = get_bucket_list(user_email, sale_id) (sale_name, sale_date) = get_sale_details_by_sale_id(sale_id) company_name = get_company_name() send_email( render_template('send_bucket_list.html', items=items, sale_name=sale_name, sale_date=sale_date, company_name=company_name), user_email, 'Bucket List') flash("Your bucket list will be sent to %s" % user_email) return redirect(url_for('show_sale', sale_id=sale_id)) else: sale_name = get_sale_name_by_sale_id(sale_id) return render_template('request_bucket_list.html', sale_name=sale_name, projecthash=get_hash_of_project())
def suite(): suite = unittest.TestSuite() suite.addTests(unittest.TestLoader().loadTestsFromTestCase(Test_api)) report_path = "./config/report/" #report_file = report_path+"{}_html_report.html".format(time.strftime("%Y_%m_%d %H-%M-%S",time.localtime())) time = datetime.now() now = time.strftime('%Y-%m-%d %H-%M-%S') report_file = report_path + now + "_html_report.html" if not os.path.exists(report_path): os.mkdir(report_path) else: pass with open(report_file, 'wb') as file: runner = HTMLTestRunner(stream=file, verbosity=2, title="特斯汀接口测试", description="特斯汀接口测试") runner.run(suite) send_email(report_file)
def a_status_is_requested(self, user_id, user_email, messageid): """ Tell sender to send out a status email. """ logmsg = ("STATUS requested: User with UserId:{0}, Email: {1}").format(\ user_id, user_email) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") curs, cons = c.connect_to_db(self.semesterdb, self.logger_queue, self.name) data = {'user_id': user_id} sql_cmd = "SELECT CurrentTask FROM Users WHERE UserId == :user_id" curs.execute(sql_cmd, data) res = curs.fetchone() current_task = res[0] cons.close() c.send_email(self.sender_queue, user_email, user_id, "Status", current_task, \ "", messageid) c.increment_db_statcounter(self.semesterdb, 'nr_status_requests', \ self.logger_queue, self.name)
def task_list_requested(self, user_id, user_email, message_id): """ Process a request for the list of tasks for the course. """ logmsg = ("TASKSLIST requested: User with UserId:{0}, Email: {1}").format(\ user_id, user_email) c.log_a_msg(self.queues["logger"], self.name, logmsg, "DEBUG") curs, cons = c.connect_to_db(self.dbs["semester"], self.queues["logger"], self.name) data = {'user_id': user_id} sql_cmd = "SELECT CurrentTask FROM Users WHERE UserId == :user_id" curs.execute(sql_cmd, data) res = curs.fetchone() current_task = res[0] cons.close() c.send_email(self.queues["sender"], user_email, user_id, "TasksList", current_task, \ "", message_id) return
def a_task_is_requested(self, user_id, user_email, task_nr, message_id): """ Process a request for a certain task_nr. Check if that task exists, if it is active, and if the deadline has not passed yet. If yes put in generator queue. """ logmsg = "Processing a Task Request, UserId:{0} TaskNr:{1}"\ .format(user_id, task_nr) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") #task with this task_nr exists? is_task = c.is_valid_task_nr(self.dbs["course"], task_nr, self.queues["logger"], \ self.name) if not is_task: # task_nr is not valid c.send_email(self.queues["sender"], user_email, "", "InvalidTask", str(task_nr), \ "", message_id) return # get time now, deadline and starttime of task time_now = datetime.datetime.now() starttime = c.get_task_starttime(self.dbs["course"], task_nr, \ self.queues["logger"], self.name) deadline = c.get_task_deadline(self.dbs["course"], task_nr, \ self.queues["logger"], self.name) if not (starttime <= time_now): # task not active yet logmsg = ("Task not active") c.log_a_msg(self.queues["logger"], self.name, logmsg, "DEBUG") c.send_email(self.queues["sender"], user_email, "", "TaskNotActive", \ str(task_nr), "", message_id) return if deadline < time_now: # deadline passed for that task_nr! logmsg = ("Deadline passed") c.log_a_msg(self.queues["logger"], self.name, logmsg, "DEBUG") c.send_email(self.queues["sender"], user_email, "", "DeadTask", \ str(task_nr), "", message_id) return logmsg = ("Calling Generator to create" "TaskNr:{0} for UserId:{1}").format(task_nr, user_id) c.log_a_msg(self.queues["logger"], self.name, logmsg, "DEBUG") c.generate_task(self.queues["generator"], user_id, task_nr, user_email, \ message_id)
def a_result_was_submitted(self, user_id, user_email, task_nr, messageid, \ mail): logmsg = "Processing a Result, UserId:{0} TaskNr:{1}"\ .format(user_id, task_nr) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") # at which task_nr is the user cur_task = c.user_get_current_task(self.semesterdb, user_id, self.logger_queue, \ self.name) #task with this tasknr exists? is_task = c.is_valid_task_nr(self.coursedb, task_nr, self.logger_queue,\ self.name) if is_task == False: # task_nr is not valid c.send_email(self.sender_queue, user_email, "", "InvalidTask", str(task_nr), \ "", messageid) return # task_nr is valid, get deadline deadline = c.get_task_deadline(self.coursedb, task_nr, self.logger_queue, \ self.name) if is_task and cur_task < int(task_nr): #task_nr valid, but user has not reached that tasknr yet logmsg = ("User can not submit for this task yet.") c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", "TaskNotSubmittable", str(task_nr), \ "", messageid) elif deadline < datetime.datetime.now(): # deadline passed for that task_nr! logmsg = ("Deadline passed") c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", "DeadTask", str(task_nr), \ "", messageid) else: # okay, let's work with the submission #save the attached files to user task directory self.save_submission_user_dir(user_email, task_nr, mail, messageid) # send job request to worker self.job_queue.put(dict({"UserId": user_id, "UserEmail": user_email, \ "message_type": "Task", "taskNr": task_nr, \ "MessageId": messageid}))
def process_results(self, *args): #set test status completed #call stop monitors #send prepare results command to exec #set test status collating #copy results files from exec #copy files from each mon #set test status finished #remove test from running Q t = args[1] status = args[2] serialize_str = t.serialize() t2 = testobj.testDefn() t2.deserialize(serialize_str) try: if (t.testobj.TestInputData.testid != t2.testobj.TestInputData.testid): lctx.error("testobj not same") raise Exception("Test objects do not match : ", t2.testobj.TestInputData.testid) ip = t.testobj.TestInputData.exechostname lctx.debug(status) if status == "completed": retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_PREPARE_RESULTS", serialize_str)) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_PREPARE_RESULTS failed : ", t2.testobj.TestInputData.testid) #get statistics hosts for s in t.testobj.TestInputData.stathostname.split(','): #stop stats monitors on req hosts #any host that blocks stop monitor blocks the scheduling for the FW p = 0 if s.strip() == self.HOST: p = self.PORT else: p = self.CPORT retsend = self.cl.send( s.strip(), p, self.ev.construct("DAYTONA_STOP_MONITOR", serialize_str)) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_STOP_MONITOR failed : ", t2.testobj.TestInputData.testid) t.updateStatus("completed", "collating") #todo : avoid send client its own ip lctx.debug("SENDING results.tgz download to : " + ip + ":" + str(self.CPORT)) results_file = cfg.daytona_agent_root + "/" + t.testobj.TestInputData.frameworkname + "/" + str( t.testobj.TestInputData.testid ) + "/results/" + "results.tgz" retsend = self.cl.send( ip, self.CPORT, self.ev.construct( "DAYTONA_FILE_DOWNLOAD", str(self.HOST) + "," + str(self.PORT) + "," + results_file + "," + serialize_str + "," + "RESULTS" + "," + ip)) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_FILE_DOWNLOAD failed :", t2.testobj.TestInputData.testid) results_file = cfg.daytona_agent_root + "/" + t.testobj.TestInputData.frameworkname + "/" + str( t.testobj.TestInputData.testid ) + "/results/" + "results_stats.tgz" for s in t.testobj.TestInputData.stathostname.split(','): lctx.info("Downloading stats from STATS self.HOSTS : " + s) lctx.info(s) #stop stats monitors on req hosts #any host that blocks stop monitor blocks the scheduling for the FW p = 0 if s.strip() == self.HOST: p = self.PORT else: p = self.CPORT lctx.info("Sending DOWNLOAD file to :" + s.strip() + ":" + str(p) + "File :" + results_file + "(upload to this host port:)" + str(self.HOST) + "," + str(self.PORT)) retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_FILE_DOWNLOAD", str(self.HOST) + "," + str(self.PORT) + "," + results_file + "," + serialize_str + "," + "STATS" + "," + s.strip())) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error("Error downloading " + results_file + " From " + s.strip() + ":" + retsend) raise Exception( "Daytona command DAYTONA_FILE_DOWNLOAD failed :", t2.testobj.TestInputData.testid) try: lctx.debug(t2.testobj.TestInputData.exec_results_path + "results.tgz") lctx.debug(t2.testobj.TestInputData.exec_results_path + "/../") common.untarfile( t2.testobj.TestInputData.exec_results_path + "/results.tgz", t2.testobj.TestInputData.exec_results_path + "/../") for s in t2.testobj.TestInputData.stats_results_path: lctx.debug( t2.testobj.TestInputData.stats_results_path[s] + "results_stats.tgz") lctx.debug( t2.testobj.TestInputData.stats_results_path[s] + "/../") common.untarfile( t2.testobj.TestInputData.stats_results_path[s] + "/results_stats.tgz", t2.testobj.TestInputData.stats_results_path[s] + "/../") except Exception as e: lctx.error("Error in untar results") lctx.error(e) raise Exception("test result processing error", t2.testobj.TestInputData.testid) #todo : invoke other scripts to transform results and update DB except Exception as e: lctx.error("Error in processing results") lctx.error(e) t.updateStatus("collating", "failed") try: retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_CLEANUP_TEST", serialize_str)) lctx.debug("DAYTONA_CLEANUP_TEST:" + str(retsend)) retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_FINISH_TEST", serialize_str)) lctx.debug(retsend) for s in t.testobj.TestInputData.stathostname.split(','): p = 0 if s.strip() == self.HOST: p = self.PORT else: p = self.CPORT lctx.debug("self.HOST : " + s.strip()) lctx.debug("PORT to send CLEANUP & FINISH : " + str(p)) retsend = self.cl.send( s.strip(), p, self.ev.construct("DAYTONA_CLEANUP_TEST", serialize_str)) lctx.debug(retsend) retsend = self.cl.send( s.strip(), p, self.ev.construct("DAYTONA_FINISH_TEST", serialize_str)) lctx.debug(retsend) except Exception as e: lctx.error("Error in processing results") t.updateStatus("collating", "failed") t.updateStatus("collating", "finished clean") now = time.time() tstr = str(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(now))) t.updateEndTime(tstr) f = open(t2.testobj.TestInputData.exec_results_path + "/results.csv") to = t.testobj.TestInputData.email reader = csv.reader(f) htmlfile = "" rownum = 0 htmlfile = '<table cellpadding="10">' for row in reader: if rownum == 0: htmlfile = htmlfile + '<tr>' for column in row: htmlfile = htmlfile + '<th width="70%">' + column + '</th>' htmlfile = htmlfile + '</tr>' else: htmlfile = htmlfile + '<tr>' for column in row: htmlfile = htmlfile + '<td width="70%">' + column + '</td>' htmlfile = htmlfile + '</tr>' rownum += 1 htmlfile = htmlfile + '</table>' f.close() subject = "Test {} completed successfully".format( t.testobj.TestInputData.testid) mail_content = "<BR> Test id : {} \ <BR> Framework : {} \ <BR> Title : {} <BR>".format( t.testobj.TestInputData.testid, t.testobj.TestInputData.frameworkname, t.testobj.TestInputData.title) mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>Purpose : {} <BR> \ <BR> Creation time : {} \ <BR>Start time : {} \ <BR>End time : {} <BR>".format( t.testobj.TestInputData.purpose, t.testobj.TestInputData.creation_time, t.testobj.TestInputData.start_time, t.testobj.TestInputData.end_time) mail_content = mail_content + "<BR>Your test executed successfully. \ <BR>Results (Contents of results.csv)<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>" + htmlfile + "<BR>" try: common.send_email(subject, to, mail_content, "", lctx, cfg.email_user, cfg.email_server, cfg.smtp_server, cfg.smtp_port) except: lctx.error("Mail send error") return "SUCCESS"
def notify_recipients(smtp_host, smtp_port, userName, password, recipients, timeslots): msg = create_mvc_real_id_message(userName, recipients, timeslots) common.send_email(smtp_host, smtp_port, userName, password, msg)
def create_ticket_from_phishtocheck(report=None, user=None): """ Create/attach report to ticket + block_url + mail to defendant + email to provider :param int report: The id of the `abuse.models.Report` :param int user: The id of the `abuse.models.User` """ if not isinstance(report, Report): try: report = Report.objects.get(id=report) except (AttributeError, ObjectDoesNotExist, TypeError, ValueError): Logger.error(unicode('Report %d cannot be found in DB. Skipping...' % (report))) return if not isinstance(user, User): try: user = User.objects.get(id=user) except (AttributeError, ObjectDoesNotExist, TypeError, ValueError): Logger.error(unicode('User %d cannot be found in DB. Skipping...' % (user))) return # Create/attach to ticket ticket = database.search_ticket(report.defendant, report.category, report.service) new_ticket = False if not ticket: ticket = database.create_ticket(report.defendant, report.category, report.service, priority=report.provider.priority) new_ticket = True utils.scheduler.enqueue_in( timedelta(seconds=settings.GENERAL_CONFIG['phishing']['wait']), 'ticket.timeout', ticket_id=ticket.id, timeout=3600, ) common.get_temp_proofs(ticket, only_urls=True) report.ticket = ticket report.status = 'Attached' report.save() database.log_action_on_ticket( ticket=ticket, action='attach_report', report=report, new_ticket=new_ticket ) database.log_action_on_ticket( ticket=ticket, action='validate_phishtocheck', user=user, report=report ) # Sending email to provider if settings.TAGS['no_autoack'] not in report.provider.tags.all().values_list('name', flat=True): common.send_email( ticket, [report.provider.email], settings.CODENAMES['ack_received'], acknowledged_report_id=report.id, ) utils.default_queue.enqueue('phishing.block_url_and_mail', ticket_id=ticket.id, report_id=report.id) return ticket
def run(self): """ Thread code for the worker thread. """ logmsg = "Starting " + self.name c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") while True: logmsg = self.name + ": waiting for a new job." c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") nextjob = self.job_queue.get(True) if nextjob: tasknr = nextjob.get("taskNr") user_id = nextjob.get("UserId") user_email = nextjob.get("UserEmail") message_id = nextjob.get("MessageId") logmsg = self.name + "got a new job: {0} from the user with id: {1}".format(str(tasknr), str(user_id)) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") # check if there is a test executable configured in the # database -- if not fall back on static test script. curc, conc = c.connect_to_db(self.coursedb, self.logger_queue, self.name) try: data = {"tasknr": tasknr} sql_cmd = "SELECT TestExecutable FROM TaskConfiguration WHERE TaskNr == :tasknr" curc.execute(sql_cmd, data) testname = curc.fetchone() except: logmsg = "Failed to fetch TestExecutable for TaskNr: {0}".format(tasknr) logmsg = logmsg + " from the Database! Table TaskConfiguration corrupted?" c.log_a_msg(self.logger_queue, self.name, logmsg, "ERROR") if testname != None: try: data = {"tasknr": tasknr} sql_cmd = "SELECT PathToTask FROM TaskConfiguration WHERE TaskNr == :tasknr" curc.execute(sql_cmd, data) path = curc.fetchone() scriptpath = str(path[0]) + "/" + str(testname[0]) except: # if a testname was given, then a Path should be # there as well! logmsg = "Failed to fetch Path to Tasknr: {0}".format(tasknr) logmsg = "{0} from the Database! Table TaskConfiguration corrupted?".format(logmsg) c.log_a_msg(self.logger_queue, self.name, logmsg, "ERROR") else: # in case no testname was given, we fall back to the # static directory structure scriptpath = "tasks/task" + str(tasknr) + "/./tests.sh" conc.close() # get the task parameters task_params = self.get_task_parameters(user_id, tasknr) # run the test script logmsg = "Running test script: " + scriptpath c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") command = '{0} {1} {2} "{3}" >> autosub.stdout 2>>autosub.stderr'.format( scriptpath, user_id, tasknr, task_params ) test_res = os.system(command) if test_res: # not 0 returned logmsg = "Test failed! User: {0} Task: {1}".format(user_id, tasknr) logmsg = logmsg + " return value:" + str(test_res) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") c.send_email( self.sender_queue, str(user_email), str(user_id), "Failed", str(tasknr), "", str(message_id) ) if test_res == 512: # Need to read up on this but os.system() returns # 256 when the script returns 1 and 512 when the script returns 2, 768 when 3! logmsg = "SecAlert: This test failed due to probable attack by user!" c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") c.send_email( self.sender_queue, str(user_email), str(user_id), "SecAlert", str(tasknr), "", str(message_id), ) elif test_res == 768: logmsg = "TaskAlert: This test for TaskNr {0} and User {1} failed due an error with task/testbench analyzation!".format( tasknr, user_id ) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") c.send_email( self.sender_queue, str(user_email), str(user_id), "TaskAlert", str(tasknr), "", str(message_id), ) else: # 0 returned logmsg = "Test succeeded! User: {0} Task: {1}".format(user_id, tasknr) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") # Notify, the user that the submission was successful c.send_email(self.sender_queue, str(user_email), str(user_id), "Success", str(tasknr), "", "") curc, conc = c.connect_to_db(self.coursedb, self.logger_queue, self.name) currenttask = int(c.user_get_current_task(self.semesterdb, user_id, self.logger_queue, self.name)) # Next, a new Task is generated -- but only if a new task # exists AND if a generator script exists (otherwise # static task description is assumed, AND if users current # task < the task that shall be generated (no Task has yet # been generated for this user yet). if currenttask < int(tasknr) + 1: try: data = {"tasknr": str(int(tasknr) + 1)} sql_cmd = "SELECT GeneratorExecutable FROM TaskConfiguration WHERE TaskNr == :tasknr" curc.execute(sql_cmd, data) res = curc.fetchone() except: logmsg = "Failed to fetch Generator Script for Tasknr: {0}".format(tasknr) logmsg = logmsg + "from the Database! Table TaskConfiguration corrupted?" c.log_a_msg(self.logger_queue, self.name, logmsg, "ERROR") finally: conc.close() task_start = c.get_task_starttime(self.coursedb, int(tasknr) + 1, self.logger_queue, self.name) if task_start < datetime.datetime.now(): if res != None: # generator script for this task configured? logmsg = "Calling Generator Script: " + str(res[0]) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") logmsg = "UserID {0}, UserEmail {1}".format(user_id, user_email) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") self.gen_queue.put( dict( { "user_id": str(user_id), "user_email": str(user_email), "task_nr": str(int(tasknr) + 1), "messageid": "", } ) ) else: c.send_email( self.sender_queue, str(user_email), str(user_id), "Task", str(int(tasknr) + 1), "", str(message_id), ) else: c.send_email( self.sender_queue, str(user_email), str(user_id), "CurLast", str(int(tasknr) + 1), "", str(message_id), )
def process_results(self, *args): # set test status completed # call stop monitors # send prepare results command to exec # set test status collating # copy results files from exec # copy files from each mon # set test status finished # remove test from running Q t = args[1] status = args[2] serialize_str = t.serialize() t2 = testobj.testDefn() t2.deserialize(serialize_str) try: if t.testobj.TestInputData.testid != t2.testobj.TestInputData.testid: lctx.error("testobj not same") raise Exception("Test objects do not match : ", t2.testobj.TestInputData.testid) ip = t.testobj.TestInputData.exechostname lctx.debug(status) if status in ["completed", "timeout"]: retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_STOP_MONITOR", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_STOP_MONITOR failed : ", t2.testobj.TestInputData.testid) # get statistics hosts for s in t.testobj.TestInputData.stathostname.split(','): # stop stats monitors on req hosts # any host that blocks stop monitor blocks the scheduling for the FW if len(s.strip()) == 0: break p = self.CPORT try: retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_STOP_MONITOR", str(t2.testobj.TestInputData.testid))) except: continue lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_STOP_MONITOR failed : ", t2.testobj.TestInputData.testid) if t.testobj.TestInputData.timeout_flag: t.updateStatus("timeout", "collating") else: t.updateStatus("completed", "collating") ptop = process_top.ProcessTop(LOG.getLogger( "processTop", "DH")) # todo : avoid send client its own ip lctx.info("SENDING results.tgz download to : " + ip + ":" + str(self.CPORT)) retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_FILE_DOWNLOAD", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_FILE_DOWNLOAD failed :", t2.testobj.TestInputData.testid) try: lctx.debug("Untar file : " + t2.testobj.TestInputData.exec_results_path + "results.tgz to location : " + t2.testobj.TestInputData.exec_results_path + "/../") common.untarfile( t2.testobj.TestInputData.exec_results_path + "/results.tgz", t2.testobj.TestInputData.exec_results_path + "/../") except Exception as e: lctx.error("Error in untar EXEC host results") lctx.error(e) raise Exception("test result processing error", t2.testobj.TestInputData.testid) ptop_ret = ptop.process_top_output( t2.testobj.TestInputData.stats_results_path[ip] + "sar/") lctx.debug(ptop_ret + " : " + t2.testobj.TestInputData.stats_results_path[ip]) for s in t.testobj.TestInputData.stathostname.split(','): if len(s.strip()) == 0: break lctx.info("Downloading stats from STATS self.HOSTS : " + s) lctx.info(s) # stop stats monitors on req hosts # any host that blocks stop monitor blocks the scheduling for the FW p = self.CPORT lctx.info("Sending results.tgz download to :" + s.strip() + ":" + str(p)) try: retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_FILE_DOWNLOAD", str(t2.testobj.TestInputData.testid))) except: continue lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error("Error downloading STATS from " + s.strip() + ":" + retsend) raise Exception( "Daytona command DAYTONA_FILE_DOWNLOAD failed :", t2.testobj.TestInputData.testid) try: lctx.debug( "Untar file : " + t2.testobj.TestInputData.stats_results_path[s] + "results.tgz to location : " + t2.testobj.TestInputData.stats_results_path[s] + "/../") common.untarfile( t2.testobj.TestInputData.stats_results_path[s] + "/results.tgz", t2.testobj.TestInputData.stats_results_path[s] + "/../") except Exception as e: lctx.error("Error in untar STAT host " + s + " results") lctx.error(e) raise Exception("test result processing error", t2.testobj.TestInputData.testid) ptop_ret = ptop.process_top_output( t2.testobj.TestInputData.stats_results_path[s] + "sar/") lctx.debug(ptop_ret + " : " + t2.testobj.TestInputData.stats_results_path[s]) # todo : invoke other scripts to transform results and update DB except Exception as e: lctx.error("Error in processing results") lctx.error(e) t.updateStatus("collating", "failed") try: retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_FINISH_TEST", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) for s in t.testobj.TestInputData.stathostname.split(','): if len(s.strip()) == 0: break p = self.CPORT lctx.debug("self.HOST : " + s.strip()) lctx.debug("PORT to send CLEANUP & FINISH : " + str(p)) try: retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_FINISH_TEST", str(t2.testobj.TestInputData.testid))) except: pass lctx.debug(retsend) except Exception as e: lctx.error("Error in processing results") t.updateStatus("collating", "failed") if t.testobj.TestInputData.timeout_flag: t.updateStatus("collating", "timeout clean") else: t.updateStatus("collating", "finished clean") now = time.time() tstr = str(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(now))) t.updateEndTime(tstr) f = None try: f = open(t2.testobj.TestInputData.exec_results_path + "/results.csv") except IOError as e: lctx.debug("File results.csv not found") pass to = t.testobj.TestInputData.email htmlfile = '<table>' if f: reader = csv.reader(f) rownum = 0 for row in reader: if rownum == 0: htmlfile += '<tr>' for column in row: htmlfile += '<th style="text-align: left;" width="70%">' + column + '</th>' htmlfile += '</tr>' else: htmlfile += '<tr>' for column in row: htmlfile += '<td style="text-align: left;" width="70%">' + column + '</td>' htmlfile += '</tr>' rownum += 1 f.close() htmlfile += '</table>' host_ip = "http://" + common.get_local_ip( ) + "/test_info.php?testid=" + str(t.testobj.TestInputData.testid) subject = "Test {} completed successfully".format( t.testobj.TestInputData.testid) mail_content = "<BR> Test id : {} \ <BR> Framework : {} \ <BR> Title : {} <BR>".format( t.testobj.TestInputData.testid, t.testobj.TestInputData.frameworkname, t.testobj.TestInputData.title) mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>Purpose : {} <BR> \ <BR> Creation time : {} \ <BR>Start time : {} \ <BR>End time : {} <BR>".format( t.testobj.TestInputData.purpose, t.testobj.TestInputData.creation_time, t.testobj.TestInputData.start_time, t.testobj.TestInputData.end_time) mail_content = mail_content + "<BR>Your test executed successfully. \ <BR>Results (Contents of results.csv)<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>" + htmlfile + "<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "Link:" mail_content = mail_content + '<BR><a href="' + host_ip + '">' + host_ip + '</a>' try: common.send_email(subject, to, mail_content, "", lctx, cfg.email_user, cfg.email_server, cfg.smtp_server, cfg.smtp_port) except: lctx.error("Mail send error") return "SUCCESS"
def activator_loop(self): curc, conc = c.connect_to_db(self.dbs["course"], self.queues["logger"],\ self.name) # first we need to know, which tasks are not active at the moment sql_cmd = "SELECT * FROM TaskConfiguration WHERE TaskActive = 0" curc.execute(sql_cmd) rows_tasks = curc.fetchall() # loop through all the inactive tasks for row_task in rows_tasks: task_nr = row_task[0] # check if a tasks start time has come task_start = datetime.datetime.strptime(row_task[1], c.format_string) if task_start < datetime.datetime.now(): # first, let's set the task active! data = {'task_nr': task_nr} sql_cmd = ("UPDATE TaskConfiguration SET TaskActive = 1 " "WHERE TaskNr = :task_nr") curc.execute(sql_cmd, data) conc.commit() logmsg = "Turned Task {0} to active.".format(str(task_nr)) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") curs, cons = c.connect_to_db(self.dbs["semester"], \ self.queues["logger"], self.name) # with allow_requests active no other measures have to be taken if self.allow_requests != "no": continue # if auto_advance is activated, all users should be # advanced to that task if self.auto_advance: data = {'task_nr': task_nr} sqlcmd = ("SELECT UserId FROM Users " "WHERE CurrentTask < :task_nr") curs.execute(sqlcmd, data) rows = curs.fetchall() users_list = [] for row in rows: users_list.append(str(row[0])) users_comma_list = ','.join(users_list) # This did not work, therefore with format: # data = {'task_nr': task_nr,\ # 'users_comma_list': users_comma_list} # sqlcmd = ("UPDATE Users SET CurrentTask = :task_nr " # " WHERE UserId IN (:users_comma_list)") # curs.execute(sqlcmd, data) sqlcmd = ("UPDATE Users SET CurrentTask = {0} WHERE " "UserId in ({1});").format( task_nr, users_comma_list) curs.execute(sqlcmd) cons.commit() logmsg = "Advanced users with ids: " + users_comma_list c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") # next, check if any users are waiting for that task, meaning: # 1) his CurrentTask = task_nr AND 2) No UserTask exists for it #TODO: Find a better solution with a join data = {'task_nr': task_nr} sqlcmd = ("SELECT UserId, Email FROM Users " "WHERE CurrentTask = :task_nr AND UserId NOT IN " "(SELECT UserId FROM UserTasks " "WHERE TaskNr = :task_nr)") curs.execute(sqlcmd, data) rows = curs.fetchall() for row in rows: user_id = row[0] user_email = row[1] logmsg = "The next task({0}) is sent to User {1} now." \ .format(task_nr, user_id) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") try: data = {'task_nr': task_nr} sql_cmd = ( "SELECT GeneratorExecutable FROM TaskConfiguration " "WHERE TaskNr = :task_nr") curc.execute(sql_cmd, data) res = curc.fetchone() except: logmsg = ( "Failed to fetch Generator Script for" "TaskNr {0} from the Database! Table " "TaskConfiguration corrupted?").format(task_nr) c.log_a_msg(self.queues["logger"], self.name, \ logmsg, "ERROR") if res != None: logmsg = "Calling Generator Script: {0}".format(res[0]) c.log_a_msg(self.queues["logger"], self.name, \ logmsg, "DEBUG") logmsg = "UserEmail: {0}, TaskNr : {1}, UserId: {2},"\ .format(user_email, task_nr, user_id) c.log_a_msg(self.queues["logger"], self.name, \ logmsg, "DEBUG") c.generate_task(self.queues["generator"], user_id,\ task_nr, user_email, "") else: c.send_email(self.queues["sender"], str(user_email), \ str(user_id), "Task", str(task_nr), "", "") cons.close() # first we need to know, which tasks are active sql_cmd = "SELECT * FROM TaskConfiguration WHERE TaskActive = 1 " curc.execute(sql_cmd) rows_tasks = curc.fetchall() # loop through all the active tasks for row_task in rows_tasks: task_nr = row_task[0] task_deadline = datetime.datetime.strptime(row_task[2], c.format_string) if task_deadline < datetime.datetime.now(): data = {'task_nr': task_nr} sql_cmd = ("UPDATE TaskConfiguration SET TaskActive = 0 " "WHERE TaskNr = :task_nr") curc.execute(sql_cmd, data) conc.commit() logmsg = "Deactivated Task {0}, deadline passed.".format( str(task_nr)) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") conc.close()
def a_result_was_submitted(self, user_id, user_email, task_nr, message_id, \ mail): """ Check if the user is allowed ot submit a result to the task with given task nr and if yes add to worker queue. """ logmsg = "Processing a Result, UserId:{0} TaskNr:{1}"\ .format(user_id, task_nr) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") #task with this task_nr exists? is_task = c.is_valid_task_nr(self.dbs["course"], task_nr, self.queues["logger"],\ self.name) if not is_task: # task_nr is not valid c.send_email(self.queues["sender"], user_email, "", "InvalidTask", str(task_nr), \ "", message_id) return # task_nr is valid, get deadline deadline = c.get_task_deadline(self.dbs["course"], task_nr, self.queues["logger"], \ self.name) already_received = c.user_received_task(self.dbs["semester"], user_id, \ task_nr, self.queues["logger"], \ self.name) if is_task and not already_received: #task_nr valid, but user has not gotten that task yet logmsg = ("User can not submit for this task yet.") c.log_a_msg(self.queues["logger"], self.name, logmsg, "DEBUG") c.send_email(self.queues["sender"], user_email, "", "TaskNotSubmittable", str(task_nr), \ "", message_id) elif deadline < datetime.datetime.now(): # deadline passed for that task_nr! logmsg = ("Deadline passed") c.log_a_msg(self.queues["logger"], self.name, logmsg, "DEBUG") c.send_email(self.queues["sender"], user_email, "", "DeadTask", str(task_nr), \ "", message_id) else: # okay, let's work with the submission job_tuple = (user_id, task_nr) dispatchable = job_tuple not in self.jobs_active if not dispatchable: if job_tuple in self.jobs_backlog: self.jobs_backlog[job_tuple].append(message_id) else: self.jobs_backlog[job_tuple] = [message_id] logmsg = ("Backlogged {0},{1},{2}").format( user_id, task_nr, message_id) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") else: # save the attached files to user task directory self.save_submission_user_dir(user_id, task_nr, mail) c.dispatch_job(self.queues["job"], user_id, task_nr, \ user_email, message_id) dispatch_time = time.time() self.jobs_active[job_tuple] = {"dispatch": dispatch_time, \ "message_id": message_id} self.mid_to_job_tuple[message_id] = job_tuple
def a_question_was_asked(self, user_id, user_email, mail, message_id): """ Process a question that was asked by a user. """ mail_subject = str(mail['subject']) logmsg = 'The user has a question, please take care of that!' c.log_a_msg(self.queues["logger"], self.name, logmsg, "DEBUG") # was the question asked to a specific task_nr search_obj = re.search( '[0-9]+', mail_subject, ) task_nr = "" if (search_obj != None): task_nr = search_obj.group() #task with this task_nr exists? is_task = c.is_valid_task_nr(self.dbs["course"], task_nr, self.queues["logger"], \ self.name) if not is_task: # task_nr is not valid c.send_email(self.queues["sender"], user_email, "", "InvalidTask", str(task_nr), \ "", message_id) logmsg = ("Question from user with email {0} contains an " "invalid TaskNr. Informing user, message dropped" ).format(user_email) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") return fwd_mails = self.get_taskoperator_emails(task_nr) if not fwd_mails: logmsg = ("Error getting the taskoperator email for task {0}. " "Question from user with email={1} " "dropped.").format(task_nr, user_email) c.log_a_msg(self.queues["logger"], self.name, logmsg, "ERROR") return else: fwd_mails = self.get_admin_emails() if not fwd_mails: logmsg = ("Error getting the admin email." "Question from user with email={1} " "dropped.").format(task_nr, user_email) c.log_a_msg(self.queues["logger"], self.name, logmsg, "ERROR") return # send Question received message to user c.send_email(self.queues["sender"], user_email, "", "Question", "", "", "") # forward email to admins/taskoperators mail["Reply-To"] = user_email c.send_email(self.queues["sender"], fwd_mails, user_id, "QFwd", task_nr, mail, message_id) c.increment_db_statcounter(self.dbs["semester"], 'nr_questions_received', \ self.queues["logger"], self.name)
def handle_new(self, m): """ Fetch new emails and initiate appropriate action """ uid_of_mid = self.idmap_new_emails(m) # no new or imap connection error if not uid_of_mid: return curs, cons = c.connect_to_db(self.dbs["semester"], self.queues["logger"], self.name) # iterate over all new e-mails and take action according to the structure # of the subject line for message_id, uid in uid_of_mid.items(): c.increment_db_statcounter(self.dbs["semester"], 'nr_mails_fetched', \ self.queues["logger"], self.name) # fetching the mail, "`(RFC822)`" means "get the whole stuff", but you # can ask for headers only, etc try: resp, data = m.uid('fetch', uid, "(RFC822)") except Exception as e: logmsg = ("Failed to fetch message with uid {0} from " "inbox with error {1}").format(uid, str(e)) c.log_a_msg(self.queues["logger"], self.name, logmsg, "ERROR") continue # parsing the mail content to get a mail object mail = email.message_from_bytes(data[0][1]) mail_subject = str(mail['subject']) from_header = str(mail['From']) split_header = str(from_header).split("<") user_name = split_header[0] try: user_email = str(split_header[1].split(">")[0]) except: user_email = str(mail['From']) # Now let's decide what actions to take with the received email whitelisted = self.check_if_whitelisted(user_email) if whitelisted: # On Whitelist data = {'Email': user_email} sql_cmd = "SELECT UserId FROM Users WHERE Email = :Email" curs.execute(sql_cmd, data) res = curs.fetchone() if res != None: # Already registered user_id = res[0] logmsg = ("Got mail from an already known user! " "(UserId:{0}, Email:{1}").format( user_id, user_email) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") # Take action based on the subject self.action_by_subject(user_id, user_email, message_id, mail, \ mail_subject) else: # Not yet registered reg_deadline = self.get_registration_deadline() if reg_deadline > datetime.datetime.now(): # Before Registraton deadline? #Name for user specified in Whitelist? -> take it data = {'Email': user_email} sql_cmd = ("SELECT Name FROM Whitelist " "WHERE Email = :Email") curs.execute(sql_cmd, data) res = curs.fetchone() if res[0] and res[0].strip(): user_name = res[0] # Create user and send Welcome message self.add_new_user(user_name, user_email) c.send_email(self.queues["sender"], user_email, "", "Welcome", \ "", "", message_id) else: # After Registration deadline c.send_email(self.queues["sender"], user_email, "", "RegOver", \ "", "", message_id) else: # Not on Whitelist # Check if user has an UserID and was therefore deleted after registration data = {'Email': user_email} sql_cmd = "SELECT UserId FROM Users WHERE Email = :Email" curs.execute(sql_cmd, data) res = curs.fetchone() if res == None: # Not a registered user c.send_email(self.queues["sender"], user_email, "", "NotAllowed", \ "", "", message_id) else: # A registered user, who was removed from the whitelist after registration c.send_email(self.queues["sender"], user_email, "", "DeletedFromWhitelist", \ "", "", message_id) cons.close()
def take_new_results(self, user_email, task_nr, mail, messageid): """ Store a new submisson in the user's directory structure. """ curs, cons = c.connect_to_db(self.semesterdb, self.logger_queue, self.name) data = {'Email': user_email} sql_cmd = "SELECT UserId FROM Users WHERE Email = :Email" curs.execute(sql_cmd, data) res = curs.fetchone() user_id = res[0] deadline = c.get_task_deadline(self.coursedb, task_nr, self.logger_queue, \ self.name) curtask = c.user_get_current_task(self.semesterdb, user_id, self.logger_queue, \ self.name) if deadline < datetime.datetime.now(): #deadline has passed! c.send_email(self.sender_queue, user_email, "", "DeadTask", str(task_nr), \ "", messageid) elif curtask < task_nr: #user is trying to submit a solution to a task although an earlier task # was not solved. c.send_email(self.sender_queue, user_email, "", "InvalidTask", str(task_nr), \ "", messageid) else: # get the user's UserId data = {'Email': user_email} sql_cmd = "SELECT UserId FROM Users WHERE Email = :Email" curs.execute(sql_cmd, data) res = curs.fetchone() user_id = res[0] # increment the submission_nr for the user submission_nr = self.increment_submission_nr(int(user_id), int(task_nr)) #create a directory for putting his submission in: detach_dir = 'users/{0}/Task{1}'.format(user_id, task_nr) ts = datetime.datetime.now() submission_dir = "/Submission{0}_{1}{2}{3}_{4}{5}{6}{7}".format(\ submission_nr, ts.year, ts.month, ts.day, ts.hour, ts.minute, \ ts.second, ts.microsecond) current_dir = detach_dir + submission_dir c.check_dir_mkdir(current_dir, self.logger_queue, self.name) # use walk to create a generator, iterate on the parts and forget # about the recursive headache for part in mail.walk(): # multipart are just containers, so skip them if part.get_content_maintype() == 'multipart': continue # is this part an attachment ? if part.get('Content-Disposition') is None: continue filename = part.get_filename() counter = 1 # if there is no filename, create one with a counter to avoid duplicates if not filename: filename = 'part-%03d%s' % (counter, 'bin') counter += 1 att_path = os.path.join(current_dir, filename) #Check if its already there if not os.path.isfile(att_path): # finally write the stuff fp = open(att_path, 'wb') fp.write(part.get_payload(decode=True)) fp.close() cmd = "rm " + detach_dir + "/*" + " 2> /dev/null" os.system(cmd) cmd = "cp -R " + current_dir + "/* " + detach_dir + " > /dev/null" os.system(cmd) # Next, let's handle the task that shall be checked, and send a job # request to the job_queue. The workers can then get it from there. self.job_queue.put(dict({"UserId": user_id, "UserEmail": user_email, \ "message_type": "Task", "taskNr": task_nr, \ "MessageId": messageid})) cons.close()
def loop_code(self): """ The code run in the while True loop of the mail fetcher thread. """ m = self.connect_to_imapserver() if m != 0: curs, cons = c.connect_to_db(self.semesterdb, self.logger_queue, self.name) items = self.fetch_new_emails(m) # iterate over all new e-mails and take action according to the structure # of the subject line for emailid in items: c.increment_db_statcounter(self.semesterdb, 'nr_mails_fetched', \ self.logger_queue, self.name) # fetching the mail, "`(RFC822)`" means "get the whole stuff", but you # can ask for headers only, etc resp, data = m.fetch(emailid, "(RFC822)") # parsing the mail content to get a mail object mail = email.message_from_bytes(data[0][1]) mail_subject = str(mail['subject']) from_header = str(mail['From']) split_header = str(from_header).split("<") user_name = split_header[0] try: user_email = str(split_header[1].split(">")[0]) except: user_email = str(mail['From']) messageid = mail.get('Message-ID') whitelisted = self.check_if_whitelisted(user_email) if whitelisted: data = {'Email': user_email} sql_cmd = "SELECT UserId FROM Users WHERE Email = :Email" curs.execute(sql_cmd, data) res = curs.fetchall() if res: logmsg = "Got mail from an already known user!" c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") #TODO: Does sending a mail "Result bla" without number crash this? if re.search('[Rr][Ee][Ss][Uu][Ll][Tt]', mail_subject): searchObj = re.search('[0-9]+', mail_subject, ) if int(searchObj.group()) <= c.get_num_tasks(self.coursedb, \ self.logger_queue, self.name): logmsg = "Processing a Result, UserId:{0} TaskNr:{1}"\ .format(user_email, searchObj.group()) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") self.take_new_results(user_email, searchObj.group(), \ mail, messageid) else: logmsg = ("Given Task number is higher than actual Number" "of Tasks!") c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", \ "InvalidTask", "", "", messageid) elif re.search('[Qq][Uu][Ee][Ss][Tt][Ii][Oo][Nn]', mail_subject): self.a_question_was_asked(user_email, mail, messageid) elif re.search('[Ss][Tt][Aa][Tt][Uu][Ss]', mail_subject): self.a_status_is_requested(user_email, messageid) else: logmsg = ("Got a kind of message I do not understand. " "Sending a usage mail...") c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, user_email, "", "Usage", "", \ "", messageid) else: reg_deadline = self.get_registration_deadline() if reg_deadline > datetime.datetime.now(): self.add_new_user(user_name, user_email) c.send_email(self.sender_queue, user_email, "", "Welcome", \ "", "", messageid) else: c.send_email(self.sender_queue, user_email, "", "RegOver", \ "", "", messageid) else: c.send_email(self.sender_queue, user_email, "", "NotAllowed", \ "", "", messageid) try: m.close() except imaplib.IMAP4.abort: logmsg = ("Closing connection to server was aborted " "(probably a server-side problem). Trying to connect again ...") c.log_a_msg(self.logger_queue, self.name, logmsg, "ERROR") #m.close() except imaplib.IMAP4.error: logmsg = ("Got an error when trying to connect to the imap server." "Trying to connect again ...") c.log_a_msg(self.logger_queue, self.name, logmsg, "ERROR") except: logmsg = ("Got an unknown exception when trying to connect to the " "imap server. Trying to connect again ...") c.log_a_msg(self.logger_queue, self.name, logmsg, "ERROR") finally: logmsg = "closed connection to imapserver" c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") # check if messages have been handled and need to be archived now try: next_send_msg = self.arch_queue.get(False) except: next_send_msg = 'NONE' if next_send_msg != 'NONE': c.log_a_msg(self.logger_queue, self.name, "moving a message!!!!!!!", \ "INFO") m = self.connect_to_imapserver() for next_msg in next_send_msg: email_ids = self.fetch_all_emails(m) for emailid in email_ids: typ, msg_data = m.fetch(str(int(emailid)), "(BODY[HEADER])") mail = email.message_from_bytes(msg_data[0][1]) if mail['Message-ID'] == next_send_msg.get('mid'): logmsg = "Moving Message with ID: {0}"\ .format(mail['Message-ID']) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") resp, data = m.fetch(emailid, "(UID)") pattern_uid = re.compile('\d+ \(UID (?P<uid>\d+)\)') match = pattern_uid.match(str(data[0]).split("'")[1]) msg_uid = match.group('uid') result = m.uid('COPY', msg_uid, 'archive_vels') if result[0] == 'OK': mov, data = m.uid('STORE', msg_uid, '+FLAGS', \ '(\Deleted)') m.expunge() break # m==0 is only possible in test-code (e.g. load_test.py) if m != 0: m.logout() cons.close() time.sleep(self.poll_period)
def action_by_subject(self, user_id, user_email, message_id, mail, mail_subject): """ Examine the subject of the users email and initiate appropiate action """ if re.search('[Rr][Ee][Ss][Uu][Ll][Tt]', mail_subject): ############### # RESULT # ############### search_obj = re.search('[0-9]+', mail_subject) if search_obj is None: # Result + no number logmsg = ("Got a kind of message I do not understand. " "Sending a usage mail...") c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") c.send_email(self.queues["sender"], user_email, "", "Usage", "", \ "", message_id) return # Result + number task_nr = search_obj.group() self.a_result_was_submitted(user_id, user_email, task_nr, message_id, \ mail) ############### # LIST # ############### elif re.search('[Ll][Ii][Ss][Tt]', mail_subject): self.task_list_requested(user_id, user_email, message_id) ############### # REQUEST # ############### elif (self.allow_requests != "no") \ and re.search('[Rr][Ee][Qq][Uu][Ee][Ss][Tt]', mail_subject): search_obj = re.search('[0-9]+', mail_subject) if search_obj is None: # Result + no number logmsg = ("Got a kind of message I do not understand. " "Sending a usage mail...") c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") c.send_email(self.queues["sender"], user_email, "", "Usage", "", \ "", message_id) return # Request + number task_nr = search_obj.group() self.a_task_is_requested(user_id, user_email, task_nr, message_id) elif re.search('[Qq][Uu][Ee][Ss][Tt][Ii][Oo][Nn]', mail_subject): ############### # QUESTION # ############### self.a_question_was_asked(user_id, user_email, mail, message_id) elif re.search('[Ss][Tt][Aa][Tt][Uu][Ss]', mail_subject): ############### # STATUS # ############### self.a_status_is_requested(user_id, user_email, message_id) ##################### # DEFAULT ACTION # ##################### else: logmsg = ("Got a kind of message I do not understand. " "Sending a usage mail...") c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") c.send_email(self.queues["sender"], user_email, "", "Usage", "", \ "", message_id)
def activator_loop(self): curc, conc = c.connect_to_db(self.coursedb, self.logger_queue, \ self.name) # first we need to know, which tasks are not active at the moment sql_cmd = "SELECT * FROM TaskConfiguration WHERE TaskActive==0;" curc.execute(sql_cmd) rows_tasks = curc.fetchall() # loop through all the inactive tasks for row_task in rows_tasks: tasknr = row_task[0] logmsg = "Task {0} is still inactive".format(str(tasknr)) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") # check if a tasks start time has come task_starttime = datetime.datetime.strptime( row_task[1], c.format_string) if task_starttime < datetime.datetime.now(): # first, let's set the task active! data = {'tasknr': tasknr} sql_cmd = "UPDATE TaskConfiguration SET TaskActive = 1 WHERE TaskNr == :tasknr;" curc.execute(sql_cmd, data) conc.commit() logmsg = "Turned Task {0} to active.".format(str(tasknr)) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") curs, cons = c.connect_to_db(self.semesterdb, \ self.logger_queue, self.name) # if auto_advance is activated, all users should be # advanced to that task if self.auto_advance == True: data = {'tasknr': tasknr} sqlcmd = "SELECT UserId FROM Users WHERE CurrentTask < :tasknr;" curs.execute(sqlcmd, data) rows = curs.fetchall() users_list = [] for row in rows: users_list.append(str(row[0])) users_comma_list = ','.join(users_list) # TODO: Why does this not work, it's basically the same! # data = {'tasknr': tasknr, 'users_comma_list': users_comma_list} # sqlcmd = ("UPDATE Users SET CurrentTask = :tasknr WHERE " # "UserId IN (:users_comma_list);") # curs.execute(sqlcmd, data) sqlcmd = ("UPDATE Users SET CurrentTask = {0} WHERE " "UserId in ({1});").format( tasknr, users_comma_list) curs.execute(sqlcmd) cons.commit() logmsg = "Advanced users with ids: " + users_comma_list c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") # next, check if any users are waiting for that task, meaning: # 1) his CurrentTask = tasknr AND 2) No UserTask exists for it #TODO: Find a better solution with a join data = {'tasknr': tasknr} sqlcmd = ( "SELECT UserId, Email FROM Users " "WHERE CurrentTask = :tasknr AND UserId NOT IN " "(SELECT UserId FROM UserTasks WHERE TaskNr = :tasknr)") curs.execute(sqlcmd, data) rows = curs.fetchall() for row in rows: uid = row[0] user_email = row[1] logmsg = "The next task({0}) is sent to User {1} now." \ .format(tasknr, uid) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") try: data = {'tasknr': tasknr} sql_cmd = "SELECT GeneratorExecutable FROM TaskConfiguration WHERE TaskNr == :tasknr;" curc.execute(sql_cmd, data) res = curc.fetchone() except: logmsg = "Failed to fetch Generator Script for Tasknr: " + str( tasknr) logmsg = logmsg + "from the Database! Table TaskConfiguration corrupted?" c.log_a_msg(self.logger_queue, self.name, \ logmsg, "ERROR") if res != None: logmsg = "Calling Generator Script: {0}".format(res[0]) c.log_a_msg(self.logger_queue, self.name, \ logmsg, "DEBUG") logmsg = "UserEmail: {0}, TaskNr : {1}, UserId: {2},".format(user_email, \ tasknr, uid) c.log_a_msg(self.logger_queue, self.name, \ logmsg, "DEBUG") self.gen_queue.put(dict({"user_id": str(uid), \ "user_email": str(user_email), \ "task_nr": str(tasknr), \ "messageid": ""})) else: c.send_email(self.sender_queue, str(user_email), \ str(uid), "Task", str(tasknr), "", "") cons.close() conc.close()
url = "http://" + host + connector_status_url.format(connector) req = requests.get(url) print(url) if req.status_code == 200: response = req.json() print(response) if response.get("connector"): print("{} is {}".format(connector, response["connector"].get("state"))) if response["connector"].get("state") == "RUNNING": tasks = response.get("tasks", []) for task in tasks: pprint(task) if task.get("state") == "FAILED": message = "{} is failed, Attempting to restart it..".format( connector) send_email("Connector Failure", message) restart_url = "http://" + host + task_restart_url.format( connector, task.get("id", -1)) requests.post(restart_url) pprint( requests.get( "http://" + host + task_status_url.format( connector, task.get("id", -1))).json()) except KeyboardInterrupt: print("Received INT signal, Stopping manager.") except Exception as e: print("Caught an Exception [Caused by: {}]".format(e)) print("Manager is stopped")
def handle_test_result(self, test_res, user_id, user_email, task_nr, message_id): """ Act based on the result of the test """ SUCCESS = 0 FAILURE = 1 SECURITYALERT = 2 TASKERROR= 3 if test_res == FAILURE: # not 0 returned ##################### # FAILED # ##################### logmsg = "Test failed! User: {0} Task: {1}".format(user_id, \ task_nr) logmsg = logmsg + " return value:" + str(test_res) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") c.send_email(self.queues["sender"], user_email, user_id, \ "Failed", task_nr, "", message_id) elif test_res == SECURITYALERT: ##################### # SECURITY ALERT # ##################### logmsg = "SecAlert: This test failed due to probable attack by user!" c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") c.send_email(self.queues["sender"], "", user_id, \ "SecAlert", task_nr, "", message_id) elif test_res == TASKERROR: ##################### # TASK ERROR # ##################### logmsg = ("TaskAlert: This test for TaskNr {0} and User {1} failed " " due an error with task/testbench analyzation!").format(task_nr, user_id) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") # alert to admins c.send_email(self.queues["sender"], "", user_id, \ "TaskAlert", task_nr, "", message_id) # error notice to user c.send_email(self.queues["sender"], user_email, user_id, \ "TaskErrorNotice", task_nr, "", message_id) elif test_res == SUCCESS: # 0 returned ##################### # SUCCESS # ##################### logmsg = "Test succeeded! User: {0} Task: {1}".format(user_id, task_nr) c.log_a_msg(self.queues["logger"], self.name, logmsg, "INFO") # Notify, the user that the submission was successful c.send_email(self.queues["sender"], user_email, user_id, \ "Success", task_nr, "", message_id) # no initiate creation of next task if allow_requests set if self.allow_requests != "no": return # initiate generation of next higher task_nr task for user (if possible) next_task_nr = int(task_nr) + 1 self.initiate_next_task(user_id, user_email, int(task_nr), next_task_nr)
htmlfile = htmlfile + '<td width="70%">' + column + '</td>' htmlfile = htmlfile + '</tr>' rownum += 1 htmlfile = htmlfile + '</table>' subject = "Test SAMPLE completed successfully" mail_content = "<BR>Test id : {sample id} \ <BR> Framework : {sample framework} \ <BR> Title : {sample title}<BR><BR>" mail_content = mail_content + "<BR>==========================================================<BR><BR>" mail_content = mail_content + "<BR>Purpose : {sample}<BR> \ <BR>Creation time : {TIME} \ <BR>Start time : {TIME} \ <BR>End time : {TIME}<BR>" mail_content = mail_content + "<BR>Your test executed successfully.\ <BR>Results (Contents of results.csv)<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>" + htmlfile + "<BR>" lctx = LOG.getLogger("schedulerlog", "DH") curhost = "ip-172-31-19-107.us-west-2.compute.internal" smtp_server = "localhost" smtp_port = 25 common.send_email("test email with results", "*****@*****.**", mail_content, "", lctx, "ubuntu", curhost, smtp_server, smtp_port)
def activator_loop(self): curc, conc = c.connect_to_db(self.coursedb, self.logger_queue, \ self.name) # first we need to know, for which tasks, the message has already # been sent out sql_cmd = "SELECT * FROM TaskConfiguration WHERE TaskActive==0;" curc.execute(sql_cmd) res = curc.fetchone() while res != None: tasknr = res[0] logmsg = "Task {0} is still inactive".format(str(tasknr)) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") # check if a tasks start time has come task_starttime = datetime.datetime.strptime(res[1], c.format_string) if task_starttime < datetime.datetime.now(): # first, let's set the task active! data = {'tasknr': tasknr} sql_cmd = "UPDATE TaskConfiguration SET TaskActive = 1 WHERE TaskNr == :tasknr;" curc.execute(sql_cmd, data) conc.commit() # next, check if any users are waiting for that task curs, cons = c.connect_to_db(self.semesterdb, \ self.logger_queue, self.name) data = {'tasknr': tasknr} sqlcmd = "SELECT * FROM Users WHERE CurrentTask == :tasknr;" curs.execute(sqlcmd, data) nextuser = curs.fetchone() while nextuser != None: logmsg = "The next example is sent to User {0} now.".format(tasknr) c.log_a_msg(self.logger_queue, self.name, logmsg, "INFO") uid = nextuser[0] user_email = nextuser[2] try: data = {'tasknr': tasknr} sql_cmd = "SELECT GeneratorExecutable FROM TaskConfiguration WHERE TaskNr == :tasknr;" curc.execute(sql_cmd, data) res = curc.fetchone() except: logmsg = "Failed to fetch Generator Script for Tasknr: "+ str(tasknr) logmsg = logmsg + "from the Database! Table TaskConfiguration corrupted?" c.log_a_msg(self.logger_queue, self.name, \ logmsg, "ERROR") if res != None: logmsg = "Calling Generator Script: {0}".format(res[0]) c.log_a_msg(self.logger_queue, self.name, \ logmsg, "DEBUG") logmsg = "UserID {0}, UserEmail{1}".format(uid, \ user_email) c.log_a_msg(self.logger_queue, self.name, \ logmsg, "DEBUG") self.gen_queue.put(dict({"UserId": str(uid), \ "UserEmail": str(user_email), \ "TaskNr": str(tasknr), \ "MessageId": ""})) else: c.send_email(self.sender_queue, str(user_email), \ str(uid), "Task", str(tasknr), "", "") nextuser = curs.fetchone() cons.close() res = curc.fetchone() conc.close()
fig.savefig(picture_path) #plt.show() #Emails sending for receiver in RECEIVERS_EMAIL_LIST: send_html_email(receiver, f"Evolution prix du gaz: {last_synthetic}€/MWH en {tendance}", texte, picture_path) log_print("Emails sent") if __name__ == "__main__": try: log_print("Démarrage de la procédure") #Get the last Trading day previously inserted PREVIOUS_DATE = da.get_last_date() #Récupération des données et intégration dans la base fetch_cotations(INDEX_LIST) extract_cotations(INDEX_LIST, PREVIOUS_DATE) log_print("Fin de la procédure") except Exception as exception: log_print("Erreur dans la " + str(exception)) send_email(SUPPORT_EMAIL, "Le processus de récupération des données gaz a planté", "L'erreur est :" + str(exception))
def generator_loop(self): """ Loop code for the generator thread """ #blocking wait on gen_queue next_gen_msg = self.gen_queue.get(True) logmsg = "gen_queue content:" + str(next_gen_msg) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") task_nr = next_gen_msg.get('task_nr') user_id = next_gen_msg.get('user_id') user_email = next_gen_msg.get('user_email') messageid = next_gen_msg.get('messageid') #generate the directory for the task task_dir = 'users/' + str(user_id) + "/Task"+str(task_nr) c.check_dir_mkdir(task_dir, self.logger_queue, self.name) #generate the task description desc_dir = task_dir + "/desc" c.check_dir_mkdir(desc_dir, self.logger_queue, self.name) # check if there is a generator executable configured in the database # if not fall back on static generator script. curc, conc = c.connect_to_db(self.coursedb, self.logger_queue, self.name) data = {'TaskNr': task_nr} sql_cmd = ("SELECT GeneratorExecutable FROM TaskConfiguration " "WHERE TaskNr== :TaskNr") curc.execute(sql_cmd, data) generatorname = curc.fetchone() if generatorname != None: data = {'TaskNr': task_nr} sql_cmd = "SELECT PathToTask FROM TaskConfiguration WHERE TaskNr == :TaskNr" curc.execute(sql_cmd, data) path = curc.fetchone() scriptpath = str(path[0]) + "/" + str(generatorname[0]) else: scriptpath = "tasks/task" + str(task_nr) + "/./generator.sh" challenge_mode = self.get_challenge_mode() command = scriptpath + " " + str(user_id) + " " + str(task_nr) + " " + \ self.submission_email + " " + str(challenge_mode) + " " + \ self.semesterdb + " >> autosub.stdout 2>>autosub.stderr" logmsg = "generator command: {0}".format(command) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") generator_res = os.system(command) if generator_res: logmsg = "Failed to call generator script, return value: " + \ str(generator_res) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") logmsg = "Generated individual task for user/tasknr:" + str(user_id) + "/" + \ str(task_nr) c.log_a_msg(self.logger_queue, self.name, logmsg, "DEBUG") c.send_email(self.sender_queue, str(user_email), str(user_id), \ "Task", str(task_nr), "Your personal example", str(messageid)) conc.close()