def compile_archive(self): """ Once the file is renamed and the difmet instruction file created, they are both packaged and sent to cache/C_tosend repertory """ # create the difmet instruction file instr_file_path = self._create_diffmet_instr() basename = os.path.basename(instr_file_path) basename = basename.replace(".diffusions.xml", ".tar.gz") archive_path = os.path.join(self.dir_c, basename + ".tmp") with tarfile.open(archive_path, "w:gz") as tar: tar.add(instr_file_path, arcname=os.path.basename(instr_file_path)) LOGGER.info("Compressed diffmet instruction file %s in %s.", instr_file_path, archive_path) tar.add(self.new_file_path, arcname=os.path.basename(self.new_file_path)) LOGGER.info("Compressed dissemination file %s in %s.", self.new_file_path, archive_path) shutil.move(archive_path, archive_path[:-4]) Tools.remove_file(instr_file_path, "processed instruction", LOGGER) Tools.remove_file(self.new_file_path, "processed data", LOGGER) Database.update_field_by_query("rxnotif", False, **dict(final_file=self.new_filename)) Database.update_field_by_query("message", "File packaged in tar.gz format", **dict(final_file=self.new_filename))
def clear_orphan_files(dir_): # cleaning other files for file_ in os.listdir(dir_): file_path = os.path.join(dir_, file_) try: Tools.remove_file(file_path, "orphan", LOGGER) except FileNotFoundError: pass
def clear_instruction_files(instruction_files): # cleaning instruction files for file_ in instruction_files: try: Tools.remove_file(file_, "instruction", LOGGER) # file either was moved back to repertory A or deleted except FileNotFoundError: pass
def bp_news_add_category(): data = request.get_json() if not data and data is None: return Tools.generate_cors_response({'code': 200, 'msg': '缺少必要参数'}) if not data.get('cname'): return Tools.generate_cors_response({'code': 200, 'msg': '请输入分类名称'}) category = Category(data) db.session.add(category) db.session.commit() return Tools.generate_cors_response({'code': 200, 'msg': '分类添加成功'})
def mail_to_xml(element,diff_info,prefix=""): etree.SubElement(element, prefix + "media").text = "EMAIL" etree.SubElement(element, prefix + "email_adress").text = Tools.ack_str(diff_info["address"]) etree.SubElement(element, prefix + "email_to_cc").text = Tools.ack_str(diff_info["dispatchMode"]) etree.SubElement(element, prefix + "email_subject").text = Tools.ack_str(diff_info["subject"]) etree.SubElement(element, prefix + "email_text_in_body").text = "0" if diff_info["fileName"] not in [None, ""]: etree.SubElement(element, prefix + "email_attached_file_name").text = Tools.ack_str(diff_info["fileName"]) else: etree.SubElement(element, prefix + "email_attached_file_name").text = Tools.ack_str(SettingsManager.get("attachmentName"))
def bp_news_add_tag(): data = request.get_json() if not data and data is None: return Tools.generate_cors_response({'code': 200, 'msg': '缺少必要参数'}) if not data.get('tname'): return Tools.generate_cors_response({'code': 200, 'msg': '请输入标签名称'}) tag = Tag(data) db.session.add(tag) db.session.commit() return Tools.generate_cors_response({'code': 200, 'msg': '标签添加成功'})
def test_string_conversion(self): test = Tools.ack_str("test&") self.assertEqual(test, "test&") test = Tools.ack_str("test<") self.assertEqual(test, "test<") test = Tools.ack_str("test>") self.assertEqual(test, "test>") test = Tools.ack_str("test\'") self.assertEqual(test, "test'") test = Tools.ack_str("test\"") self.assertEqual(test, "test"")
def create_server(cls): if cls._process is None: #check if no other server is running. if so, kill them Tools.kill_process("harness_soap_server") args = ["python3", application.__file__] my_env = os.environ.copy() my_env["PYTHONPATH"] = os.environ.get( "PYTHONPATH", "") + ":" + "/".join( webservice.__path__[0].split('/')[:-1]) cls._process = subprocess.Popen(args, env=my_env) sleep(3) print("Soap server started")
def process(cls, max_loops=0): cls.nb_workers = SettingsManager.get("sendFTPlimitConn") # in debug mode, it is possible to set pool_method = cls.get_pool_method() cls.pool = pool_method(processes=cls.nb_workers) counter = 0 cls.setup_process() while cls._running: counter += 1 cls.signal_loop(counter) cls.load_settings() cls.update_workers() # idle time idle_time = SettingsManager.get("sendFTPIdle") sleep(idle_time) # get settings cls.dir_c = dir_c = HarnessTree.get("temp_dissRequest_C") cls.dir_d = dir_d = HarnessTree.get("temp_dissRequest_D") # move back any remaining file from D to C cls.move_back_files() # get files in C max_files = cls.nb_workers list_files_c = cls.get_file_list(dir_c, max_files) files_to_ftp = cls.move_files(list_files_c, dir_d) for file_ in files_to_ftp: file_expired = cls.check_file_age(file_) if file_expired: # TODO we need to find a way to update the info to the database # would require looking at the file compressed though Tools.remove_file(file_, "difmet archive", LOGGER) continue size = os.stat(file_).st_size timeout = cls.compute_timeout(size, file_) # start download # renaming file to prevent any operation on it. cls.lock_file(file_) res = cls.pool.apply_async( cls.abortable_ftp, (cls.upload_file, file_, dir_c, dir_d), dict(timeout=timeout)) # for testing and debugging purpose only cls.check_end_loop(counter, max_loops)
def process_instruction_file(cls, file_to_process): processed = False files_fetched = [] with open(file_to_process, "r") as file_: info_file = json.load(file_) # get full_id req_id = info_file.get("req_id") hostname = info_file.get("hostname") full_id = req_id + hostname file_expired = cls.check_file_age(file_to_process) if file_expired: msg = ("%s instruction file discarded " "because it is over expiration date " "according to keepfiletime settings " "parameter" % file_to_process) LOGGER.warning(msg) Database.update_field_by_query("requestStatus", REQ_STATUS.failed, **dict(fullrequestId=full_id)) Database.update_field_by_query("message", msg, **dict(fullrequestId=full_id)) else: # get URI uri = info_file.get("uri") # fetch files on staging post processed, files_fetched = cls.fetch_files(req_id, hostname, uri) # if a file couldn't be gathered, dissemination is failed and # instruction file deleted if not processed: LOGGER.error("Couldn't fetch files from openwis staging post for" " instruction file %s." " Proceeding to next instruction file.", file_to_process) # check if database status is at failed. If yes, instruction file is deleted. # if not, it sent back to A repertory to be processed again if Database.get_request_status(full_id) == REQ_STATUS.failed: Tools.remove_file(file_to_process, "instruction", LOGGER) else: shutil.move(file_to_process, cls.dir_a) else: msg = "Instruction file %s processed" % file_to_process LOGGER.info(msg) Database.update_field_by_query("message", msg, **dict(fullrequestId=full_id)) return processed, info_file, files_fetched
def setUp(self): # Configuring repertories file_sender.sender.DEBUG = False self.tmpdir = mkdtemp(prefix='harnais_') os.environ["TMPDIR"] = self.tmpdir self.difmet_deposit = join(self.tmpdir, "difmet_deposit") os.mkdir(self.difmet_deposit) self.ack_dir = join(self.tmpdir, "ack_dir") os.mkdir(self.ack_dir) #killing ftpserver in case one exists Tools.kill_process("diffmet_test_ftp_server") # start FTP server FTPserver.create_server("/")
def check_zip(item, destination_dir): if item == "tmp.zip": ext = "." + Tools.generate_random_string(5) destination_path = os.path.join(destination_dir, item + ext) else: destination_path = os.path.join(destination_dir, item) return destination_path
def process(self): # create the unicity key for the database self._diff_externalid = diff_id = Tools.generate_random_string() # fetch database database = Database.get_database() try: # create JSON request file self.create_request_file() # create the first database record diffusion = Diffusion(diff_externalid=diff_id, fullrequestId=self.req_id+self.hostname, requestStatus=REQ_STATUS.ongoing, Date=self._to_datetime(self.date_reception), rxnotif=True, message="Created record in SQL database",) with Database.get_app().app_context(): database.session.add(diffusion) database.session.commit() LOGGER.debug("Committed %s dissemination status " "into database.", REQ_STATUS.ongoing) status = REQ_STATUS.ongoing except Exception as exc: LOGGER.exception("Error during notification processing. " "Dissemination failed.") status = self.commit_failure(database, diff_id) return status
def update_filename(self, filename): """ Register a new entry in the database in case there are multiples files for one request_id. If one request id is requesting N files, there should be N entries in the database, each with its own diff_external_id. """ # fetch database database = Database.get_database() # get the base record to duplicate with Database.get_app().app_context(): base_record = Diffusion.query.filter_by( fullrequestId=self.req_id).first() if base_record.original_file is None: # if there is only the one record created by # receiver module, job is done base_record.original_file = filename database.session.commit() # otherwise, we create a new record else: diffusion = Diffusion(diff_externalid=Tools.generate_random_string(), fullrequestId=base_record.fullrequestId, original_file=filename, requestStatus=base_record.requestStatus, message=base_record.message, Date=base_record.Date, rxnotif=base_record.rxnotif) database.session.add(diffusion) database.session.commit()
def post(self): if self.human_valid(): self.flash(0, {'msg': '验证码错误!'}) return try: username = self.get_argument('username') password = self.get_argument('password') remember = self.get_argument('remember', None) redirect = self.get_argument('redirect', '/shell') if remember: remember = int(remember) user = self.users_ins.get_user_by_name(username) if user and self.entry('login:user#' + str(user['user_id'])): self.flash(0, {'msg': '操作太频繁,请稍后再试', 'sta': 429}) return if user and Tools.generate_password(password, user['user_salt']) == user['user_pswd']: self.set_current_sess(user, days=remember) logging.info("Current Login: %s" % user['user_id']) self.flash(1, {'url': redirect}) return except: pass self.flash(0, {'msg': '用户名或者密码错误!'})
def post(self): if not self.human_valid(): self.flash(0, {'msg': '验证码错误'}) return post = self.posts_ins.get_post_by_id(self.get_argument('poid')) if not post: self.flash(0, {'msg': '文章不存在'}) return rank = '0' usid = '0' if self.get_argument('auth', False) and self.current_user: if Tools.chk_user_is_live(self.current_user): rank = self.get_runtime_conf('posts_talks_min_rank') usid = self.current_user['user_id'] name = self.current_user['user_name'] mail = self.current_user['user_mail'] else: name = self.get_argument('name') mail = self.get_argument('mail') text = self.get_argument('text') s_time = int(time.time()) try: # insert info to talks # update posts_refc self.talks_ins.insert_info_to_talks([post['post_id'], self.request.remote_ip, usid, name, mail, text, rank, s_time, s_time]) except: self.flash(0)
def get_current_user(self): usid = self.get_cookie('_usid') auid = self.get_secure_cookie('_auid') auth = self.get_secure_cookie('_auth') if usid and auth: user = self.users_ins.get_user_by_id(usid) if user and user['user_auid'] == auid and Tools.generate_authword(user['user_atms'], user['user_salt']) == auth: return user
def tearDown(self): cleared = Tools.move_dir_to_trash_can(self.tmpdir) if not cleared: rmtree(self.tmpdir) os.environ.pop(ENV.settings) os.environ.pop("TMPDIR") tempfile.tempdir = None Database.reset() SettingsManager.reset()
def bp_news_add_comment(): data = request.get_json() if not data and data is None: return Tools.generate_cors_response({'code': 200, 'msg': '缺少必要参数'}) if not data.get('content'): return Tools.generate_cors_response({'code': 200, 'msg': '请填写评论内容'}) if not data.get('news_id'): return Tools.generate_cors_response({'code': 200, 'msg': '请填写评论新闻ID'}) data['user_id'] = g.user.id comment = Comment(data) db.session.add(comment) db.session.commit() return Tools.generate_cors_response({ 'code': 200, 'msg': '评论添加成功', 'comment': comment.to_json() })
def bp_user_show_user(): users = User.query.filter(User.is_delete == False).all() data_list = list() for user in users: data_list.append(user.to_json()) return Tools.generate_cors_response({ 'code': 200, 'msg': '用户信息显示成功', 'user_list': data_list })
def commit_failure(self, database, diff_id): diffusion = Diffusion(diff_externalid=diff_id, fullrequestId=self.req_id, requestStatus=REQ_STATUS.failed, Date=self._to_datetime(self.date_reception), rxnotif=True) with Database.get_app().app_context(): database.session.add(diffusion) database.session.commit() LOGGER.info("Committed %s dissemination status into database.", REQ_STATUS.failed) if os.path.isfile(self.request_file): Tools.remove_file(self.request_file, "JSON request", LOGGER) return REQ_STATUS.failed
def bp_news_show_comment(): comments = Comment.query.filter(Comment.is_delete == False).all() comment_list = list() for comment in comments: comment_list.append(comment.to_json()) return Tools.generate_cors_response({ 'code': 200, 'msg': '请求评论内容成功', 'comment_list': comment_list })
def bp_news_show_reply(): replys = Reply.query.filter(Reply.is_delete == False).all() reply_list = list() for reply in replys: reply_list.append(reply.to_json()) return Tools.generate_cors_response({ 'code': 200, 'msg': '请求回复内容成功', 'reply_list': reply_list })
def ftp_to_xml(element,diff_info,prefix=""): etree.SubElement(element, prefix + "media").text="FTP" etree.SubElement(element, prefix + "ftp_host").text = Tools.ack_str(diff_info["host"]) etree.SubElement(element, prefix + "ftp_user").text = Tools.ack_str(diff_info["user"]) etree.SubElement(element, prefix + "ftp_passwd").text = Tools.ack_str(diff_info["password"]) etree.SubElement(element, prefix + "ftp_directory").text = Tools.ack_str(diff_info["remotePath"]) etree.SubElement(element, prefix + "ftp_use_size").text = bin_bool(diff_info["checkFileSize"]) etree.SubElement(element, prefix + "ftp_passive").text = bin_bool(diff_info["passive"]) etree.SubElement(element, prefix + "ftp_port").text = self._get_port_value(diff_info) etree.SubElement(element, prefix + "ftp_tmp_method").text = "NAME" if diff_info["fileName"] not in [None, ""]: etree.SubElement(element, prefix + "ftp_final_file_name").text = Tools.ack_str(diff_info["fileName"]) etree.SubElement(element, prefix + "ftp_tmp_file_name").text = Tools.ack_str(diff_info["fileName"]+ ".tmp") elif re.match(r"^tmp\.zip", self.original_filename) is not None: etree.SubElement(element, prefix + "ftp_final_file_name").text = Tools.ack_str(self.new_filename) etree.SubElement(element, prefix + "ftp_tmp_file_name").text = Tools.ack_str(self.new_filename + ".tmp") else: etree.SubElement(element, prefix + "ftp_final_file_name").text = Tools.ack_str(self.original_filename) etree.SubElement(element, prefix + "ftp_tmp_file_name").text = Tools.ack_str(self.original_filename + ".tmp")
def bp_news_every_news(): news = News.query.filter(News.is_delete == False).all() news_list = list() for n in news: news_list.append(n.to_json()) return Tools.generate_cors_response({ 'code': 200, 'msg': '新闻内容请求成功', 'news_list': news_list })
def get(cls, key, alt=None): if not cls._loaded: raise RuntimeError("Attempting to access SettingsManager " "before it has been loaded.") checksum = Tools.checksum_file(cls._settings_file) if checksum != cls._checksum: cls.load_settings() return cls._parameters.get(key) or alt
def package_data(all_files_fetched, diss_instructions): # process files fetched for file_path in all_files_fetched: filename = os.path.basename(file_path) request_id_list = Database.get_id_list_by_filename(filename) # no reference => file is an orphan if request_id_list == []: Tools.remove_file(file_path, "orphan file", LOGGER) continue # purge requestId_list or req_id that are not in # diss_instructions keys. That is to prevent trying to find # an instruction file related to a file that has been processed # by a previous request request_id_list = [item for item in request_id_list if item in diss_instructions.keys()] LOGGER.info("Processing downloaded file %s linked to " "requests %s", file_path, request_id_list) # instantiate a DiffMetManager object that connects the file # to its single or multiple requests and instructioons diff_manager = DiffMetManager(request_id_list, file_path, diss_instructions) # rename files according to regex renaming_ok = diff_manager.rename() # package the archive if renaming_ok: diff_manager.compile_archive() else: msg = ("Dissemination failed for requests %s because user settings " "regex resulted in incorrect filename for difmet" % request_id_list) LOGGER.error(msg) for req_id in request_id_list: Database.update_field_by_query("requestStatus", REQ_STATUS.failed, **dict(fullrequestId=req_id)) Database.update_field_by_query("message", msg, **dict(fullrequestId=req_id))
def bp_news_add_reply(): data = request.get_json() if not data and data is None: return Tools.generate_cors_response({'code': 200, 'msg': '缺少必要参数'}) if not data.get('content'): return Tools.generate_cors_response(({'code': 200, 'msg': '请填写回复内容'})) if not data.get('comment_id'): return Tools.generate_cors_response({ 'code': 200, 'msg': '请填写需回复的评论ID' }) data['user_id'] = g.user.id reply = Reply(data) db.session.add(reply) db.session.commit() return Tools.generate_cors_response({ 'code': 200, 'msg': '回复添加成功', 'reply': reply.to_json() })
def tearDown(self): #clearing repertories cleared = Tools.move_dir_to_trash_can(self.tmpdir) if not cleared: rmtree(self.tmpdir) SFTPserver.stop_server() os.environ.pop(ENV.settings) os.environ.pop("TMPDIR") tempfile.tempdir = None Database.reset() SettingsManager.reset() DebugSettingsManager.reset()
def tearDown(self): #stopping FTP server FTPserver.stop_server() #clearing repertories cleared = Tools.move_dir_to_trash_can(self.tmpdir) if not cleared: rmtree(self.tmpdir) # cleaning up environment os.environ.pop(ENV.settings) os.environ.pop("TMPDIR") tempfile.tempdir = None SettingsManager.reset() HarnessTree.reset()
def tearDown(self): cleared = Tools.move_dir_to_trash_can(self.tmpdir) if not cleared: rmtree(self.tmpdir) # Stop sftp server SFTPserver.stop_server() os.environ.pop(ENV.settings) os.environ.pop("TMPDIR") tempfile.tempdir = None Database.reset() SettingsManager.reset() HarnessTree.reset() DebugSettingsManager.reset()
def GenKnapsack(n=1000, v=10, r=5, type_wp='uc', type_c='rk', addr="problems/knapsack"): assert type_wp in ['uc', 'wc', 'sc'], 'type_wp is not valid' assert type_c in ['rk', 'ak'], 'type_wp is not valid' # type_wp = 'uc'; strong or weakly or un-correlated # type_c = 'rk'; average or restrictive knapsack --- ALWAYS we choose average w = (1 + np.round(np.random.rand(n) * (v - 1))) if type_wp == 'uc': p = 1 + np.round(np.random.rand(n) * (v - 1)) elif type_wp == 'wc': p = w + np.round(r - 2 * r * np.random.rand(n)) p[p <= 0] = w[p <= 0] elif type_wp == 'sc': p = w + r if type_c == 'rk': cap = int(2 * v) elif type_c == 'ak': cap = int(0.5 * np.sum(w)) # print(w, p, cap) th_best, _ = knapsack(w, p, cap) KP_uc_rk = {} KP_uc_rk['w'] = w KP_uc_rk['p'] = p KP_uc_rk['cap'] = cap KP_uc_rk['opt'] = th_best Tools.save_to_file(os.path.join(addr, 'KP_{}_{}'.format(type_wp, type_c)), KP_uc_rk)
def load_settings(cls, settings_file=DEFAULT_SETTINGS_PATH, reloading=False): loaded = False # load yaml settings file path = cls._settings_file = os.environ.get(ENV.settings, None) if path is None: path = cls._settings_file = os.path.join(os.path.dirname(__file__), settings_file) checksum = Tools.checksum_file(path) if reloading or checksum != cls._checksum: for i in range(1, MAX_REGEX + 1): cls._parameters["fileRegex%i" % i] = dict() with open(path, "r") as file_: settings = yaml.safe_load(file_) # TODO implement value check # TODO check if interger for port value # TODO check if diffFileName has been defined # TODO should be better than this hack exceptions = ["fileregex%i" % i for i in range(1, MAX_REGEX + 1)] settings = compact_dict(settings, exceptions=exceptions) # set up with class variables for key in cls._parameters.keys(): for set_key, value in settings.items(): if set_key.lower() == key.lower() and value is not None: cls._parameters[key] = value cls._parameters = dict(cls._parameters) cls._loaded = loaded = True cls._checksum = checksum return loaded
def post(self, *args): try: user = self.current_user if self.entry('panel:user#' + str(user['user_id'])): self.flash(0, {'msg': '操作太频繁,请稍后再试', 'sta': 429}) return user_mail = self.get_argument('mail') user_sign = self.get_argument('sign', '') user_meta = self.get_argument('meta', '') user_pswd = self.get_argument('pswd', None) user_npwd = self.get_argument('npwd', None) user_rpwd = self.get_argument('rpwd', None) if not user_mail: self.flash(0) return if not Tools.chk_is_user_mail(user_mail): self.flash(0, {'msg': '无效的用户邮箱'}) return if user_mail != user['user_mail'] and self.users_ins.get_user_by_mail(user_mail): self.flash(0, {'msg': '用户邮箱已存在'}) return user_logo = user['user_logo'] if 'logo' in self.request.files and len(self.request.files['logo']) > 0: res = self.request.files['logo'][0] if 'filename' not in res or res['filename'] == '': self.flash(0, {'msg': '无效的文件名称'}) return if 'body' not in res or not (0 < len(res['body']) < 1024 * 1024): self.flash(0, {'msg': '无效的文件长度'}) return if 'content_type' not in res or res['content_type'].find('/') < 1 or len(res['content_type']) > 128: self.flash(0, {'msg': '无效的文件类型'}) return ets = mimetypes.guess_all_extensions(res['content_type']) ext = os.path.splitext(res['filename'])[1].lower() if ets and ext not in ets: ext = ets[0] ets = [".jpg", ".jpeg", ".gif", ".png", ".bmp"] if ext not in ets: self.flash(0, {'msg': '文件类型不支持'}) return md5 = hashlib.md5() md5.update(res['body']) key = md5.hexdigest() dir = '/www' url = '/upload/' + time.strftime('%Y/%m/%d/') + key[0] + key[1] + key[30] + key[ 31] + '/' + key + ext uri = self.settings['root_path'] + dir + url url = '/static/img/www' + url if not os.path.exists(os.path.dirname(uri)): os.makedirs(os.path.dirname(uri), mode=0777) fin = open(uri, 'w') fin.write(res['body']) fin.close() # 对应信息存入数据库 self.files_ins.insert_user_logo_info([key, dir, url, res['content_type'], res['filename'], int(time.time())]) user_logo = url if user_npwd: if not len(user_npwd) >= 6 or user_npwd != user_rpwd or Tools.generate_password(user_pswd, user['user_salt']) != user['user_pswd']: self.flash(0, {'msg': '密码输入错误'}) return user_auid = Tools.generate_randauid() user_salt = Tools.generate_randsalt() self.users_ins.update_user_info_by_pwd([user_auid, user_mail, user_logo, user_sign, user_meta, Tools.generate_password(user_npwd, user_salt), user_salt, int(time.time()), int(time.time()), user['user_id']]) else: self.users_ins.update_user_info_by_other([user_mail, user_logo, user_sign, user_meta, int(time.time()), user['user_id']]) self.flash(1, {'msg': '更新成功'}) return except: pass self.flash(0)
def set_current_sess(self, user, days=30): self.set_cookie('_usid', str(user['user_id']), expires_days=days) self.set_secure_cookie('_auid', str(user['user_auid']), expires_days=days, httponly=True) self.set_secure_cookie('_auth', Tools.generate_authword(user['user_atms'], user['user_salt']))
def wrapper(self, *args, **kwargs): if Tools.chk_user_is_live(self.current_user): return method(self, *args, **kwargs) else: self.flash(0, {'sta': 403, 'url': self.get_login_url()}) return