def run_apt_only(self): if self._is_databinding_enabled and self._should_run_databinding_apt(): apt_args = self._generate_java_compile_args(extra_javac_args_enabled=True) self.debug('apt exec: ' + ' '.join(apt_args)) output, err, code = cexec(apt_args, callback=None) if code != 0: raise FreelineException('apt compile failed.', '{}\n{}'.format(output, err)) if self._apt_output_dir and os.path.exists(self._apt_output_dir): apt_cache_path = os.path.join(self._config['build_cache_dir'], 'apt_files_stat_cache.json') if os.path.exists(apt_cache_path): apt_cache = load_json_cache(apt_cache_path) for dirpath, dirnames, files in os.walk(self._apt_output_dir): for fn in files: fpath = os.path.join(dirpath, fn) if apt_cache and self._name in apt_cache: if fpath in apt_cache[self._name]: new_md5 = get_md5(fpath) if new_md5 != apt_cache[self._name][fpath]['md5']: self.debug('detect new md5 value, add apt file to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath) else: self.debug('find new apt file, add to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath) else: self.debug('apt cache not found, add to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath)
def handle_db(args, name, now): try: conn = utils.get_mysql_conn(args.host, args.dbUser, args.dbPassword, args.database) cursor = conn.cursor() cursor.execute(args.sql) except: utils.edit_list(now, 'err_generate\n') logger().error(['err_data_source', now+'/'+args.host]) return {} meta = {} count = 0 rows = cursor.fetchmany(int(args.fetchSize)) with open(name, 'a', encoding='utf-8') as f: while rows: for row in rows: v = '|||'.join(('%s' % i).replace('|||', '---') for i in list(row)) f.write(v.replace('\n', '') + '\n') count = count + 1 if count % int(args.fetchSize) == 0: rows = cursor.fetchmany(int(args.fetchSize)) else: rows = False with open(name, 'r', encoding='utf-8') as f: content = f.read() index = cursor.description cols = [] for i in range(len(index)): cols.append(index[i][0]) meta.__setitem__("size", count) meta.__setitem__("md5", utils.get_md5(content)) meta.__setitem__("colName", ','.join(cols)) return meta
def ex_json(post_data): post_data=json.loads(post_data) for p_name,p_value in post_data.items(): p_id = get_md5(self.data["host"] + self.path + decode(p_name) + self.data["method"]) p_state=self.get_Ostate(str(p_value)) p_type="post" yield (p_id, p_state, p_type, p_name)
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') stat_cache = load_json_cache(cache_path) cache_path_md5 = os.path.join(self._config['build_cache_dir'], 'stat_cache_md5.json') stat_cache_md5 = load_json_cache(cache_path_md5) for module, file_dict in self._changed_files.iteritems(): for key, files in file_dict.iteritems(): if key != 'apt': for fpath in files: if not fpath.startswith(self._config['build_cache_dir'] ) and os.path.exists(fpath): self.debug('refresh {} stat'.format(fpath)) os.utime(fpath, None) if fpath not in stat_cache[module]: stat_cache[module][fpath] = {} if fpath in stat_cache_md5: stat_cache_md5[fpath] = get_md5(fpath) stat_cache[module][fpath][ 'mtime'] = os.path.getmtime(fpath) stat_cache[module][fpath][ 'size'] = os.path.getsize(fpath) write_json_cache(cache_path, stat_cache) write_json_cache(cache_path_md5, stat_cache_md5)
def content_length(self): content_length=self.data["content_length"] p_id = get_md5(self.data["host"] + self.path + "content_length"+ self.data["method"] ) p_state = self.get_Ostate(content_length) p_type="content_length" p_name="" return (p_id, p_state,p_type,p_name)
def add_new_url(self, url): if url is None: return url_md5 = get_md5(url)[8:-8] if url not in self.new_urls and url_md5 not in self.old_urls: print("add new url.") self.new_urls.add(url)
def get_resources(pe): resources = [] if hasattr(pe, 'DIRECTORY_ENTRY_RESOURCE'): count = 1 for resource_type in pe.DIRECTORY_ENTRY_RESOURCE.entries: try: resource = {} if resource_type.name is not None: name = str(resource_type.name) else: name = str(pefile.RESOURCE_TYPE.get(resource_type.struct.Id, "UNKNOWN")) if name is None: name = str(resource_type.struct.Id) if hasattr(resource_type, 'directory'): for resource_id in resource_type.directory.entries: if hasattr(resource_id, 'directory'): for resource_lang in resource_id.directory.entries: data = pe.get_data(resource_lang.data.struct.OffsetToData, resource_lang.data.struct.Size) entropy = get_entropy(data) filetype = get_type(data) md5 = get_md5(data) sha256 = get_sha256(data) language = pefile.LANG.get(resource_lang.data.lang, None) language_desc = LCID.get(resource_lang.id, 'unknown language') sublanguage = pefile.get_sublang_name_for_lang( resource_lang.data.lang, resource_lang.data.sublang) offset = ('%-8s' % hex(resource_lang.data.struct.OffsetToData)).strip() size = ('%-8s' % hex(resource_lang.data.struct.Size)).strip() resource = [ count, name, offset, md5, sha256, size, filetype, entropy, language, sublanguage, language_desc ] # Dump resources if requested if self.dump and pe == self.pe: if self.dump: folder = self.dump else: folder = tempfile.mkdtemp() resource_path = path.join(folder, '{0}_{1}_{2}'.format( self.sha256, offset, name)) resource.append(resource_path) with open(resource_path, 'wb') as resource_handle: resource_handle.write(data) resources.append(resource) count += 1 except Exception as e: log.error(e) continue return resources
def http_type(self): http_type=self.data["http_type"] p_id=get_md5(self.data["host"]+self.path+"http_type"+self.data["method"]) p_state=self.get_Ostate(http_type) p_type="http_type" p_name="" return (p_id,p_state,p_type,p_name)
def tmp(file, tmp_file, salt, col_names, encrypt_col, unencrypt_cols): encrypt_col = col_names.index(encrypt_col) unencrypt_cols = unencrypt_cols.split(',') for i in range(len(unencrypt_cols)): unencrypt_cols[i] = col_names.index(unencrypt_cols[i]) try: unencrypt_cols.remove(encrypt_col) except: pass try: with open(file, 'r', encoding='utf-8') as f: line_count = 0 file_count = 0 for line in f: cols = line.strip().split('|||') v = utils.get_md5(cols[encrypt_col] + salt) for col in unencrypt_cols: v = v + "|||" + cols[col] if line_count == 3000000: file_count = file_count + 1 line_count = 0 with open(tmp_file + str(file_count), 'a') as f_tmp: f_tmp.write(v + '\n') line_count = line_count + 1 return True except: return False
def handle_task(self, batch_index, sub_sources): ''' :param batch_index: 线程id :param sub_sources: hub页面(url,format_class)的list :return: None ''' for i, web_seed in enumerate(sub_sources, 0): url, web_name = web_seed hub_page_crawl_interval = self.web_conf.get(web_name).get("crawl_interval") # 获取最新更新时间 mp_uid = get_md5(url) redis_key = 'last_crawl_ts_%s' % mp_uid redis_key = self.BUZZQA_CRAWL_INFO.format(redis_key) value = read_redit(self._spring_client, redis_key) last_crawl_ts = int(value) if value else 0 curr_ts = int(time.time()) if curr_ts - last_crawl_ts < hub_page_crawl_interval: log_content = "cur_ts - last_crawl_ts=%d,hub_page_crawl_interval=%d, " \ "dont need update" % (curr_ts - last_crawl_ts, hub_page_crawl_interval) logging.info(log_content) continue goal_pages_info = self.get_info_from_hub_page(web_seed) # print(goal_pages_info) if goal_pages_info is None: # 该url为垃圾网页 return self.process_goal_pages(goal_pages_info) # 保存该次爬取时间点到redis put_data2redit(self._spring_client, redis_key, str(curr_ts))
def adds_https_pool(self, iter): for proxy in iter: m = utils.get_md5(proxy[0].encode()) if not self.__db.sismember(self.__HTTPS_MD5_KEY, m): self.__db.sadd(self.__HTTPS_POOL_KEY, proxy) self.__db.sadd(self.__HTTPS_MD5_KEY, m) print('https', proxy)
def parse(html, xpaths): tree = Selector(text=html) trs = tree.xpath(xpaths["trs"]) for tr in trs: ip = xpath_extract(tr, xpaths.get("ip")) port = xpath_extract(tr, xpaths.get("port")) address = xpath_extract(tr, xpaths.get("address")) style = xpath_extract(tr, xpaths.get("style"), "透明") protocol = xpath_extract(tr, xpaths.get("protocol"), "HTTP") item = { "ip": ip, "port": port, "style": style_mapping.get(style, style), "protocol": protocol, "address": address, "source": xpaths.get("source", ""), "create_time": datetime.now(), "update_time": datetime.now(), "md5": get_md5("{}{}".format(ip, port)) } pprint(item) yield item
def pop_temp_buffer(self): reval = self.__db.spop(self.__TEMP_BUFFER_KEY) if reval: item = eval(reval) self.__db.srem(self.__TEMP_BUFFER_MD5_KEY, utils.get_md5(item[0].encode())) return reval
def add_mark(request): status = 6003 req = request.DATA message = 'params error!' tag = req.get('tag') name = req.get('name') href = req.get('href') icon = req.get('icon', '0') pwd = req.get('pwd') if config['OP_PWD'] != get_md5(pwd): status = 5001 message = 'password error!' return Response({'status': status, 'result': {}, 'message': message}) if tag and name and href and pwd: try: obj, created = MarkTag.objects.get_or_create(tag_name=tag) u_obj = UrlDetail(tag=obj, name=name, url=href, icon=icon) u_obj.save() status = 2000 message = 'success' except Exception, e: print "except reson", e
def marks(request): status = 6003 message = "params error!" req = request.DATA pwd = req.get('pwd') mark = req.get('mark') if config['OP_PWD'] != get_md5(pwd): status = 5001 message = 'password error!' return Response({'status': status, 'result': {}, 'message': message}) if mark: try: if request.method == 'POST': UrlDetail.objects.filter(id=mark['id']).update(name=mark['name'], url=mark['url']) status = 2000 message = "modify row success!" elif request.method == 'PUT': UrlDetail.objects.get(id=mark['id']).delete() status = 2000 message = "delete row success!" except Exception, e: print 'except reson:', e
def get_local_df(root_path): file_list = [] for root, dirs, files in os.walk(root_path): root = Path(root) for x in dirs: path = root / x file_list.append({ "local_type": "folder", "local_path": path, "local_mtime": path.stat().st_mtime, }) for x in files: path = root / x file_list.append({ "local_type": path.suffix.strip("."), "local_path": path, "local_mtime": max(path.stat().st_mtime, path.stat().st_ctime), "local_md5": get_md5(path), }) return pd.DataFrame(file_list)
def handle_file(args, name, now): meta = {} count = 0 col_size = len(args.columnName.split(',')) try: rf = open(args.sourceFile, 'r', encoding='utf-8') except: utils.edit_list(now, 'err_generate\n') logger().error(['err_data_source', args.sourceFile]) return {} with open(name, 'a', encoding='utf-8') as wf: for line in rf: count = count + 1 rows = line.strip().split(args.separator) if len(rows) != col_size: utils.edit_list(now, 'err_generate\n') logger().error(['err_source_info', now+'/'+str(count)]) return {} wf.write('|||'.join(('%s' % i).replace('|||', '---') for i in rows).replace('\n', '') + '\n') with open(name, 'r', encoding='utf-8') as f: content = f.read() meta.__setitem__("size", count) meta.__setitem__("md5", utils.get_md5(content)) meta.__setitem__("colName", args.columnName) return meta
def process_goal_pages(self, goal_pages_info): # 用户最近一次爬取的item goal_page_urls = goal_pages_info.get("goal_page_urls") answers_num = goal_pages_info.get("ans_num") source_name = goal_pages_info.get("source_name") # 处理hub页中的每一个goal页面 for i in range(len(goal_page_urls)): time.sleep(0.5) # 防止页面429 url = goal_page_urls[i] # 如果hub页无法抓取ans_num数量 if answers_num is None or len(answers_num)==0: qa_info = self.crawl_goal_page(url,source_name) if not qa_info is None: self.store_data_to_kafka(qa_info) continue mp_uid = get_md5(url) redis_key = 'goal_page_answers_num_%s' % (mp_uid) redis_key = self.BUZZQA_CRAWL_INFO.format(redis_key) old_ans_num = read_redit(self._spring_client, redis_key) old_ans_num = int(old_ans_num) if old_ans_num else None ans_num = answers_num[i] if old_ans_num is None or old_ans_num != ans_num: qa_info = self.crawl_goal_page(url, source_name) if not qa_info is None: self.store_data_to_kafka(qa_info) logging.info("Put new data to kafka. crawl goal page %s ." % (url)) put_data2redit(self._spring_client, redis_key, str(ans_num))
def post_p_name(self, p_names): p_state = self.get_Ostate(p_names) p_type = "post_pname" p_name = "" p_id = get_md5(self.data["host"] + self.path + self.data["method"] + p_type) return (p_id, p_state, p_type, p_name)
def backup_res_files(self): pending_remove = [] for fpath in self._changed_files['res']: # res/values/colors.xml -> build/target/generated-sources/res/values/colors.xml # res/values/colors.xml -> build/intermediates/res/merged/debug/values/colors.xml dst_path = self._get_res_incremental_dst_path(fpath) is_new_file = False if not os.path.exists(dst_path): is_new_file = True self._new_res_list.append(dst_path) if fpath in self._merged_xml_cache: backup_res_file(dst_path) # backup old file cache = self._merged_xml_cache[fpath] write_file_content(dst_path, cache) # write merged cache to dst path else: if is_new_file: shutil.copyfile( fpath, dst_path) # just copy to dst path, if this is new file self.debug('copy {} to {}'.format(fpath, dst_path)) continue old_file_md5 = get_md5(fpath) dst_file_md5 = get_md5(dst_path) if old_file_md5 != dst_file_md5: backup_res_file(dst_path) shutil.copyfile(fpath, dst_path) self.debug('copy {} to {}'.format(fpath, dst_path)) else: pending_remove.append( fpath ) # file is not changed, so remove from changed list os.utime(dst_path, None) for fpath in self._changed_files['assets']: dst_path = self._get_res_incremental_dst_path(fpath) if os.path.exists(dst_path): backup_res_file(dst_path) else: self._new_res_list.append(dst_path) shutil.copyfile(fpath, dst_path) for fpath in pending_remove: if fpath in self._changed_files['res']: self._changed_files['res'].remove(fpath)
def api_login(): if "phone" not in request.args: error = {"status": -1, "message": u"非法请求(未找到phone字段)"} resp = jsonify(error) return resp if "password" not in request.args: error = {"status": -1, "message": u"非法请求(未找到password字段)"} resp = jsonify(error) return resp phone = request.args['phone'] psd = request.args['password'] db = sql.UserHelper() user = db.get_user(phone) ct = utils.get_current_time() if len(user) == 0: res = {"status": -1, "message": u"用户不存在"} return jsonify(res) else: user = user[0] phone = user[0] password = user[1] token = user[2] time = user[3] title = user[6] profile = user[7] head = user[8] if not password == psd: error = {"status": -1, "message": u"密码错误"} return jsonify(error) else: if ct > long(time): time = ct + token_time token = utils.get_md5(phone + psd + str(time)) db.save_user_token(phone, token) res = { "token": token, "time": time, "message": "登录成功", "phone": phone, "title": title, "profile": profile, "head": head } return jsonify(res) else: res = { "token": token, "time": time, "message": "登录成功", "phone": phone, "title": title, "profile": profile, "head": head } return jsonify(res)
def uri_p_name(self): p_name="" for p in self.payload.split("&"): p_name+=p.split("=")[0] p_state=self.get_Ostate(p_name) p_type="uri_pname" p_id = get_md5(self.data["host"] + self.path + self.data["method"]+p_type) p_name="" return (p_id, p_state,p_type,p_name)
def __init__(self, serial_number, access_code, password, message): jid = "*****@*****.**" % serial_number connection_password = "******" % access_code sleekxmpp.ClientXMPP.__init__(self, jid, connection_password) self.recipient = "*****@*****.**" % serial_number self.msg = message self.add_event_handler("session_start", self.start) self.add_event_handler("message", self.message) self.connected = False abyte1 = get_md5(access_code.encode() + self.secret) abyte2 = get_md5(self.secret + password.encode()) self.key = abyte1 + abyte2
def check_r_md5(self): old_md5 = None old_r_file = self._finder.get_dst_r_path(config=self._config) self.debug("{} old R.java path: {}".format(self._name, old_r_file)) new_r_file = DirectoryFinder.get_r_file_path(self._finder.get_backup_dir()) self.debug("{} new R.java path: {}".format(self._name, new_r_file)) if old_r_file and os.path.exists(old_r_file): old_md5 = get_md5(old_r_file) if new_r_file and os.path.exists(new_r_file): new_md5 = get_md5(new_r_file) if not old_md5: mark_r_changed_flag(self._name, self._cache_dir) self._changed_files['src'].append(new_r_file) self.debug('find R.java changed (origin R.java not exists)') else: if new_md5 != old_md5: mark_r_changed_flag(self._name, self._cache_dir) self._changed_files['src'].append(new_r_file) self.debug('find R.java changed (md5 value is different from origin R.java)')
def ex_urlencoded(post_data): for p in post_data.split("&"): p_list = p.split("=") p_name = p_list[0] if len(p_list) > 1: p_value = reduce(operator.add, p_list[1:]) p_id = get_md5(self.data["host"] + self.path + decode(p_name) + self.data["method"]) p_state = self.get_Ostate(p_value) p_type = "post" yield (p_id, p_state, p_type, p_name)
def url(self): for p in self.payload.split("&"): p_list=p.split("=") p_name=p_list[0] if len(p_list)>1: p_value=reduce(operator.add,p_list[1:]) p_id=get_md5(self.data["host"]+self.path+p_name.encode().decode('utf8','ignore')+self.data["method"]) p_state=self.get_Ostate(p_value) p_type="uri" yield (p_id,p_state,p_type,p_name)
def backup_res_files(self): pending_remove = [] for fpath in self._changed_files['res']: # res/values/colors.xml -> build/target/generated-sources/res/values/colors.xml # res/values/colors.xml -> build/intermediates/res/merged/debug/values/colors.xml dst_path = self._get_res_incremental_dst_path(fpath) is_new_file = False if not os.path.exists(dst_path): is_new_file = True self._new_res_list.append(dst_path) if fpath in self._merged_xml_cache: backup_res_file(dst_path) # backup old file cache = self._merged_xml_cache[fpath] write_file_content(dst_path, cache) # write merged cache to dst path else: if is_new_file: shutil.copyfile(fpath, dst_path) # just copy to dst path, if this is new file self.debug('copy {} to {}'.format(fpath, dst_path)) continue old_file_md5 = get_md5(fpath) dst_file_md5 = get_md5(dst_path) if old_file_md5 != dst_file_md5: backup_res_file(dst_path) shutil.copyfile(fpath, dst_path) self.debug('copy {} to {}'.format(fpath, dst_path)) else: pending_remove.append(fpath) # file is not changed, so remove from changed list os.utime(dst_path, None) for fpath in self._changed_files['assets']: dst_path = self._get_res_incremental_dst_path(fpath) if os.path.exists(dst_path): backup_res_file(dst_path) else: self._new_res_list.append(dst_path) shutil.copyfile(fpath, dst_path) for fpath in pending_remove: if fpath in self._changed_files['res']: self._changed_files['res'].remove(fpath)
def cookie_p_name(self): cookie = urllib.parse.unquote(self.data["cookie"].encode("utf-8").decode('utf8','ignore')) p_name="" for p in cookie.split("; "): if p.strip(): p_name+=p.split("=")[0] p_type = "cookie_pname" p_id = get_md5(self.data["host"] + self.path + self.data["method"]+p_type) p_state = self.get_Ostate(p_name) p_name="" return (p_id, p_state,p_type,p_name)
def check_r_md5(self): old_md5 = None old_r_file = self._finder.get_dst_r_path(config=self._config) self.debug("{} old R.java path: {}".format(self._name, old_r_file)) new_r_file = DirectoryFinder.get_r_file_path(self._finder.get_backup_dir()) self.debug("{} new R.java path: {}".format(self._name, new_r_file)) if old_r_file and os.path.exists(old_r_file): old_md5 = get_md5(old_r_file) if new_r_file and os.path.exists(new_r_file): new_md5 = get_md5(new_r_file) if not old_md5: mark_r_changed_flag(self._name, self._cache_dir) AndroidIncBuildInvoker.fix_for_windows(new_r_file) self._changed_files['src'].append(new_r_file) self.debug('find R.java changed (origin R.java not exists)') else: if new_md5 != old_md5: mark_r_changed_flag(self._name, self._cache_dir) AndroidIncBuildInvoker.fix_for_windows(new_r_file) self._changed_files['src'].append(new_r_file) self.debug('find R.java changed (md5 value is different from origin R.java)')
def cookie(self): cookies=urllib.parse.unquote(self.data["cookie"].encode("utf-8").decode('utf8','ignore')) for p in cookies.split("; "): if p.strip(): p_list=p.split("=") p_name=p_list[0] if len(p_list)>1: p_value=reduce(operator.add,p_list[1:]) p_id=get_md5(self.data["host"]+self.path+decode(p_name)+self.data["method"]) p_state=self.get_Ostate(p_value) p_type="cookie" yield (p_id,p_state,p_type,p_name)
def form_valid(self, form): if form.is_valid(): with atomic(): user = form.save(False) user.is_active = False user.source = 'Register' user.save(True) site = get_current_site().domain sign = get_md5(get_md5(settings.SECRET_KEY + str(user.id))) if settings.DEBUG: site = '127.0.0.1:8000' path = reverse('account:result') url = "http://{site}{path}?type=validation&id={id}&sign={sign}".format( site=site, path=path, id=user.id, sign=sign) content = """ <p>请点击下面链接验证您的邮箱</p> <a href="{url}" rel="bookmark">{url}</a> 再次感谢您! <br /> 如果上面链接无法打开,请将此链接复制至浏览器。 {url} """.format(url=url) send_email(emailto=[ user.email, ], title='验证您的电子邮箱', content=content) url = reverse('account:result') + '?type=register&id=' + str( user.id) return HttpResponseRedirect(url) else: return self.render_to_response({'form': form})
def md5(self): """ Return the md5 of the current qstat full output Used for long_poll refresh : Client check his last known status md5 against this, and only get a reply when this output changes compared to his md5 :return: :rtype: str """ from utils import get_md5 # from hashlib import md5 # m = md5() # m.update(str(self.html)) # return m.hexdigest() return get_md5(str(self.html))
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') stat_cache = load_json_cache(cache_path) for module, file_dict in self._changed_files.iteritems(): for key, files in file_dict.iteritems(): if key != 'apt': for fpath in files: if not fpath.startswith(self._config['build_cache_dir']) and os.path.exists(fpath): self.debug('refresh {} stat'.format(fpath)) os.utime(fpath, None) if fpath not in stat_cache[module]: stat_cache[module][fpath] = {} stat_cache[module][fpath]['mtime'] = os.path.getmtime(fpath) stat_cache[module][fpath]['md5'] = get_md5(fpath) write_json_cache(cache_path, stat_cache)
def upload_cropped_file(file_path, mid, md5_orig, crop_orig): access_key = '5haoQZguw4iGPnjUuJnhOGufZMjrQnuSdySzGboj' secret_key = 'OADMEtVegAXAhCJBhRSXXeEd_YRYzEPyHwzJDs95' q = Auth(access_key, secret_key) up_token = q.upload_token('cdstatic') key = utils.get_md5(file_path) ret, info = put_file(up_token, key, file_path) if info.status_code != 200: return -4 #update database obj = Rawfiles.objects.filter(pac=mid, download_url=md5_orig, crop=crop_orig) if len(obj) <= 0: print 'no history recored with maker_id:%s' % mid return -5 print "audio crop: update record %s,%s" % (mid, md5_orig) obj.update(processed_url=key) return 0
def upload(request, task_id): task = CovTask.objects.get(id=task_id) if request.method == 'POST': uf = UploadForm(request.POST,request.FILES) if uf.is_valid(): task.last_upload_time = datetime.datetime.now() task.save() cov_reports = CovReport.objects.filter(cov_task=task, account=request.session[u'django_mail'].split('@')[0]) if 0 >= len(cov_reports): cov_report = CovReport() cov_report.cov_task = task cov_report.name = request.session[u'django_name'] cov_report.account = request.session[u'django_mail'].split('@')[0] cov_report.state = u'init' cov_report.cov_class = u'-' cov_report.cov_method = u'-' cov_report.cov_block = u'-' cov_report.cov_line = u'-' cov_report.save() else: cov_report = cov_reports[0] cov_data = CovData() cov_data.cov_task = task cov_data.user_name = request.session[u'django_name'] cov_data.user_account = request.session[u'django_mail'].split('@')[0] cov_data.data = uf.cleaned_data['data'] cov_data.file_name = cov_data.data.name cov_data.cov_report = cov_report cov_data.save() cov_data.file_md5 = utils.get_md5(cov_data.data.file.name) cov_data.save() return render(request, 'cov_upload.html', {'task':task, 'uf': uf, 'op': u'success'}) else: return render(request, 'cov_upload.html', {'task':task, 'uf': uf, 'op': u'failed'}) else: uf = UploadForm() return render(request, 'cov_upload.html', {'task':task, 'uf': uf, 'op': u'upload'})