def run(self): need_download = True if os.path.exists(local_mirrors_json): try: remote_md5 = urllib2.urlopen(self.mirrors_json_md5_url).read().strip() local_md5 = md5_file(local_mirrors_json) need_download = remote_md5 != local_md5 except Exception, e: logging.warn("fetch mirrors.json md5 error:") logging.warn(str(e))
def upload_file(self, local_file, remote_file, data_name): if self.server.getFileInfo(remote_file): print "%s: 删除远端%s..." % (self.space_name, data_name) self.server.deleteFile(remote_file) print "%s: 删除远端%s完成" % (self.space_name, data_name) print "%s: 上传远端%s..." % (self.space_name, data_name) self.server.setContentMD5(md5_file(local_file)) with open(local_file, "rb") as patch_list_file: if self.server.writeFile(remote_file, patch_list_file.read(), True): print "%s: 上传远端%s成功" % (self.space_name, data_name) else: print "%s: 上传远端%s失败" % (self.space_name, data_name)
def download_data(self, data_file, test): origin_data_md5 = md5_file(os.path.join(self.data_origin_dir, data_file)) space_name = data_file.split(".tar.gz")[0] patch_dir = os.path.join(self.data_patch_dir, space_name) # Create download directory. create_directory(patch_dir) if test: remote_url = "http://%s.%s/test" % (space_name, UPDATE_DATA_URL) else: remote_url = "http://%s.%s/3.1" % (space_name, UPDATE_DATA_URL) patch_list_url = "%s/patch/%s/patch_md5.json" % (remote_url, origin_data_md5) try: patch_list_json = json.load(urllib2.urlopen(patch_list_url)) except Exception, e: patch_list_json = ""
def build_origin_data(self, action): if action == "build": # Delete origin data remove_file(self.data_origin_file) # Build origin data. print "%s: 创建本地原始数据..." % (self.space_name) with tarfile.open(self.data_origin_file, "w:gz") as tar: for root, dir, files in os.walk(self.input_data_dir): for file in files: fullpath = os.path.join(root, file) tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False) print "%s: 创建本地原始数据完成" % (self.space_name) with open(TIME_FLAG_FILE, 'w') as fp: s = get_current_time("%Y_%m_%d_%H:%M:%S") fp.write(s) if action == "upload" and self.check_permission(self.space_name): remote_origin_file_path = os.path.join( self.server_data_dir, "origin", md5_file(self.data_origin_file), "%s.tar.gz" % self.space_name) self.upload_file(self.data_origin_file, remote_origin_file_path, "原始数据")
def build_origin_data(self, action): if action == "build": # Delete origin data remove_file(self.data_origin_file) # Build origin data. print "%s: 创建本地原始数据..." % (self.space_name) with tarfile.open(self.data_origin_file, "w:gz") as tar: for root, dir, files in os.walk(self.input_data_dir): for file in files: fullpath=os.path.join(root, file) tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False) print "%s: 创建本地原始数据完成" % (self.space_name) with open(TIME_FLAG_FILE, 'w') as fp: s = get_current_time("%Y_%m_%d_%H:%M:%S") fp.write(s) if action == "upload" and self.check_permission(self.space_name): remote_origin_file_path = os.path.join( self.server_data_dir, "origin", md5_file(self.data_origin_file), "%s.tar.gz" % self.space_name) self.upload_file(self.data_origin_file, remote_origin_file_path, "原始数据")
def build_update_patch(self, action): if os.path.exists(self.data_origin_file): self.output_patch_dir = os.path.join(self.output_data_dir, "patch") if not os.path.exists(self.output_patch_dir): create_directory(self.output_patch_dir) self.output_temp_dir = os.path.join(self.output_data_dir, "temp") self.output_temp_file = os.path.join(self.output_temp_dir, "%s.tar.gz" % self.space_name) self.output_temp_patch_file = os.path.join(self.output_temp_dir, "patch") self.patch_md5_file = os.path.join(self.output_patch_dir, "patch_md5.json") self.origin_data_md5 = md5_file(self.data_origin_file) if not os.path.exists(self.patch_md5_file): self.patch_md5_json = {} self.patch_md5_json["origin_data"] = self.origin_data_md5 self.patch_md5_json["current_patch"] = [] else: self.patch_md5_json = json.load(open(self.patch_md5_file)) if self.patch_md5_json["origin_data"] != self.origin_data_md5: self.patch_md5_json["origin_data"] = self.origin_data_md5 self.patch_md5_json["current_patch"] = [] self.remote_patch_dir = os.path.join(self.server_data_dir, "patch") if action == "build": # Delete temp directory first. create_directory(self.output_temp_dir, True) # Build temp file. print "%s: 创建本地更新数据..." % self.space_name with tarfile.open(self.output_temp_file, "w:gz") as tar: for root, dir, files in os.walk(self.input_data_dir): for file in files: fullpath = os.path.join(root, file) tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False) print "%s: 创建本地更新数据完成" % self.space_name print "%s: 生成补丁文件..." % self.space_name subprocess.Popen("xdelta3 -ves %s %s %s" % (self.data_origin_file, self.output_temp_file, self.output_temp_patch_file), shell=True).wait() newest_patch_file_md5 = md5_file(self.output_temp_patch_file) current_patch_dict = self.patch_md5_json.get("current_patch") if current_patch_dict: last_patch_md5 = current_patch_dict[0]["md5"] if last_patch_md5 == newest_patch_file_md5: remove_directory(self.output_temp_dir) print "%s: input_data数据未做任何改变,删除相同补丁文件" % self.space_name sys.exit(0) else: current_patch_dict = [] newest_patch_dir = os.path.join(self.output_patch_dir, self.origin_data_md5) if not os.path.exists(newest_patch_dir): create_directory(newest_patch_dir) newest_patch_name = "%s-%s.xd3" % ( self.space_name, get_current_time("%Y_%m_%d_%H:%M:%S")) newest_patch_file = os.path.join(newest_patch_dir, newest_patch_name) os.renames(self.output_temp_patch_file, newest_patch_file) remove_directory(self.output_temp_dir) current_patch_dict.insert(0, { "name": newest_patch_name, "md5": newest_patch_file_md5 }) print "%s: 生成补丁文件完成" % self.space_name print "%s: 写入补丁md5..." % self.space_name self.patch_md5_json["current_patch"] = current_patch_dict with open(self.patch_md5_file, "w") as fp: json.dump(self.patch_md5_json, fp) print "%s: 写入补丁md5完成" % self.space_name elif action == "upload" and self.check_permission(self.space_name): # Upload patch file. current_patch_dict = self.patch_md5_json.get("current_patch") if current_patch_dict != []: if len(current_patch_dict) > 2: print "%s: 清理多余的补丁" % self.space_name spare_patchs = current_patch_dict[2:] current_patch_dict = current_patch_dict[:2] for patch in spare_patchs: patch_name = patch["name"].encode("utf-8") local_path = os.path.join(self.output_patch_dir, self.origin_data_md5, patch_name) try: remove_file(local_path) print "%s: 清除了补丁%s" % (self.space_name, patch_name) except: pass remote_path = os.path.join(self.remote_patch_dir, self.origin_data_md5, patch_name) self.delete_remote_file(remote_path, patch_name) self.patch_md5_json[ "current_patch"] = current_patch_dict with open(self.patch_md5_file, "w") as fp: json.dump(self.patch_md5_json, fp) newest_patch_name = current_patch_dict[0]["name"].encode( "utf-8") newest_patch_file = os.path.join(self.output_patch_dir, self.origin_data_md5, newest_patch_name) remote_patch_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, newest_patch_name) remote_patch_md5_file = os.path.join( self.remote_patch_dir, self.origin_data_md5, "patch_md5.json") # upload newest_patch_file self.upload_file(newest_patch_file, remote_patch_file, "补丁更新数据") # Update patch list file. self.upload_file(self.patch_md5_file, remote_patch_md5_file, "补丁md5列表文件") else: print "%s: 当前没有任何补丁,请打好补丁再上传吧!" % self.space_name else: print "%s: %s 不存在, 无法进行补丁的创建和上传" % (self.space_name, self.data_origin_file)
class UpdateDataService(dbus.service.Object): ''' class docs ''' def __init__(self, system_bus, mainloop): ''' init docs ''' # Init dbus service. dbus.service.Object.__init__(self, system_bus, DSC_UPDATER_PATH) self.mainloop = mainloop self.data_origin_dir = os.path.join(DATA_DIR, "origin") self.data_newest_dir = os.path.join(DATA_DIR, "newest") self.data_patch_dir = os.path.join(DATA_DIR, "patch") self.data_patch_config_filepath = os.path.join(DATA_DIR, "patch_status.ini") self.data_newest_id_path = os.path.join(DATA_DIR, "data_newest_id.ini") def get_unique_id(self): return str(uuid.uuid4()) def run(self, test): # Init ini files. if not os.path.exists(self.data_newest_id_path): self.newest_data_id_config = Config(self.data_newest_id_path) self.newest_data_id_config.load() self.newest_data_id_config.set("newest", "data_id", "") self.newest_data_id_config.set("newest", "update_date", "") self.newest_data_id_config.write() else: self.newest_data_id_config = Config(self.data_newest_id_path) self.newest_data_id_config.load() try: update_date = self.newest_data_id_config.get("newest", "update_date") except Exception: update_date = "" if self.newest_data_id_config.get("newest", "data_id") == "" or update_date != UPDATE_DATE: self.clean() newest_data_id = self.get_unique_id() newest_data_dir = os.path.join(DATA_DIR, "update", newest_data_id) print "进行第一次数据解压..." log("进行第一次数据解压...") for data_file in os.listdir(self.data_origin_dir): with tarfile.open(os.path.join(self.data_origin_dir, data_file), "r:gz") as tar_file: tar_file.extractall(newest_data_dir) print "进行第一次数据解压完成" log("进行第一次数据解压完成") self.newest_data_id_config.set("newest", "data_id", newest_data_id) self.newest_data_id_config.set("newest", "update_date", UPDATE_DATE) self.newest_data_id_config.write() if not os.path.exists(self.data_patch_config_filepath): self.patch_status_config = Config(self.data_patch_config_filepath) self.patch_status_config.load() for space_name in DATA_SPACE_NAME: self.patch_status_config.set("data_md5", space_name, "") self.patch_status_config.write() else: self.patch_status_config = Config(self.data_patch_config_filepath) self.patch_status_config.load() self.have_update = [] # Download update data. for data_file in os.listdir(self.data_origin_dir): self.download_data(data_file, test) if self.have_update: # Apply update data. for space_name in self.have_update: self.apply_data(space_name) # Extra data. newest_data_id = self.get_unique_id() newest_data_dir = os.path.join(DATA_DIR, "update", newest_data_id) for space_name in DATA_SPACE_NAME: data_filename = "%s.tar.gz" % space_name origin_data_file = os.path.join(self.data_origin_dir, data_filename) newest_data_file = os.path.join(self.data_newest_dir, data_filename) if not os.path.exists(newest_data_file): os.system('cp %s %s' % (origin_data_file, newest_data_file)) print "解压最新数据..." log("解压最新数据...") for data_file in os.listdir(self.data_newest_dir): newest_file = os.path.join(self.data_newest_dir, data_file) with tarfile.open(newest_file, "r:gz") as tar_file: tar_file.extractall(newest_data_dir) print "解压最新数据完成" log("解压最新数据完成") self.previous_data_id = self.newest_data_id_config.get("newest", "data_id") self.newest_data_id_config.set("newest", "data_id", newest_data_id) self.newest_data_id_config.write() if self.is_fontend_running(): print 'Frontend is running, clear data next time!' log('Frontend is running, clear data next time!') else: print 'Clear unused data.' log('Clear unused data.') self.clear_data_folder() print 'Done!' log("Done!") glib.timeout_add(200, self.mainloop.quit) def is_fontend_running(self): if os.path.exists(DATA_CURRENT_ID_CONFIG_PATH): config = Config(DATA_CURRENT_ID_CONFIG_PATH) config.load() data_id = config.get('current', 'data_id') if data_id: return True else: return False else: False def clear_data_folder(self): # clear data when ui is not running # judge which data is in using if os.path.exists(DATA_CURRENT_ID_CONFIG_PATH): current_data_id_config = Config(DATA_CURRENT_ID_CONFIG_PATH) current_data_id_config.load() current_data_id = current_data_id_config.get("current", "data_id") else: current_data_id = None self.newest_data_id_config.load() newest_data_id = self.newest_data_id_config.get("newest", "data_id") data_file_list = ["newest", "origin", "patch", "update", "data_newest_id.ini", "patch_status.ini", "cache_soft.db", "origin_data_time" ] data_id_list = (current_data_id, newest_data_id) for data_file in os.listdir(DATA_DIR): if data_file not in data_file_list: remove_directory(os.path.join(DATA_DIR, data_file)) print ">> remove file: %s" % data_file log(">> remove file: %s" % data_file) elif data_file == "update": for data_id in os.listdir(os.path.join(DATA_DIR, "update")): if data_id not in data_id_list: remove_directory(os.path.join(DATA_DIR, "update", data_id)) print '>> remove old data: %s' % data_id log('>> remove old data: %s' % data_id) def download_data(self, data_file, test): origin_data_md5 = md5_file(os.path.join(self.data_origin_dir, data_file)) space_name = data_file.split(".tar.gz")[0] patch_dir = os.path.join(self.data_patch_dir, space_name) # Create download directory. create_directory(patch_dir) if test: remote_url = "http://%s.%s/test" % (space_name, UPDATE_DATA_URL) else: remote_url = "http://%s.%s/3.1" % (space_name, UPDATE_DATA_URL) patch_list_url = "%s/patch/%s/patch_md5.json" % (remote_url, origin_data_md5) try: patch_list_json = json.load(urllib2.urlopen(patch_list_url)) except Exception, e: patch_list_json = "" if patch_list_json != "": patch_name = patch_list_json["current_patch"][0]["name"].encode("utf-8") patch_md5 = patch_list_json["current_patch"][0]["md5"].encode("utf-8") local_patch_info = self.patch_status_config.get("data_md5", space_name) if local_patch_info == '' or (local_patch_info != '' and eval(local_patch_info)[1] != patch_md5): # Start download. download_url = "%s/patch/%s/%s" % (remote_url, origin_data_md5, patch_name) local_patch_file = os.path.join(patch_dir, patch_name) # TODO: 此处添加下载返回值判断 os.system("wget %s -t 5 -c -O %s" % (download_url, local_patch_file)) try: download_md5 = md5_file(local_patch_file) if download_md5 == patch_md5: self.have_update.append(space_name) if local_patch_info: remove_file(os.path.join(self.data_patch_dir, eval(local_patch_info)[0])) self.patch_status_config.set("data_md5", space_name, [patch_name, patch_md5]) self.patch_status_config.write() print "%s: 补丁%s下载成功" % (space_name, patch_name) log("%s: 补丁%s下载成功" % (space_name, patch_name)) else: print "%s: 补丁%s下载错误" (space_name, patch_name) log("%s: 补丁%s下载错误" (space_name, patch_name)) except: print "%s: 补丁%s下载失败" (space_name, patch_name) log("%s: 补丁%s下载失败" (space_name, patch_name)) else: print "%s: 当前数据是最新的" % space_name log("%s: 当前数据是最新的" % space_name) else: print "%s: 网络问题或者远端没有任何更新补丁" % space_name log("%s: 网络问题或者远端没有任何更新补丁" % space_name)
def build_update_patch(self, action): if os.path.exists(self.data_origin_file): self.output_patch_dir = os.path.join(self.output_data_dir, "patch") if not os.path.exists(self.output_patch_dir): create_directory(self.output_patch_dir) self.output_temp_dir = os.path.join(self.output_data_dir, "temp") self.output_temp_file = os.path.join(self.output_temp_dir, "%s.tar.gz" % self.space_name) self.output_temp_patch_file = os.path.join(self.output_temp_dir, "patch") self.patch_md5_file = os.path.join(self.output_patch_dir, "patch_md5.json") self.origin_data_md5 = md5_file(self.data_origin_file) if not os.path.exists(self.patch_md5_file): self.patch_md5_json = {} self.patch_md5_json["origin_data"] = self.origin_data_md5 self.patch_md5_json["current_patch"] = [] else: self.patch_md5_json = json.load(open(self.patch_md5_file)) if self.patch_md5_json["origin_data"] != self.origin_data_md5: self.patch_md5_json["origin_data"] = self.origin_data_md5 self.patch_md5_json["current_patch"] = [] self.remote_patch_dir = os.path.join(self.server_data_dir, "patch") if action == "build": # Delete temp directory first. create_directory(self.output_temp_dir, True) # Build temp file. print "%s: 创建本地更新数据..." % self.space_name with tarfile.open(self.output_temp_file, "w:gz") as tar: for root, dir, files in os.walk(self.input_data_dir): for file in files: fullpath=os.path.join(root, file) tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False) print "%s: 创建本地更新数据完成" % self.space_name print "%s: 生成补丁文件..." % self.space_name subprocess.Popen( "xdelta3 -ves %s %s %s" % ( self.data_origin_file, self.output_temp_file, self.output_temp_patch_file), shell=True).wait() newest_patch_file_md5 = md5_file(self.output_temp_patch_file) current_patch_dict = self.patch_md5_json.get("current_patch") if current_patch_dict: last_patch_md5 = current_patch_dict[0]["md5"] if last_patch_md5 == newest_patch_file_md5: remove_directory(self.output_temp_dir) print "%s: input_data数据未做任何改变,删除相同补丁文件" % self.space_name sys.exit(0) else: current_patch_dict = [] newest_patch_dir = os.path.join(self.output_patch_dir, self.origin_data_md5) if not os.path.exists(newest_patch_dir): create_directory(newest_patch_dir) newest_patch_name = "%s-%s.xd3" % (self.space_name, get_current_time("%Y_%m_%d_%H:%M:%S")) newest_patch_file = os.path.join(newest_patch_dir, newest_patch_name) os.renames(self.output_temp_patch_file, newest_patch_file) remove_directory(self.output_temp_dir) current_patch_dict.insert(0, {"name" : newest_patch_name, "md5" : newest_patch_file_md5}) print "%s: 生成补丁文件完成" % self.space_name print "%s: 写入补丁md5..." % self.space_name self.patch_md5_json["current_patch"] = current_patch_dict with open(self.patch_md5_file, "w") as fp: json.dump(self.patch_md5_json, fp) print "%s: 写入补丁md5完成" % self.space_name elif action == "upload" and self.check_permission(self.space_name): # Upload patch file. current_patch_dict = self.patch_md5_json.get("current_patch") if current_patch_dict != []: if len(current_patch_dict) > 2: print "%s: 清理多余的补丁" % self.space_name spare_patchs = current_patch_dict[2:] current_patch_dict = current_patch_dict[:2] for patch in spare_patchs: patch_name = patch["name"].encode("utf-8") local_path = os.path.join(self.output_patch_dir, self.origin_data_md5, patch_name) try: remove_file(local_path) print "%s: 清除了补丁%s" % (self.space_name, patch_name) except: pass remote_path = os.path.join(self.remote_patch_dir, self.origin_data_md5, patch_name) self.delete_remote_file(remote_path, patch_name) self.patch_md5_json["current_patch"] = current_patch_dict with open(self.patch_md5_file, "w") as fp: json.dump(self.patch_md5_json, fp) newest_patch_name = current_patch_dict[0]["name"].encode("utf-8") newest_patch_file = os.path.join(self.output_patch_dir, self.origin_data_md5, newest_patch_name) remote_patch_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, newest_patch_name) remote_patch_md5_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, "patch_md5.json") # upload newest_patch_file self.upload_file(newest_patch_file, remote_patch_file, "补丁更新数据") # Update patch list file. self.upload_file(self.patch_md5_file, remote_patch_md5_file, "补丁md5列表文件") else: print "%s: 当前没有任何补丁,请打好补丁再上传吧!" % self.space_name else: print "%s: %s 不存在, 无法进行补丁的创建和上传" % (self.space_name, self.data_origin_file)