def __init__(self, space_name, action_type, input_data_dir, output_data_dir, server_data_dir, action): ''' init docs ''' # Init. self.space_name = space_name self.input_data_dir = input_data_dir self.output_data_dir = output_data_dir self.server_data_dir = server_data_dir self.action = action if not os.path.exists(DATA_ORIGIN_DIR): create_directory(DATA_ORIGIN_DIR) self.data_origin_file = os.path.join(DATA_ORIGIN_DIR, "%s.tar.gz" % self.space_name) if action_type == BUILD_ORIGIN_DATA: self.build_origin_data(self.action[1]) if action_type == BUILD_UPDATE_PATCH: self.build_update_patch(self.action[1])
def __init__(self, image_path, callback_method=None): td.Thread.__init__(self) self.callback_method = callback_method self.remote_url = UPYUN_SERVER_ADDRESS + image_path self.local_path = os.path.join(CACHE_DIR, image_path) create_directory(os.path.dirname(self.local_path)) self.setDaemon(True)
def load_skin(self, skin_name, system_skin_dir=None, user_skin_dir=None): ''' Internal function to Load skin. @return: Return True if load finish, otherwise return False. ''' try: # Save skin dir. self.skin_name = skin_name if system_skin_dir: self.system_skin_dir = system_skin_dir create_directory(self.system_skin_dir) if user_skin_dir: self.user_skin_dir = user_skin_dir create_directory(self.user_skin_dir) self.skin_dir = self.get_skin_dir() # Load config file. self.config = Config(self.get_skin_file_path("config.ini")) self.config.load() # Get theme config. self.theme_name = self.config.get("theme", "theme_name") # Get application config. self.app_id = self.config.get("application", "app_id") self.app_version = self.config.getfloat("application", "app_version") # Get background config. self.image = self.config.get("background", "image") self.x = self.config.getfloat("background", "x") self.y = self.config.getfloat("background", "y") self.scale_x = self.config.getfloat("background", "scale_x") self.scale_y = self.config.getfloat("background", "scale_y") self.dominant_color = self.config.get("background", "dominant_color") # Get action config. self.deletable = self.config.getboolean("action", "deletable") self.editable = self.config.getboolean("action", "editable") self.vertical_mirror = self.config.getboolean("action", "vertical_mirror") self.horizontal_mirror = self.config.getboolean("action", "horizontal_mirror") # Generate background pixbuf. self.background_pixbuf = gtk.gdk.pixbuf_new_from_file(self.get_skin_file_path(self.image)) # Save skin name. self.save_skin_name() return True except Exception, e: print "function load_skin got error: %s" % (e) traceback.print_exc(file=sys.stdout) return False
def start(self): self.init_file_size() if not self.error_flag and self.file_size > 0: self.last_byte_index = self.file_size - 1 create_directory(self.temp_save_dir) (downloaded_pieces, download_pieces, downloaded_size) = self.get_download_pieces() self.check_download_pieces(download_pieces) if downloaded_size == self.file_size: self.signal.emit("no-need-fetch") else: current_time = time.time() self.update_info = { "file_size" : self.file_size, "downloaded_size" : downloaded_size, "start_time" : current_time, "update_time" : current_time, "remain_time" : -1, "average_speed" : -1, "realtime_speed" : -1, "realtime_time" : current_time, "realtime_size" : 0, } self.signal.emit("start", self.update_info) for (begin, end) in download_pieces: self.create_greenlet(begin, end) [self.pool.start(greenlet) for greenlet in self.greenlet_dict.values()] self.pool.join() if self.stop_flag: remove_directory(self.temp_save_dir) self.signal.emit("stop") elif self.pause_flag: self.signal.emit("pause") else: offset_ids = sorted(map(lambda (start, end): start, downloaded_pieces + download_pieces)) command = "cat " + ' '.join(map(lambda offset_id: "%s_%s" % (self.temp_save_path, offset_id), offset_ids)) + " > %s" % self.file_save_path subprocess.Popen(command, shell=True).wait() remove_directory(self.temp_save_dir) if self.file_hash_info != None: (expect_hash_type, expect_hash_value) = self.file_hash_info hash_value = get_hash(self.file_save_path, expect_hash_type) if hash_value != expect_hash_value: self.signal.emit("check-hash-failed", expect_hash_value, hash_value) else: self.signal.emit("finish") else: self.signal.emit("finish") else: self.signal.emit("get-file-size-failed")
def load_skin_from_package(self, filepath): ''' Load theme from given package. @param filepath: The file path of package. ''' # Init. skin_dir = os.path.join(self.user_skin_dir, str(uuid.uuid4())) # Create skin directory. create_directory(skin_dir, True) # Extract skin package. tar = tarfile.open(filepath, "r:gz") tar.extractall(skin_dir) # Get skin image file. config = Config(os.path.join(skin_dir, "config.ini")) config.load() # Move theme files to given directory if theme is not in default theme list. skin_theme_name = config.get("theme", "theme_name") if not skin_theme_name in COLOR_SEQUENCE: # Check version when package have special theme that not include in standard themes. app_id = config.get("application", "app_id") app_version = config.get("application", "app_version") if app_id == self.app_given_id and app_version == self.app_given_version: # Remove same theme from given directories. remove_directory(os.path.join(self.ui_theme_dir, skin_theme_name)) if self.app_theme_dir != None: remove_directory(os.path.join(self.app_theme_dir, skin_theme_name)) # Move new theme files to given directories. shutil.move(os.path.join(skin_dir, "ui_theme", skin_theme_name), self.ui_theme_dir) if self.app_theme_dir != None: shutil.move(os.path.join(skin_dir, "app_theme", skin_theme_name), self.app_theme_dir) # Remove temp theme directories under skin directory. remove_directory(os.path.join(skin_dir, "ui_theme")) remove_directory(os.path.join(skin_dir, "app_theme")) else: # Remove skin directory if version mismatch. remove_directory(skin_dir) return False # Apply new skin. skin_image_file = config.get("background", "image") if self.reload_skin(os.path.basename(skin_dir)): self.apply_skin() return (True, skin_dir, skin_image_file) else: return (False, skin_dir, skin_image_file)
def load_skin_from_image(self, filepath): ''' Load theme from given image. @param filepath: The file path of image. ''' # Init. skin_dir = os.path.join(self.user_skin_dir, str(uuid.uuid4())) skin_image_file = os.path.basename(filepath) config_file = os.path.join(skin_dir, "config.ini") dominant_color = get_dominant_color(filepath) similar_color = find_similar_color(dominant_color)[0] default_config = [ ("theme", [("theme_name", similar_color)]), ("application", [("app_id", self.app_given_id), ("app_version", self.app_given_version)]), ("background", [("image", skin_image_file), ("x", "0"), ("y", "0"), ("scale_x", "1.0"), ("scale_y", "1.0"), ("dominant_color", dominant_color)]), ("action", [("deletable", "True"), ("editable", "True"), ("vertical_mirror", "False"), ("horizontal_mirror", "False")])] # Create skin directory. create_directory(skin_dir, True) # Copy skin image file. shutil.copy(filepath, skin_dir) # Touch skin config file. touch_file(config_file) # Write default skin config information. Config(config_file, default_config).write() if self.reload_skin(os.path.basename(skin_dir)): self.apply_skin() return (True, skin_dir, skin_image_file) else: return (False, skin_dir, skin_image_file)
def download_data(self, data_file, test): origin_data_md5 = md5_file(os.path.join(self.data_origin_dir, data_file)) space_name = data_file.split(".tar.gz")[0] patch_dir = os.path.join(self.data_patch_dir, space_name) # Create download directory. create_directory(patch_dir) if test: remote_url = "http://%s.%s/test" % (space_name, UPDATE_DATA_URL) else: remote_url = "http://%s.%s/3.1" % (space_name, UPDATE_DATA_URL) patch_list_url = "%s/patch/%s/patch_md5.json" % (remote_url, origin_data_md5) try: patch_list_json = json.load(urllib2.urlopen(patch_list_url)) except Exception, e: patch_list_json = ""
def __init__(self, system_theme_dir, user_theme_dir): ''' Initialize Theme class. @param system_theme_dir: Default theme directory. @param user_theme_dir: User's theme save directory, generic is ~/.config/project-name/theme ''' # Init. self.system_theme_dir = system_theme_dir self.user_theme_dir = user_theme_dir self.theme_info_file = "theme.txt" self.ticker = 0 self.pixbuf_dict = {} self.color_dict = {} self.alpha_color_dict = {} self.shadow_color_dict = {} # Create directory if necessarily. for theme_dir in [self.system_theme_dir, self.user_theme_dir]: create_directory(theme_dir)
def apply_data(self, space_name): if not os.path.exists(self.data_newest_dir): create_directory(self.data_newest_dir) data_filename = "%s.tar.gz" % space_name patch_name = self.patch_status_config.get("data_md5", space_name)[0] origin_data_file = os.path.join(self.data_origin_dir, data_filename) patch_file = os.path.join(self.data_patch_dir, space_name, patch_name) newest_data_file = os.path.join(self.data_newest_dir, data_filename) print "%s: 补丁%s合并开始..." % (space_name, patch_name) log("%s: 补丁%s合并开始..." % (space_name, patch_name)) if os.path.exists(newest_data_file): remove_file(newest_data_file) subprocess.Popen("xdelta3 -ds %s %s %s" % (origin_data_file, patch_file, newest_data_file), shell=True).wait() print "%s: 补丁%s合并完成" % (space_name, patch_name) log("%s: 补丁%s合并完成" % (space_name, patch_name))
import urllib2 import urllib import json from constant import SERVER_ADDRESS, POST_TIMEOUT from events import global_event import traceback from deepin_utils.file import create_directory, touch_file import utils DEBUG = False BAIDU_SERVER_ADDRESS = 'http://apis.linuxdeepin.com/dschome/' if not DEBUG else 'http://127.0.0.1:8000/dschome/' UPYUN_SERVER_ADDRESS = 'http://dsc-home-data.b0.upaiyun.com/' CACHE_DIR = os.path.join(os.path.expanduser("~"), '.cache', 'deepin-software-center') create_directory(CACHE_DIR) HOME_CACHE_DATA_PATH = os.path.join(CACHE_DIR, "home_cache_data.json") status_modes = { 'test' : '2', 'publish' : '3', 'archive' : '4', } class FetchAlbumData(td.Thread): def __init__(self, language, status="publish"): td.Thread.__init__(self) self.language = language self.album_data_url = BAIDU_SERVER_ADDRESS + "album/" self.data = {
def start(self): self.init_file_size() if not self.error_flag and self.file_size > 0: self.last_byte_index = self.file_size - 1 create_directory(self.temp_save_dir) (downloaded_pieces, download_pieces, downloaded_size) = self.get_download_pieces() self.check_download_pieces(download_pieces) if downloaded_size == self.file_size: self.signal.emit("no-need-fetch") else: current_time = time.time() self.update_info = { "file_size": self.file_size, "downloaded_size": downloaded_size, "start_time": current_time, "update_time": current_time, "remain_time": -1, "average_speed": -1, "realtime_speed": -1, "realtime_time": current_time, "realtime_size": 0, } self.signal.emit("start", self.update_info) for (begin, end) in download_pieces: self.create_greenlet(begin, end) [ self.pool.start(greenlet) for greenlet in self.greenlet_dict.values() ] self.pool.join() if self.stop_flag: remove_directory(self.temp_save_dir) self.signal.emit("stop") elif self.pause_flag: self.signal.emit("pause") else: offset_ids = sorted( map(lambda (start, end): start, downloaded_pieces + download_pieces)) command = "cat " + ' '.join( map( lambda offset_id: "%s_%s" % (self.temp_save_path, offset_id), offset_ids)) + " > %s" % self.file_save_path subprocess.Popen(command, shell=True).wait() remove_directory(self.temp_save_dir) if self.file_hash_info != None: (expect_hash_type, expect_hash_value) = self.file_hash_info hash_value = get_hash(self.file_save_path, expect_hash_type) if hash_value != expect_hash_value: self.signal.emit("check-hash-failed", expect_hash_value, hash_value) else: self.signal.emit("finish") else: self.signal.emit("finish") else: self.signal.emit("get-file-size-failed")
from events import global_event import traceback from deepin_utils.file import create_directory, touch_file from deepin_utils.hash import md5_file import utils import logging from constant import local_mirrors_json DEBUG = False BAIDU_SERVER_ADDRESS = 'http://apis.linuxdeepin.com/dschome/' if not DEBUG else 'http://127.0.0.1:8000/dschome/' UPYUN_SERVER_ADDRESS = 'http://dsc-home-data.b0.upaiyun.com/' CACHE_DIR = os.path.join(os.path.expanduser("~"), '.cache', 'deepin-software-center') create_directory(CACHE_DIR) HOME_CACHE_DATA_PATH = os.path.join(CACHE_DIR, "home_cache_data.json") status_modes = { 'test' : '2', 'publish' : '3', 'archive' : '4', } class FetchMirrors(td.Thread): def __init__(self): td.Thread.__init__(self) self.mirrors_json_url = UPYUN_SERVER_ADDRESS + "mirrors.json" self.mirrors_json_md5_url = UPYUN_SERVER_ADDRESS + "mirrors_json_md5.txt" def run(self):
def build_update_patch(self, action): if os.path.exists(self.data_origin_file): self.output_patch_dir = os.path.join(self.output_data_dir, "patch") if not os.path.exists(self.output_patch_dir): create_directory(self.output_patch_dir) self.output_temp_dir = os.path.join(self.output_data_dir, "temp") self.output_temp_file = os.path.join(self.output_temp_dir, "%s.tar.gz" % self.space_name) self.output_temp_patch_file = os.path.join(self.output_temp_dir, "patch") self.patch_md5_file = os.path.join(self.output_patch_dir, "patch_md5.json") self.origin_data_md5 = md5_file(self.data_origin_file) if not os.path.exists(self.patch_md5_file): self.patch_md5_json = {} self.patch_md5_json["origin_data"] = self.origin_data_md5 self.patch_md5_json["current_patch"] = [] else: self.patch_md5_json = json.load(open(self.patch_md5_file)) if self.patch_md5_json["origin_data"] != self.origin_data_md5: self.patch_md5_json["origin_data"] = self.origin_data_md5 self.patch_md5_json["current_patch"] = [] self.remote_patch_dir = os.path.join(self.server_data_dir, "patch") if action == "build": # Delete temp directory first. create_directory(self.output_temp_dir, True) # Build temp file. print "%s: 创建本地更新数据..." % self.space_name with tarfile.open(self.output_temp_file, "w:gz") as tar: for root, dir, files in os.walk(self.input_data_dir): for file in files: fullpath = os.path.join(root, file) tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False) print "%s: 创建本地更新数据完成" % self.space_name print "%s: 生成补丁文件..." % self.space_name subprocess.Popen("xdelta3 -ves %s %s %s" % (self.data_origin_file, self.output_temp_file, self.output_temp_patch_file), shell=True).wait() newest_patch_file_md5 = md5_file(self.output_temp_patch_file) current_patch_dict = self.patch_md5_json.get("current_patch") if current_patch_dict: last_patch_md5 = current_patch_dict[0]["md5"] if last_patch_md5 == newest_patch_file_md5: remove_directory(self.output_temp_dir) print "%s: input_data数据未做任何改变,删除相同补丁文件" % self.space_name sys.exit(0) else: current_patch_dict = [] newest_patch_dir = os.path.join(self.output_patch_dir, self.origin_data_md5) if not os.path.exists(newest_patch_dir): create_directory(newest_patch_dir) newest_patch_name = "%s-%s.xd3" % ( self.space_name, get_current_time("%Y_%m_%d_%H:%M:%S")) newest_patch_file = os.path.join(newest_patch_dir, newest_patch_name) os.renames(self.output_temp_patch_file, newest_patch_file) remove_directory(self.output_temp_dir) current_patch_dict.insert(0, { "name": newest_patch_name, "md5": newest_patch_file_md5 }) print "%s: 生成补丁文件完成" % self.space_name print "%s: 写入补丁md5..." % self.space_name self.patch_md5_json["current_patch"] = current_patch_dict with open(self.patch_md5_file, "w") as fp: json.dump(self.patch_md5_json, fp) print "%s: 写入补丁md5完成" % self.space_name elif action == "upload" and self.check_permission(self.space_name): # Upload patch file. current_patch_dict = self.patch_md5_json.get("current_patch") if current_patch_dict != []: if len(current_patch_dict) > 2: print "%s: 清理多余的补丁" % self.space_name spare_patchs = current_patch_dict[2:] current_patch_dict = current_patch_dict[:2] for patch in spare_patchs: patch_name = patch["name"].encode("utf-8") local_path = os.path.join(self.output_patch_dir, self.origin_data_md5, patch_name) try: remove_file(local_path) print "%s: 清除了补丁%s" % (self.space_name, patch_name) except: pass remote_path = os.path.join(self.remote_patch_dir, self.origin_data_md5, patch_name) self.delete_remote_file(remote_path, patch_name) self.patch_md5_json[ "current_patch"] = current_patch_dict with open(self.patch_md5_file, "w") as fp: json.dump(self.patch_md5_json, fp) newest_patch_name = current_patch_dict[0]["name"].encode( "utf-8") newest_patch_file = os.path.join(self.output_patch_dir, self.origin_data_md5, newest_patch_name) remote_patch_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, newest_patch_name) remote_patch_md5_file = os.path.join( self.remote_patch_dir, self.origin_data_md5, "patch_md5.json") # upload newest_patch_file self.upload_file(newest_patch_file, remote_patch_file, "补丁更新数据") # Update patch list file. self.upload_file(self.patch_md5_file, remote_patch_md5_file, "补丁md5列表文件") else: print "%s: 当前没有任何补丁,请打好补丁再上传吧!" % self.space_name else: print "%s: %s 不存在, 无法进行补丁的创建和上传" % (self.space_name, self.data_origin_file)
def create_directory(directory, remove_first=False): print "Please import deepin_utils.file.create_directory, this function will departed in next release version." return file.create_directory(directory, remove_first=False)
def build_update_patch(self, action): if os.path.exists(self.data_origin_file): self.output_patch_dir = os.path.join(self.output_data_dir, "patch") if not os.path.exists(self.output_patch_dir): create_directory(self.output_patch_dir) self.output_temp_dir = os.path.join(self.output_data_dir, "temp") self.output_temp_file = os.path.join(self.output_temp_dir, "%s.tar.gz" % self.space_name) self.output_temp_patch_file = os.path.join(self.output_temp_dir, "patch") self.patch_md5_file = os.path.join(self.output_patch_dir, "patch_md5.json") self.origin_data_md5 = md5_file(self.data_origin_file) if not os.path.exists(self.patch_md5_file): self.patch_md5_json = {} self.patch_md5_json["origin_data"] = self.origin_data_md5 self.patch_md5_json["current_patch"] = [] else: self.patch_md5_json = json.load(open(self.patch_md5_file)) if self.patch_md5_json["origin_data"] != self.origin_data_md5: self.patch_md5_json["origin_data"] = self.origin_data_md5 self.patch_md5_json["current_patch"] = [] self.remote_patch_dir = os.path.join(self.server_data_dir, "patch") if action == "build": # Delete temp directory first. create_directory(self.output_temp_dir, True) # Build temp file. print "%s: 创建本地更新数据..." % self.space_name with tarfile.open(self.output_temp_file, "w:gz") as tar: for root, dir, files in os.walk(self.input_data_dir): for file in files: fullpath=os.path.join(root, file) tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False) print "%s: 创建本地更新数据完成" % self.space_name print "%s: 生成补丁文件..." % self.space_name subprocess.Popen( "xdelta3 -ves %s %s %s" % ( self.data_origin_file, self.output_temp_file, self.output_temp_patch_file), shell=True).wait() newest_patch_file_md5 = md5_file(self.output_temp_patch_file) current_patch_dict = self.patch_md5_json.get("current_patch") if current_patch_dict: last_patch_md5 = current_patch_dict[0]["md5"] if last_patch_md5 == newest_patch_file_md5: remove_directory(self.output_temp_dir) print "%s: input_data数据未做任何改变,删除相同补丁文件" % self.space_name sys.exit(0) else: current_patch_dict = [] newest_patch_dir = os.path.join(self.output_patch_dir, self.origin_data_md5) if not os.path.exists(newest_patch_dir): create_directory(newest_patch_dir) newest_patch_name = "%s-%s.xd3" % (self.space_name, get_current_time("%Y_%m_%d_%H:%M:%S")) newest_patch_file = os.path.join(newest_patch_dir, newest_patch_name) os.renames(self.output_temp_patch_file, newest_patch_file) remove_directory(self.output_temp_dir) current_patch_dict.insert(0, {"name" : newest_patch_name, "md5" : newest_patch_file_md5}) print "%s: 生成补丁文件完成" % self.space_name print "%s: 写入补丁md5..." % self.space_name self.patch_md5_json["current_patch"] = current_patch_dict with open(self.patch_md5_file, "w") as fp: json.dump(self.patch_md5_json, fp) print "%s: 写入补丁md5完成" % self.space_name elif action == "upload" and self.check_permission(self.space_name): # Upload patch file. current_patch_dict = self.patch_md5_json.get("current_patch") if current_patch_dict != []: if len(current_patch_dict) > 2: print "%s: 清理多余的补丁" % self.space_name spare_patchs = current_patch_dict[2:] current_patch_dict = current_patch_dict[:2] for patch in spare_patchs: patch_name = patch["name"].encode("utf-8") local_path = os.path.join(self.output_patch_dir, self.origin_data_md5, patch_name) try: remove_file(local_path) print "%s: 清除了补丁%s" % (self.space_name, patch_name) except: pass remote_path = os.path.join(self.remote_patch_dir, self.origin_data_md5, patch_name) self.delete_remote_file(remote_path, patch_name) self.patch_md5_json["current_patch"] = current_patch_dict with open(self.patch_md5_file, "w") as fp: json.dump(self.patch_md5_json, fp) newest_patch_name = current_patch_dict[0]["name"].encode("utf-8") newest_patch_file = os.path.join(self.output_patch_dir, self.origin_data_md5, newest_patch_name) remote_patch_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, newest_patch_name) remote_patch_md5_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, "patch_md5.json") # upload newest_patch_file self.upload_file(newest_patch_file, remote_patch_file, "补丁更新数据") # Update patch list file. self.upload_file(self.patch_md5_file, remote_patch_md5_file, "补丁md5列表文件") else: print "%s: 当前没有任何补丁,请打好补丁再上传吧!" % self.space_name else: print "%s: %s 不存在, 无法进行补丁的创建和上传" % (self.space_name, self.data_origin_file)