Exemple #1
0
    def __init__(self, cache):
        self.cache = cache
        self.db_path = os.path.join(get_parent_dir(__file__, 3),
                                    "data/cache_soft.db")

        remove_file(self.db_path)
        touch_file_dir(self.db_path)

        self.connect = sqlite3.connect(self.db_path)
        self.cursor = self.connect.cursor()

        self.cursor.execute("CREATE TABLE IF NOT EXISTS software (\
            pkg_name PRIMARY KEY NOT NULL, short_desc, long_desc, version, \
            homepage, size)")

        for pkg in self.cache:
            try:
                self.cursor.execute("INSERT INTO software VALUES(?,?,?,?,?,?)",
                                    (pkg.name, unicode(pkg.candidate.summary),
                                     unicode(pkg.candidate.description),
                                     unicode(pkg.candidate.version),
                                     unicode(pkg.candidate.homepage),
                                     unicode(pkg.candidate.size)))
            except Exception, e:
                print "Error in db_build: %s %s" % (e, pkg.name)
Exemple #2
0
    def download_screenshot_finish(self, pkg_name):
        if self.pkg_name == pkg_name:
            screenshot_dir = os.path.join(SCREENSHOT_DOWNLOAD_DIR, pkg_name)
            screenshot_zip_path = os.path.join(screenshot_dir,
                                               "screenshot.zip")
            if os.path.exists(screenshot_zip_path):
                # Remove unused files first.
                for screenshot_file in os.listdir(screenshot_dir):
                    if screenshot_file not in [
                            "screenshot_md5.txt", "screenshot.zip"
                    ]:
                        remove_file(
                            os.path.join(screenshot_dir, screenshot_file))

                # Extract zip file.
                zip_file = zipfile.ZipFile(screenshot_zip_path)
                for extract_file in zip_file.namelist():
                    with open(
                            os.path.join(screenshot_dir,
                                         os.path.split(extract_file)[1]),
                            "wb") as screenshot_file:
                        screenshot_file.write(zip_file.read(extract_file))
                zip_file.close()

                # Remove zip file.
                remove_file(screenshot_zip_path)

                # Add screenshots.
                self.show_screenshot()
    def clean_download_cache(self):
        '''Clean download cache.'''
        # get action packages.
        remain_pkgs = []
        for (pkg_name, info_dict) in self.download_manager.fetch_files_dict.items():
            remain_pkgs.append(pkg_name)

        for (pkg_name, info_dict) in self.apt_action_pool.install_action_dict.items():
            remain_pkgs.append(pkg_name)
        for (pkg_name, info_dict) in self.apt_action_pool.upgrade_action_dict.items():
            remain_pkgs.append(pkg_name)
        
        # Get depend packages.
        remain_pkgs_paths = []
        for pkg_name in remain_pkgs:
            result = get_pkg_dependence_file_path(self.pkg_cache, pkg_name)
            if not result:
                remain_pkgs_paths += result

        # Init clean size.
        packageNum = 0
        cleanSize = 0
                
        # Delete cache directory.
        cache_archive_dir = get_cache_archive_dir()
        if os.path.exists(cache_archive_dir):
            for root, folder, files in os.walk(cache_archive_dir):
                for file_name in files:
                    path = os.path.join(root, file_name)
                    if path.endswith(".deb") and (path not in remain_pkgs_paths):
                        packageNum += 1
                        cleanSize += os.path.getsize(path)
                        remove_file(path)

        return [packageNum, cleanSize]
Exemple #4
0
def clean_tmp():
    dsc_update_list_log_path = "/tmp/dsc-update-list.log"
    dsc_updata_data_log_path = "/tmp/dsc-update-data.log"
    tmp_file_dict = [dsc_update_list_log_path, dsc_updata_data_log_path]
    for path in tmp_file_dict:
        if os.path.exists(path):
            remove_file(path)
Exemple #5
0
    def __init__(self, cache):
        self.cache = cache
        self.db_path = os.path.join(get_parent_dir(__file__, 3),
                "data/cache_soft.db")

        remove_file(self.db_path)
        touch_file_dir(self.db_path)

        self.connect = sqlite3.connect(self.db_path)
        self.cursor = self.connect.cursor()

        self.cursor.execute(
            "CREATE TABLE IF NOT EXISTS software (\
            pkg_name PRIMARY KEY NOT NULL, short_desc, long_desc, version, \
            homepage, size)")

        for pkg in self.cache:
            try:
                self.cursor.execute(
                    "INSERT INTO software VALUES(?,?,?,?,?,?)",
                    (pkg.name,
                    unicode(pkg.candidate.summary),
                    unicode(pkg.candidate.description),
                    unicode(pkg.candidate.version),
                    unicode(pkg.candidate.homepage),
                    unicode(pkg.candidate.size)
                    ))
            except Exception, e:
                print "Error in db_build: %s %s" % (e, pkg.name)
Exemple #6
0
    def export_skin(self, filepath):
        '''
        Internal function to export skin.
        '''
        # Build temp config file.
        config_filepath = os.path.join("/tmp/%s", str(uuid.uuid4()))
        touch_file(config_filepath)
        self.save_skin(config_filepath)

        # Build skin package.
        with tarfile.open("%s.tar.gz" % filepath, "w:gz") as tar:
            # Add config file.
            tar.add(config_filepath, "config.ini", False)

            # Add background image file.
            tar.add(self.get_skin_file_path(self.image), self.image, False)

            # Copy theme files is theme is not standard theme.
            if not self.theme_name in COLOR_SEQUENCE:
                tar.add(os.path.join(self.ui_theme_dir, self.theme_name), os.path.join("ui_theme", self.theme_name))
                if self.app_theme_dir != None:
                    tar.add(os.path.join(self.app_theme_dir, self.theme_name), os.path.join("app_theme", self.theme_name))

        # Remove temp config file.
        remove_file(config_filepath)
def update_origin_data(space_name):
    data_origin_file = os.path.join(deepin_software_center_data_dir, "data/origin/dsc-%s-data.tar.gz" % space_name)
    input_data_dir = os.path.join(database_dir, space_name)

    remove_file(data_origin_file)
    
    # Build origin data.
    print "%s: 创建本地原始数据..." % (space_name)
    with tarfile.open(data_origin_file, "w:gz") as tar:
        for root, dir, files in os.walk(input_data_dir):
            for file in files:
                fullpath=os.path.join(root, file)
                tar.add(fullpath, fullpath.split(database_dir)[1], False)
    print "%s: 创建本地原始数据完成" % (space_name)
    with open(TIME_FLAG_FILE, 'w') as fp:
        s = get_current_time("%Y_%m_%d_%H:%M:%S")
        fp.write(s)
def update_origin_data(space_name):
    data_origin_file = os.path.join(
        deepin_software_center_data_dir,
        "data/origin/dsc-%s-data.tar.gz" % space_name)
    input_data_dir = os.path.join(database_dir, space_name)

    remove_file(data_origin_file)

    # Build origin data.
    print "%s: 创建本地原始数据..." % (space_name)
    with tarfile.open(data_origin_file, "w:gz") as tar:
        for root, dir, files in os.walk(input_data_dir):
            for file in files:
                fullpath = os.path.join(root, file)
                tar.add(fullpath, fullpath.split(database_dir)[1], False)
    print "%s: 创建本地原始数据完成" % (space_name)
    with open(TIME_FLAG_FILE, 'w') as fp:
        s = get_current_time("%Y_%m_%d_%H:%M:%S")
        fp.write(s)
    def apply_data(self, space_name):
        if not os.path.exists(self.data_newest_dir):
            create_directory(self.data_newest_dir)
        data_filename = "%s.tar.gz" % space_name
        patch_name = self.patch_status_config.get("data_md5", space_name)[0]

        origin_data_file = os.path.join(self.data_origin_dir, data_filename)
        patch_file = os.path.join(self.data_patch_dir, space_name, patch_name)
        newest_data_file = os.path.join(self.data_newest_dir, data_filename)

        print "%s: 补丁%s合并开始..." % (space_name, patch_name)
        log("%s: 补丁%s合并开始..." % (space_name, patch_name))
        if os.path.exists(newest_data_file):
            remove_file(newest_data_file)
        subprocess.Popen("xdelta3 -ds %s %s %s" % (origin_data_file,
                                                    patch_file,
                                                    newest_data_file),
                                                    shell=True).wait()
                        
        print "%s: 补丁%s合并完成" % (space_name, patch_name)
        log("%s: 补丁%s合并完成" % (space_name, patch_name))
Exemple #10
0
    def download_screenshot_finish(self, pkg_name):
        if self.pkg_name == pkg_name:
            screenshot_dir = os.path.join(SCREENSHOT_DOWNLOAD_DIR, pkg_name)
            screenshot_zip_path = os.path.join(screenshot_dir, "screenshot.zip")
            if os.path.exists(screenshot_zip_path):
                # Remove unused files first.
                for screenshot_file in os.listdir(screenshot_dir):
                    if screenshot_file not in ["screenshot_md5.txt", "screenshot.zip"]:
                        remove_file(os.path.join(screenshot_dir, screenshot_file))

                # Extract zip file.
                zip_file = zipfile.ZipFile(screenshot_zip_path)
                for extract_file in zip_file.namelist():
                    with open(os.path.join(screenshot_dir, os.path.split(extract_file)[1]), "wb") as screenshot_file:
                        screenshot_file.write(zip_file.read(extract_file))
                zip_file.close()

                # Remove zip file.
                remove_file(screenshot_zip_path)

                # Add screenshots.
                self.show_screenshot()
Exemple #11
0
    def build_origin_data(self, action):
        if action == "build":
            # Delete origin data
            remove_file(self.data_origin_file)

            # Build origin data.
            print "%s: 创建本地原始数据..." % (self.space_name)
            with tarfile.open(self.data_origin_file, "w:gz") as tar:
                for root, dir, files in os.walk(self.input_data_dir):
                    for file in files:
                        fullpath = os.path.join(root, file)
                        tar.add(fullpath,
                                fullpath.split(self.input_data_dir)[1], False)
            print "%s: 创建本地原始数据完成" % (self.space_name)
            with open(TIME_FLAG_FILE, 'w') as fp:
                s = get_current_time("%Y_%m_%d_%H:%M:%S")
                fp.write(s)

        if action == "upload" and self.check_permission(self.space_name):
            remote_origin_file_path = os.path.join(
                self.server_data_dir, "origin",
                md5_file(self.data_origin_file), "%s.tar.gz" % self.space_name)
            self.upload_file(self.data_origin_file, remote_origin_file_path,
                             "原始数据")
    def build_origin_data(self, action):
        if action == "build":
            # Delete origin data
            remove_file(self.data_origin_file)
            
            # Build origin data.
            print "%s: 创建本地原始数据..." % (self.space_name)
            with tarfile.open(self.data_origin_file, "w:gz") as tar:
                for root, dir, files in os.walk(self.input_data_dir):
                    for file in files:
                        fullpath=os.path.join(root, file)
                        tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False)
            print "%s: 创建本地原始数据完成" % (self.space_name)
            with open(TIME_FLAG_FILE, 'w') as fp:
                s = get_current_time("%Y_%m_%d_%H:%M:%S")
                fp.write(s)

        if action == "upload" and self.check_permission(self.space_name):
            remote_origin_file_path = os.path.join(
                    self.server_data_dir, 
                    "origin", 
                    md5_file(self.data_origin_file), 
                    "%s.tar.gz" % self.space_name)
            self.upload_file(self.data_origin_file, remote_origin_file_path, "原始数据")
Exemple #13
0
    def clean_download_cache(self):
        '''Clean download cache.'''
        # get action packages.
        remain_pkgs = []
        for pkg_name in self.download_manager.task_name_to_id.keys():
            remain_pkgs.append(pkg_name)

        for pkg_name in self.apt_action_pool.install_action_dict.keys():
            remain_pkgs.append(pkg_name)
        for pkg_name in self.apt_action_pool.upgrade_action_dict.keys():
            remain_pkgs.append(pkg_name)

        # Get depend packages.
        remain_pkgs_paths = []
        for pkg_name in remain_pkgs:
            result = get_pkg_dependence_file_path(self.pkg_cache, pkg_name)
            if not result:
                remain_pkgs_paths += result

        # Init clean size.
        packageNum = 0
        cleanSize = 0

        # Delete cache directory.
        cache_archive_dir = get_cache_archive_dir()
        if os.path.exists(cache_archive_dir):
            for root, folder, files in os.walk(cache_archive_dir):
                for file_name in files:
                    path = os.path.join(root, file_name)
                    if path.endswith(".deb") and (path
                                                  not in remain_pkgs_paths):
                        packageNum += 1
                        cleanSize += os.path.getsize(path)
                        remove_file(path)

        return [packageNum, cleanSize]
Exemple #14
0
def remove_file(path):
    print "Please import deepin_utils.file.remove_file, this function will departed in next release version."
    return file.remove_file(path)
class UpdateDataService(dbus.service.Object):
    '''
    class docs
    '''
	
    def __init__(self, system_bus, mainloop):
        '''
        init docs
        '''
        # Init dbus service.
        dbus.service.Object.__init__(self, system_bus, DSC_UPDATER_PATH)
        self.mainloop = mainloop
        
        self.data_origin_dir = os.path.join(DATA_DIR, "origin")
        self.data_newest_dir = os.path.join(DATA_DIR, "newest")
        self.data_patch_dir = os.path.join(DATA_DIR, "patch")
        self.data_patch_config_filepath = os.path.join(DATA_DIR, "patch_status.ini")
        self.data_newest_id_path = os.path.join(DATA_DIR, "data_newest_id.ini")
        
    def get_unique_id(self):
        return str(uuid.uuid4())
        
    def run(self, test):
        # Init ini files.
        if not os.path.exists(self.data_newest_id_path):
            self.newest_data_id_config = Config(self.data_newest_id_path)
            self.newest_data_id_config.load()
            self.newest_data_id_config.set("newest", "data_id", "")
            self.newest_data_id_config.set("newest", "update_date", "")
            self.newest_data_id_config.write()
        else:
            self.newest_data_id_config = Config(self.data_newest_id_path)
            self.newest_data_id_config.load()
            
        try:
            update_date = self.newest_data_id_config.get("newest", "update_date")
        except Exception:
            update_date = ""

        if self.newest_data_id_config.get("newest", "data_id") == "" or update_date != UPDATE_DATE:
            self.clean()
            newest_data_id = self.get_unique_id()
            newest_data_dir = os.path.join(DATA_DIR, "update", newest_data_id)
            
            print "进行第一次数据解压..."
            log("进行第一次数据解压...")
            for data_file in os.listdir(self.data_origin_dir):
                with tarfile.open(os.path.join(self.data_origin_dir, data_file), "r:gz") as tar_file:
                    tar_file.extractall(newest_data_dir)
            print "进行第一次数据解压完成"
            log("进行第一次数据解压完成")
            
            self.newest_data_id_config.set("newest", "data_id", newest_data_id)
            self.newest_data_id_config.set("newest", "update_date", UPDATE_DATE)
            self.newest_data_id_config.write()
            
        if not os.path.exists(self.data_patch_config_filepath):
            self.patch_status_config = Config(self.data_patch_config_filepath)
            self.patch_status_config.load()
            for space_name in DATA_SPACE_NAME:
                self.patch_status_config.set("data_md5", space_name, "")
            self.patch_status_config.write()
        else:
            self.patch_status_config = Config(self.data_patch_config_filepath)
            self.patch_status_config.load()
        
        self.have_update = []
        # Download update data.
        for data_file in os.listdir(self.data_origin_dir):
            self.download_data(data_file, test)
            
        if self.have_update:
            # Apply update data.
            for space_name in self.have_update:
                self.apply_data(space_name)
                
            # Extra data.
            newest_data_id = self.get_unique_id()
            newest_data_dir = os.path.join(DATA_DIR, "update", newest_data_id)

            for space_name in DATA_SPACE_NAME:
                data_filename = "%s.tar.gz" % space_name
                origin_data_file = os.path.join(self.data_origin_dir, data_filename)
                newest_data_file = os.path.join(self.data_newest_dir, data_filename)
                if not os.path.exists(newest_data_file):
                    os.system('cp %s %s' % (origin_data_file, newest_data_file))
            
            print "解压最新数据..."
            log("解压最新数据...")
            for data_file in os.listdir(self.data_newest_dir):
                newest_file = os.path.join(self.data_newest_dir, data_file)
                with tarfile.open(newest_file, "r:gz") as tar_file:
                    tar_file.extractall(newest_data_dir)
            print "解压最新数据完成"
            log("解压最新数据完成")
            
            self.previous_data_id = self.newest_data_id_config.get("newest", "data_id")
            self.newest_data_id_config.set("newest", "data_id", newest_data_id)
            self.newest_data_id_config.write()

        if self.is_fontend_running():
            print 'Frontend is running, clear data next time!'
            log('Frontend is running, clear data next time!')
        else:
            print 'Clear unused data.'
            log('Clear unused data.')
            self.clear_data_folder()

        print 'Done!'
        log("Done!")
        glib.timeout_add(200, self.mainloop.quit)

    def is_fontend_running(self):
        if os.path.exists(DATA_CURRENT_ID_CONFIG_PATH):
            config = Config(DATA_CURRENT_ID_CONFIG_PATH)
            config.load()
            data_id = config.get('current', 'data_id')
            if data_id:
                return True
            else:
                return False
        else:
            False

    def clear_data_folder(self):
        # clear data when ui is not running

        # judge which data is in using
        if os.path.exists(DATA_CURRENT_ID_CONFIG_PATH):
            current_data_id_config = Config(DATA_CURRENT_ID_CONFIG_PATH)
            current_data_id_config.load()
            current_data_id = current_data_id_config.get("current", "data_id")
        else:
            current_data_id = None

        self.newest_data_id_config.load()
        newest_data_id = self.newest_data_id_config.get("newest", "data_id")
        data_file_list = ["newest",
                          "origin",
                          "patch",
                          "update",
                          "data_newest_id.ini",
                          "patch_status.ini",
                          "cache_soft.db",
                          "origin_data_time"
                          ]
        data_id_list = (current_data_id, newest_data_id)
        
        for data_file in os.listdir(DATA_DIR):
            if data_file not in data_file_list:
                remove_directory(os.path.join(DATA_DIR, data_file))
                print ">> remove file: %s" % data_file
                log(">> remove file: %s" % data_file)
            elif data_file == "update":
                for data_id in os.listdir(os.path.join(DATA_DIR, "update")):
                    if data_id not in data_id_list:
                        remove_directory(os.path.join(DATA_DIR, "update", data_id))
                        print '>> remove old data: %s' % data_id
                        log('>> remove old data: %s' % data_id)
        
    def download_data(self, data_file, test):
        origin_data_md5 = md5_file(os.path.join(self.data_origin_dir, data_file))
        space_name = data_file.split(".tar.gz")[0]
        patch_dir = os.path.join(self.data_patch_dir, space_name)
        
        # Create download directory.
        create_directory(patch_dir)
                
        if test:
            remote_url = "http://%s.%s/test" % (space_name, UPDATE_DATA_URL)
        else:
            remote_url = "http://%s.%s/3.1" % (space_name, UPDATE_DATA_URL)
            
        patch_list_url = "%s/patch/%s/patch_md5.json" % (remote_url, origin_data_md5)    

        try:
            patch_list_json = json.load(urllib2.urlopen(patch_list_url))
        except Exception, e:
            patch_list_json = ""
            
        if patch_list_json != "":
            patch_name = patch_list_json["current_patch"][0]["name"].encode("utf-8")
            patch_md5 = patch_list_json["current_patch"][0]["md5"].encode("utf-8")

            local_patch_info = self.patch_status_config.get("data_md5", space_name)
            if local_patch_info == '' or (local_patch_info != '' and eval(local_patch_info)[1] != patch_md5):
                
                # Start download.
                download_url = "%s/patch/%s/%s" % (remote_url, origin_data_md5, patch_name)
                local_patch_file = os.path.join(patch_dir, patch_name)
                
                # TODO: 此处添加下载返回值判断
                os.system("wget %s -t 5 -c -O %s" % (download_url, local_patch_file))
                try:
                    download_md5 = md5_file(local_patch_file)

                    if download_md5 == patch_md5:
                        self.have_update.append(space_name)
                        if local_patch_info:
                            remove_file(os.path.join(self.data_patch_dir, eval(local_patch_info)[0]))
                        self.patch_status_config.set("data_md5", space_name, [patch_name, patch_md5])
                        self.patch_status_config.write()
                        print "%s: 补丁%s下载成功" % (space_name, patch_name)
                        log("%s: 补丁%s下载成功" % (space_name, patch_name))
                    else:
                        print "%s: 补丁%s下载错误" (space_name, patch_name)
                        log("%s: 补丁%s下载错误" (space_name, patch_name))
                except:
                    print "%s: 补丁%s下载失败" (space_name, patch_name)
                    log("%s: 补丁%s下载失败" (space_name, patch_name))
            else:
                print "%s: 当前数据是最新的" % space_name
                log("%s: 当前数据是最新的" % space_name)
        else:
            print "%s: 网络问题或者远端没有任何更新补丁" % space_name
            log("%s: 网络问题或者远端没有任何更新补丁" % space_name)
Exemple #16
0
    def build_update_patch(self, action):
        if os.path.exists(self.data_origin_file):
            self.output_patch_dir = os.path.join(self.output_data_dir, "patch")
            if not os.path.exists(self.output_patch_dir):
                create_directory(self.output_patch_dir)

            self.output_temp_dir = os.path.join(self.output_data_dir, "temp")
            self.output_temp_file = os.path.join(self.output_temp_dir,
                                                 "%s.tar.gz" % self.space_name)
            self.output_temp_patch_file = os.path.join(self.output_temp_dir,
                                                       "patch")

            self.patch_md5_file = os.path.join(self.output_patch_dir,
                                               "patch_md5.json")
            self.origin_data_md5 = md5_file(self.data_origin_file)
            if not os.path.exists(self.patch_md5_file):
                self.patch_md5_json = {}
                self.patch_md5_json["origin_data"] = self.origin_data_md5
                self.patch_md5_json["current_patch"] = []
            else:
                self.patch_md5_json = json.load(open(self.patch_md5_file))

            if self.patch_md5_json["origin_data"] != self.origin_data_md5:
                self.patch_md5_json["origin_data"] = self.origin_data_md5
                self.patch_md5_json["current_patch"] = []

            self.remote_patch_dir = os.path.join(self.server_data_dir, "patch")

            if action == "build":
                # Delete temp directory first.
                create_directory(self.output_temp_dir, True)

                # Build temp file.
                print "%s: 创建本地更新数据..." % self.space_name
                with tarfile.open(self.output_temp_file, "w:gz") as tar:
                    for root, dir, files in os.walk(self.input_data_dir):
                        for file in files:
                            fullpath = os.path.join(root, file)
                            tar.add(fullpath,
                                    fullpath.split(self.input_data_dir)[1],
                                    False)
                print "%s: 创建本地更新数据完成" % self.space_name

                print "%s: 生成补丁文件..." % self.space_name
                subprocess.Popen("xdelta3 -ves %s %s %s" %
                                 (self.data_origin_file, self.output_temp_file,
                                  self.output_temp_patch_file),
                                 shell=True).wait()

                newest_patch_file_md5 = md5_file(self.output_temp_patch_file)
                current_patch_dict = self.patch_md5_json.get("current_patch")
                if current_patch_dict:
                    last_patch_md5 = current_patch_dict[0]["md5"]
                    if last_patch_md5 == newest_patch_file_md5:
                        remove_directory(self.output_temp_dir)
                        print "%s: input_data数据未做任何改变,删除相同补丁文件" % self.space_name
                        sys.exit(0)
                else:
                    current_patch_dict = []
                newest_patch_dir = os.path.join(self.output_patch_dir,
                                                self.origin_data_md5)
                if not os.path.exists(newest_patch_dir):
                    create_directory(newest_patch_dir)

                newest_patch_name = "%s-%s.xd3" % (
                    self.space_name, get_current_time("%Y_%m_%d_%H:%M:%S"))
                newest_patch_file = os.path.join(newest_patch_dir,
                                                 newest_patch_name)

                os.renames(self.output_temp_patch_file, newest_patch_file)
                remove_directory(self.output_temp_dir)
                current_patch_dict.insert(0, {
                    "name": newest_patch_name,
                    "md5": newest_patch_file_md5
                })
                print "%s: 生成补丁文件完成" % self.space_name

                print "%s: 写入补丁md5..." % self.space_name
                self.patch_md5_json["current_patch"] = current_patch_dict
                with open(self.patch_md5_file, "w") as fp:
                    json.dump(self.patch_md5_json, fp)
                print "%s: 写入补丁md5完成" % self.space_name

            elif action == "upload" and self.check_permission(self.space_name):
                # Upload patch file.
                current_patch_dict = self.patch_md5_json.get("current_patch")
                if current_patch_dict != []:
                    if len(current_patch_dict) > 2:
                        print "%s: 清理多余的补丁" % self.space_name
                        spare_patchs = current_patch_dict[2:]
                        current_patch_dict = current_patch_dict[:2]
                        for patch in spare_patchs:
                            patch_name = patch["name"].encode("utf-8")
                            local_path = os.path.join(self.output_patch_dir,
                                                      self.origin_data_md5,
                                                      patch_name)
                            try:
                                remove_file(local_path)
                                print "%s: 清除了补丁%s" % (self.space_name,
                                                       patch_name)
                            except:
                                pass
                            remote_path = os.path.join(self.remote_patch_dir,
                                                       self.origin_data_md5,
                                                       patch_name)
                            self.delete_remote_file(remote_path, patch_name)
                        self.patch_md5_json[
                            "current_patch"] = current_patch_dict
                        with open(self.patch_md5_file, "w") as fp:
                            json.dump(self.patch_md5_json, fp)

                    newest_patch_name = current_patch_dict[0]["name"].encode(
                        "utf-8")
                    newest_patch_file = os.path.join(self.output_patch_dir,
                                                     self.origin_data_md5,
                                                     newest_patch_name)
                    remote_patch_file = os.path.join(self.remote_patch_dir,
                                                     self.origin_data_md5,
                                                     newest_patch_name)
                    remote_patch_md5_file = os.path.join(
                        self.remote_patch_dir, self.origin_data_md5,
                        "patch_md5.json")

                    # upload newest_patch_file
                    self.upload_file(newest_patch_file, remote_patch_file,
                                     "补丁更新数据")

                    # Update patch list file.
                    self.upload_file(self.patch_md5_file,
                                     remote_patch_md5_file, "补丁md5列表文件")
                else:
                    print "%s: 当前没有任何补丁,请打好补丁再上传吧!" % self.space_name
        else:
            print "%s: %s 不存在, 无法进行补丁的创建和上传" % (self.space_name,
                                                self.data_origin_file)
Exemple #17
0
    def update(self, (begin, end)):
        current_time = time.time()
        greenlet = self.greenlet_dict[begin]
        greenlet.info["status"] = STATUS_DOWNLOADING
        greenlet.info["range_size"] = end - begin
        greenlet.info["remain_size"] = end - begin
        greenlet.info["start_time"] = current_time
        greenlet.info["update_time"] = current_time
        greenlet.info["realtime_time"] = current_time
        greenlet.info["realtime_size"] = 0

        self.signal.emit("part_start", begin, greenlet.info)

        filepath = "%s_%s" % (self.temp_save_path, begin)

        remove_file(filepath)
        save_file = open(filepath, "ab")

        def update_data(begin, data):
            save_file.write(data)
            data_len = len(data)
            self.update_info["downloaded_size"] += data_len
            self.update_info[
                "remain_size"] = self.file_size - self.update_info[
                    "downloaded_size"]

            current_time = time.time()
            self.update_info[
                "average_speed"] = self.update_info["downloaded_size"] / (
                    current_time - self.update_info["start_time"])
            self.update_info["update_time"] = current_time
Exemple #18
0
def remove_file(path):
    print "Please import deepin_utils.file.remove_file, this function will departed in next release version."
    return file.remove_file(path)
 def update(self, (begin, end)):
     current_time = time.time()
     greenlet = self.greenlet_dict[begin]
     greenlet.info["status"] = STATUS_DOWNLOADING
     greenlet.info["range_size"] = end - begin
     greenlet.info["remain_size"] = end - begin
     greenlet.info["start_time"] = current_time
     greenlet.info["update_time"] = current_time
     greenlet.info["realtime_time"] = current_time
     greenlet.info["realtime_size"] = 0
     
     self.signal.emit("part_start", begin, greenlet.info)
     
     filepath = "%s_%s" % (self.temp_save_path, begin)
     
     remove_file(filepath)
     save_file = open(filepath, "ab")
     
     def update_data(begin, data):
         save_file.write(data)
         data_len = len(data)
         self.update_info["downloaded_size"] += data_len
         self.update_info["remain_size"] = self.file_size - self.update_info["downloaded_size"]
         
         current_time = time.time()
         self.update_info["average_speed"] = self.update_info["downloaded_size"] / (current_time - self.update_info["start_time"])
         self.update_info["update_time"] = current_time
         self.update_info["remain_time"] = (self.file_size - self.update_info["downloaded_size"]) / self.update_info["average_speed"]            
         self.update_info["realtime_size"] += data_len
         
         if self.update_info["remain_size"] == 0 or current_time - greenlet.info["realtime_time"] >= REALTIME_DELAY:
Exemple #20
0
def clean():
    remove_file(os.path.join(DATA_DIR, "patch_status.ini"))
    for dir_name in os.listdir(DATA_DIR):
        if dir_name in ["newest", "update", "patch"]:
            remove_path(os.path.join(DATA_DIR, dir_name))
    def build_update_patch(self, action):
        if os.path.exists(self.data_origin_file):
            self.output_patch_dir = os.path.join(self.output_data_dir, "patch")
            if not os.path.exists(self.output_patch_dir):
                create_directory(self.output_patch_dir)

            self.output_temp_dir = os.path.join(self.output_data_dir, "temp")
            self.output_temp_file = os.path.join(self.output_temp_dir, "%s.tar.gz" % self.space_name)
            self.output_temp_patch_file = os.path.join(self.output_temp_dir, "patch")

            self.patch_md5_file = os.path.join(self.output_patch_dir, "patch_md5.json")
            self.origin_data_md5 = md5_file(self.data_origin_file)
            if not os.path.exists(self.patch_md5_file):
                self.patch_md5_json = {}
                self.patch_md5_json["origin_data"] = self.origin_data_md5
                self.patch_md5_json["current_patch"] = []
            else:
                self.patch_md5_json = json.load(open(self.patch_md5_file))

            if self.patch_md5_json["origin_data"] != self.origin_data_md5:
                self.patch_md5_json["origin_data"] = self.origin_data_md5
                self.patch_md5_json["current_patch"] = []

            self.remote_patch_dir = os.path.join(self.server_data_dir, "patch")

            if action == "build":
                # Delete temp directory first.
                create_directory(self.output_temp_dir, True)

                # Build temp file.
                print "%s: 创建本地更新数据..." % self.space_name    
                with tarfile.open(self.output_temp_file, "w:gz") as tar:
                    for root, dir, files in os.walk(self.input_data_dir):
                        for file in files:
                            fullpath=os.path.join(root, file)
                            tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False)
                print "%s: 创建本地更新数据完成" % self.space_name    
                
                print "%s: 生成补丁文件..." % self.space_name    
                subprocess.Popen(
                    "xdelta3 -ves %s %s %s" % (
                        self.data_origin_file,
                        self.output_temp_file,
                        self.output_temp_patch_file),
                    shell=True).wait()
                
                newest_patch_file_md5 = md5_file(self.output_temp_patch_file)
                current_patch_dict = self.patch_md5_json.get("current_patch")
                if current_patch_dict:
                    last_patch_md5 = current_patch_dict[0]["md5"]
                    if last_patch_md5 == newest_patch_file_md5:
                        remove_directory(self.output_temp_dir)
                        print "%s: input_data数据未做任何改变,删除相同补丁文件" % self.space_name
                        sys.exit(0)
                else:
                    current_patch_dict = []
                newest_patch_dir = os.path.join(self.output_patch_dir, self.origin_data_md5)
                if not os.path.exists(newest_patch_dir):
                    create_directory(newest_patch_dir)

                newest_patch_name = "%s-%s.xd3" % (self.space_name, get_current_time("%Y_%m_%d_%H:%M:%S"))
                newest_patch_file = os.path.join(newest_patch_dir, newest_patch_name)

                os.renames(self.output_temp_patch_file, newest_patch_file)
                remove_directory(self.output_temp_dir)
                current_patch_dict.insert(0, {"name" : newest_patch_name, "md5" : newest_patch_file_md5})
                print "%s: 生成补丁文件完成" % self.space_name    
                
                print "%s: 写入补丁md5..." % self.space_name
                self.patch_md5_json["current_patch"] = current_patch_dict
                with open(self.patch_md5_file, "w") as fp:
                    json.dump(self.patch_md5_json, fp)
                print "%s: 写入补丁md5完成" % self.space_name
                
            elif action == "upload" and self.check_permission(self.space_name):
                # Upload patch file.
                current_patch_dict = self.patch_md5_json.get("current_patch")
                if current_patch_dict != []:
                    if len(current_patch_dict) > 2:
                        print "%s: 清理多余的补丁" % self.space_name
                        spare_patchs = current_patch_dict[2:]
                        current_patch_dict = current_patch_dict[:2]
                        for patch in spare_patchs:
                            patch_name = patch["name"].encode("utf-8")
                            local_path = os.path.join(self.output_patch_dir, self.origin_data_md5, patch_name)
                            try:
                                remove_file(local_path)
                                print "%s: 清除了补丁%s" % (self.space_name, patch_name)
                            except:
                                pass
                            remote_path = os.path.join(self.remote_patch_dir, self.origin_data_md5, patch_name)
                            self.delete_remote_file(remote_path, patch_name)
                        self.patch_md5_json["current_patch"] = current_patch_dict
                        with open(self.patch_md5_file, "w") as fp:
                            json.dump(self.patch_md5_json, fp)

                    newest_patch_name = current_patch_dict[0]["name"].encode("utf-8")
                    newest_patch_file = os.path.join(self.output_patch_dir, self.origin_data_md5, newest_patch_name)
                    remote_patch_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, newest_patch_name)
                    remote_patch_md5_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, "patch_md5.json")

                    # upload newest_patch_file
                    self.upload_file(newest_patch_file, remote_patch_file, "补丁更新数据")

                    # Update patch list file.
                    self.upload_file(self.patch_md5_file, remote_patch_md5_file, "补丁md5列表文件")
                else:
                    print "%s: 当前没有任何补丁,请打好补丁再上传吧!" % self.space_name
        else:
            print "%s: %s 不存在, 无法进行补丁的创建和上传" % (self.space_name, self.data_origin_file)