예제 #1
0
    def build_index(self, remove_old=True):

        if remove_old:
            remove_directory(self.search_db_dir)

        self.__xappy = xappy.IndexerConnection(self.search_db_dir)

        self.__xappy.add_field_action("module_uid",
                                      xappy.FieldActions.STORE_CONTENT)

        self.__xappy.add_field_action("keyword_term",
                                      xappy.FieldActions.INDEX_FREETEXT,
                                      nopos=True)

        for module_keyword in self.__keywords:
            for keyword in module_keyword[2]:
                module_doc = xappy.UnprocessedDocument()

                module_doc.fields.append(xappy.Field("module_uid", keyword[0]))

                terms = list(split_word(keyword[1], True))
                module_doc.fields.append(
                    xappy.Field("keyword_term", ' '.join(terms)))

                self.__xappy.add(module_doc)

        self.__xappy.close()
    def build_index(self, remove_old=True):
        
        if remove_old:
            remove_directory(self.search_db_dir)
            
        self.__xappy = xappy.IndexerConnection(self.search_db_dir)

        self.__xappy.add_field_action("module_uid", 
                                      xappy.FieldActions.STORE_CONTENT)

        self.__xappy.add_field_action("keyword_term", 
                                      xappy.FieldActions.INDEX_FREETEXT, 
                                      nopos=True)

        for module_keyword in self.__keywords:
            for keyword in module_keyword[2]:
                module_doc = xappy.UnprocessedDocument()
                
                module_doc.fields.append(xappy.Field("module_uid", keyword[0]))
                
                terms = list(split_word(keyword[1], True))
                module_doc.fields.append(xappy.Field("keyword_term", ' '.join(terms)))
                
                self.__xappy.add(module_doc)

        self.__xappy.close()
    def clear_data_folder(self):
        # clear data when ui is not running

        # judge which data is in using
        if os.path.exists(DATA_CURRENT_ID_CONFIG_PATH):
            current_data_id_config = Config(DATA_CURRENT_ID_CONFIG_PATH)
            current_data_id_config.load()
            current_data_id = current_data_id_config.get("current", "data_id")
        else:
            current_data_id = None

        self.newest_data_id_config.load()
        newest_data_id = self.newest_data_id_config.get("newest", "data_id")
        data_file_list = ["newest",
                          "origin",
                          "patch",
                          "update",
                          "data_newest_id.ini",
                          "patch_status.ini",
                          "cache_soft.db",
                          "origin_data_time"
                          ]
        data_id_list = (current_data_id, newest_data_id)
        
        for data_file in os.listdir(DATA_DIR):
            if data_file not in data_file_list:
                remove_directory(os.path.join(DATA_DIR, data_file))
                print ">> remove file: %s" % data_file
                log(">> remove file: %s" % data_file)
            elif data_file == "update":
                for data_id in os.listdir(os.path.join(DATA_DIR, "update")):
                    if data_id not in data_id_list:
                        remove_directory(os.path.join(DATA_DIR, "update", data_id))
                        print '>> remove old data: %s' % data_id
                        log('>> remove old data: %s' % data_id)
예제 #4
0
 def start(self):
     self.init_file_size()
     if not self.error_flag and self.file_size > 0:
         self.last_byte_index = self.file_size - 1
         
         create_directory(self.temp_save_dir)
         
         (downloaded_pieces, download_pieces, downloaded_size) = self.get_download_pieces()
         self.check_download_pieces(download_pieces)
         
         if downloaded_size == self.file_size:
             self.signal.emit("no-need-fetch")
         else:
             current_time = time.time()
             self.update_info = {
                 "file_size" : self.file_size,
                 "downloaded_size" : downloaded_size,
                 "start_time" : current_time,
                 "update_time" : current_time,
                 "remain_time" : -1,
                 "average_speed" : -1,
                 "realtime_speed" : -1,
                 "realtime_time" : current_time,
                 "realtime_size" : 0,
                 }
             
             self.signal.emit("start", self.update_info)
             
             for (begin, end) in download_pieces:
                 self.create_greenlet(begin, end)
                 
             [self.pool.start(greenlet) for greenlet in self.greenlet_dict.values()]
             self.pool.join()
             
         if self.stop_flag:
             remove_directory(self.temp_save_dir)
             self.signal.emit("stop")
         elif self.pause_flag:
             self.signal.emit("pause")
         else:
             offset_ids = sorted(map(lambda (start, end): start, downloaded_pieces + download_pieces))
             command = "cat " + ' '.join(map(lambda offset_id: "%s_%s" % (self.temp_save_path, offset_id), offset_ids)) + " > %s" % self.file_save_path
             subprocess.Popen(command, shell=True).wait()
             
             remove_directory(self.temp_save_dir)
             
             if self.file_hash_info != None:
                 (expect_hash_type, expect_hash_value) = self.file_hash_info
                 hash_value = get_hash(self.file_save_path, expect_hash_type)
                 if hash_value != expect_hash_value:
                     self.signal.emit("check-hash-failed", expect_hash_value, hash_value)
                 else:    
                     self.signal.emit("finish")
             else:
                 self.signal.emit("finish")
     else:
         self.signal.emit("get-file-size-failed")
예제 #5
0
    def load_skin_from_package(self, filepath):
        '''
        Load theme from given package.

        @param filepath: The file path of package.
        '''
        # Init.
        skin_dir = os.path.join(self.user_skin_dir, str(uuid.uuid4()))

        # Create skin directory.
        create_directory(skin_dir, True)

        # Extract skin package.
        tar = tarfile.open(filepath, "r:gz")
        tar.extractall(skin_dir)

        # Get skin image file.
        config = Config(os.path.join(skin_dir, "config.ini"))
        config.load()

        # Move theme files to given directory if theme is not in default theme list.
        skin_theme_name = config.get("theme", "theme_name")
        if not skin_theme_name in COLOR_SEQUENCE:
            # Check version when package have special theme that not include in standard themes.
            app_id = config.get("application", "app_id")
            app_version = config.get("application", "app_version")
            if app_id == self.app_given_id and app_version == self.app_given_version:
                # Remove same theme from given directories.
                remove_directory(os.path.join(self.ui_theme_dir, skin_theme_name))
                if self.app_theme_dir != None:
                    remove_directory(os.path.join(self.app_theme_dir, skin_theme_name))

                # Move new theme files to given directories.
                shutil.move(os.path.join(skin_dir, "ui_theme", skin_theme_name), self.ui_theme_dir)
                if self.app_theme_dir != None:
                    shutil.move(os.path.join(skin_dir, "app_theme", skin_theme_name), self.app_theme_dir)

                # Remove temp theme directories under skin directory.
                remove_directory(os.path.join(skin_dir, "ui_theme"))
                remove_directory(os.path.join(skin_dir, "app_theme"))
            else:
                # Remove skin directory if version mismatch.
                remove_directory(skin_dir)

                return False

        # Apply new skin.
        skin_image_file = config.get("background", "image")
        if self.reload_skin(os.path.basename(skin_dir)):
            self.apply_skin()

            return (True, skin_dir, skin_image_file)
        else:
            return (False, skin_dir, skin_image_file)
예제 #6
0
def remove_directory(path):
    print "Please import deepin_utils.file.remove_directory, this function will departed in next release version."
    return file.remove_directory(path)
예제 #7
0
    def start(self):
        self.init_file_size()
        if not self.error_flag and self.file_size > 0:
            self.last_byte_index = self.file_size - 1

            create_directory(self.temp_save_dir)

            (downloaded_pieces, download_pieces,
             downloaded_size) = self.get_download_pieces()
            self.check_download_pieces(download_pieces)

            if downloaded_size == self.file_size:
                self.signal.emit("no-need-fetch")
            else:
                current_time = time.time()
                self.update_info = {
                    "file_size": self.file_size,
                    "downloaded_size": downloaded_size,
                    "start_time": current_time,
                    "update_time": current_time,
                    "remain_time": -1,
                    "average_speed": -1,
                    "realtime_speed": -1,
                    "realtime_time": current_time,
                    "realtime_size": 0,
                }

                self.signal.emit("start", self.update_info)

                for (begin, end) in download_pieces:
                    self.create_greenlet(begin, end)

                [
                    self.pool.start(greenlet)
                    for greenlet in self.greenlet_dict.values()
                ]
                self.pool.join()

            if self.stop_flag:
                remove_directory(self.temp_save_dir)
                self.signal.emit("stop")
            elif self.pause_flag:
                self.signal.emit("pause")
            else:
                offset_ids = sorted(
                    map(lambda (start, end): start,
                        downloaded_pieces + download_pieces))
                command = "cat " + ' '.join(
                    map(
                        lambda offset_id: "%s_%s" %
                        (self.temp_save_path, offset_id),
                        offset_ids)) + " > %s" % self.file_save_path
                subprocess.Popen(command, shell=True).wait()

                remove_directory(self.temp_save_dir)

                if self.file_hash_info != None:
                    (expect_hash_type, expect_hash_value) = self.file_hash_info
                    hash_value = get_hash(self.file_save_path,
                                          expect_hash_type)
                    if hash_value != expect_hash_value:
                        self.signal.emit("check-hash-failed",
                                         expect_hash_value, hash_value)
                    else:
                        self.signal.emit("finish")
                else:
                    self.signal.emit("finish")
        else:
            self.signal.emit("get-file-size-failed")
예제 #8
0
def remove_directory(path):
    print "Please import deepin_utils.file.remove_directory, this function will departed in next release version."
    return file.remove_directory(path)
예제 #9
0
    def build_update_patch(self, action):
        if os.path.exists(self.data_origin_file):
            self.output_patch_dir = os.path.join(self.output_data_dir, "patch")
            if not os.path.exists(self.output_patch_dir):
                create_directory(self.output_patch_dir)

            self.output_temp_dir = os.path.join(self.output_data_dir, "temp")
            self.output_temp_file = os.path.join(self.output_temp_dir,
                                                 "%s.tar.gz" % self.space_name)
            self.output_temp_patch_file = os.path.join(self.output_temp_dir,
                                                       "patch")

            self.patch_md5_file = os.path.join(self.output_patch_dir,
                                               "patch_md5.json")
            self.origin_data_md5 = md5_file(self.data_origin_file)
            if not os.path.exists(self.patch_md5_file):
                self.patch_md5_json = {}
                self.patch_md5_json["origin_data"] = self.origin_data_md5
                self.patch_md5_json["current_patch"] = []
            else:
                self.patch_md5_json = json.load(open(self.patch_md5_file))

            if self.patch_md5_json["origin_data"] != self.origin_data_md5:
                self.patch_md5_json["origin_data"] = self.origin_data_md5
                self.patch_md5_json["current_patch"] = []

            self.remote_patch_dir = os.path.join(self.server_data_dir, "patch")

            if action == "build":
                # Delete temp directory first.
                create_directory(self.output_temp_dir, True)

                # Build temp file.
                print "%s: 创建本地更新数据..." % self.space_name
                with tarfile.open(self.output_temp_file, "w:gz") as tar:
                    for root, dir, files in os.walk(self.input_data_dir):
                        for file in files:
                            fullpath = os.path.join(root, file)
                            tar.add(fullpath,
                                    fullpath.split(self.input_data_dir)[1],
                                    False)
                print "%s: 创建本地更新数据完成" % self.space_name

                print "%s: 生成补丁文件..." % self.space_name
                subprocess.Popen("xdelta3 -ves %s %s %s" %
                                 (self.data_origin_file, self.output_temp_file,
                                  self.output_temp_patch_file),
                                 shell=True).wait()

                newest_patch_file_md5 = md5_file(self.output_temp_patch_file)
                current_patch_dict = self.patch_md5_json.get("current_patch")
                if current_patch_dict:
                    last_patch_md5 = current_patch_dict[0]["md5"]
                    if last_patch_md5 == newest_patch_file_md5:
                        remove_directory(self.output_temp_dir)
                        print "%s: input_data数据未做任何改变,删除相同补丁文件" % self.space_name
                        sys.exit(0)
                else:
                    current_patch_dict = []
                newest_patch_dir = os.path.join(self.output_patch_dir,
                                                self.origin_data_md5)
                if not os.path.exists(newest_patch_dir):
                    create_directory(newest_patch_dir)

                newest_patch_name = "%s-%s.xd3" % (
                    self.space_name, get_current_time("%Y_%m_%d_%H:%M:%S"))
                newest_patch_file = os.path.join(newest_patch_dir,
                                                 newest_patch_name)

                os.renames(self.output_temp_patch_file, newest_patch_file)
                remove_directory(self.output_temp_dir)
                current_patch_dict.insert(0, {
                    "name": newest_patch_name,
                    "md5": newest_patch_file_md5
                })
                print "%s: 生成补丁文件完成" % self.space_name

                print "%s: 写入补丁md5..." % self.space_name
                self.patch_md5_json["current_patch"] = current_patch_dict
                with open(self.patch_md5_file, "w") as fp:
                    json.dump(self.patch_md5_json, fp)
                print "%s: 写入补丁md5完成" % self.space_name

            elif action == "upload" and self.check_permission(self.space_name):
                # Upload patch file.
                current_patch_dict = self.patch_md5_json.get("current_patch")
                if current_patch_dict != []:
                    if len(current_patch_dict) > 2:
                        print "%s: 清理多余的补丁" % self.space_name
                        spare_patchs = current_patch_dict[2:]
                        current_patch_dict = current_patch_dict[:2]
                        for patch in spare_patchs:
                            patch_name = patch["name"].encode("utf-8")
                            local_path = os.path.join(self.output_patch_dir,
                                                      self.origin_data_md5,
                                                      patch_name)
                            try:
                                remove_file(local_path)
                                print "%s: 清除了补丁%s" % (self.space_name,
                                                       patch_name)
                            except:
                                pass
                            remote_path = os.path.join(self.remote_patch_dir,
                                                       self.origin_data_md5,
                                                       patch_name)
                            self.delete_remote_file(remote_path, patch_name)
                        self.patch_md5_json[
                            "current_patch"] = current_patch_dict
                        with open(self.patch_md5_file, "w") as fp:
                            json.dump(self.patch_md5_json, fp)

                    newest_patch_name = current_patch_dict[0]["name"].encode(
                        "utf-8")
                    newest_patch_file = os.path.join(self.output_patch_dir,
                                                     self.origin_data_md5,
                                                     newest_patch_name)
                    remote_patch_file = os.path.join(self.remote_patch_dir,
                                                     self.origin_data_md5,
                                                     newest_patch_name)
                    remote_patch_md5_file = os.path.join(
                        self.remote_patch_dir, self.origin_data_md5,
                        "patch_md5.json")

                    # upload newest_patch_file
                    self.upload_file(newest_patch_file, remote_patch_file,
                                     "补丁更新数据")

                    # Update patch list file.
                    self.upload_file(self.patch_md5_file,
                                     remote_patch_md5_file, "补丁md5列表文件")
                else:
                    print "%s: 当前没有任何补丁,请打好补丁再上传吧!" % self.space_name
        else:
            print "%s: %s 不存在, 无法进行补丁的创建和上传" % (self.space_name,
                                                self.data_origin_file)
예제 #10
0
    def build_update_patch(self, action):
        if os.path.exists(self.data_origin_file):
            self.output_patch_dir = os.path.join(self.output_data_dir, "patch")
            if not os.path.exists(self.output_patch_dir):
                create_directory(self.output_patch_dir)

            self.output_temp_dir = os.path.join(self.output_data_dir, "temp")
            self.output_temp_file = os.path.join(self.output_temp_dir, "%s.tar.gz" % self.space_name)
            self.output_temp_patch_file = os.path.join(self.output_temp_dir, "patch")

            self.patch_md5_file = os.path.join(self.output_patch_dir, "patch_md5.json")
            self.origin_data_md5 = md5_file(self.data_origin_file)
            if not os.path.exists(self.patch_md5_file):
                self.patch_md5_json = {}
                self.patch_md5_json["origin_data"] = self.origin_data_md5
                self.patch_md5_json["current_patch"] = []
            else:
                self.patch_md5_json = json.load(open(self.patch_md5_file))

            if self.patch_md5_json["origin_data"] != self.origin_data_md5:
                self.patch_md5_json["origin_data"] = self.origin_data_md5
                self.patch_md5_json["current_patch"] = []

            self.remote_patch_dir = os.path.join(self.server_data_dir, "patch")

            if action == "build":
                # Delete temp directory first.
                create_directory(self.output_temp_dir, True)

                # Build temp file.
                print "%s: 创建本地更新数据..." % self.space_name    
                with tarfile.open(self.output_temp_file, "w:gz") as tar:
                    for root, dir, files in os.walk(self.input_data_dir):
                        for file in files:
                            fullpath=os.path.join(root, file)
                            tar.add(fullpath, fullpath.split(self.input_data_dir)[1], False)
                print "%s: 创建本地更新数据完成" % self.space_name    
                
                print "%s: 生成补丁文件..." % self.space_name    
                subprocess.Popen(
                    "xdelta3 -ves %s %s %s" % (
                        self.data_origin_file,
                        self.output_temp_file,
                        self.output_temp_patch_file),
                    shell=True).wait()
                
                newest_patch_file_md5 = md5_file(self.output_temp_patch_file)
                current_patch_dict = self.patch_md5_json.get("current_patch")
                if current_patch_dict:
                    last_patch_md5 = current_patch_dict[0]["md5"]
                    if last_patch_md5 == newest_patch_file_md5:
                        remove_directory(self.output_temp_dir)
                        print "%s: input_data数据未做任何改变,删除相同补丁文件" % self.space_name
                        sys.exit(0)
                else:
                    current_patch_dict = []
                newest_patch_dir = os.path.join(self.output_patch_dir, self.origin_data_md5)
                if not os.path.exists(newest_patch_dir):
                    create_directory(newest_patch_dir)

                newest_patch_name = "%s-%s.xd3" % (self.space_name, get_current_time("%Y_%m_%d_%H:%M:%S"))
                newest_patch_file = os.path.join(newest_patch_dir, newest_patch_name)

                os.renames(self.output_temp_patch_file, newest_patch_file)
                remove_directory(self.output_temp_dir)
                current_patch_dict.insert(0, {"name" : newest_patch_name, "md5" : newest_patch_file_md5})
                print "%s: 生成补丁文件完成" % self.space_name    
                
                print "%s: 写入补丁md5..." % self.space_name
                self.patch_md5_json["current_patch"] = current_patch_dict
                with open(self.patch_md5_file, "w") as fp:
                    json.dump(self.patch_md5_json, fp)
                print "%s: 写入补丁md5完成" % self.space_name
                
            elif action == "upload" and self.check_permission(self.space_name):
                # Upload patch file.
                current_patch_dict = self.patch_md5_json.get("current_patch")
                if current_patch_dict != []:
                    if len(current_patch_dict) > 2:
                        print "%s: 清理多余的补丁" % self.space_name
                        spare_patchs = current_patch_dict[2:]
                        current_patch_dict = current_patch_dict[:2]
                        for patch in spare_patchs:
                            patch_name = patch["name"].encode("utf-8")
                            local_path = os.path.join(self.output_patch_dir, self.origin_data_md5, patch_name)
                            try:
                                remove_file(local_path)
                                print "%s: 清除了补丁%s" % (self.space_name, patch_name)
                            except:
                                pass
                            remote_path = os.path.join(self.remote_patch_dir, self.origin_data_md5, patch_name)
                            self.delete_remote_file(remote_path, patch_name)
                        self.patch_md5_json["current_patch"] = current_patch_dict
                        with open(self.patch_md5_file, "w") as fp:
                            json.dump(self.patch_md5_json, fp)

                    newest_patch_name = current_patch_dict[0]["name"].encode("utf-8")
                    newest_patch_file = os.path.join(self.output_patch_dir, self.origin_data_md5, newest_patch_name)
                    remote_patch_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, newest_patch_name)
                    remote_patch_md5_file = os.path.join(self.remote_patch_dir, self.origin_data_md5, "patch_md5.json")

                    # upload newest_patch_file
                    self.upload_file(newest_patch_file, remote_patch_file, "补丁更新数据")

                    # Update patch list file.
                    self.upload_file(self.patch_md5_file, remote_patch_md5_file, "补丁md5列表文件")
                else:
                    print "%s: 当前没有任何补丁,请打好补丁再上传吧!" % self.space_name
        else:
            print "%s: %s 不存在, 无法进行补丁的创建和上传" % (self.space_name, self.data_origin_file)