Beispiel #1
0
 def processItem(self, job, item):
     monitoringFullPath = transform.transformDirToInternal(item["monitoringPath"])
     fullPath = transform.transformDirToInternal(item["fullPath"])
     # Check if item exists in local file sytem
     if not os.path.exists(fullPath):
         job.delete()
         return
     if not self.itemToProcess.has_key(monitoringFullPath):
         self.itemToProcess[monitoringFullPath] = {}
     #############################################
     # Start processing
     #############################################
     # If the full path already in tube, check if the timestamp is updated
     if self.itemToProcess[monitoringFullPath].has_key(fullPath):
         savedItem = self.itemToProcess[monitoringFullPath][fullPath]
         if savedItem["timestamp"] == item["timestamp"]:
             # Item not updated for time out time, add it to output queue
             self.outputBeanstalk.put(job.body)
             print "output item:", item
             job.delete()
         elif savedItem["timestamp"] < item["timestamp"]:
             # Received a new notification for an path, update saved info
             self.itemToProcess[monitoringFullPath][fullPath] = item
             job.release(priority=beanstalkc.DEFAULT_PRIORITY, delay=gItemDelayTime)
             print "item updated"
         else:
             job.delete()
     else:
         # New notification, add it
         self.itemToProcess[monitoringFullPath][fullPath] = item
         # print item, job, gItemDelayTime
         # priority is necessary to avoid error for requesting priority to be an int in beanstalkc
         job.release(priority=beanstalkc.DEFAULT_PRIORITY, delay=gItemDelayTime)
         print "new item added"
Beispiel #2
0
 def processItem(self, job, item):
     #fullPath = transform.transformDirToInternal(item["fullPath"])
     #monitoringFullPath = transform.transformDirToInternal(item["monitoringPath"])
     
     #source_dir = item["SourceDir"]
     #misc.ensureDir(source_dir)
     
     tag = item["tag"]
     
     working_dir = item["WorkingDir"]
     misc.ensureDir(transform.transformDirToInternal(working_dir))
     
     target_dir = item["TargetDir"]
     misc.ensureDir(transform.transformDirToInternal(target_dir))
     
     import wwjufsdatabase.libs.services.servicesV2 as service
     req = service.req()
     t = tagSystem.getTagSysObj(req.getDbSys())
     e = t.getObjs(unicode(tag))
     for i in e:
         print i
         source_dir = transform.transformDirToInternal(i)
         
         AutoArchiveThumb(source_dir, target_dir, working_dir)
     job.delete()
     return False
     #Return true only when the item should be kept in the tube
     return True
Beispiel #3
0
 def encInfoZip(self, pendingCollection):
     ############################
     # Save info for zipped files
     ############################
     logFilePath = transform.transformDirToInternal(
         fileTools.getTimestampWithFreeName(self.workingDir, '.log'))
     s = json.dumps(self.zippedFileInfo, sort_keys=True, indent=4)
     f = open(logFilePath,'w')
     f.write(s)
     f.close()
     logZipPath = logFilePath.replace(u'.log',u'.log.zip')
     logZip = zipClass.ZFile(logZipPath, 'w')
     logZip.addfile(unicode(logFilePath), os.path.basename(logFilePath))
     logZip.close()
     
     gTimeV = time.gmtime()
     yearStr = time.strftime("%Y", gTimeV)
     monthStr = time.strftime("%m", gTimeV)
     dayStr = time.strftime("%d", gTimeV)
     dateTimeDir = yearStr+"/"+monthStr+"/"+dayStr
     newEncDir = unicode(os.path.join(self.zipStorageDir, dateTimeDir))
     misc.ensureDir(newEncDir)
     targetPath = transform.transformDirToInternal(
             fileTools.getTimestampWithFreeName(newEncDir, '.enc'))
     self.encCopier.copy(logZipPath, targetPath.replace('.enc', '.encziplog'))
     
     
     ############################
     # Update state in storage state
     ############################
     self.updateZipLog(self.zippedFileInfo, pendingCollection)
     #Clean the current zipped file info
     self.zippedFileInfo = {}
def AutoArchiveThumb(source_folder = gAutoArchiveFullPath, target_dir = g_default_target_dir,
                     workingDir = gWorkingDir,taskUuid = str(uuid.uuid4())):
    inputTubeName = "collectionListTube"+taskUuid
    delayedCollectionListTubeName = "delayedCollectionListTubeName"+taskUuid
    
    #s1 = FolderScanner.FolderScanner()
    #s1.addItem({"command": "folderScanner", "fullPath":source_folder,
    #           "targetTubeName": inputTubeName,"blackList":g_ignore_file_type_list})
    target_dir = transform.transformDirToInternal(target_dir)
    source_folder = transform.transformDirToInternal(source_folder)
    
    #storage_state_collection_name = "storage_state://"+source_folder+":"+target_dir
    s1 = FolderEnumeratingService.FolderEnumeratingService()
    s1.addItem({"full_path": source_folder, "black_list":[], 
                "target_tube_name": inputTubeName})

    s2 = monitorService.monitorService()
    s2.addItem({"command": "monitorService", "fullPath":source_folder,
               "targetTubeName": inputTubeName,"blackList":g_ignore_file_type_list})
    
    s3 = tubeDelayService()
    s3.addItem({"inputTubeName":inputTubeName,
               "outputTubeName": delayedCollectionListTubeName,"blackList":g_ignore_file_type_list})
    
    s4 = FolderInfoArchiveService()
    s4.addItem({"InputTubeName":delayedCollectionListTubeName, "WorkingDir":workingDir, "TargetDir": target_dir})
Beispiel #5
0
 def __init__(self, rootPath, fullPath, itemInfo, zipFileObj, pathInZipFile):
     folderStorage.folderStorageItem.__init__(self, rootPath, fullPath)
     self.itemInfo = itemInfo
     self.rootPath = transform.transformDirToInternal(rootPath)
     self.fullPath = transform.transformDirToInternal(fullPath)
     self.zipFileObj = zipFileObj
     self.pathInZipFile = pathInZipFile
Beispiel #6
0
 def __init__(self, rootDir, backupDir, syncFolderCollectionId, dbInst):
     '''
     syncFolderCollectionId is a virtual collection which contains all items with synced info
     '''
     collectionDatabase.collectionOnMongoDbBase.__init__(self, syncFolderCollectionId, dbInst.getCollectionDb())
     self.rootDir = transform.transformDirToInternal(rootDir)
     self.backupDir = transform.transformDirToInternal(backupDir)
     self.objDb = dbInst
     self.folderCollection = folderRecursiveEnumCollection.folderRecursiveEnumCollection(self.rootDir, dbInst)
Beispiel #7
0
    def processItem(self, job, item):
        #fullPath = transform.transformDirToInternal(item["fullPath"])
        #monitoringFullPath = transform.transformDirToInternal(item["monitoringPath"])
        
        #source_dir = item["SourceDir"]
        #misc.ensureDir(source_dir)
        
        tag = item["tag"]
        
        task_item = item
        
        if item.has_key("output_tube_name"):
            #################################
            # Adding tag processing task
            #################################
            task_item = item
            self.processing_tag_dict[tag] = item
            import wwjufsdatabase.libs.services.servicesV2 as service
            req = service.req()
            t = tagSystem.getTagSysObj(req.getDbSys())
            tagged_item_list = t.getObjs(unicode(tag))
        else:
            #################################
            # A new tag added for existing tag processing task
            #################################
            if self.processing_tag_dict.has_key(tag):
                #Task exist, add the new tagged elment for processing
                task_item = self.processing_tag_dict[tag]
                tagged_item_list = [transform.transformDirToInternal(item["url"])]
            else:
                #Not a valid item, return
                print "not a valid item or tag not have processor yet"
                job.delete()
                return False
            
            
        output_tube_name = task_item["output_tube_name"]
        
        working_dir = task_item["working_dir"]
        misc.ensureDir(transform.transformDirToInternal(working_dir))
        
        target_dir = task_item["target_dir"]
        misc.ensureDir(transform.transformDirToInternal(target_dir))
        
        
        b = beanstalkServiceBase(output_tube_name)

        for i in tagged_item_list:
            info(i)
            source_dir = transform.transformDirToInternal(i)
            
            b.addItem({"source_dir":source_dir, "working_dir": working_dir, "target_dir":target_dir})

        job.delete()
        return False
Beispiel #8
0
 def callback(self, pathToWatch, relativePath, changeType):
     fullPath = transform.transformDirToInternal(os.path.join(pathToWatch, relativePath))
     itemDict = {"monitoringPath": transform.transformDirToInternal(pathToWatch),
                     "fullPath": fullPath, "changeType":changeType,
                     "timestamp": time.time()}
     s = json.dumps(itemDict, sort_keys=True, indent=4)
     beanstalk = beanstalkc.Connection(host=gBeanstalkdServerHost, port=gBeanstalkdServerPort)
     beanstalk.use(self.targetTube)
     #print beanstalk.using()
     s = json.dumps(itemDict, sort_keys=True, indent=4)
     job = beanstalk.put(s)
Beispiel #9
0
 def __init__(self, srcRoot, storageRoot, stateStoragePath = 'd:/state.txt'):
     print "src root is:", srcRoot
     self.srcRoot = transform.transformDirToInternal(srcRoot)
     self.storageRoot = transform.transformDirToInternal(storageRoot)
     self.stateStoragePath = stateStoragePath
     try:
         f = open(self.stateStoragePath,'r')
         self.config = json.load(f)
         f.close()
     except IOError:
         self.config = {}
Beispiel #10
0
 def store(self, element):
     #print 'storing....'
     fullPath = transform.transformDirToInternal(element.getAbsPath())
     relPath = fullPath.replace(self.srcRoot, '')
     if (self.curArchive is None) or (self.curArchivedSize > MAX_SINGLE_ARCHIVE_SIZE):
         self.curArchiveName = transform.transformDirToInternal(
             fileTools.getTimestampWithFreeName(self.storageRoot, '.zip'))
         self.curArchive = zipClass.ZFile(self.curArchiveName, 'w')
         self.curArchivedSize = 0
     #print 'copying "%s" to "%s"'%(fullPath, relPath)
     self.curArchive.addfile(unicode(fullPath).encode('gbk'), unicode(relPath).encode('gbk'))
     self.curArchivedSize += os.stat(fullPath).st_size
 def processItem(self, job, item):
     monitoringFullPath = transform.transformDirToInternal(item['monitoringPath'])
     archiveId = gZipFolderCollectionPrefix + monitoringFullPath
     if not self.collectionInDbForMonitoringPath.has_key(monitoringFullPath):
         self.collectionInDbForMonitoringPath[monitoringFullPath] = collectionDatabase.collectionOnMongoDbBase(archiveId, self.dbInst.getCollectionDb())
         objUuid = self.dbInst.addVirtualObj({"monitoringPath": monitoringFullPath, "zippedInfoCollectionId": archiveId});
         idInCol = objUuid
         self.zippedInfoCollectionList.addObj(idInCol, objUuid)
     #Save the item in the archive collection: zippedInfoColllection://D:/tmp/
     fullPath = transform.transformDirToInternal(item["fullPath"])
     relativePath = transform.getRelativePathFromFull(fullPath, monitoringFullPath)
     if not os.path.exists(fullPath):
         job.delete()
         return False#No job release, job was deleted.
     #################################################################
     # Start process the 
     #################################################################
     if not self.collectionInDbForMonitoringPath[monitoringFullPath].exists(relativePath):
         #This item is not in the collection, so we need to extract info from this item
         newObj = self.dbInst.getFsObjFromFullPath(fullPath)
         self.collectionInDbForMonitoringPath[monitoringFullPath].addObj(relativePath, newObj["uuid"])
         zipFilePath = transform.transformDirToInternal(
             fileTools.getTimestampWithFreeName(self.workingDir, gInfoFileDecryptedExt, gInfoFilePrefix))
         self.decCopier.copy(fullPath, zipFilePath)
         for i in zippedInfo(self.workingDir).enumItems(zipFilePath):
             print '--------------------------------------------------'
             print i
             fp = open(i, 'r')
             loadedFileInfo = json.load(fp)
             print loadedFileInfo
         for i in zippedInfo(self.workingDir).enumZippedFiles(zipFilePath):
             fp = open(i, 'r')
             print 'data file extracted:', i
     '''
     else:
         #This item is not in the collection, so we need to extract info from this item
         newObj = self.dbInst.getFsObjFromFullPath(fullPath)
         self.collectionInDbForMonitoringPath[monitoringFullPath].addObj(relativePath, newObj["uuid"])
         zipFilePath = transform.transformDirToInternal(
             fileTools.getTimestampWithFreeName(self.workingDir, gInfoFileDecryptedExt, gInfoFilePrefix))
         self.decCopier.copy(fullPath, zipFilePath)
         for i in zippedInfo(self.workingDir).enumItems(zipFilePath):
             print '--------------------------------------------------'
             print i
             fp = open(i, 'r')
             loadedFileInfo = json.load(fp)
             print loadedFileInfo
         for i in zippedInfo(self.workingDir).enumZippedFiles(zipFilePath):
             fp = open(i, 'r')
             print 'data file extracted:', i
     '''
     return True#Release job
Beispiel #12
0
def checkDirChanges(path_to_watch, busname = BUS_NAME_NAME, interfacename = INTERFACE_NAME, objname = OBJ_NAME):
    path_to_watch = transform.transformDirToInternal(os.path.abspath (path_to_watch))
    need_to_quit = False
    print "Watching %s at %s" % (path_to_watch, time.asctime ())
    hDir = win32file.CreateFile(
        path_to_watch,
        win32con.GENERIC_READ,
        win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE,
        None,
        win32con.OPEN_EXISTING,
        win32con.FILE_FLAG_BACKUP_SEMANTICS,
        None
    )
    cnt = 0
    bus = dbus.SessionBus()
    proxy = bus.get_object(busname, objname)
    ###############################################
    #Scan for existing files
    ###############################################
    for i in os.walk(path_to_watch):
        print i
        for j in i[2]:
            fullPath = transform.transformDirToInternal(os.path.join(i[0], j))
            #print fullPath
            proxy.notify(path_to_watch, fullPath, "Existing", False, dbus_interface = interfacename)

    while not need_to_quit:
#            print "new watch\n"
        results = win32file.ReadDirectoryChangesW(
            hDir,
            1024*256,
            True,
            win32con.FILE_NOTIFY_CHANGE_FILE_NAME
            | win32con.FILE_NOTIFY_CHANGE_DIR_NAME
            | win32con.FILE_NOTIFY_CHANGE_ATTRIBUTES
            | win32con.FILE_NOTIFY_CHANGE_SIZE
            | win32con.FILE_NOTIFY_CHANGE_LAST_WRITE
            | win32con.FILE_NOTIFY_CHANGE_SECURITY,
            None,
            None
        )
        if not need_to_quit:
            for action, file in results:
                #full_filename = os.path.join (self.path_to_watch, file)
                #print full_filename, ACTIONS.get (action, "Unknown")
                #callback(self.path_to_watch, file, ACTIONS.get (action, "Unknown"))
                print 'filechanged called:', path_to_watch, file, ACTIONS.get (action, "Unknown")
                proxy.notify(path_to_watch, file, ACTIONS.get (action, "Unknown"), True, dbus_interface = interfacename)
Beispiel #13
0
def genPicThumb(local_path, dest_dir, mime_type = None):
    #If no thumbnail exist, create one
    #print '-----------------------localpath:',local_path
    basename = os.path.basename(local_path)
    #print "basename:" + basename
    
    ext = basename.split(".")[-1]
    #print ext
    #if picFormatSupported(ext):
    if picFormatSupportedV2(local_path, mime_type = None):
        #It is a jpeg file, currently no other type supported
        import Image #Using PIL lib 
        im = Image.open(local_path)
        # convert to thumbnail image
        im.thumbnail((g_default_thumb_size, g_default_thumb_size), Image.ANTIALIAS)
        # don't save if thumbnail already exists
        #Use _T as the thumb file end to indicate the end of the original firl
        thumb_path_without_ext = os.path.join(dest_dir, basename.split(".")[0]+"_T")
        import random
        while os.path.exists(thumb_path_without_ext+".jpg"):
            thumb_path_without_ext += str(random.randint(0,10))
        thumb_path = thumb_path_without_ext+'.jpg'
        #print thumb_path.encode("utf8","replace")
        if im.mode != "RGB":
            im = im.convert("RGB")
        im.save(thumb_path,  "JPEG")
        return transform.transformDirToInternal(thumb_path)
    else:
        print 'non jpeg file not supported'
        raise pictureFormatNotSupported
Beispiel #14
0
    def store(self, processingObj, pendingCollection):
        '''
        processingObj = {"fullPath": "D:/tmp/good.txt", "size":100}
        '''
        ncl(processingObj)
        #relaPath = transform.formatRelativePath(item.getRelaPath())
        relaPath = processingObj.getIdInCol()
        ncl('Got relaPath')
        if (pendingCollection.has_key(relaPath)) and (pendingCollection[relaPath] != processingObj["uuid"]):
            #Item exists in pending but uuid is not the same, update the uuid for the pending item
            pendingCollection[relaPath] = processingObj["uuid"]
            cl('Added to pending')
        fullPath = transform.transformDirToInternal(processingObj["fullPath"])

        #Add the file to zip
        try:
            #If there is already an item with the same name, ignore the current?
            existingElem = self.zippedFileInfo[relaPath]
            return
        except:
            pass


        if (self.fileCnt > MAX_FILE_CNT_IN_INFO_FILE):
            self.encInfoZip(pendingCollection)

        processingObj["parentEncZip"] = self.targetPath.replace(".zip", ".enc")
        self.zippedFileInfo[relaPath] = processingObj.getItemInfo()
        cl('return from store')
Beispiel #15
0
    def run(self):
        print 'Start scanning'
        if not os.path.isdir(self.rootFolder):
            print "not a folder"
            if filter(self.rootFolder, self.blackList):
                return
            paramDict = {"fullPath": self.rootFolder, "timestamp": os.stat(self.rootFolder)[ST_MTIME],
                             "monitoringPath": self.rootFolder}
            self.addItem(paramDict)
        else:
            for root, dirs, files in os.walk(self.rootFolder):
                #Break if quit called
                if self.quit_flag:
                    break
                #cl("remaining:", dirs)
                #Process files
                for j in dirs:
                    info(j)
                    if filter(j, self.blackList):
                        info("ignoring: ", j, "\n")
                        continue
                    
                    fullPath = transform.transformDirToInternal(os.path.join(root, j))

                    paramDict = {"fullPath": fullPath, "timestamp": os.stat(fullPath)[ST_MTIME],
                                 "monitoringPath": self.rootFolder}
                    self.addItem(paramDict)
        print "process complete, quitting thread"
Beispiel #16
0
 def __init__(self, srcRoot, storageRoot, stateStoragePath = 'd:/state.txt'):
     #print 'src root:',srcRoot
     self.srcRoot = transform.transformDirToInternal(srcRoot)
     self.storageRoot = transform.transformDirToInternal(storageRoot)
     #print self.srcRoot
     #print self.storageRoot
     self.stateStoragePath = stateStoragePath
     try:
         f = open(self.stateStoragePath,'r')
         self.config = json.load(f)
         f.close()
     except IOError:
         self.config = {}
     #print 'storage root:', self.storageRoot
     self.curArchivedSize = 0
     self.curArchive = None
Beispiel #17
0
 def __init__(self, srcRoot, storageRoot, stateStoragePath = 'd:/state.txt', 
                     tmpStorageRoot = 'd:/tmp/removeAfterComplete', decCopier = encryptionStorageBase.arc4DecSimpleCopier('defaultPass')):
     misc.ensureDir(tmpStorageRoot)
     misc.ensureDir(storageRoot)
     zipStorage.zipStorage.__init__(self, srcRoot, storageRoot, stateStoragePath)
     self.tmpStorageRoot = transform.transformDirToInternal(tmpStorageRoot)
     self.decCopier = decCopier
Beispiel #18
0
    def initParam(self, zipDir, folderDir, workingDir, encryptionPass, direction):
        #################################
        #Make dir if not exist
        #################################
        misc.ensureDir(zipDir)
        misc.ensureDir(workingDir)
        misc.ensureDir(folderDir)
        self.configPath = os.path.join(workingDir, 'workingState.txt')
        self.backupPath = os.path.join(workingDir, 'backup')
        misc.ensureDir(self.backupPath)
        self.tmpStorageRoot = transform.transformDirToInternal(os.path.join(workingDir, 'working'))
        self.config = configDict.configFileDict(self.configPath, {"zipStorageState":{}, "folderState":{}})

        #################################
        #Create source storage
        #################################
        
        self.storage1 = encZipStorage.encZipStorage(self.config["zipStorageState"], 
                self.tmpStorageRoot, zipDir, encryptionPass)
        #################################
        #Create target storage
        #################################
        self.storage2 = folderStorage.folderStorage(self.config["folderState"], 
                folderDir, self.backupPath)
        
        if direction == "extract":
            self.srcStorage = self.storage1
            self.dstStorage = self.storage2
        else:
            self.srcStorage = self.storage2
            self.dstStorage = self.storage1
    def subClassRun(self, paramDict):
        ###############################################
        #Scan for existing files
        ###############################################
        collection = self.objDb.getCollection(self.targetCollectionId)
        cl('start scanning')
        for i in os.walk(self.rootFolder):
            #cl(i)
            for j in i[2]:
                if (self.addedItemCnt % 1000) == 0:
                    cl("processing item cnt:", self.addedItemCnt)
                self.addedItemCnt += 1

                fullPath = transform.transformDirToInternal(os.path.join(i[0], j))
                #print '---------------------real adding item'
                #Update the item info for the item
                ncl('before fs obj base')
                itemUrl = objectDatabase.fsObjBase(fullPath).getObjUrl()
                ncl('before get fs obj')
                newObjUuid = self.objDb.getFsObjUuid(itemUrl)
                if newObjUuid is None:
                    cl("item deleted, do not add it")
                    continue
                ncl('before update obj uuid')
                '''
                collection.updateObjUuidIfNeeded(itemUrl, newObjUuid)
                '''
                if collection.isSame(itemUrl, newObjUuid):
                    ncl("no updates needed", itemUrl, newObjUuid)
                    continue
                collection.updateObjUuidRaw(itemUrl, newObjUuid)
                ncl('new item added', itemUrl)
                
        cl("notifying listener")
        self.notifyAll()
Beispiel #20
0
def internal_get_thumb(path, targetDir, mime_type = None):
    '''
    path: Full Path. The path of the file whose thumbnail will be generated
    targetDir: Directory Path. The target directory where the generated thumbnail will be put in.
    Return: the thumbnail fullPath
    '''
    newPath = None
    ext = path.split('.')[-1].lower()
    if ext in ['exe']:
        try:
            newPath = appThumb.genAppThumb(path, targetDir)
        except:
            return None
    else:
        try:
            newPath = picThumbGenerator.genPicThumb(path, targetDir, mime_type)
        except picThumbGenerator.pictureFormatNotSupported:
            if ext in g_video_file_ext_list:
                try:#if True:
                        newPath = ffmpegThumb.genVideoThumb(path, targetDir)
                        #return "complete transform"
                        #return newPath
                except:
                    pass
            else:
                pass
    if newPath is None:
        return None
    return transformDirToInternal(newPath)
Beispiel #21
0
 def getRange(self, start, cnt):
     '''
     Check if element is in the list
     '''
     res = []
     #print self.fullPath
     #If the param is unicode, it will return unicode
     try:
         d = os.listdir(self.fullPath)
     except WindowsError:
         return []
     #print d
     for i in d:
         if self.folderOnly:
             if not os.path.isdir(os.path.join(self.fullPath,i)):
                 continue
         '''
         if type(i.decode('gbk')) != unicode:
             raise "unknown issue"
         i.decode('utf8').encode('gbk')
         '''
         p = transform.transformDirToInternal(os.path.join(self.fullPath,i))
         #p.encode('utf8')#Test if utf can support decoding filesystem chars.
         res.append(p)
     if cnt is None:
         return res
     return res[start:start+cnt]
Beispiel #22
0
 def encZip(self):
     #Must close the zip before encrypt it, otherwise, the file are not integrate
     if self.curArchive is None:
         return
     self.curArchive.close()
     self.curArchive = None
     
     ############################
     # Encrypt the zip file
     ############################
     targetPath = transform.transformDirToInternal(
             fileTools.getTimestampWithFreeName(self.zipStorageDir, '.enc'))
     print 'copying "%s" to "%s"'%(self.curArchiveName, targetPath)
     #import shutil
     #shutil.copy(self.curArchiveName, targetPath+'.backup.zip')
     self.encCopier.copy(self.curArchiveName, targetPath)
     
     ############################
     # Save info for zipped files
     ############################
     s = json.dumps(self.zippedFileInfo, sort_keys=True, indent=4)
     f = open(self.curArchiveName.replace('.zip', '.log'),'w')
     f.write(s)
     f.close()
     self.encCopier.copy(self.curArchiveName.replace('.zip', '.log'), targetPath.replace('.enc', '.enclog'))
     ############################
     # Update state in storage state
     ############################
     self.updateZipLog(self.zippedFileInfo)
     #Clean the current zipped file info
     self.zippedFileInfo = {}
     zipFileFolderStorageItem = folderStorage.folderStorageItem(self.zipStorageDir, targetPath)
     self.lastState.zipFileUpdate(zipFileFolderStorageItem)
Beispiel #23
0
 def on_ok_button_clicked(self, widget, data=None):
     import wwjufsdatabase.libs.services.servicesV2 as service
     req = service.req()
     #gtk.main_quit()
     entry = self.builder.get_object("entry1")
     tag = entry.get_text()
     print tag
     tag_list_raw = tag.split(",")
     tag_list = []
     for i in tag_list_raw:
         if i == "":
             continue
         tag_list.append(unicode(i))
     t = tagSystem.getTagSysObj(req.getDbSys())
     for i in self.liststore:
         #print i[0], i[1]
         if i[0]:
             url = i[1]
             url = url.replace("file:///", "")
             url = unicode(url)
             full_path = transform.transformDirToInternal(url)
             #print "full_path is:", full_path
             t.tag(full_path, tag_list)
             from localLibs.services.beanstalkdServices.TagProcessServiceV2 import TagProcessService
             p = TagProcessService()
             for cur_tag in tag_list:
                 p.addItem({"url":full_path, "tag":cur_tag})
     self.window1.hide()
    def processItem(self, job, item):
        source_dir = item["source_dir"]
        if os.path.isdir(source_dir):
            misc.ensureDir(transform.transformDirToInternal(source_dir))
        
        working_dir = item["working_dir"]
        misc.ensureDir(transform.transformDirToInternal(working_dir))
        
        target_dir = item["target_dir"]
        misc.ensureDir(transform.transformDirToInternal(target_dir))

        AutoArchiveThumb(source_dir, target_dir, working_dir)
        
        #Must delete the job if it is no longer needed and return False so the job will not be put back to tube
        job.delete()
        return False
Beispiel #25
0
    def __init__(self, taskId, appUuid, targetRootDir, targetBackupDir, 
                            collectionId, workingDir, passwd):
                            
        #First set the input param, __init__ will create initial config from this by calling subClassInitialCfg
        advCollectionProcessor.__init__(self, taskId, appUuid, collectionId)
        #The param initiated in this class should be check by this class
        if self.appConfigObj["logCollectionId"] != logCollectionId:
            raise "Task parameter does not match"

        #Create the 2 collections
        #print self.db
        self.folderCol = syncFolderCollection.syncFolderCollection(transform.transformDirToInternal(targetRootDir), 
                                transform.transformDirToInternal(targetBackupDir),
                                syncFolderCollectionId, self.db)
        self.encZipCol = encZipCollection.encZipCollection(transform.transformDirToInternal(collectionId), 
                                logCollectionId, 
                                transform.transformDirToInternal(workingDir), passwd, self.db)
Beispiel #26
0
 def encZip(self):
     ############################
     #Encrypt the zip file
     ############################
     targetPath = transform.transformDirToInternal(
             fileTools.getTimestampWithFreeName(self.storageRoot, '.enc'))
     print 'copying "%s" to "%s"'%(self.curArchiveName, targetPath)
     self.encCopier.copy(self.curArchiveName, targetPath)
Beispiel #27
0
 def __init__ ( self, tubeName, rootFolder, blackList = []):
     self.blackList = blackList
     self.rootFolder = transform.transformDirToInternal(rootFolder)
     super(FolderEnumeratingThread, self).__init__ (tubeName)
     self.quit_flag = False
     import wwjufsdatabase.libs.services.servicesV2 as service
     self.req = service.req()
     self.obj_db = self.req.getObjDbSys()
    def __init__(self, outputFolder, rootFolder, username = "******", passwd = "nopass", targetCollectionId = None
                 , dbPrefix = "test"):
        #print rootFolder
        self.rootFolder = transform.transformDirToInternal(rootFolder)
        self.outputFolder = transform.transformDirToInternal(outputFolder)
        #print self.rootFolder
        threadHndl = "recursive://" + self.rootFolder
        self.userSession = service.ufsUser(username, passwd)
        #print username, passwd
        self.objDb = objectDatabase.objectDatabase(self.userSession, dbPrefix = dbPrefix)
#        if not (targetCollectionId is None):
#            self.targetCollectionId = targetCollectionId
#        else:
#            self.targetCollectionId = "folder://" + self.rootFolder
        super(workThread, self).__init__(threadHndl, "singleton")
#        self.partialRes = []
        self.addedItemCnt = 0
 def processJob(self, job, item):
     monitoringFullPath = transform.transformDirToInternal(item['monitoringPath'])
     archiveId = "zippedInfoColllection://" + monitoringFullPath
     if not self.collectionInDbForMonitoringPath.has_key(monitoringFullPath):
         self.collectionInDbForMonitoringPath[monitoringFullPath] = collectionDatabase.collectionOnMongoDbBase(archiveId, self.dbInst.getCollectionDb())
     #Save the item in the archive collection: zippedInfoColllection://D:/tmp/
     fullPath = transform.transformDirToInternal(item["fullPath"])
     relativePath = transform.getRelativePathFromFull(fullPath, monitoringFullPath)
     if not self.collectionInDbForMonitoringPath[monitoringFullPath].exists(relativePath):
         #This item is not in the collection, so we need to extract info from this item
         newObj = self.dbInst.getFsObjFromFullPath(fullPath)
         self.collectionInDbForMonitoringPath[monitoringFullPath].addObj(relativePath, newObj["uuid"])
         for i in zippedInfo(self.workingDir).enumItems(fullPath):
             fp = open(i, 'r')
             loadedFileInfo = json.load(fp)
             print loadedFileInfo
     return True
Beispiel #30
0
 def createNewZip(self):
     ####################
     # Create new zip file
     ####################
     self.curArchiveName = transform.transformDirToInternal(
         fileTools.getTimestampWithFreeName(self.workingDir, '.zip'))
     self.curArchive = zipClass.ZFile(self.curArchiveName, 'w')
     self.curArchivedSize = 0