def defaultRecordingLocation(candidate=None): if candidate and pathExists(candidate): return candidate # First, try whatever /hdd points to, or /media/hdd. try: path = readlink("/hdd") except OSError: path = "/media/hdd" if not pathExists(path): # Find the largest local disk. from Components import Harddisk mounts = [m for m in Harddisk.getProcMounts() if m[1].startswith("/media/")] # Search local devices first, use the larger one path = bestRecordingLocation([m for m in mounts if m[0].startswith("/dev/")]) # If we haven't found a viable candidate yet, try remote mounts. if not path: path = bestRecordingLocation([m for m in mounts if not m[0].startswith("/dev/")]) if path: # If there's a movie subdir, we'd probably want to use that. movie = pathJoin(path, "movie") if pathIsdir(movie): path = movie if not path.endswith("/"): path += "/" # Bad habits die hard, old code relies on this. return path
def setUpTableFiles(self, tableSpecifications): requires(self.isProperSpecificationOfTables(tableSpecifications)) self.pathToSaveTemporaryResults = nameOfFolderToSavePostgresSQLFile + "temp/" if (not pathExists(self.pathToSaveTemporaryResults)): mkdir(self.pathToSaveTemporaryResults) self.pathToSaveTemporaryResults = self.pathToSaveTemporaryResults + self.uuidNotEscaped + "/" if (not pathExists(self.pathToSaveTemporaryResults)): mkdir(self.pathToSaveTemporaryResults) self.tableSpecifications = tableSpecifications self.tableTempFiles = {} self.tableRecordCounts = { thisTableName: 0 for thisTableName in tableSpecifications } assert (isinstance(self.tableRecordCounts, dict)) assert (set(self.tableRecordCounts.keys()) == set( tableSpecifications.keys())) assert (set(self.tableRecordCounts.values()) == set([0])) for thisTableName in tableSpecifications: pathToWriteThisTableTempFiles = self.pathToSaveTemporaryResults + thisTableName assert (not pathExists(pathToWriteThisTableTempFiles)) self.tableTempFiles[thisTableName] = open( pathToWriteThisTableTempFiles, "w") assert (pathExists(pathToWriteThisTableTempFiles)) self.tableTempFiles[thisTableName].write( self.formPostgresHeader(thisTableName, tableSpecifications)) self.tableTempFiles[thisTableName].flush() return
def __init__(self, path): ''' DB table to store things as files and directories. This is designed to reduce ram usage when reading things from large databases. Specifically this is designed for caches. # variables # .path The path on the filesystem where the table is stored. .names Gives you a list containing the names of all stored values as strings. .namePaths Gives you a dict where the keys are the names and the value is the path of that value database file .length The length of names stored in this table ''' # path of the root of the cache, this is where files # will be stored on the system self.path = path # create the paths if they do not exist if not pathExists(self.path): makedirs(self.path) debug.add('table path', self.path) # the path prefix is for tables stored in tables self.pathPrefix = '' # tables are stored as files tempTable = [] # array of all the value names stored on table namesPath = pathJoin(self.path, 'names.table') # if no namepaths exist create them if not pathExists(pathJoin(namesPath)): # write the new value to the system writeFile(namesPath, pickle(dict())) # load the name paths self.namePaths = unpickle(loadFile(namesPath)) debug.add('self.namePaths', self.namePaths) # create a array of all the names of values stored self.names = self.namePaths.keys() debug.add('self.names', self.names) # length of all the values stored on the table self.length = len(self.names) debug.add('self.length', self.length) # the protected list is a array of names that are # protected from limit cleaning protectedPath = pathJoin(self.path, 'protected.table') if pathExists(pathJoin(protectedPath)): # load the list self.protectedList = unpickle(loadFile(protectedPath)) else: # create a blank list self.protectedList = [] # limit value to limit the number of values # load the limit value from file if it exists limitPath = pathJoin(self.path, 'limit.table') if pathExists(pathJoin(limitPath)): self.limit = unpickle(loadFile(limitPath)) else: self.limit = None
def __del__(self): for thisTableFile in self.tableTempFiles.values(): thisTableFile.close() mainFileHandleToWriteTo = open(self.nameOfMainFileToWriteTo, "a") for thisTableName in self.tableSpecifications: pathToWriteThisTableTempFiles = self.pathToSaveTemporaryResults + thisTableName assert (pathExists(pathToWriteThisTableTempFiles)) thisTableFileHandle = open(pathToWriteThisTableTempFiles, "r") for thisLine in thisTableFileHandle: mainFileHandleToWriteTo.write(thisLine) mainFileHandleToWriteTo.flush() thisTableFileHandle.close() remove(pathToWriteThisTableTempFiles) # deleting the temporary file. assert (not pathExists(pathToWriteThisTableTempFiles)) mainFileHandleToWriteTo.write(";\n") # this end the PostgresSQL insertion command. Note # that we do not do this in the self.writeToTable function. mainFileHandleToWriteTo.flush() mainFileHandleToWriteTo.close() assert (("/temp/" + self.uuidNotEscaped + "/") in self.pathToSaveTemporaryResults) # weak check that we are deleting the write file. rmdir(self.pathToSaveTemporaryResults) return
def __init__(self,path): ''' DB table to store things as files and directories. This is designed to reduce ram usage when reading things from large databases. Specifically this is designed for caches. # variables # .path The path on the filesystem where the table is stored. .names Gives you a list containing the names of all stored values as strings. .namePaths Gives you a dict where the keys are the names and the value is the path of that value database file .length The length of names stored in this table ''' # path of the root of the cache, this is where files # will be stored on the system self.path=path # create the paths if they do not exist if not pathExists(self.path): makedirs(self.path) debug.add('table path',self.path) # the path prefix is for tables stored in tables self.pathPrefix='' # tables are stored as files tempTable=[] # array of all the value names stored on table namesPath=pathJoin(self.path,'names.table') # if no namepaths exist create them if not pathExists(pathJoin(namesPath)): # write the new value to the system writeFile(namesPath,pickle(dict())) # load the name paths self.namePaths=unpickle(loadFile(namesPath)) debug.add('self.namePaths',self.namePaths) # create a array of all the names of values stored self.names=self.namePaths.keys() debug.add('self.names',self.names) # length of all the values stored on the table self.length=len(self.names) debug.add('self.length',self.length) # the protected list is a array of names that are # protected from limit cleaning protectedPath=pathJoin(self.path,'protected.table') if pathExists(pathJoin(protectedPath)): # load the list self.protectedList=unpickle(loadFile(protectedPath)) else: # create a blank list self.protectedList=[] # limit value to limit the number of values # load the limit value from file if it exists limitPath=pathJoin(self.path,'limit.table') if pathExists(pathJoin(limitPath)): self.limit=unpickle(loadFile(limitPath)) else: self.limit=None
def CheckFolders(self,directory): #-------------------------------------------------------------Exe if not pathExists(join(directory,'executables')): print 'Creating "executables" folder in %s"' % directory makedirs(join(directory,'executables')) #-------------------------------------------------------------images if not pathExists(join(directory,'images')): print 'Creating "images" folder in %s"' % directory makedirs(join(directory,'images')) #-------------------------------------------------------------jars if not pathExists(join(directory,'jars')): print 'Creating "jars" folder in %s"' % directory makedirs(join(directory,'jars')) #-------------------------------------------------------------misc if not pathExists(join(directory,'misc')): print 'Creating "misc" folder in %s"' % directory makedirs(join(directory,'misc')) #-------------------------------------------------------------compressed if not pathExists(join(directory,'compressed')): print 'Creating "compressed" folder in %s"' % directory makedirs(join(directory,'compressed')) #-------------------------------------------------------------sfx if not pathExists(join(directory,'sounds-or-music')): print 'Creating "sounds-or-music" folder in %s"' % directory makedirs(join(directory,'sounds-or-music')) #-------------------------------------------------------------docs if not pathExists(join(directory,'Documents')): print 'Creating "Documents" folder in %s"' % directory makedirs(join(directory,'Documents')) #-------------------------------------------------------------Torrents if not pathExists(join(directory,'torrents')): print 'Creating "torrents" folder in %s"' % directory makedirs(join(directory,'torrents'))
def createAuthBlob(self): data = "" if pathExists("/home/user/.wazapp/challenge"): f = open("/home/user/.wazapp/challenge", "rb") data = str(f.read()) f.close() if len(data) > 0: numArray = _bytearray(KeyStream.keyFromPasswordAndNonce(self.authObject.password, data)) self.conn.reader.inputKey = self.inputKey = KeyStream(numArray) self.outputKey = KeyStream(numArray) nums = [] nums.extend(self.username) nums.extend(str(data)) wt = WATime() utcNow = int(wt.utcTimestamp()) nums.extend(str(utcNow)) nums.extend(str(WAUtilities.UserAgent)) nums.extend(str(" Mcc/Mnc")) nums.extend(str(self.mcc)) nums.extend(str(self.mnc)) encoded = self.outputKey.encodeMessage(nums, 0, 0, len(nums)) encoded = "".join(map(chr, encoded)) return encoded else: blob = [] return ''.join(map(chr, blob))
def openPath(self, _, path): # Open path LOGGER.info("Opening '%s'", path) if pathExists(path): try: call(['xdg-open', path]) except: LOGGER.error("Start of '%s' failed", path)
def deleteValue(self, name): ''' Delete a value with name name. ''' # clean up names to avoid stupid debug.add('deleting value ', name) # figure out the path to the named value file if name in self.names: filePath = self.namePaths[name] # remove the metadata entry del self.namePaths[name] # write changes to database metadata file writeFile(pathJoin(self.path, 'names.table'), pickle(self.namePaths)) # update the length and names attributes self.names = self.namePaths.keys() self.length = len(self.names) else: return False if pathExists(filePath): # remove the file accocated with the value removeFile(filePath) return True else: return False
def createAuthBlob(self): data = "" if pathExists("/home/user/.wazapp/challenge"): f = open("/home/user/.wazapp/challenge", "rb") data = str(f.read()) f.close() if len(data) > 0: numArray = _bytearray( KeyStream.keyFromPasswordAndNonce(self.authObject.password, data)) self.conn.reader.inputKey = self.inputKey = KeyStream(numArray) self.outputKey = KeyStream(numArray) nums = [] nums.extend(self.username) nums.extend(str(data)) wt = WATime() utcNow = int(wt.utcTimestamp()) nums.extend(str(utcNow)) nums.extend(str(WAUtilities.UserAgent)) nums.extend(str(" Mcc/Mnc")) nums.extend(str(self.mcc)) nums.extend(str(self.mnc)) encoded = self.outputKey.encodeMessage(nums, 0, 0, len(nums)) encoded = "".join(map(chr, encoded)) return encoded else: blob = [] return ''.join(map(chr, blob))
def __init__(self, config, inputPaths, name=None): from os.path import split as splitPath from os.path import exists as pathExists from os import makedirs from sys import modules if name == None: assert len( inputPaths ) == 1, "can only determine names for singlePaths automatically. Got '%s'" % inputPaths name = splitPath(inputPaths[0])[1].split(".")[2] self.config = config self.name = name self.tasks = list( set([splitPath(i)[1].split(".")[1] for i in inputPaths])) self.flags = list( set([splitPath(i)[1].split(".")[0] for i in inputPaths])) self.inputPaths = inputPaths self.counterSum = None self.outPath = config.get("general", "outPath") if not pathExists(self.outPath): makedirs(self.outPath) self.treeProcessors = {} for section in self.config.sections(): if section.startswith("treeProcessor:"): processorName = section.split("treeProcessor:")[1] processorType = self.config.get(section, "type") #warning black magic ahead :P self.treeProcessors[processorName] = getattr( modules[globals()["__name__"]], processorType)(self.config, processorName)
def update(self, vals, yddir): # Update information in menu self.folder = yddir # Update status data on first run or when status has changed if vals['statchg'] or vals['laststatus'] == 'unknown': self.status.set_label( _('Status: ') + self.YD_STATUS[vals['status']] + (vals['progress'] if vals['status'] == 'busy' else ' '. join((':', vals['error'], shortPath(vals['path']) )) if vals['status'] == 'error' else '')) # Update pseudo-static items on first run or when daemon has stopped or started if 'none' in (vals['status'], vals['laststatus'] ) or vals['laststatus'] == 'unknown': started = vals['status'] != 'none' self.status.set_sensitive(started) # zero-space UTF symbols are used to detect requered action without need to compare translated strings self.daemon_ss.set_label(( '\u2060' + _('Stop Yandex.Disk daemon')) if started else ( '\u200B' + _('Start Yandex.Disk daemon'))) if self.ID != '': # Set daemon identity row in multidaemon mode self.yddir.set_label(self.ID + _(' Folder: ') + (shortPath(yddir) if yddir else '< NOT CONFIGURED >')) self.open_folder.set_sensitive( yddir != '') # Activate Open YDfolder if daemon configured # Update sizes data on first run or when size data has changed if vals['szchg'] or vals['laststatus'] == 'unknown': self.used.set_label( _('Used: ') + vals['used'] + '/' + vals['total']) self.free.set_label( _('Free: ') + vals['free'] + _(', trash: ') + vals['trash']) # Update last synchronized sub-menu on first run or when last data has changed if vals['lastchg'] or vals['laststatus'] == 'unknown': # Update last synchronized sub-menu self.lastItems.destroy( ) # Disable showing synchronized sub menu while updating it - temp fix for #197 self.lastItems = Gtk.Menu() # Create new/empty Sub-menu: for filePath in vals['lastitems']: # Create new sub-menu items # Create menu label as file path (shorten it down to 50 symbols when path length > 50 # symbols), with replaced underscore (to disable menu acceleration feature of GTK menu). widget = Gtk.MenuItem.new_with_label(shortPath(filePath)) filePath = pathJoin(yddir, filePath) # Make full path to file if pathExists(filePath): widget.set_sensitive( True) # If it exists then it can be opened widget.connect("activate", self.openPath, filePath) else: widget.set_sensitive( False) # Don't allow to open non-existing path self.lastItems.append(widget) self.last.set_submenu(self.lastItems) # Switch off last items menu sensitivity if no items in list self.last.set_sensitive(vals['lastitems']) LOGGER.debug("Sub-menu 'Last synchronized' has %s items", str(len(vals['lastitems']))) self.show_all() # Renew menu
def __init__(self, directory): self.path = f"{directory}/{self.FILENAME}" # Read the manifest if one exists if pathExists(self.path): self.deviceTable = self._readJson() else: self.deviceTable = {}
def _upload(self, cmd, path): r_path = relpath(path, start=self.path) status, res = super().task(cmd, r_path, path) if status and pathExists(path): fst = file_info(path) self.h_data[path] = int(fst.st_mtime) self._r_setMode(r_path, fst.st_mode) return status, res
def __init__(self, user): self.user = user # dictionary with user configuration path = expanduser(self.user['path']) dataFolder = '.yandex-disk-client' dataFolderPath = path_join(path, dataFolder) self.prevStatus = 'start' self.status = 'none' self.errorReason = '' if not pathExists(dataFolderPath): info('%s not exists' % dataFolderPath) try: makedirs(dataFolderPath, exist_ok=True) except: self.status = 'fault' else: try: utime(dataFolderPath) except: self.status = 'fault' self.shutdown = False # signal for utility threads to exit if self.status == 'fault': self.errorReason = "Can't access the local folder %s" % dataFolderPath critical(self.errorReason) else: super().__init__(self.user['auth'], path, dataFolderPath) self.executor = ThreadPoolExecutor() self.downloads = set() # set of currently downloading files # event handler thread self.EH = Thread(target=self._eventHandler) self.EH.name = 'EventHandler' # i-notify watcher object self.watch = self._PathWatcher(self.path, [ path_join(self.path, e) for e in self.user['exclude'] + [dataFolder] ]) self.EH.start() #self.listener = XMPPListener('\00'.join(user[login], user[auth])) # Status treatment staff self.error = False # error flag. If it is True then fullSync is required self.progress = '' # dictionary with set of cloud status elements. Initial state self.cloudStatus = dict() self.changes = {'init'} # set with changes flags # individual thread to control changes of status self.statusQueue = Queue( ) # queue to pass status changes from other threads to StatusUpdater self.SU = Thread(target=self._statusUpdater) self.SU.name = 'StatusUpdater' self.SU.start() if self.status != 'fault': # connect to cloud if it is required by configuration if self.user.setdefault('start', True): self.connect() else: self.statusQueue.put((self.status, self.prevStatus)) else: # set fault status self.statusQueue.put((self.status, self.prevStatus))
def __init__(self, config, name): from os.path import exists as pathExists from os import makedirs TreeProcessor.__init__(self, config, name) self.keepEvents = {} self.rejected = {} self.listPath = self.config.get(self.section, "listPath") if not pathExists(self.listPath): makedirs(self.listPath)
def lookup(self, path): candidates = map(lambda stem: pathJoin(stem, path), self.pathList) for candidate in candidates: if pathExists(candidate): log('incdir: {0} -> {1}'.format(path, candidate)) return candidate raise FileNotFound(path)
def test_Disk_50_DownloadNew(self): self.disk.disconnect() remove(self.disk.h_data._filePath) # remove history self.disk.h_data.clear() # reset it in memory path = path_join(self.disk.path, 'word.docx') remove(path) self.disk.connect() sleep(30) self.assertTrue(pathExists(path)) self.assertTrue(self.disk.status == 'idle')
def ignore_path_down(path): nonlocal ignore ret = set() while path not in ignore and path != self.path: ret.add(path) if pathExists(path): self.h_data[path] = int(file_info(path).st_mtime) path = path_split(path)[0] ignore |= ret return ret
def setIconTheme(self, theme): """ Determine paths to icons according to current theme """ # global APPINSTPATH, APPCONFPATH theme = 'light' if theme else 'dark' # Determine theme from application configuration settings defaultPath = pathJoin(APPINSTPATH, 'icons', theme) userPath = pathJoin(APPCONFPATH, 'icons', theme) # Set appropriate paths to all status icons self.icon = {} for status in ['idle', 'error', 'paused', 'none', 'no_net', 'busy']: name = ('yd-ind-pause.png' if status in { 'paused', 'none', 'no_net' } else 'yd-busy1.png' if status == 'busy' else 'yd-ind-' + status + '.png') userIcon = pathJoin(userPath, name) self.icon[status] = userIcon if pathExists(userIcon) else pathJoin( defaultPath, name) # userIcon corresponds to busy icon on exit from this loop # Set theme paths according to existence of first busy icon self.themePath = userPath if pathExists(userIcon) else defaultPath
def itemExists(resolveList, base): baseList = [base] if base.endswith(".png"): baseList.append("%s%s" % (base[:-3], "svg")) elif base.endswith(".svg"): baseList.append("%s%s" % (base[:-3], "png")) for item in resolveList: for base in baseList: file = pathJoin(item, base) if pathExists(file): return file return base
def _move(self, cmd, pathto, pathfrom): status, res = super().task(cmd, relpath(pathto, start=self.path), relpath(pathfrom, start=self.path)) if status: # update history date too p = self.h_data.pop(pathfrom, None) if p is not None: self.h_data[pathto] = p else: if pathExists(pathto): self.h_data[pathto] = int(file_info(pathto).st_mtime) return status, res
def checkAutoStart(path): """ Check that auto-start is enabled """ if pathExists(path): i = 1 if getenv('XDG_CURRENT_DESKTOP') in ('Unity', 'Pantheon') else 0 with open(path, 'rt') as f: attr = reFindall(r'\nHidden=(.+)|\nX-GNOME-Autostart-enabled=(.+)', f.read()) if attr: if attr[0][i] and attr[0][i] == ('true' if i else 'false'): return True else: return True return False
def plotOnePicture(self, xDats, yDats): """ plot x, y data to one picture; label, title and so on is based on the class; xDats, yDats: x, y data, must be list and corrosspondly """ path = self.imagePath + self.title + datetime.now().strftime( '%Y%m%d_%H%M%S') if not pathExists(path): pathMkdir(path) plt.figure(figsize=self.figsize) plt.title(self.title) plt.xlabel(self.xlabel) plt.ylabel(self.ylabel) for xDat, yDat, label in zip(xDats, yDats, self.labels): plt.plot(xDat, yDat, label=label) plt.legend() plt.savefig(path + '/' + self.title, dpi=self.figdpi)
def build(): assert pathExists(config_path), "build config file missing!" sql = "" with open(config_path, 'r') as input: config = jsonLoad(input) for table_name in config: if not config[table_name]["active"]: continue table_info = config[table_name] create_sql = createTable(table_name, table_info) sql += "\n" + create_sql insertion_sql = insertionsFromCSV(table_name, table_info) sql += "\n" + insertion_sql return sql
def dataPlotFromTables(self, tablenames=[], dbType="postgre"): """ plot date from table names and plot to a picture tablenames: table name to data dbType: database type, default by postgre """ path = self.imagePath + self.title + datetime.now().strftime( '%Y%m%d_%H%M%S') if not pathExists(path): pathMkdir(path) plt.figure(figsize=self.figsize) plt.title(self.title) plt.xlabel(self.xlabel) plt.ylabel(self.ylabel) for tablename in tablenames: self.dataReadFromTable(tablename) for yclo in self.yclos: plt.plot(self.dat[dataDict[self.xclo]], self.dat[dataDict[yclo]], label=tablename) plt.legend() plt.savefig(path + '/' + self.title, dpi=self.figdpi)
def loadValue(self,name): ''' Loads a saved value and returns it. ''' # find the file path in the names array if name in self.names: filePath=self.namePaths[name] else: return False # check if the path exists if pathExists(filePath): # load the data fileData=loadFile(filePath) else: # return false if the value does not exist return False # unpickle the filedata fileData = unpickle(fileData) debug.add('loading value '+str(name),fileData) # returns the value of a table stored on disk return fileData
def setProtected(self,name): ''' Set a name in the table to be protected from removal because of limits. ''' # generate the filepath to the protected values # list filePath=pathJoin(self.path,'protected.table') # check if the path exists if pathExists(filePath): # read the protected list from the file protectedList=unpickle(loadFile(filePath)) else: # create the list and append the name protectedList=[] # append the new value to the list protectedList.append(name) # pickle the protected list for storage protectedList=pickle(protectedList) # write the changes back to the protected list writeFile(filePath,protectedList)
def loadValue(self, name): ''' Loads a saved value and returns it. ''' # find the file path in the names array if name in self.names: filePath = self.namePaths[name] else: return False # check if the path exists if pathExists(filePath): # load the data fileData = loadFile(filePath) else: # return false if the value does not exist return False # unpickle the filedata fileData = unpickle(fileData) debug.add('loading value ' + str(name), fileData) # returns the value of a table stored on disk return fileData
def setProtected(self, name): ''' Set a name in the table to be protected from removal because of limits. ''' # generate the filepath to the protected values # list filePath = pathJoin(self.path, 'protected.table') # check if the path exists if pathExists(filePath): # read the protected list from the file protectedList = unpickle(loadFile(filePath)) else: # create the list and append the name protectedList = [] # append the new value to the list protectedList.append(name) # pickle the protected list for storage protectedList = pickle(protectedList) # write the changes back to the protected list writeFile(filePath, protectedList)
def plot(self): """ plot based on dataDict, motoDataDict, heatDataDict """ path = self.imagePath + self.title + datetime.now().strftime( '%Y%m%d_%H%M%S') if not pathExists(path): pathMkdir(path) plt.figure(figsize=self.figsize) plt.title(self.title) plt.xlabel(self.xlabel) plt.ylabel(self.ylabel) for yclo in self.yclos: xDat = self.dat[dataDict[self.xclo]] if yclo == 'motoData': yDat = [motoDataDict[i] for i in self.dat[dataDict[yclo]]] elif yclo == 'heatData': yDat = [heatDataDict[i] for i in self.dat[dataDict[yclo]]] else: yDat = self.dat[dataDict[yclo]] plt.plot(xDat, yDat, label=yclo) plt.legend() plt.savefig(path + '/' + self.title, dpi=self.figdpi)
def deleteValue(self,name): ''' Delete a value with name name. ''' # clean up names to avoid stupid debug.add('deleting value ',name) # figure out the path to the named value file if name in self.names: filePath=self.namePaths[name] # remove the metadata entry del self.namePaths[name] # write changes to database metadata file writeFile(pathJoin(self.path,'names.table'),pickle(self.namePaths)) # update the length and names attributes self.names=self.namePaths.keys() self.length=len(self.names) else: return False if pathExists(filePath): # remove the file accocated with the value removeFile(filePath) return True else: return False
def _fullSync(self): def ignore_path_down(path): nonlocal ignore ret = set() while path not in ignore and path != self.path: ret.add(path) if pathExists(path): self.h_data[path] = int(file_info(path).st_mtime) path = path_split(path)[0] ignore |= ret return ret ignore = set( ) # set of files that shouldn't be synced or already in sync exclude = set(self.watch.exclude) # {colud} - {local} -> download from cloud or delete from cloud if it exist in the history # ({cloud} & {local}) and hashes are equal = ignore # ({cloud} & {local}) and hashes not equal -> decide conflict/upload/download depending on # the update time of files and time stored in the history for status, i in self.task('list', 40): if status: path = i[ 'path'] # full file path !NOTE! getList doesn't return empty folders p = path_split(path)[0] # containing folder if in_paths(p, exclude): continue if pathExists(path): if i['type'] == 'dir': # it is existing directory # there is nothing to check for directories # here we may check UGM and if they are different we have to decide: # - store UGM to cloud or # - restore UGM from cloud # but for this decision we need last updated data for directories in history #### # !!! Actually Yd don't return empty folders in file list !!! # This section newer run #### #ignore_path_down(path); continue pass else: # existig file try: with open(path, 'rb') as f: hh = sha256(f.read()).hexdigest() except: hh = '' c_t = i[ 'modified'] # cloud file modified date-time l_t = int(file_info(path).st_mtime ) # local file modified date-time h_t = self.h_data.get( path, l_t) # history file modified date-time if hh == i['sha256']: # Cloud and local hashes are equal # here we may check UGM and if they are different we have to decide: # - store UGM to cloud or # - restore UGM from cloud # depending on modified time (compare c_t and l_t) ignore_path_down( path ) # add in ignore and history all folders by way to file continue else: # Cloud and local files are different. Need to decide what to do: upload, # download, or it is conflict. # Solutions: # - conflict if both cloud and local files are newer than stored in the history # - download if the cloud file newer than the local, or # - upload if the local file newer than the cloud file. if l_t > h_t and c_t > h_t: # conflict info('conflict') continue ### it is not fully designed and not tested yet !!! # Concept: rename older file to file.older and copy both files --> cloud and local path2 = path + '.older' ignore.add(path2) ignore.add(path) if l_t > c_t: # older file is in cloud self.downloads.add(path2) self.task( 'move', path2, path) # need to do before rest self._submit('down', path2) self._submit('up', path) else: # local file is older than file in cloud self.downloads.add(path) fileMove( path, path2 ) # it will be captured as move from & move to !!!??? self._submit('down', path) self._submit('up', path2) continue elif l_t > c_t: # local time greater than the cloud time # upload (as file exists the dir exists too - no need to create dir in cloud) self._submit('up', path) ignore_path_down( path ) # add in ignore and history all folders by way to file continue else: # download # upload (as file exists the dir exists too - no need to create local dir) self.downloads.add( path ) # remember in downloads to avod events on this path self._submit('down', path) ignore_path_down( path ) # add in ignore and history all folders by way to file continue # The file is not exists # it means that it has to be downloaded or.... deleted from the cloud when local file # was deleted and this deletion was not cached by active client (client was not # connected to cloud or was not running at the moment of deletion). if self.h_data.get( path, False): # do we have history data for this path? # as we have history info for path but local path doesn't exists then we have to # delete it from cloud if not pathExists( p): # containing directory is also removed? while True: # go down to the shortest removed directory p_ = path_split(p)[0] if pathExists(p_): break p = p_ self.h_data.pop(p) # remove history ### !!! all files in this folder mast be removed too, but we ### can't walk as files/folders was deleted from local FS! ### NEED history database to do delete where path.startwith(p) - it can't be done in dict self._submit('del', p) # add d to exceptions to avoid unnecessary checks for other files which are within p exclude.add(p) else: # only file was deleted self._submit('del', path) del self.h_data[path] # remove history else: # local file have to be downloaded from the cloud if i['type'] == 'file': if not pathExists(p): self.downloads |= ignore_path_down( p ) # store new dir in downloads to avoid upload makedirs(p, exist_ok=True) ignore.add(p) self.downloads.add( path ) # store downloaded file in downloads to avoid upload self._submit('down', path) ignore.add(path) #else: # directory not exists !!! newer run !!! # self.downloads.add(ignore_path_down(path)) # store new dir in downloads to avoid upload # makedirs(path, exist_ok=True) # ---- Done forward path (sync cloud to local) ------ # (local - ignored) -> upload to cloud for root, dirs, files in walk(self.path): if in_paths(root, exclude): continue for d in dirs: d = path_join(root, d) if d not in ignore | exclude: # directory have to be created before start of uploading a file in it # do it in-line as it rather fast operation s, r = self.task('mkdir', d) info('done in-line %s %s' % (str(s), str(r))) ### !need to check success of folder creation! !need to decide what to do in case of error! for f in files: f = path_join(root, f) if f not in ignore: self._submit('up', f) return 'fullSync'
def lookup(self, curDirFilePath, path): candidate = pathJoin(dirname(curDirFilePath), path) if pathExists(candidate): log('curdir: {0} -> {1}'.format(path, candidate)) return candidate return FileFinder.lookup(self, path)
disks = [] while True: for user in config['disks'].values(): disks.append(Disk(user)) if disks: break else: from OAuth import getToken, getLogin print(_('No accounts configured')) if input(_('Do you want to configure new account (Y/n):')).lower( ) not in ('', 'y'): appExit(_('Exit.')) else: while True: path = '' while not pathExists(path): path = input( _('Enter the path to local folder ' 'which will by synchronized with cloud disk. (Default: ~/YandexDisk):' )) if not path: path = '~/YandexDisk' path = expanduser(path) if not pathExists(path): try: makedirs(path_join(path, dataFolder), exist_ok=True) except: print( 'Error: Incorrect folder path specified (no access or wrong path name).' )
def resolveFilename(scope, base="", path_prefix=None): # You can only use the ~/ if we have a prefix directory. if str(base).startswith("~%s" % sep): # You can only use the ~/ if we have a prefix directory. if path_prefix: base = pathJoin(path_prefix, base[2:]) else: print("[Directories] Warning: resolveFilename called with base starting with '~%s' but 'path_prefix' is None!" % sep) if str(base).startswith(sep): # Don't further resolve absolute paths. return pathNormpath(base) if scope not in defaultPaths: # If an invalid scope is specified log an error and return None. print("[Directories] Error: Invalid scope=%s provided to resolveFilename!" % scope) return None path, flag = defaultPaths[scope] # Ensure that the defaultPath directory that should exist for this scope does exist. if flag == PATH_CREATE and not pathExists(path): try: makedirs(path) except (IOError, OSError) as err: print("[Directories] Error %d: Couldn't create directory '%s'! (%s)" % (err.errno, path, err.strerror)) return None suffix = None # Remove any suffix data and restore it at the end. data = base.split(":", 1) if len(data) > 1: base = data[0] suffix = data[1] path = base def itemExists(resolveList, base): baseList = [base] if base.endswith(".png"): baseList.append("%s%s" % (base[:-3], "svg")) elif base.endswith(".svg"): baseList.append("%s%s" % (base[:-3], "png")) for item in resolveList: for base in baseList: file = pathJoin(item, base) if pathExists(file): return file return base if base == "": # If base is "" then set path to the scope. Otherwise use the scope to resolve the base filename. path, flags = defaultPaths[scope] if scope == SCOPE_GUISKIN: # If the scope is SCOPE_GUISKIN append the current skin to the scope path. from Components.config import config # This import must be here as this module finds the config file as part of the config initialisation. skin = pathDirname(config.skin.primary_skin.value) path = pathJoin(path, skin) elif scope in (SCOPE_PLUGIN_ABSOLUTE, SCOPE_PLUGIN_RELATIVE): callingCode = pathNormpath(getframe(1).f_code.co_filename) plugins = pathNormpath(scopePlugins) path = None if comparePath(plugins, callingCode): pluginCode = callingCode[len(plugins) + 1:].split(sep) if len(pluginCode) > 2: relative = "%s%s%s" % (pluginCode[0], sep, pluginCode[1]) path = pathJoin(plugins, relative) elif scope == SCOPE_GUISKIN: global skinResolveList if not skinResolveList: # This import must be here as this module finds the config file as part of the config initialisation. from Components.config import config skin = pathDirname(config.skin.primary_skin.value) skinResolveList = addInList( pathJoin(scopeConfig, skin), pathJoin(scopeConfig, "skin_common"), scopeConfig ) if not "skin_default" in skin: skinResolveList += addInList(pathJoin(scopeGUISkin, skin)) skinResolveList += addInList( pathJoin(scopeGUISkin, "skin_fallback_%d" % getDesktop(0).size().height()), pathJoin(scopeGUISkin, "skin_default"), scopeGUISkin ) path = itemExists(skinResolveList, base) elif scope == SCOPE_LCDSKIN: global lcdskinResolveList if not lcdskinResolveList: # This import must be here as this module finds the config file as part of the config initialisation. from Components.config import config if hasattr(config.skin, "display_skin"): skin = pathDirname(config.skin.display_skin.value) else: skin = "" lcdskinResolveList = addInList( pathJoin(scopeConfig, "display", skin), pathJoin(scopeConfig, "display", "skin_common"), scopeConfig, pathJoin(scopeLCDSkin, skin), pathJoin(scopeLCDSkin, "skin_fallback_%s" % getDesktop(1).size().height()), pathJoin(scopeLCDSkin, "skin_default"), scopeLCDSkin ) path = itemExists(lcdskinResolveList, base) elif scope == SCOPE_FONTS: global fontsResolveList if not fontsResolveList: # This import must be here as this module finds the config file as part of the config initialisation. from Components.config import config skin = pathDirname(config.skin.primary_skin.value) display = pathDirname(config.skin.display_skin.value) if hasattr(config.skin, "display_skin") else None fontsResolveList = addInList( pathJoin(scopeConfig, "fonts"), pathJoin(scopeConfig, skin, "fonts"), pathJoin(scopeConfig, skin) ) if display: fontsResolveList += addInList( pathJoin(scopeConfig, "display", display, "fonts"), pathJoin(scopeConfig, "display", display) ) fontsResolveList += addInList( pathJoin(scopeConfig, "skin_common"), scopeConfig, pathJoin(scopeGUISkin, skin, "fonts"), pathJoin(scopeGUISkin, skin), pathJoin(scopeGUISkin, "skin_default", "fonts"), pathJoin(scopeGUISkin, "skin_default") ) if display: fontsResolveList += addInList( pathJoin(scopeLCDSkin, display, "fonts"), pathJoin(scopeLCDSkin, display) ) fontsResolveList += addInList( pathJoin(scopeLCDSkin, "skin_default", "fonts"), pathJoin(scopeLCDSkin, "skin_default"), scopeFonts ) path = itemExists(fontsResolveList, base) elif scope == SCOPE_PLUGIN: file = pathJoin(scopePlugins, base) if pathExists(file): path = file elif scope in (SCOPE_PLUGIN_ABSOLUTE, SCOPE_PLUGIN_RELATIVE): callingCode = pathNormpath(getframe(1).f_code.co_filename) plugins = pathNormpath(scopePlugins) path = None if comparePaths(plugins, callingCode): pluginCode = callingCode[len(plugins) + 1:].split(sep) if len(pluginCode) > 2: relative = pathJoin("%s%s%s" % (pluginCode[0], sep, pluginCode[1]), base) path = pathJoin(plugins, relative) else: path, flags = defaultPaths[scope] path = pathJoin(path, base) path = pathNormpath(path) if pathIsdir(path) and not path.endswith(sep): # If the path is a directory then ensure that it ends with a "/". path = "%s%s" % (path, sep) if scope == SCOPE_PLUGIN_RELATIVE: path = path[len(plugins) + 1:] if suffix is not None: # If a suffix was supplied restore it. path = "%s:%s" % (path, suffix) return path
def mkdir(path): if not pathExists(path): makedirs(path)
"/usr/lib/openoffice.org3.0/program/", "/opt/lib/openoffice.org/program/", "/opt/lib/openoffice.org2.0/program/", "/opt/lib/openoffice.org2.2/program/", "/opt/lib/openoffice.org2.3/program/", "/opt/lib/openoffice.org2.4/program/", "/opt/lib/openoffice.org3.0/program/", "/opt/openoffice.org/program/", "/opt/openoffice.org2.0/program/", "/opt/openoffice.org2.2/program/", "/opt/openoffice.org2.3/program/", "/opt/openoffice.org2.4/program/", "/opt/openoffice.org3.0/program/", ] for possiblePath in possiblePaths: if pathExists(possiblePath): sys.path.append(possiblePath) try: import uno except ImportError: # unable to find Python UNO libraries, exiting sys.stderr.write("Error: Unable to find Python UNO libraries in %s. Exiting..." % sys.path) sys.exit(0) import unohelper from com.sun.star.beans import PropertyValue from com.sun.star.task import ErrorCodeIOException from com.sun.star.uno import Exception as UnoException from com.sun.star.connection import NoConnectException from com.sun.star.io import XOutputStream
def formMainFile(self, input_dictOfKeysAndValuesForPostgresRowInsert): requires( isinstance(input_dictOfKeysAndValuesForPostgresRowInsert, dict)) requires(set(input_dictOfKeysAndValuesForPostgresRowInsert.keys()) == \ {"datasetName", "algorithmName", "queueSize", "queryBudget", "typeOfRun", "seedNodeFilePath", \ "datasetFilePath", "terminalInput", "algorithmParameters", "seedNodesType"}) requires( isinstance( input_dictOfKeysAndValuesForPostgresRowInsert[ "seedNodeFilePath"], str)) requires( len(input_dictOfKeysAndValuesForPostgresRowInsert[ "seedNodeFilePath"]) > 0) requires( input_dictOfKeysAndValuesForPostgresRowInsert["seedNodeFilePath"] [0] == "/") # we require an absolute path in # the seed node file provided. requires( input_dictOfKeysAndValuesForPostgresRowInsert["seedNodeFilePath"] [-1] != "/") # the seed node file should # NOT specify a directory. requires( input_dictOfKeysAndValuesForPostgresRowInsert["datasetFilePath"][0] == "/") # we require an absolute path in # the dataset file provided. requires( input_dictOfKeysAndValuesForPostgresRowInsert["datasetFilePath"] [-1] == "/") # the data set file SHOULD # specify a directory. # still need to work-out how it will add content to the row listing how it has finished the run dictOfValuesToWriteIntoPostgresTable = {} self.integerRunningNumberOfRecords = 0 categoryOfData = postgres_escape_string("experiment") self.uuidNotEscaped = myUUIDImplementation() uuidEscaped = postgres_escape_string(self.uuidNotEscaped) # To avoid confusion in other parts of the # code, we do not store this as a class variable, since self.uuidNotEscaped already is. startTimeDict = self.getCurrentTimeAsStringAndInt() directoryCodeRunAt = postgres_escape_string(getPathToThisDirectory()) gitHashForLatestCommitRunBeingRunBuildOn = postgres_escape_string( getGitCommitHash(self.uuidNotEscaped)) startOfPostgresCommand = "INSERT INTO \"ResultUUIDRegistrationTable\" ( " middleOfPostgresCommand = " ) VALUES ( " endOfPostgresCommand = ");" # Note - below actually mutates the dictionary passed in. dictOfValuesToWriteIntoPostgresTable["categoryOfData"] = categoryOfData dictOfValuesToWriteIntoPostgresTable["UUID"] = uuidEscaped dictOfValuesToWriteIntoPostgresTable[ "timeCodeStartedRunningAsString"] = startTimeDict["stringTime"] dictOfValuesToWriteIntoPostgresTable[ "timeCodeStartedRunningAsInt"] = startTimeDict["intTime"] dictOfValuesToWriteIntoPostgresTable[ "directoryCodeRunAt"] = directoryCodeRunAt dictOfValuesToWriteIntoPostgresTable[ "gitHashOfLatestCommitOfCodeRun"] = gitHashForLatestCommitRunBeingRunBuildOn dictOfValuesToWriteIntoPostgresTable[ "timeCodeFinishedRunningAsString"] = "NULL" # we put a NULL value here because the column is not yet applicable. Note that # it is important we do NOT escape this value so that Postgresql DOES interpreate it as NULL..... dictOfValuesToWriteIntoPostgresTable[ "timeCodeFinishedRunningAsInt"] = "NULL" # we put a NULL value here because the column is not yet applicable. Note that # it is important we do NOT escape this value so that Postgresql DOES interpreate it as NULL..... dictOfValuesToWriteIntoPostgresTable[ "status"] = postgres_escape_string(self.getString_statusRunning()) dictOfValuesToWriteIntoPostgresTable["errorInformation"] = "NULL" # we put a NULL value here because the column is not yet applicable (and # if the code runs successful, this should remain NULL....). Note that it is important we do NOT escape this value so that Postgresql # DOES interpreate it as NULL..... for thisKey in input_dictOfKeysAndValuesForPostgresRowInsert: assert (isinstance(thisKey, str)) assert (len(thisKey) > 0) assert (isinstance( input_dictOfKeysAndValuesForPostgresRowInsert[thisKey], str)) assert (len(input_dictOfKeysAndValuesForPostgresRowInsert[thisKey]) > 0) if (thisKey in {"queueSize", "queryBudget"}): assert (re.match( "^[0-9]*$", input_dictOfKeysAndValuesForPostgresRowInsert[thisKey]) != None) # we check # that the values passed in for the query budget and queue size are numeric strings... dictOfValuesToWriteIntoPostgresTable[ thisKey] = input_dictOfKeysAndValuesForPostgresRowInsert[ thisKey] else: dictOfValuesToWriteIntoPostgresTable[thisKey] = postgres_escape_string(\ input_dictOfKeysAndValuesForPostgresRowInsert[thisKey]) stringSeperator = "" for thisKey in dictOfValuesToWriteIntoPostgresTable: assert (isinstance(thisKey, str)) assert (len(thisKey) > 0) assert (isinstance(dictOfValuesToWriteIntoPostgresTable[thisKey], str)) assert (len(dictOfValuesToWriteIntoPostgresTable[thisKey]) > 0) startOfPostgresCommand = startOfPostgresCommand + stringSeperator + "\"" + thisKey + "\"" middleOfPostgresCommand = middleOfPostgresCommand + stringSeperator + dictOfValuesToWriteIntoPostgresTable[ thisKey] stringSeperator = " , " self.nameOfMainFileToWriteTo = nameOfFolderToSavePostgresSQLFile + self.uuidNotEscaped + ".sql" assert (not pathExists(self.nameOfMainFileToWriteTo)) #not strictly guarenteed, but highly probable given out UUIDs.... mainFileHandleToWriteTo = open(self.nameOfMainFileToWriteTo, "w") mainFileHandleToWriteTo.write(startOfPostgresCommand + middleOfPostgresCommand + endOfPostgresCommand + "\n") mainFileHandleToWriteTo.flush() mainFileHandleToWriteTo.close() assert (pathExists(self.nameOfMainFileToWriteTo)) #not strictly guarenteed, but highly probable given out UUIDs.... return None
def StartSorting(self,path): #check if path exists if pathExists(path): #filter non-files files = listdir(path) print files filtered=[] for f in files: if isfile(join(path,f)): filtered.append(f) print filtered else: print 'WARNING: PATH DOES NOT EXIST PROGRAM WILL NOW EXIT' raw_input('press <enter> to continue') sysExit() self.CheckFolders(path) #define some lists self.executables=[] self.jars=[] self.images=[] self.misc=[] self.compressed=[] self.sounds=[] self.docs=[] self.torrents=[] #sorting for f in filtered: s = f.split('.') #check if the file has an extension. If not, it is probably a folder and will not be sorted if len(s) < 2: continue if f.endswith('.exe'): self.executables.append(".".join(s)) print 'sorting "%s" into executables...' % '.'.join(s) elif f.endswith('.jar'): self.jars.append(".".join(s)) print 'sorting "%s" into jars...' % '.'.join(s) elif f.endswith('.png') or f.endswith('.jpg') or f.endswith('.bmp') or f.endswith('.gif'): self.images.append(".".join(s)) print 'sorting "%s" into images...' % '.'.join(s) elif f.endswith('.zip') or f.endswith('.rar') or f.endswith('.7z') or f.endswith('.tar.gz'): self.compressed.append(".".join(s)) print 'sorting "%s" into compressed...' % '.'.join(s) elif f.endswith('.wav') or f.endswith('.mp3') or f.endswith('.mp4') or f.endswith('.ogg') or f.endswith('.it') or f.endswith('.midi'): self.sounds.append(".".join(s)) print 'sorting "%s" into compressed...' % '.'.join(s) elif f.endswith('.doc') or f.endswith('.odt') or f.endswith('.rtf') or f.endswith('.pdf') or f.endswith('docx') or f.endswith('.html') or f.endswith('.htm') or f.endswith('.xml'): self.docs.append(".".join(s)) print 'sorting "%s" into documents...' % '.'.join(s) elif f.endswith('.torrent'): self.torrents.append(".".join(s)) print 'sorting "%s" into torrent...' % '.'.join(s) else: self.misc.append(".".join(s)) print 'sorting "%s" into misc...' % '.'.join(s) self.Sort(path) #some Output print '===================Sorted===========================' print 'All sorting has finished' print '(If error messages are recieved. Please ignore them first, \nand check if your files were sorted)' print 'executables: ' + str(self.executables) print 'jars: ' + str(self.jars) print 'Images: ' + str(self.images) print 'compressed: ' + str(self.compressed) print 'sounds & music: ' + str(self.sounds) print 'Documents: ' + str(self.docs) print 'misc: ' + str(self.misc) print 'torrent: ' + str(self.torrents)