def rsr_update_metafiles(prog=None, volume=None, volumedb=None, opts=None): log.debug("{bblack}rsr{bwhite}:update-volume{default}") i = 0 for path in res.Metafile.walk(prog.pwd): print(path) i += 1 new, updated = False, False metafile = res.Metafile(path) #if options: #metafile.basedir = 'media/application/metalink/' #if metafile.key in volumedb: # metafile = volumedb[metafile.key] # #log.info("Found %s in volumedb", metafile.key) #else: # new = True if metafile.needs_update(): log.note("Updating metafile for %s", metafile.path) metafile.update() updated = True #if updated or metafile.key not in volumedb: # log.note("Writing %s to volumedb", metafile.key) # volumedb[metafile.key] = metafile # new = True if new or updated: #if options.persist_meta: #if metafile.non_zero: # log.note("Overwriting previous metafile at %s", metafile.path) metafile.write() for k in metafile.data: print('\t'+k+':', metafile.data[k]) print('\tSize: ', lib.human_readable_bytesize( metafile.data['Content-Length'], suffix_as_separator=True)) else: print('\tOK') volume.store.sync()
def traverse(self, root, acceptorname='', acceptor=None): """ Query tree object for IVisitorAcceptor interface, invoke ``accept`` and return result. Returning does allow ``accept`` to return a depth-first generator of ``visit*`` results. """ log.debug('traverser : acceptor', root, acceptor, acceptorname) assert iface.Node.providedBy(root), root tree = res_iface.ITree(root) if not acceptor: acceptor = res_iface.IVisitorAcceptor(tree, acceptorname) log.debug('traverser -> acceptor.accept', tree, acceptor, acceptorname) return acceptor.accept(self)
def init(Class, path, kind): self = Class(path) self.user = res.Homedir.fetch(path) self.volume = res.Volumedir.fetch(path) self.workspace = res.Workspace.fetch(path) if kind == 'default': self.context = self.workspace or self.volume or self.user if self.workspace: kind = 'workspace' elif self.volume: kind = 'volume' elif self.user: kind = 'user' else: self.context = getattr(self, kind) log.info("Session context type is %r" % (kind,)) log.debug("Session.init: using context %r" % (self.context,)) self.kind = kind return self
def rsr_status(prog=None, volume=None, opts=None): log.debug("{bblack}rsr{bwhite}:status{default}") # print if superdir is OK #Meta.index.get(dirname(prog.pwd)) # start lookign from current dir meta = res.Meta(volume) opts = confparse.Values(res.Dir.walk_opts.copy()) opts.interactive = False opts.recurse = True opts.max_depth = 1 for path in res.Dir.walk_tree_interactive(prog.pwd, opts=opts): if not meta.exists(path): yield { 'status': { 'unknown': [ path ] } } continue elif not meta.clean(path): yield { 'status': { 'updated': [ path ] } } yield 0
def rsr_session(self, prog, volume, workspace, homedir, opts): """ TODO: get an Metadir session; lock (and later sync) an file/db. Move current db setup to new txs. Determine context, and from there get the session/dbref to initialize an SQLAlchemy session. The context depends on the current working directory, and defaults to the nearest workspace; perhaps a volume or the homedir. """ session = Session.init(prog.pwd, opts.session) log.note('Session: %s', session) if session.context and confparse.haspath(session.context, 'data.repository.root_dir'): prog.session = session yield dict(context=session.context) log.note('Context: %s', session.context) repo_root = session.context.settings.data.repository.root_dir else: repo_root = 'sa_migrate' # SA session repo_path = os.path.join(repo_root, opts.repo) if os.path.exists(repo_path): log.info("Reading SA migrate config for repo %r" % repo_path) # hard coded module name, root dir for local repos from sa_migrate import custom config = custom.read(repo_path) log.info("Reading SA migrate config from %r" % config) repo_opts = custom.migrate_opts(repo_path, config) dbref = repo_opts['url'] else: dbref = opts.dbref log.note('DBRef: %s', dbref) if opts.init_db: log.debug("Initializing SQLAlchemy session for %s", dbref) sa = SessionMixin.get_session(opts.session, dbref, opts.init_db, metadata=SqlBase.metadata) yield dict(sa=sa)
def current_hostname(initialize=False, interactive=False): """ """ host = dict( name = get_hostname() ) hostnameId = host['name'].lower() return hostnameId; # FIXME: current_hostname hostname = None hostname_file = os.path.expanduser('~/.cllct/host') if os.path.exists(hostname_file): hostname = open(hostname_file).read().strip() elif initialize: hostname = socket.gethostname() assert not isinstance(hostname, (tuple, list)), hostname log.debug(hostname) hostnames = socket.gethostbyaddr(hostname) while True: if socket.getfqdn() != hostname: hostname = hostnames[0] +"." else: log.err("FQDN is same as hostname") # cannot figure out what host to use while interactive: hostname = prompt_choice_with_input("Which? ", hostnames[1]) if hostname: break #if not interactive: # raise ValueError("") if hostname: try: nameinfo((hostname, 80)) except Exception as e: print('Warning: Cannot resolve FQDN', e) open(hostname_file, 'w+').write(hostname) print("Stored %s in %s" % (hostname, hostname_file)) break return hostname
def lnd_tag(opts=None, sa=None, ur=None, pwd=None): """ Experiment, interactive interface. Tagging. """ log.debug("{bblack}lnd{bwhite}:tag{default}") if not pwd: log.err("Not initialized") yield 1 tags = {} if '' not in tags: tags[''] = 'Root' FS_Path_split = re.compile('[\/\.\+,]+').split log.info("{bblack}Tagging paths in {green}%s{default}", os.path.realpath('.') + os.sep) try: for root, dirs, files in os.walk(pwd.local_path): for name in files + dirs: log.info("{bblack}Typing tags for {green}%s{default}", name) path = FS_Path_split(os.path.join(root, name)) for tag in path: yield # Ask about each new tag, TODO: or rename, fuzzy match. if tag not in tags: type = raw_input('%s%s%s:?' % ( log.palette['yellow'], tag, log.palette['default']) ) if not type: type = 'Tag' tags[tag] = type log.info(''.join( [ "{bwhite} %s:{green}%s{default}" % (tag, name) for tag in path if tag in tags] )) except KeyboardInterrupt as e: log.err(e) yield 1
def rsr_commit(self, sa): "Commit changes to SQL" log.note("Committing SQL changes"); sa.commit() log.debug("Commit finished");
except Exception, e: pass#log.warn("Failed loading simplejson %r", e) try: import ujson loads = ujson.loads dumps = ujson.dumps load = ujson.load dump = ujson.dump except Exception, e: pass#log.warn("Failed loading ujson %r", e) try: import json as json_ log.debug("Using json") loads = json_.loads dumps = json_.dumps load = json_.load dump = json_.dump except Exception, e: pass#log.warn("Failed loading json %r", e) if not loads: log.err("No known json library installed. Plain Python printing.") def require(self): if not loads: import sys sys.exit(1)