def update(self, appdir, name=None, *args, **opts): should_force = opts.get("force", False) vendordir = os.path.join(appdir, "vendor") if not os.path.isdir(vendordir): os.makedirs(vendordir) if name is not None: if name not in self.installed_vendors(vendordir): raise VendorError("vendor `%s` doesn't exist" % name) dest = os.path.join(vendordir, name) metaf = os.path.join(dest, "metadata.json") meta = util.read_json(metaf) uri = meta.get("fetch_uri", "") if not uri: raise VendorError("Can't update vendor `%s`: fetch_uri undefined." % name) new_vendors, temppath = self.fetch_vendor(uri, *args, **opts) for vname, vpath, vmeta in new_vendors: if name != vname: continue else: util.deltree(dest) shutil.copytree(vpath, dest) util.write_json(metaf, vmeta) logger.info("%s updated in vendors" % vname) break util.deltree(temppath) else: # update all vendors updated = [] for vendor in self.installed_vendors(vendordir): if vendor in updated: continue else: dest = os.path.join(vendordir, vendor) metaf = os.path.join(dest, "metadata.json") meta = util.read_json(metaf) uri = meta.get("fetch_uri", "") if not uri: logger.warning("Can't update vendor `%s`: fetch_uri undefined." % vendor) continue else: new_vendors, temppath = self.fetch_vendor(uri, *args, **opts) for vname, vpath, vmeta in new_vendors: dest1 = os.path.join(vendordir, vname) metaf1 = os.path.join(dest1, "metadata.json") if os.path.exists(dest1): util.deltree(dest1) shutil.copytree(vpath, dest1) util.write_json(metaf1, vmeta) logger.info("%s updated in vendors" % vname) updated.append(vname) elif should_force: # install forced shutil.copytree(vpath, dest1) util.write_json(metaf1, vmeta) logger.info("%s installed in vendors" % vname) updated.append(vname) util.deltree(temppath) return 0
def _encode_content(name, path): ''' This is a private subroutine for ``dir_to_fields`` ''' if name.endswith('.json'): try: return util.read_json(path, raise_on_error=True) except ValueError: logger.error("Json invalid in %s", path) return '' try: content = util.read(path) except UnicodeDecodeError: logger.warning("%s isn't encoded in utf8", path) content = util.read(path, utf8=False) try: content.encode('utf-8') except UnicodeError: logger.warning("plan B didn't work, %s is a binary", path) logger.warning("use plan C: encode to base64") content = ("base64-encoded;%s" % base64.b64encode(content)) return content
def rjson(mo): if mo.group(2).startswith('_attachments'): # someone want to include from attachments path = os.path.join(app_dir, mo.group(2).strip()) filenum = 0 for filename in glob.iglob(path): logger.debug("process json macro: %s" % filename) library = '' try: if filename.endswith('.json'): library = util.read_json(filename) else: library = util.read(filename) except IOError, e: raise MacroError(str(e)) filenum += 1 current_file = filename.split(app_dir)[1] fields = current_file.split('/') count = len(fields) include_to = included for i, field in enumerate(fields): if i+1 < count: include_to[field] = {} include_to = include_to[field] else: include_to[field] = library if not filenum: raise MacroError("Processing code: No file matching '%s'" % mo.group(2))
def fetch_vendor(self, uri, *args, **opts): """ fetch a vendor from uri """ # get fetch cmd vendor_obj = self.find_handler(uri) # execute fetch command path = _tempdir() vendor_obj.fetch(uri, path, *args, **opts) vendors = [] for name in os.listdir(path): vpath = os.path.join(path, name) metaf = os.path.join(vpath, "metadata.json") if not os.path.isfile(metaf): continue else: meta = util.read_json(metaf) meta["fetch_uri"] = uri name = meta.get('name', name) vendors.append((name, vpath, meta)) os.unlink(metaf) if not vendors: util.deltree(path) raise VendorError("Invalid vendor, metadata not found.") return vendors, path
def rjson(mo): if mo.group(2).startswith('_attachments'): # someone want to include from attachments path = os.path.join(app_dir, mo.group(2).strip()) filenum = 0 for filename in glob.iglob(path): logger.debug("process json macro: %s" % filename) library = '' try: if filename.endswith('.json'): library = util.read_json(filename) else: library = util.read(filename) except IOError, e: raise MacroError(str(e)) filenum += 1 current_file = filename.split(app_dir)[1] fields = current_file.split('/') count = len(fields) include_to = included for i, field in enumerate(fields): if i + 1 < count: include_to[field] = {} include_to = include_to[field] else: include_to[field] = library if not filenum: raise MacroError("Processing code: No file matching '%s'" % mo.group(2))
def fetch_vendor(self, uri, *args, **opts): """ fetch a vendor from uri """ # get fetch cmd vendor_obj = self.find_handler(uri) # execute fetch command path = _tempdir() vendor_obj.fetch(uri, path, *args, **opts) vendors = [] for name in os.listdir(path): vpath = os.path.join(path, name) metaf = os.path.join(vpath, "metadata.json") if not os.path.isfile(metaf): continue else: meta = util.read_json(metaf) meta["fetch_uri"] = uri name = meta.get('name', name) vendors.append((name, vpath, meta)) os.unlink(metaf) if not vendors: util.deltree(path) raise VendorError("Invalid vendor, medata not found.") return vendors, path
def dir_to_fields(self, current_dir=None, depth=0, manifest=None): """ Process a directory and get all members :param manifest: ``list``. We will have side effect on this param. """ fields = {} # return value manifest = manifest if manifest is not None else [] current_dir = current_dir if current_dir else self.docdir for name in os.listdir(current_dir): current_path = os.path.join(current_dir, name) rel_path = _replace_backslash( util.relpath(current_path, self.docdir)) if name.startswith('.'): continue elif self.check_ignore(rel_path): continue elif depth == 0 and name.startswith('_'): # files starting with "_" are always "special" continue elif name == '_attachments': continue elif depth == 0 and (name in ('couchapp', 'couchapp.json')): # we are in app_meta if name == "couchapp": manifest.append('%s/' % rel_path) content = self.dir_to_fields(current_path, depth=depth + 1, manifest=manifest) else: manifest.append(rel_path) content = util.read_json(current_path) fields, content = self._meta_to_fields(fields, content) elif os.path.isdir(current_path): manifest.append('%s/' % rel_path) fields[name] = self.dir_to_fields(current_path, depth=depth + 1, manifest=manifest) else: # handler for normal file logger.debug('push %s', rel_path) # remove extension _name, ext = os.path.splitext(name) if _name in fields: logger.warning( "%(name)s is already in properties. " "Can't add (%(fqn)s)", { 'name': _name, 'fqn': rel_path }) else: manifest.append(rel_path) fields[_name] = self._encode_content(name, current_path) return fields
def dir_to_fields(self, current_dir=None, depth=0, manifest=None): """ Process a directory and get all members :param manifest: ``list``. We will have side effect on this param. """ fields = {} # return value manifest = manifest if manifest is not None else [] current_dir = current_dir if current_dir else self.docdir for name in os.listdir(current_dir): current_path = os.path.join(current_dir, name) rel_path = _replace_backslash(util.relpath(current_path, self.docdir)) if name.startswith('.'): continue elif self.check_ignore(rel_path): continue elif depth == 0 and name.startswith('_'): # files starting with "_" are always "special" continue elif name == '_attachments': continue elif depth == 0 and (name in ('couchapp', 'couchapp.json')): # we are in app_meta if name == "couchapp": manifest.append('%s/' % rel_path) content = self.dir_to_fields( current_path, depth=depth + 1, manifest=manifest) else: manifest.append(rel_path) content = util.read_json(current_path) fields, content = self._meta_to_fields(fields, content) elif os.path.isdir(current_path): manifest.append('%s/' % rel_path) fields[name] = self.dir_to_fields( current_path, depth=depth + 1, manifest=manifest) else: # handler for normal file logger.debug('push %s', rel_path) # remove extension _name, ext = os.path.splitext(name) if _name in fields: logger.warning("%(name)s is already in properties. " "Can't add (%(fqn)s)", {'name': _name, 'fqn': rel_path}) else: manifest.append(rel_path) fields[_name] = self._encode_content(name, current_path) return fields
def load(self, path, default=None): """ load config """ conf = default if isinstance(path, basestring): paths = [path] else: paths = path for p in paths: if os.path.isfile(p): new_conf = util.read_json(p, use_environment=True) conf.update(new_conf) return conf
def load(self, path, default=None): """ load config """ conf = default if isinstance(path, basestring): paths = [path] else: paths = path for p in paths: if os.path.isfile(p): try: new_conf = util.read_json(p, use_environment=True, raise_on_error=True) except ValueError: raise AppError("Error while reading %s" % p) conf.update(new_conf) return conf
def pushdocs(conf, source, dest, *args, **opts): export = opts.get('export', False) noatomic = opts.get('no_atomic', False) browse = opts.get('browse', False) dbs = conf.get_dbs(dest) docs = [] for d in os.listdir(source): docdir = os.path.join(source, d) if docdir.startswith('.'): continue elif os.path.isfile(docdir): if d.endswith(".json"): doc = util.read_json(docdir) docid, ext = os.path.splitext(d) doc.setdefault('_id', docid) doc.setdefault('couchapp', {}) if export or not noatomic: docs.append(doc) else: for db in dbs: db.save_doc(doc, force_update=True) else: doc = document(docdir, is_ddoc=False) if export or not noatomic: docs.append(doc) else: doc.push(dbs, True, browse) if docs: if export: docs1 = [] for doc in docs: if hasattr(doc, 'doc'): docs1.append(doc.doc()) else: docs1.append(doc) jsonobj = {'docs': docs} if opts.get('output') is not None: util.write_json(opts.get('output'), util.json.dumps(jsonobj)) else: print util.json.dumps(jsonobj) else: for db in dbs: docs1 = [] for doc in docs: if hasattr(doc, 'doc'): docs1.append(doc.doc(db)) else: newdoc = doc.copy() try: rev = db.last_rev(doc['_id']) newdoc.update({'_rev': rev}) except ResourceNotFound: pass docs1.append(newdoc) try: db.save_docs(docs1) except BulkSaveError, e: # resolve conflicts docs1 = [] for doc in e.errors: try: doc['_rev'] = db.last_rev(doc['_id']) docs1.append(doc) except ResourceNotFound: pass if docs1: db.save_docs(docs1)
def dir_to_fields(self, current_dir="", depth=0, manifest=[]): """ process a directory and get all members """ fields = {} if not current_dir: current_dir = self.docdir for name in os.listdir(current_dir): current_path = os.path.join(current_dir, name) rel_path = _replace_backslash(util.relpath(current_path, self.docdir)) if name.startswith("."): continue elif self.check_ignore(name): continue elif depth == 0 and name.startswith("_"): # files starting with "_" are always "special" continue elif name == "_attachments": continue elif depth == 0 and (name == "couchapp" or name == "couchapp.json"): # we are in app_meta if name == "couchapp": manifest.append("%s/" % rel_path) content = self.dir_to_fields(current_path, depth=depth + 1, manifest=manifest) else: manifest.append(rel_path) content = util.read_json(current_path) if not isinstance(content, dict): content = {"meta": content} if "signatures" in content: del content["signatures"] if "manifest" in content: del content["manifest"] if "objects" in content: del content["objects"] if "length" in content: del content["length"] if "couchapp" in fields: fields["couchapp"].update(content) else: fields["couchapp"] = content elif os.path.isdir(current_path): manifest.append("%s/" % rel_path) fields[name] = self.dir_to_fields(current_path, depth=depth + 1, manifest=manifest) else: logger.debug("push %s", rel_path) content = "" if name.endswith(".json"): try: content = util.read_json(current_path) except ValueError: logger.error("Json invalid in %s", current_path) else: try: content = util.read(current_path).strip() except UnicodeDecodeError: logger.warning("%s isn't encoded in utf8", current_path) content = util.read(current_path, utf8=False) try: content.encode("utf-8") except UnicodeError: logger.warning("plan B didn't work, " "%s is a binary", current_path) logger.warning("use plan C: encode to base64") content = "base64-encoded;%s" % base64.b64encode(content) # remove extension name, ext = os.path.splitext(name) if name in fields: logger.warning( "%(name)s is already in properties. " "Can't add (%(fqn)s)", {"name": name, "fqn": rel_path} ) else: manifest.append(rel_path) fields[name] = content return fields
def update(self, appdir, name=None, *args, **opts): should_force = opts.get('force', False) vendordir = os.path.join(appdir, "vendor") if not os.path.isdir(vendordir): os.makedirs(vendordir) if name is not None: if name not in self.installed_vendors(vendordir): raise VendorError("vendor `%s` doesn't exist" % name) dest = os.path.join(vendordir, name) metaf = os.path.join(dest, "metadata.json") meta = util.read_json(metaf) uri = meta.get("fetch_uri", "") if not uri: raise VendorError( "Can't update vendor `%s`: fetch_uri undefined." % name) new_vendors, temppath = self.fetch_vendor(uri, *args, **opts) for vname, vpath, vmeta in new_vendors: if name != vname: continue else: util.deltree(dest) shutil.copytree(vpath, dest) util.write_json(metaf, vmeta) logger.info("%s updated in vendors" % vname) break util.deltree(temppath) else: # update all vendors updated = [] for vendor in self.installed_vendors(vendordir): if vendor in updated: continue else: dest = os.path.join(vendordir, vendor) metaf = os.path.join(dest, "metadata.json") meta = util.read_json(metaf) uri = meta.get("fetch_uri", "") if not uri: logger.warning( "Can't update vendor `%s`: fetch_uri undefined." % vendor) continue else: new_vendors, temppath = self.fetch_vendor( uri, *args, **opts) for vname, vpath, vmeta in new_vendors: dest1 = os.path.join(vendordir, vname) metaf1 = os.path.join(dest1, "metadata.json") if os.path.exists(dest1): util.deltree(dest1) shutil.copytree(vpath, dest1) util.write_json(metaf1, vmeta) logger.info("%s updated in vendors" % vname) updated.append(vname) elif should_force: #install forced shutil.copytree(vpath, dest1) util.write_json(metaf1, vmeta) logger.info("%s installed in vendors" % vname) updated.append(vname) util.deltree(temppath) return 0
def pushdocs(conf, source, dest, *args, **opts): export = opts.get('export', False) noatomic = opts.get('no_atomic', False) browse = opts.get('browse', False) dbs = conf.get_dbs(dest) docs = [] for d in os.listdir(source): docdir = os.path.join(source, d) if d.startswith('.'): continue elif os.path.isfile(docdir): if d.endswith(".json"): doc = util.read_json(docdir) docid, ext = os.path.splitext(d) doc.setdefault('_id', docid) doc.setdefault('couchapp', {}) if export or not noatomic: docs.append(doc) else: for db in dbs: db.save_doc(doc, force_update=True) else: doc = document(docdir, is_ddoc=False) if export or not noatomic: docs.append(doc) else: doc.push(dbs, True, browse) if docs: if export: docs1 = [] for doc in docs: if hasattr(doc, 'doc'): docs1.append(doc.doc()) else: docs1.append(doc) jsonobj = {'docs': docs} if opts.get('output'): util.write_json(opts.get('output'), jsonobj) else: print(util.json.dumps(jsonobj)) else: for db in dbs: docs1 = [] for doc in docs: if hasattr(doc, 'doc'): docs1.append(doc.doc(db)) else: newdoc = doc.copy() try: rev = db.last_rev(doc['_id']) newdoc.update({'_rev': rev}) except ResourceNotFound: pass docs1.append(newdoc) try: db.save_docs(docs1) except BulkSaveError, e: # resolve conflicts docs1 = [] for doc in e.errors: try: doc['_rev'] = db.last_rev(doc['_id']) docs1.append(doc) except ResourceNotFound: pass if docs1: db.save_docs(docs1)
def dir_to_fields(self, current_dir='', depth=0, manifest=[]): """ process a directory and get all members """ fields={} if not current_dir: current_dir = self.docdir for name in os.listdir(current_dir): current_path = os.path.join(current_dir, name) rel_path = _replace_backslash(util.relpath(current_path, self.docdir)) if name.startswith("."): continue elif self.check_ignore(name): continue elif depth == 0 and name.startswith('_'): # files starting with "_" are always "special" continue elif name == '_attachments': continue elif depth == 0 and (name == 'couchapp' or name == 'couchapp.json'): # we are in app_meta if name == "couchapp": manifest.append('%s/' % rel_path) content = self.dir_to_fields(current_path, depth=depth+1, manifest=manifest) else: manifest.append(rel_path) content = util.read_json(current_path) if not isinstance(content, dict): content = { "meta": content } if 'signatures' in content: del content['signatures'] if 'manifest' in content: del content['manifest'] if 'objects' in content: del content['objects'] if 'length' in content: del content['length'] if 'couchapp' in fields: fields['couchapp'].update(content) else: fields['couchapp'] = content elif os.path.isdir(current_path): manifest.append('%s/' % rel_path) fields[name] = self.dir_to_fields(current_path, depth=depth+1, manifest=manifest) else: logger.debug("push %s" % rel_path) content = '' if name.endswith('.json'): try: content = util.read_json(current_path) except ValueError: logger.error("Json invalid in %s" % current_path) else: try: content = util.read(current_path).strip() except UnicodeDecodeError, e: logger.warning("%s isn't encoded in utf8" % current_path) content = util.read(current_path, utf8=False) try: content.encode('utf-8') except UnicodeError, e: logger.warning( "plan B didn't work, %s is a binary" % current_path) logger.warning("use plan C: encode to base64") content = "base64-encoded;%s" % base64.b64encode( content) # remove extension name, ext = os.path.splitext(name) if name in fields and ext in ('.txt'): logger.warning( "%(name)s is already in properties. Can't add (%(name)s%(ext)s)" % { "name": name, "ext": ext }) else: manifest.append(rel_path) fields[name] = content
def dir_to_fields(self, current_dir='', depth=0, manifest=[]): """ process a directory and get all members """ fields={} if not current_dir: current_dir = self.docdir for name in os.listdir(current_dir): current_path = os.path.join(current_dir, name) rel_path = _replace_backslash(util.relpath(current_path, self.docdir)) if name.startswith("."): continue elif self.check_ignore(name): continue elif depth == 0 and name.startswith('_'): # files starting with "_" are always "special" continue elif name == '_attachments': continue elif depth == 0 and (name == 'couchapp' or name == 'couchapp.json'): # we are in app_meta if name == "couchapp": manifest.append('%s/' % rel_path) content = self.dir_to_fields(current_path, depth=depth+1, manifest=manifest) else: manifest.append(rel_path) content = util.read_json(current_path) if not isinstance(content, dict): content = { "meta": content } if 'signatures' in content: del content['signatures'] if 'manifest' in content: del content['manifest'] if 'objects' in content: del content['objects'] if 'length' in content: del content['length'] if 'couchapp' in fields: fields['couchapp'].update(content) else: fields['couchapp'] = content elif os.path.isdir(current_path): manifest.append('%s/' % rel_path) fields[name] = self.dir_to_fields(current_path, depth=depth+1, manifest=manifest) else: logger.debug("push %s" % rel_path) content = '' if name.endswith('.json'): try: content = util.read_json(current_path) except ValueError: logger.error("Json invalid in %s" % current_path) else: try: content = util.read(current_path).strip() except UnicodeDecodeError, e: logger.warning("%s isn't encoded in utf8" % current_path) content = util.read(current_path, utf8=False) try: content.encode('utf-8') except UnicodeError, e: logger.warning( "plan B didn't work, %s is a binary" % current_path) logger.warning("use plan C: encode to base64") content = "base64-encoded;%s" % base64.b64encode( content) # remove extension name, ext = os.path.splitext(name) if name in fields: logger.warning( "%(name)s is already in properties. Can't add (%(fqn)s)" % { "name": name, "fqn": rel_path }) else: manifest.append(rel_path) fields[name] = content