def rjson(mo): if mo.group(2).startswith('_attachments'): # someone want to include from attachments path = os.path.join(app_dir, mo.group(2).strip()) filenum = 0 for filename in glob.iglob(path): logger.debug("process json macro: %s" % filename) library = '' try: if filename.endswith('.json'): library = read_json(filename) else: library = read_file(filename) except IOError, e: raise MacroError(str(e)) filenum += 1 current_file = filename.split(app_dir)[1] fields = current_file.split('/') count = len(fields) include_to = included for i, field in enumerate(fields): if i+1 < count: include_to[field] = {} include_to = include_to[field] else: include_to[field] = library if not filenum: raise MacroError( "Processing code: No file matching '%s'" % mo.group(2))
def rjson(mo): if mo.group(2).startswith('_attachments'): # someone want to include from attachments path = os.path.join(app_dir, mo.group(2).strip()) filenum = 0 for filename in glob.iglob(path): library = '' try: if filename.endswith('.json'): library = read_json(filename) else: library = read_file(filename) except IOError, e: print >>sys.stderr, e sys.exit(1) filenum += 1 current_file = filename.split(app_dir)[1] fields = current_file.split('/') count = len(fields) include_to = included for i, field in enumerate(fields): if i+1 < count: include_to[field] = {} include_to = include_to[field] else: include_to[field] = library if not filenum: print >>sys.stderr, "Processing code: No file matching '%s'" % mo.group(2) sys.exit(-1)
def rjson(mo): if mo.group(2).startswith('_attachments'): # someone want to include from attachments path = os.path.join(app_dir, mo.group(2).strip()) filenum = 0 for filename in glob.iglob(path): library = '' try: if filename.endswith('.json'): library = read_json(filename) else: library = read_file(filename) except IOError, e: print >> sys.stderr, e sys.exit(1) filenum += 1 current_file = filename.split(app_dir)[1] fields = current_file.split('/') count = len(fields) include_to = included for i, field in enumerate(fields): if i + 1 < count: include_to[field] = {} include_to = include_to[field] else: include_to[field] = library if not filenum: print >> sys.stderr, "Processing code: No file matching '%s'" % mo.group( 2) sys.exit(-1)
def pushdocs(path, dbs, atomic=True, export=False): """ push multiple docs in a path """ if not isinstance(dbs, (list, tuple)): dbs = [dbs] docs = [] for d in os.listdir(path): docdir = os.path.join(path, d) if docdir.startswith('.'): continue elif os.path.isfile(docdir): if d.endswith(".json"): doc = utils.read_json(docdir) docid, ext = os.path.splitext(d) doc.setdefault('_id', docid) doc.setdefault('couchapp', {}) if not atomic: for db in dbs: db.save_doc(doc, force_update=True) else: docs.append(doc) else: doc = document(docdir, is_ddoc=False) if not atomic: doc.push(dbs, atomic=False) else: docs.append(doc) if docs: if export: docs1 = [] for doc in docs: if hasattr(doc, 'doc'): docs1.append(doc.doc()) else: docs1.append(doc) jsonobj = {'docs': docs1} return jsonobj else: for db in dbs: docs1 = [] for doc in docs: if hasattr(doc, 'doc'): docs1.append(doc.doc(db)) else: newdoc = doc.copy() try: rev = db.last_rev(doc['_id']) newdoc.update({'_rev': rev}) except ResourceNotFound: pass docs1.append(newdoc) try: db.save_docs(docs1) except BulkSaveError, e: # resolve conflicts docs1 = [] for doc in e.errors: try: doc['_rev'] = db.last_rev(doc['_id']) docs1.append(doc) except ResourceNotFound: pass if docs1: db.save_docs(docs1)
def dir_to_fields(self, current_dir='', depth=0, manifest=[]): """ process a directory and get all members """ fields={} if not current_dir: current_dir = self.docdir for name in os.listdir(current_dir): current_path = os.path.join(current_dir, name) rel_path = _replace_backslash(utils.relpath(current_path, self.docdir)) if name.startswith("."): continue elif self.check_ignore(name): continue elif depth == 0 and name.startswith('_'): # files starting with "_" are always "special" continue elif name == '_attachments': continue elif depth == 0 and (name == 'couchapp' or name == 'couchapp.json'): # we are in app_meta if name == "couchapp": manifest.append('%s/' % rel_path) content = self.dir_to_fields(current_path, depth=depth+1, manifest=manifest) else: manifest.append(rel_path) content = utils.read_json(current_path) if not isinstance(content, dict): content = { "meta": content } if 'signatures' in content: del content['signatures'] if 'manifest' in content: del content['manifest'] if 'objects' in content: del content['objects'] if 'length' in content: del content['length'] if 'couchapp' in fields: fields['couchapp'].update(content) else: fields['couchapp'] = content elif os.path.isdir(current_path): manifest.append('%s/' % rel_path) fields[name] = self.dir_to_fields(current_path, depth=depth+1, manifest=manifest) else: logger.debug("push %s" % rel_path) content = '' if name.endswith('.json'): try: content = utils.read_json(current_path) except ValueError: logger.error("Json invalid in %s" % current_path) else: try: content = utils.read_file(current_path).strip() except UnicodeDecodeError: logger.warning("%s isn't encoded in utf8" % current_path) content = utils.read_file(current_path, utf8=False) try: content.encode('utf-8') except UnicodeError: logger.warning( "plan B didn't work, %s is a binary" % current_path) logger.warning("use plan C: encode to base64") content = "base64-encoded;%s" % base64.b64encode( content) # remove extension name, ext = os.path.splitext(name) if name in fields: logger.warning( "%(name)s is already in properties. Can't add (%(fqn)s)" % { "name": name, "fqn": rel_path }) else: manifest.append(rel_path) fields[name] = content return fields