def execute(self, quals, columns): request = unicode_("(objectClass=%s)") % self.object_class for qual in quals: if isinstance(qual.operator, tuple): operator = qual.operator[0] else: operator = qual.operator if operator in ("=", "~~"): baseval = qual.value.translate(SPECIAL_CHARS) val = (baseval.replace("%", "*") if operator == "~~" else baseval) request = unicode_("(&%s(%s=%s))") % (request, qual.field_name, val) request = request.encode('utf8') for _, item in self.ldap.search_s(self.path, self.scope, request): # Case insensitive lookup for the attributes litem = dict() for key, value in item.items(): if key.lower() in self.field_definitions: pgcolname = self.field_definitions[key.lower()].column_name if pgcolname in self.array_columns: value = value else: value = value[0] litem[pgcolname] = value yield litem
def execute(self, quals, columns): request = unicode_("(objectClass=%s)") % self.object_class for qual in quals: if isinstance(qual.operator, tuple): operator = qual.operator[0] else: operator = qual.operator if operator in ("=", "~~"): if hasattr(qual.value, "translate"): baseval = qual.value.translate(SPECIAL_CHARS) val = (baseval.replace("%", "*") if operator == "~~" else baseval) else: val = qual.value request = unicode_("(&%s(%s=%s))") % ( request, qual.field_name, val) self.ldap.search( self.path, request, self.scope, attributes=list(self.field_definitions)) for entry in self.ldap.response: # Case insensitive lookup for the attributes litem = dict() for key, value in entry["attributes"].items(): if key.lower() in self.field_definitions: pgcolname = self.field_definitions[key.lower()].column_name if pgcolname in self.array_columns: value = value else: value = value[0] litem[pgcolname] = value yield litem
def execute(self, quals, columns): request = unicode_("(objectClass=%s)") % self.object_class for qual in quals: if isinstance(qual.operator, tuple): operator = qual.operator[0] else: operator = qual.operator if operator in ("=", "~~"): if hasattr(qual.value, "translate"): baseval = qual.value.translate(SPECIAL_CHARS) val = (baseval.replace("%", "*") if operator == "~~" else baseval) else: val = qual.value request = unicode_("(&%s(%s=%s))") % ( request, qual.field_name, val) self.ldap.search( self.path, request, self.scope, attributes=list(self.field_definitions)) for entry in self.ldap.response: # Case insensitive lookup for the attributes litem = dict() for key, value in entry["attributes"].items(): if key.lower() in self.field_definitions: pgcolname = self.field_definitions[key.lower()].column_name if ldap3.version.__version__ > '2.0.0': value = value else: if pgcolname in self.array_columns: value = value else: value = value[0] litem[pgcolname] = value yield litem
def import_schema(self, schema, srv_options, options, restriction_type, restricts): log_to_postgres( "IMPORT %s FROM srv %s OPTIONS %s RESTRICTION: %s %s" % (schema, srv_options, options, restriction_type, restricts)) tables = set([ unicode_("imported_table_1"), unicode_("imported_table_2"), unicode_("imported_table_3") ]) if restriction_type == 'limit': tables = tables.intersection(set(restricts)) elif restriction_type == 'except': tables = tables - set(restricts) rv = [] for tname in sorted(list(tables)): table = TableDefinition(tname) nb_col = options.get('nb_col', 3) for col in range(nb_col): table.columns.append( ColumnDefinition("col%s" % col, type_name="text", options={"option1": "value1"})) rv.append(table) return rv
def __init__(self, root_dir, pattern, file_mode=0o700): self.root_dir = unicode_(root_dir) self.pattern = unicode_(pattern) # Cache for file descriptors. self.cache = {} parts_re, parts_properties = _parse_pattern(self.pattern) self.file_mode = file_mode self._path_parts_re = parts_re self._path_parts_properties = parts_properties self.properties = set(prop for part in parts_properties for prop in part)
def get_items(self, quals, columns): filename_column = self.filename_column for qual in quals: if qual.field_name == filename_column and qual.operator == '=': item = self.structured_directory.from_filename( unicode_(qual.value)) if item is not None and os.path.exists(item.full_filename): return [item] else: return [] properties = self.structured_directory.properties return self.structured_directory.get_items(**dict( (qual.field_name, unicode_(qual.value)) for qual in quals if qual.operator == '=' and qual.field_name in properties))
def get_items(self, quals, columns): filename_column = self.filename_column for qual in quals: if qual.field_name == filename_column and qual.operator == '=': item = self.structured_directory.from_filename( unicode_(qual.value)) if item is not None and os.path.isfile(item.full_filename): st = os.stat(item.full_filename) item.set_timestamps(st[stat.ST_MTIME], st[stat.ST_CTIME]) return [item] else: return [] properties = self.structured_directory.properties return self.structured_directory.get_items(**dict( (qual.field_name, unicode_(qual.value)) for qual in quals if qual.operator == '=' and qual.field_name in properties))
def strict_unicode(value): """ Make sure that value is either unicode or (on Py 2.x) an ASCII string, and return it in unicode. Raise otherwise. """ if not isinstance(value, basestring_): raise TypeError('Filename property values must be of type ' 'unicode, got %r.' % value) return unicode_(value)
def execute(self, quals, columns): gc.collect() result = [] for obj in gc.get_objects(): tobj = type(obj) if isinstance(obj, bytes): obj = obj.decode('utf8') elif isinstance(obj, unicode_): pass else: try: obj = bytes(obj).decode('utf8') except (UnicodeEncodeError, UnicodeDecodeError): try: obj = unicode_(obj) except (UnicodeEncodeError, UnicodeDecodeError): obj = unicode_("<NA>") result.append({'object': obj, 'type': unicode_(tobj), 'id': unicode_(id(obj)), 'refcount': unicode_(sys.getrefcount(obj))}) return result
def import_schema(self, schema, srv_options, options, restriction_type, restricts): log_to_postgres("IMPORT %s FROM srv %s OPTIONS %s RESTRICTION: %s %s" % (schema, srv_options, options, restriction_type, restricts)) tables = set([unicode_("imported_table_1"), unicode_("imported_table_2"), unicode_("imported_table_3")]) if restriction_type == 'limit': tables = tables.intersection(set(restricts)) elif restriction_type == 'except': tables = tables - set(restricts) rv = [] for tname in sorted(list(tables)): table = TableDefinition(tname) nb_col = options.get('nb_col', 3) for col in range(nb_col): table.columns.append( ColumnDefinition("col%s" % col, type_name="text", options={"option1": "value1"})) rv.append(table) return rv
def execute(self, quals, columns): gc.collect() result = [] for obj in gc.get_objects(): tobj = type(obj) if isinstance(obj, bytes): obj = obj.decode('utf8') elif isinstance(obj, unicode_): pass else: try: obj = bytes(obj).decode('utf8') except (UnicodeEncodeError, UnicodeDecodeError): try: obj = unicode_(obj) except (UnicodeEncodeError, UnicodeDecodeError): obj = unicode_("<NA>") result.append({ 'object': obj, 'type': unicode_(tobj), 'id': unicode_(id(obj)), 'refcount': unicode_(sys.getrefcount(obj)) }) return result
def _equals_cond(self, quals): return dict((qual.field_name, unicode_(qual.value)) for qual in quals if qual.operator == '=')