def test_check_replaced_and_modified_file(integration_env): record = Record(last_status=Status.UPDATED, updated_at=time.time() - 10, sha=digest('original'), local_path='file', strategy=Strategy.SYNC) record2 = Record(last_status=Status.UPDATED, updated_at=time.time() - 10, sha=digest('original2'), local_path='file2', strategy=Strategy.SYNC) config['records'].update({'http://file': record, 'http://file2': record2}) # Replace file with file2 with open('file', 'w') as f: f.write('changed') xattr('file').set(XATTR_KEY_URL, 'http://file2'.encode()) changes = check_all() assert set(changes) == {('http://file', 'file', None), ('http://file2', 'file2', 'file')} assert record.local_path == None assert record.strategy == Strategy.IGNORE assert record2.local_path == 'file' assert record2.strategy == Strategy.SYNC assert record2.local_modified == True
def test_attr(self): x = xattr.xattr(self.tempfile) self.assertEqual(x.keys(), []) self.assertEqual(dict(x), {}) x['user.sopal'] = b'foo' x['user.sop.foo'] = b'bar' x[u'user.\N{SNOWMAN}'] = b'not a snowman' del x x = xattr.xattr(self.tempfile) self.assertTrue('user.sopal' in x) self.assertEqual(x['user.sopal'], b'foo') self.assertTrue('user.sop.foo' in x) self.assertEqual(x['user.sop.foo'], b'bar') self.assertTrue(u'user.\N{SNOWMAN}' in x) self.assertEqual(x[u'user.\N{SNOWMAN}'], b'not a snowman') del x[u'user.\N{SNOWMAN}'] del x['user.sop.foo'] del x x = xattr.xattr(self.tempfile) self.assertTrue('user.sop.foo' not in x)
def test_attr(self): x = xattr.xattr(self.tempfile) self.assertEqual(x.keys(), []) self.assertEqual(x.list(), []) self.assertEqual(dict(x), {}) x['user.sopal'] = b'foo' x['user.sop.foo'] = b'bar' x[u'user.\N{SNOWMAN}'] = b'not a snowman' del x x = xattr.xattr(self.tempfile) attrs = set(x.list()) self.assertTrue('user.sopal' in x) self.assertTrue(u'user.sopal' in attrs) self.assertEqual(x['user.sopal'], b'foo') self.assertTrue('user.sop.foo' in x) self.assertTrue(u'user.sop.foo' in attrs) self.assertEqual(x['user.sop.foo'], b'bar') self.assertTrue(u'user.\N{SNOWMAN}' in x) self.assertTrue(u'user.\N{SNOWMAN}' in attrs) self.assertEqual(x[u'user.\N{SNOWMAN}'], b'not a snowman') del x[u'user.\N{SNOWMAN}'] del x['user.sop.foo'] del x x = xattr.xattr(self.tempfile) self.assertTrue('user.sop.foo' not in x)
def remove_quarantine_from_item(some_path): '''Removes com.apple.quarantine from some_path''' try: if "com.apple.quarantine" in xattr.xattr(some_path).list(): xattr.xattr(some_path).remove("com.apple.quarantine") except BaseException, err: display.display_warning( "Error removing com.apple.quarantine from %s: %s", some_path, err)
def remove_quarantine(some_path): '''Removes com.apple.quarantine from some_path''' try: if "com.apple.quarantine" in xattr.xattr(some_path).list(): xattr.xattr(some_path).remove("com.apple.quarantine") except BaseException, err: display.display_warning( "Error removing com.apple.quarantine from %s: %s", some_path, err)
def file_follow_durable( path, min_dump_interval=10, xattr_name='user.collectd.logtail.pos', xattr_update=True, **follow_kwz ): '''Records log position into xattrs after reading line every min_dump_interval seconds. Checksum of the last line at the position is also recorded (so line itself don't have to fit into xattr) to make sure file wasn't truncated between last xattr dump and re-open.''' from xattr import xattr from io import open from hashlib import sha1 from time import time import struct # Try to restore position src = open(path, mode='rb') src_xattr = xattr(src) try: if not xattr_name: raise KeyError pos = src_xattr[xattr_name] except KeyError: pos = None if pos: data_len = struct.calcsize('=I') (pos,), chksum = struct.unpack('=I', pos[:data_len]), pos[data_len:] (data_len,), chksum = struct.unpack('=I', chksum[:data_len]), chksum[data_len:] try: src.seek(pos - data_len) if sha1(src.read(data_len)).digest() != chksum: raise IOError('Last log line doesnt match checksum') except (OSError, IOError) as err: collectd.info('Failed to restore log position: {}'.format(err)) src.seek(0) tailer = file_follow(src, yield_file=True, **follow_kwz) # ...and keep it updated pos_dump_ts_get = lambda ts=None: (ts or time()) + min_dump_interval pos_dump_ts = pos_dump_ts_get() while True: line, src_chk = next(tailer) if not line: pos_dump_ts = 0 # force-write xattr ts = time() if ts > pos_dump_ts: if src is not src_chk: src, src_xattr = src_chk, xattr(src_chk) pos_new = src.tell() if pos != pos_new: pos = pos_new if xattr_update: src_xattr[xattr_name] =\ struct.pack('=I', pos)\ + struct.pack('=I', len(line))\ + sha1(line).digest() pos_dump_ts = pos_dump_ts_get(ts) if (yield line.decode('utf-8', 'replace')): tailer.send(StopIteration) break
def remove_quarantine_from_item(some_path): '''Removes com.apple.quarantine from some_path''' try: if ("com.apple.quarantine" in xattr.xattr(some_path).list(options=xattr.XATTR_NOFOLLOW)): xattr.xattr(some_path).remove("com.apple.quarantine", options=xattr.XATTR_NOFOLLOW) except BaseException as err: display.display_warning( "Error removing com.apple.quarantine from %s: %s", some_path, err)
def testSymlinkAttrs(self): symlinkPath = self.tempfilename + '.link' os.symlink(self.tempfilename, symlinkPath) try: symlink = xattr.xattr(symlinkPath, options=xattr.XATTR_NOFOLLOW) realfile = xattr.xattr(self.tempfilename) symlink['user.islink'] = 'true' self.assertEqual(dict(realfile), {}) self.assertEqual(symlink['user.islink'], 'true') finally: os.remove(symlinkPath)
def assert_identical_directories(path1, path2): """ Verifies two directories have identical contents. Checks file type (via the high byte of the mode), size, atime, and mtime, but does not check other attributes like uid and gid, since they can be expected to change. """ seen = set([]) for file1 in os.listdir(path1): seen.add(file1) entry1 = os.path.join(path1, file1) entry2 = os.path.join(path2, file1) assert os.path.exists(entry2), "\"{f1}\" exists in \"{p1}\" but not \"{p2}\"".format(f1=file1, p1=path1, p2=path2) # Extended attributes xattr1 = xattr.xattr(entry1) xattr2 = xattr.xattr(entry2) assert set(xattr1.list()) == set(xattr2.list()), "list of extended attributes on \"{f1}\" ({l1}) differs from \"{f2}\" ({l2})".format(f1=entry1, l1=xattr1.list(), f2=entry2, l2=xattr2.list()) for attribute in xattr1.list(): assert xattr1.get(attribute) == xattr2.get(attribute), "extended attribute \"{a1}\" on \"{f1}\" doesn't match value from \"{f2}\"".format(a1=attribute, f1=entry1, f2=entry2) # Why do it this way? We want to lstat() instead of stat(), so we can't use os.path.isdir() and friends stat1 = os.lstat(entry1) stat2 = os.lstat(entry2) # Modes mode1 = stat1.st_mode mode2 = stat2.st_mode if stat.S_ISREG(mode1): assert stat.S_ISREG(mode2) if stat.S_ISDIR(mode1): assert stat.S_ISDIR(mode2) if stat.S_ISLNK(mode1): assert stat.S_ISLNK(mode2) if stat.S_ISCHR(mode1): assert stat.S_ISCHR(mode2) if stat.S_ISBLK(mode1): assert stat.S_ISBLK(mode2) if stat.S_ISFIFO(mode1): assert stat.S_ISFIFO(mode2) if stat.S_ISSOCK(mode1): assert stat.S_ISSOCK(mode2) # Sizes and the like assert stat1.st_size == stat2.st_size, "size mismatch for \"{e1}\" ({s1}) and \"{e2}\" ({s2})".format(e1=entry1, s1=stat1.st_size, e2=entry2, s2=stat2.st_size) assert stat1.st_mtime == stat2.st_mtime, "mtime mismatch for \"{e1}\" and \"{e2}\"".format(e1=entry1, e2=entry2) assert _md5_path(entry1) == _md5_path(entry2), "md5 hash mismatch for \"{e1}\" and \"{e2}\"".format(e1=entry1, e2=entry2) if os.path.isdir(entry1): assert_identical_directories(entry1, entry2) for file2 in os.listdir(path2): assert file2 in seen, "\"{f2}\" exists in \"{p2}\" but not \"{p1}\"".format(f2=file2, p1=path1, p2=path2)
def copy_metadata(path_src, path_dst, symlink=False): # print('copystat') # shutil.copystat(path_src, path_dst) try: tmp_flags = 0x0 if symlink: file_stat = os.lstat(path_src) dbg_print('lstat: {}'.format(file_stat)) dbg_print('lchown: {} : {} : {}'.format(path_dst, file_stat.st_uid, file_stat.st_gid)) os.lchown(path_dst, file_stat.st_uid, file_stat.st_gid) dbg_print('lchmod: {}'.format(file_stat.st_mode)) os.lchmod(path_dst, file_stat.st_mode) else: file_stat = os.stat(path_src) dbg_print('stat: {}'.format(file_stat)) dbg_print('chown: {} : {} : {}'.format(path_dst, file_stat.st_uid, file_stat.st_gid)) os.chown(path_dst, file_stat.st_uid, file_stat.st_gid) dbg_print('copymode') shutil.copymode(path_src, path_dst) # Unfortunately, os.utime() of Python 2 does not have the "follow_symlinks" option, so I have no idea to modify atime and mtime of a symlink itself. # https://stackoverflow.com/questions/48068739/how-can-i-change-atime-and-mtime-of-a-symbolic-link-from-python dbg_print('utime') os.utime(path_dst, (file_stat.st_atime, file_stat.st_mtime)) if file_stat.st_flags & stat.SF_NOUNLINK: tmp_flags |= stat.SF_NOUNLINK if file_stat.st_flags & 0x80000: # 0x80000 means SF_RESTRICTED, but Python cannot recognize it. # https://github.com/pypa/virtualenv/issues/1173 # https://bugs.python.org/issue32347 tmp_flags |= 0x80000 dbg_print('file_stat st_flags ^ tmp_flags: {} | {}'.format(hex(file_stat.st_flags), hex(tmp_flags))) if symlink: os.lchflags(path_dst, file_stat.st_flags ^ tmp_flags) else: os.chflags(path_dst, file_stat.st_flags ^ tmp_flags) extattr_src = xattr.xattr(path_src) extattr_src_items = dict(extattr_src.items()) extattr_dst = xattr.xattr(path_dst) dbg_print('xattr src: {}'.format(extattr_src.items())) if 'com.apple.rootless' in extattr_src.keys(): del extattr_src_items['com.apple.rootless'] # dbg_print('xattr dst: {}'.format(extattr_dst.items())) dbg_print('xattr src: {}'.format(extattr_src_items)) extattr_dst.update(extattr_src_items) return True except (IOError, OSError, shutil.Error) as err: # sys.exit('Error has been occurred in copy_metadata(): {}'.format(err)) return False
def test_symlink_attrs(self): # Solaris doesn't support extended attributes on symlinks if sys.platform == 'sunos5': return symlinkPath = self.tempfilename + '.link' os.symlink(self.tempfilename, symlinkPath) try: symlink = xattr.xattr(symlinkPath, options=xattr.XATTR_NOFOLLOW) realfile = xattr.xattr(self.tempfilename) symlink['user.islink'] = b'true' self.assertEqual(dict(realfile), {}) self.assertEqual(symlink['user.islink'], b'true') finally: os.remove(symlinkPath)
def get(self, arg, head=False): if not arg: self.redirect("/post.html") path = os.path.join('files', arg) try: with open(path, "rb") as f: attrs = xattr.xattr(f) self.set_header( "Expires", datetime.datetime.utcnow() + datetime.timedelta(1000000)) if 'user.Content-Type' in attrs: mimetype = attrs['user.Content-Type'].decode('utf-8') else: mimetype = Popen( ["file", "-b", "--mime-type", path], stdout=PIPE).communicate()[0].decode('utf8').strip() self.set_header("Content-Type", mimetype) try: orig_filename = attrs.get('user.filename').decode('utf-8') self.set_header( 'Content-Disposition', ' inline; filename="{}"'.format(orig_filename)) except IOError: pass self.set_header('content-length', os.stat(f.fileno()).st_size) if head: self.finish() return self.write(f.read()) self.finish() except IOError: raise tornado.web.HTTPError(404)
def read_tags(fname): """Return a list of Tags of the xatag xattr fields in fname.""" attributes = xattr.xattr(fname) # no sense in reading the value if the key isn't going to be chosen return [tag.Tag(xattr_to_xatag_key(k), val) for k in attributes if is_xatag_xattr_key(k) for val in xattr_value_to_list(attributes[k])]
def set_label(filename, color_name): colors = ['none', 'gray', 'green', 'purple', 'blue', 'yellow', 'red', 'orange'] key = u'com.apple.FinderInfo' attrs = xattr(filename) current = attrs.copy().get(key, chr(0)*32) changed = current[:9] + chr(colors.index(color_name)*2) + current[10:] attrs.set(key, changed)
def process_COLOR(self, inpath, colorname): try: attrs = xattr.xattr(inpath) if u'com.apple.FinderInfo' in attrs: finder_attrs = attrs[u'com.apple.FinderInfo'] flags = struct.unpack(32*'B', finder_attrs) else: flags = 32 * (0,) colorid = colorids[colorname] if colorid == None: colorid = 0 flag = flags[9] >> 1 & 7 flag &= 0xF1 flag |= (colorid << 1) flags = list(flags) flags[9] = flag flags = tuple(flags) finder_attrs = struct.pack(32*'B', *flags) xattr.setxattr(inpath, u'com.apple.FinderInfo', finder_attrs) self.send_response(200, 'OK') self.end_headers() except Exception as e: print traceback.format_exc() self.send_response(500, 'Internal Server Error') self.end_headers()
def doSinglePathChange(filename,attr_name,attr_value,read,write,delete,recursive): def onError(e): global status if not os.path.exists(filename): sys.stderr.write("xattr: No such file: %s\n" % (filename,)) else: sys.stderr.write("xattr: " + str(e) + "\n") status = 1 def notPrintable(s): try: s.decode('utf-8') if s.find('\0') >= 0: return True return False except UnicodeError: return True if verbose or recursive or multiple_files: file_prefix = "%s: " % filename else: file_prefix = "" if recursive and os.path.isdir(filename) and not os.path.islink(filename): listdir = os.listdir(filename) for subfilename in listdir: doSinglePathChange(filename+'/'+subfilename,attr_name,attr_value,read,write,delete,recursive) try: attrs = xattr.xattr(filename, options) except (IOError, OSError), e: onError(e) return
def upload(source, resource_id=None, **kwargs): """Uploads a file to a datastore table""" verbose = not kwargs['quiet'] resource_id = resource_id or p.splitext(p.basename(source))[0] if '.' in resource_id: resource_id = resource_id.split('.')[0] ckan_kwargs = {k: v for k, v in kwargs.items() if k in api.CKAN_KEYS} if verbose: print('Uploading %s to datastore resource %s...' % (source, resource_id)) # read encoding from extended attributes x = xattr(source) try: kwargs['encoding'] = x.get('com.ckanny.encoding') except IOError: pass if verbose and kwargs['encoding']: print('Using encoding %s' % kwargs['encoding']) ckan = CKAN(**ckan_kwargs) if ckan.update_datastore(resource_id, source, **kwargs): print('Success! Resource %s uploaded.' % resource_id) else: sys.exit('ERROR: resource %s not uploaded.' % resource_id)
def test_delete_all_tags(file_with_tags): x = xattr.xattr(file_with_tags) delete_all_tags(file_with_tags) assert 'user.org.xatag.tags.' + DEFAULT_TAG_KEY not in x.keys() assert 'user.org.xatag.tags.artist' not in x.keys() assert 'user.org.xatag.tags.genre' not in x.keys() assert x['user.other.tag'] == 'something'
def doaccess(self, ruri, enable): """ We have to set the xattr WebDAV:{http:%2F%2Ftwistedmatrix.com%2Fxml_namespace%2Fdav%2Fprivate%2F}access-disabled on the resource pointed to by the ruri. Strictly speaking only the server know how to map from a uri to a file path, so we have to cheat! """ if self.manager.server_info.serverfilepath: # __uids__ URI path is actually hashed on disk segments = ruri[1:].split('/') for ctr, segment in enumerate(segments): if segment == "__uids__": uid = segments[ctr + 1] segments.insert(ctr + 1, uid[0:2]) segments.insert(ctr + 2, uid[2:4]) break filepath = "/".join(segments) filename = os.path.join(self.manager.server_info.serverfilepath, filepath) if os.path.exists(filename): attrs = xattr.xattr(filename) if enable: del attrs["WebDAV:{http:%2F%2Ftwistedmatrix.com%2Fxml_namespace%2Fdav%2Fprivate%2F}caldav-access-disabled"] else: attrs["WebDAV:{http:%2F%2Ftwistedmatrix.com%2Fxml_namespace%2Fdav%2Fprivate%2F}caldav-access-disabled"] = "yes" return True return False
def mark_file_ok(path, mtime): x = xattr.xattr(path) try: x.set('user.moin-pep8-tested-mtime', '%d' % mtime) except IOError: # probably not supported mark_file_ok = lambda path, mtime: None
def __init__(self, mp): self.context = carvpath.Context(longpathmap.LongPathMap()) self.mountpoint = mp ctlpath = self.mountpoint + "/mattockfs.ctl" if not os.path.isfile(ctlpath): raise RuntimeError("File-system not mounted at "+mp) self.main_ctl = xattr.xattr(ctlpath)
def __init__(self, mp): self.context = carvpath.Context(LongPathMap(mp)) self.mountpoint = mp ctlpath = self.mountpoint + "/mattockfs.ctl" if not os.path.isfile(ctlpath): raise RuntimeError("File-system not mounted at " + mp) self.main_ctl = xattr.xattr(ctlpath)
def test_check_modified_file(integration_env, with_move): record = Record(last_status=Status.UPDATED, updated_at=time.time() - 10, sha=digest('original'), local_path='file', strategy=Strategy.SYNC) config['records'].update({'http://file': record}) loc = 'file2' if with_move else 'file' with open(loc, 'w') as f: f.write('changed') xattr(loc).set(XATTR_KEY_URL, 'http://file'.encode()) check_all() assert record.local_modified == True
def ExtendedAttributeLabelCheck(path, dataset_SCID): cur_path = Path(path) LOG.debug(f'_project_path is: {_project_path}') LOG.debug(f'_project_path.parent is: {_project_path.parent}') while cur_path != _project_path.parent: LOG.debug(f'cur_path is: {cur_path}') path_attrs = xattr(cur_path) attr_key_list = [ e for e in path_attrs.list() if _xattr_label_base in e ] if attr_key_list: for attr in attr_key_list: LOG.debug(f'Checking xattr: {attr} for path: {cur_path}') if (path_attrs[attr]).decode('utf-8') == dataset_SCID: LOG.debug(f'Matching SCID found for {path}') return True # If we got here, we got to the end of the list of # matching extended attributes, but did not find a # matching SCID. # # Since we found matching extended attributes and # we should match as narrowly as possible, we need # to break out of the path search loop here. break cur_path = cur_path.parent LOG.debug(f'No matching SCIDs found for {path}') return False
def main(root): """Main function of this helper. Args: root: Path object to root directory. """ results = { "com.apple.quarantine": {UNSET: []}, "com.apple.FinderInfo": {UNSET: []}, } for path in sorted(filter(lambda p: p.name not in IGN, root.glob("**/*"))): for attr, store in results.items(): xpath = xattr.xattr(path) if attr in xpath: val = xpath.get(attr) if val not in store: store[val] = [] store[val].append(path) else: store[UNSET].append(path) for key, store in results.items(): print() print(key) for val, paths in store.items(): print() print(val) for path in paths: print(path.relative_to(root))
def keys(clazz, filename): 'Return all the keys set for filename.' check.check_string(filename) clazz.check_file_is_readable(filename) raw_keys = [key for key in xattr.xattr(filename).iterkeys()] return sorted([clazz._decode_key(key) for key in raw_keys])
def doquota(self, ruri, size): """ We have to set the xattr WebDAV:{http:%2F%2Ftwistedmatrix.com%2Fxml_namespace%2Fdav%2Fprivate%2F}access-disabled on the resource pointed to by the ruri. Strictly speaking only the server know how to map from a uri to a file path, so we have to cheat! """ if self.manager.server_info.serverfilepath: # __uids__ URI path is actually hashed on disk segments = ruri[1:].split('/') for ctr, segment in enumerate(segments): if segment == "__uids__": uid = segments[ctr + 1] segments.insert(ctr + 1, uid[0:2]) segments.insert(ctr + 2, uid[2:4]) break filepath = "/".join(segments) filename = os.path.join(self.manager.server_info.serverfilepath, filepath) if os.path.exists(filename): attrs = xattr.xattr(filename) if size is None: del attrs["WebDAV:{http:%2F%2Ftwistedmatrix.com%2Fxml_namespace%2Fdav%2Fprivate%2F}quota-root"] else: attrs["WebDAV:{http:%2F%2Ftwistedmatrix.com%2Fxml_namespace%2Fdav%2Fprivate%2F}quota-root"] = \ "<?xml version='1.0' encoding='UTF-8'?>\n" + \ "<quota-root xmlns='http://twistedmatrix.com/xml_namespace/dav/private/'>" + \ str(size) + \ "</quota-root>" return True return False
def promote(self, cf): self.hierarchy += 1 store = os.path.join(cf.archive_store, self.job.name, self.id) fname = "%s.1.%s" % (store, find_ext(self.type.name)) if os.path.exists(fname): x = xattr.xattr(fname) x['user.dardrive.hierarchy'] = str(self.hierarchy)
def test_set_all_tags(file_with_tags): x = xattr.xattr(file_with_tags) set_all_tags(file_with_tags, [Tag('', 'another'), Tag('', 'zanother'), Tag('genre', 'awesome')]) assert x['user.org.xatag.tags.' + DEFAULT_TAG_KEY] == 'another;zanother' assert x['user.org.xatag.tags.genre'] == 'awesome' assert 'user.org.xatag.tags.artist' not in x.keys()
def upload(source, resource_id=None, **kwargs): """Uploads a file to a datastore table""" verbose = not kwargs['quiet'] resource_id = resource_id or p.splitext(p.basename(source))[0] if '.' in resource_id: resource_id = resource_id.split('.')[0] ckan_kwargs = {k: v for k, v in kwargs.items() if k in api.CKAN_KEYS} if verbose: print( 'Uploading %s to datastore resource %s...' % (source, resource_id)) # read encoding from extended attributes x = xattr(source) try: kwargs['encoding'] = x.get('com.ckanny.encoding') except IOError: pass if verbose and kwargs['encoding']: print('Using encoding %s' % kwargs['encoding']) ckan = CKAN(**ckan_kwargs) if ckan.update_datastore(resource_id, source, **kwargs): print('Success! Resource %s uploaded.' % resource_id) else: sys.exit('ERROR: resource %s not uploaded.' % resource_id)
def set_finderinfo_color(filename, colorid): """ set tag color of filename to colorid filename: path to file colorid: ID of tag color in range 0 to 7 """ if not os.path.exists(filename): raise FileNotFoundError(f"filename {filename} not found") if not _MIN_FINDER_COLOR <= colorid <= _MAX_FINDER_COLOR: raise ValueError(f"colorid out of range {colorid}") attr = xattr.xattr(filename) try: finderinfo = attr.get("com.apple.FinderInfo") finderbits = bitstring.BitArray(finderinfo) except: finderbits = bitstring.BitArray(uint=0, length=256) # color is encoded as 3 binary bits bits = bitstring.BitArray(uint=colorid, length=3) # set color bits finderbits.overwrite(bits, _kCOLOR_OFFSET) attr.set("com.apple.FinderInfo", finderbits.bytes)
def test_delete_these_tags(file_with_tags): x = xattr.xattr(file_with_tags) delete_tags(file_with_tags, [Tag('', 'tag4')]) assert x['user.org.xatag.tags.' + DEFAULT_TAG_KEY] == 'tag1;tag2;tag3;tag5' assert x['user.org.xatag.tags.artist'] == 'The XX' assert x['user.org.xatag.tags.genre'] == 'indie;pop' delete_tags(file_with_tags, [Tag('', t) for t in ['tag2','tag4','tag5']]) assert x['user.org.xatag.tags.' + DEFAULT_TAG_KEY] == 'tag1;tag3' assert x['user.org.xatag.tags.artist'] == 'The XX' assert x['user.org.xatag.tags.genre'] == 'indie;pop' delete_tags(file_with_tags, Tag('notakey', 'tag')) delete_tags(file_with_tags, Tag('genre', 'pop')) assert x['user.org.xatag.tags.genre'] == 'indie' # xattr fields get deleted explicitly... assert 'user.org.xatag.tags.genre' in x.keys() delete_tags(file_with_tags, Tag('genre', '')) assert 'user.org.xatag.tags.genre' not in x.keys() # ...or by removing all of the tag values from the field assert 'user.org.xatag.tags.artist' in x.keys() assert 'user.org.xatag.tags.' + DEFAULT_TAG_KEY in x.keys() delete_tags(file_with_tags, [Tag('', t) for t in ['tag1','tag3']]) delete_tags(file_with_tags, Tag('artist', 'The XX')) assert 'user.org.xatag.tags.artist' not in x.keys() assert 'user.org.xatag.tags.' + DEFAULT_TAG_KEY not in x.keys()
def test_symlink_attrs(self): symlinkPath = self.tempfilename + '.link' os.symlink(self.tempfilename, symlinkPath) try: symlink = xattr.xattr(symlinkPath, options=xattr.XATTR_NOFOLLOW) realfile = xattr.xattr(self.tempfilename) try: symlink['user.islink'] = b'true' except IOError: # Solaris, Linux don't support extended attributes on symlinks raise unittest.SkipTest("XATTRs on symlink not allowed" " on filesystem/platform") self.assertEqual(dict(realfile), {}) self.assertEqual(symlink['user.islink'], b'true') finally: os.remove(symlinkPath)
def get_xattr_url(local_path, default=None): try: url = xattr(local_path).get(XATTR_KEY_URL).decode() except OSError: return default else: return url
def set_user_metadata(path, user_metadata): attrs = xattr.xattr(path) try: for key, value in user_metadata.iteritems(): attrs['user.%s' % key] = value except IOError: raise StorageException('object not modifiable or not found')
def get(self, arg, head=False): if not arg: self.redirect("/post.html") path = os.path.join('files',arg) try: with open(path,"rb") as f: attrs = xattr.xattr(f) self.set_header("Expires", datetime.datetime.utcnow() + datetime.timedelta(1000000)) if 'user.Content-Type' in attrs: mimetype = attrs['user.Content-Type'].decode('utf-8') else: mimetype = Popen(["file","-b","--mime-type", path], stdout=PIPE).communicate()[0].decode('utf8').strip() self.set_header("Content-Type", mimetype) try: orig_filename = attrs.get('user.filename').decode('utf-8') self.set_header('Content-Disposition',' inline; filename="{}"'.format(orig_filename)) except IOError: pass self.set_header('content-length',os.stat(f.fileno()).st_size) if head: self.finish() return self.write(f.read()) self.finish() except IOError: raise tornado.web.HTTPError(404)
def __init__(self, fpath): self.fpath = fpath # can raise exceptions self.st = os.stat(fpath) self.xattr = xattr(fpath) self._hash = None self.hash_from_cache = None
def update_manifest(self): package = rdflib.URIRef(self._path) self._manifest += ((package, RDF.type, OXDS.Grouping),) seen_uris = set() for base, dirs, files in os.walk(self._path): for filename in files + dirs: if not base and filename == 'manifest.rdf': continue # URIs use '/' as separators, and we don't want to trust that the OS # uses the same separator. uri = rdflib.URIRef('/'.join(os.path.split(os.path.join(base, filename)))) filename = os.path.join(self._path, base, filename) seen_uris.add(uri) if os.path.isdir(filename): self._manifest.add((uri, RDF.type, FOAF.Document)) xattr_data = dict(xattr.xattr(filename)) self._update_field(uri, DCTERMS['title'], xattr_data.get('user.dublincore.title')) self._update_field(uri, DCTERMS.description, xattr_data.get('user.dublincore.description')) for uri in self._manifest.subjects(RDF.type, FOAF.Document): if uri not in seen_uris: self._remove_cbd(uri) print "Updating" self._update_field(package, DCTERMS['title'], self.title) self._update_field(package, DCTERMS.description, self.description) self._update_field(package, DCTERMS.identifier, self.identifier)
def get_filemetadata(self, channel: str, src: str): filepath = path.abspath(path.join(self.channels_dir, channel, src)) if not path.exists(filepath): raise FileNotFoundError() stat_res = os.stat(filepath) mtime = stat_res.st_mtime msize = stat_res.st_size xattr_failed = False if has_xattr: try: # xattr will fail here if executing on e.g. the tmp filesystem attrs = xattr.xattr(filepath) attrs['user.testifpermissionok'] = b'' try: etag = attrs['user.etag'].decode('ascii') except KeyError: # calculate md5 sum with self.fs.open(filepath, 'rb') as f: etag = hashlib.md5(f.read()).hexdigest() attrs['user.etag'] = etag.encode('ascii') except OSError: xattr_failed = True if not has_xattr or xattr_failed: etag_base = str(mtime) + "-" + str(msize) etag = hashlib.md5(etag_base.encode()).hexdigest() return (msize, mtime, etag)
def fetch(resource_id, **kwargs): """Downloads a filestore resource""" verbose = not kwargs['quiet'] filepath = kwargs['destination'] name_from_id = kwargs.get('name_from_id') chunksize = kwargs.get('chunksize_bytes') ckan_kwargs = {k: v for k, v in kwargs.items() if k in api.CKAN_KEYS} ckan = CKAN(**ckan_kwargs) try: r = ckan.fetch_resource(resource_id) except api.NotAuthorized as err: sys.exit('ERROR: %s\n' % str(err)) else: fkwargs = { 'headers': r.headers, 'name_from_id': name_from_id, 'resource_id': resource_id} filepath = tup.make_filepath(filepath, **fkwargs) tio.write(filepath, r.iter_content, chunksize=chunksize) # save encoding to extended attributes x = xattr(filepath) if verbose and r.encoding: print('saving encoding %s to extended attributes' % r.encoding) if r.encoding: x['com.ckanny.encoding'] = r.encoding print(filepath)
def doSinglePathChange(filename,attr_name,attr_value,read,write,delete,recursive): def onError(e): global status if not os.path.exists(filename): sys.stderr.write("xattr: No such file: %s\n" % (filename,)) else: sys.stderr.write("xattr: " + str(e) + "\n") status = 1 def hasNulls(s): try: if s.find('\0') >= 0: return True return False except UnicodeDecodeError: return True if verbose or recursive or multiple_files: file_prefix = "%s: " % filename else: file_prefix = "" if recursive and os.path.isdir(filename) and not os.path.islink(filename): listdir = os.listdir(filename) for subfilename in listdir: doSinglePathChange(filename+'/'+subfilename,attr_name,attr_value,read,write,delete,recursive) try: attrs = xattr.xattr(filename, options) except (IOError, OSError), e: onError(e) return
def test_purge_lock(self): self.test_file.write(data='test', hdrs={'X-Hpss-Purgelock-Status': 'true', 'X-Hpss-Class-Of-Service-Id': '3'}) test_file_name = os.path.join(self.hpss_dir, self.account.name, self.container.name, 'testfile') xattrs = dict(xattr.xattr(test_file_name)) self.assertEqual(xattrs['system.hpss.purgelock'], '1') self.test_file.post(hdrs={'X-Hpss-Purgelock-Status': 'false'}) xattrs = dict(xattr.xattr(test_file_name)) self.assertEqual(xattrs['system.hpss.purgelock'], '0')
def __init__(self,path): global supports_alternate_data_streams global supports_extended_attributes self.path = path self.x = {} self.dirty = False if xattr_disabled: supports_alternate_data_streams=False supports_extended_attributes=False return if supports_alternate_data_streams: self.ads = ADS(path) s = list(self.ads) if STREAM_NAME in s: self.x = json.loads( self.ads.get_stream_content(STREAM_NAME).decode('utf-8') ) if supports_extended_attributes: d = xattr.xattr(path) for i in d: if not i.startswith('user.sr_'): continue k= i.replace('user.sr_','') v= d[i].decode('utf-8') self.x[k] = v
def upgradeCalendarHome(homePath, directory): errorOccurred = False log.debug("Upgrading calendar home: %s" % (homePath,)) try: for cal in os.listdir(homePath): calPath = os.path.join(homePath, cal) if not os.path.isdir(calPath): # Skip non-directories; these might have been uploaded by a # random DAV client, they can't be calendar collections. continue if cal == 'notifications': # Delete the old, now obsolete, notifications directory. rmdir(calPath) continue log.debug("Upgrading calendar: %s" % (calPath,)) if not upgradeCalendarCollection(calPath, directory): errorOccurred = True # Change the calendar-free-busy-set xattrs of the inbox to the # __uids__/<guid> form if cal == "inbox": for attr, value in xattr.xattr(calPath).iteritems(): if attr == "WebDAV:{urn:ietf:params:xml:ns:caldav}calendar-free-busy-set": value = updateFreeBusySet(value, directory) if value is not None: # Need to write the xattr back to disk xattr.setxattr(calPath, attr, value) except Exception, e: log.error("Failed to upgrade calendar home %s: %s" % (homePath, e)) raise
def __init__(self, fname, tz_aware=False): """Create an OSXMetaData object to access file metadata fname: filename to operate on timezone_aware: bool; if True, date/time attributes will return timezone aware datetime.dateime attributes; if False (default) date/time attributes will return timezone naive objects """ self._fname = pathlib.Path(fname) self._posix_name = self._fname.resolve().as_posix() self._tz_aware = tz_aware if not self._fname.exists(): raise FileNotFoundError("file does not exist: ", fname) self._attrs = xattr.xattr(self._fname) # create property classes for the multi-valued attributes # tags get special handling due to color labels # ATTRIBUTES contains both long and short names, want only the short names (attribute.name) for name in set([attribute.name for attribute in ATTRIBUTES.values()]): attribute = ATTRIBUTES[name] if attribute.class_ not in [str, float, datetime.datetime]: super().__setattr__( name, attribute.class_(attribute, self._attrs, self)) # Done with initialization self.__init = True
def compute_sumstr(self, path, fsiz): sumstr = '' if supports_extended_attributes: try: attr = xattr.xattr(path) if 'user.sr_sum' in attr: if 'user.sr_mtime' in attr: if attr['user.sr_mtime'].decode( "utf-8") >= self.msg.headers['mtime']: self.logger.debug("sum set by xattr") sumstr = attr['user.sr_sum'].decode("utf-8") return sumstr else: xattr.setxattr( path, 'user.sr_mtime', bytes(self.msg.headers['mtime'], "utf-8")) self.logger.debug("sum set by xattr") sumstr = attr['user.sr_sum'].decode("utf-8") return sumstr except: pass self.logger.debug("sum set by compute_sumstr") sumflg = self.sumflg if sumflg[:2] == 'z,' and len(sumflg) > 2: sumstr = sumflg else: if not sumflg[0] in ['0', 'd', 'n', 's', 'z']: sumflg = 'd' self.set_sumalgo(sumflg) sumalgo = self.sumalgo sumalgo.set_path(path) # compute checksum if sumflg in ['d', 's']: fp = open(path, 'rb') i = 0 while i < fsiz: buf = fp.read(self.bufsize) if not buf: break sumalgo.update(buf) i += len(buf) fp.close() # setting sumstr checksum = sumalgo.get_value() sumstr = '%s,%s' % (sumflg, checksum) return sumstr
def set_xattr(path, key, value): """Set the value of a specified xattr. If xattrs aren't supported by the file-system, we skip setting the value. """ namespaced_key = _make_namespaced_xattr_key(key) entry_xattr = xattr.xattr(path) entry_xattr.set(namespaced_key, str(value))
def getxattr(self, path, name, foo): """Get extended attributes, we just try to pass shit through. This is rather naive but seems to work for facl (tested by getfacl). """ logger.info("getxattr -- path:{} xattr:{} foo:{}".format(path, name, foo)) if isRarDirPath(path): # is inside a rar file logging.debug("getxattr: we need to check inside rar archive for path " + str(path)) (rar_file, rar_path) = rarDirSplit(path) logger.debug("getxattr: returning xattr for path " + str(rar_file)) xa = xattr.xattr('.' + rar_file) return xa.get(name) # normal file outside of any rar file logger.debug("getxattr: returning xattr for path " + str(path)) xa = xattr.xattr('.' + path) return xa.get(name)
def must_read_xattr(path): """ Read all extended attributes of a »path«. NOTE: This will only work on non-tmpfs mounts. See create_special_fs for a workaround. """ return dict(xattr.xattr(os.path.join(TESTDIR_NAME, path)).items())
def test_change_cos(self): self.test_file.write(data='asdfasdf', hdrs={'X-Hpss-Class-Of-Service-Id': '3'}) test_file_name = os.path.join(self.hpss_dir, self.account.name, self.container.name, 'testfile') time.sleep(30) # It takes a long time for HPSS to get around to it. xattrs = dict(xattr.xattr(test_file_name)) self.assertEqual(xattrs['system.hpss.cos'], '3') self.test_file.post(hdrs={'X-HPSS-Class-Of-Service-ID': '1'}) time.sleep(30) xattrs = dict(xattr.xattr(test_file_name)) self.assertEqual(xattrs['system.hpss.cos'], '1')
def test_check_moved_file(integration_env): record = Record(last_status=Status.UPDATED, updated_at=time.time() - 10, sha=digest('original'), local_path='file', strategy=Strategy.SYNC) config['records'].update({'http://file': record}) with open('file2', 'w') as f: f.write('original') xattr('file2').set(XATTR_KEY_URL, 'http://file'.encode()) changes = check_all() assert changes == [('http://file', 'file', 'file2')] assert record.local_path == 'file2' assert record.local_modified == False assert record.strategy == Strategy.SYNC
def read(self, file, password): attributes = xattr.xattr(file) try: crtime = attributes.get("system.ntfs_crtime") return self.decrypt(crtime[-4:], password) except IOError: print("Cannot access NTFS attributes. Are you on an NTFS volume?", file=sys.stderr)