def main(): '''Parse args and handle options.''' parser = argparse.ArgumentParser(description='Object lister for Fedora 4.') # Path to the repo config (endpoint, relpath, credentials, and WebAC paths) parser.add_argument('-r', '--repo', help='Path to repository configuration file.', action='store', required=True) # long mode to print more than just the URIs (name modeled after ls -l) parser.add_argument('-l', '--long', help='Display additional information besides the URI', action='store_true') parser.add_argument( '-R', '--recursive', help= 'List additional objects found by traversing the given predicate(s)', action='store') args = parser.parse_args() # configure logging with open('config/logging.yml', 'r') as configfile: logging_config = yaml.safe_load(configfile) logfile = 'logs/list.py.{0}.log'.format( datetime.utcnow().strftime('%Y%m%d%H%M%S')) logging_config['handlers']['file']['filename'] = logfile logging_config['handlers']['console']['stream'] = 'ext://sys.stderr' logging.config.dictConfig(logging_config) # Load required repository config file and create repository object with open(args.repo, 'r') as repoconfig: fcrepo = Repository(yaml.safe_load(repoconfig)) logger.info('Loaded repo configuration from {0}'.format(args.repo)) if args.recursive is not None: manager = namespaces.get_manager() args.predicates = [ from_n3(p, nsm=manager) for p in args.recursive.split(',') ] logger.info( 'Listing will traverse the following predicates: {0}'.format( ', '.join([p.n3() for p in args.predicates]))) else: args.predicates = [] for item_uri in sys.stdin: for (uri, graph) in fcrepo.recursive_get(item_uri.rstrip('\n'), traverse=args.predicates): if args.long: title = get_title_string(graph) print("{0} {1}".format(uri, title)) else: print(uri)
def graph(self): graph = super(Article, self).graph() graph.namespace_manager = namespaces.get_manager(graph) graph.add((self.uri, dcterms.title, Literal(self.title))) graph.add((self.uri, pcdm.ns.memberOf, self.issue.uri)) graph.add((self.uri, rdf.type, bibo.Article)) if self.start_page is not None: graph.add((self.uri, bibo.pageStart, Literal(self.start_page))) if self.end_page is not None: graph.add((self.uri, bibo.pageEnd, Literal(self.end_page))) return graph
def graph(self): graph = super(Page, self).graph() graph.namespace_manager = namespaces.get_manager(graph) graph.add((self.uri, dcterms.title, Literal(self.title))) graph.add((self.uri, pcdm.ns.memberOf, self.issue.uri)) graph.add((self.uri, rdf.type, ndnp.Page)) # add optional metadata elements if present if hasattr(self, 'number'): graph.add((self.uri, ndnp.number, Literal(self.number))) if hasattr(self, 'frame'): graph.add((self.uri, ndnp.sequence, Literal(self.frame))) return graph
def graph(self): graph = Graph() graph.namespace_manager = namespaces.get_manager(graph) for (rel, obj) in self.linked_objects: graph.add((self.uri, rel, obj.uri)) for obj in self.fragments: graph = graph + obj.graph() graph = graph + self.extra return graph
def graph(self): graph = super(Issue, self).graph() # store required metadata as an RDF graph graph.namespace_manager = namespaces.get_manager(graph) graph.add((self.uri, dcterms.title, Literal(self.title))) graph.add((self.uri, dc.date, Literal(self.date))) graph.add((self.uri, rdf.type, bibo.Issue)) # add optional metadata elements if present if hasattr(self, 'volume'): graph.add((self.uri, bibo.volume, Literal(self.volume))) if hasattr(self, 'issue'): graph.add((self.uri, bibo.issue, Literal(self.issue))) if hasattr(self, 'edition'): graph.add((self.uri, bibo.edition, Literal(self.edition))) return graph
def graph(self): graph = super(File, self).graph() graph.namespace_manager = namespaces.get_manager(graph) graph.add((self.uri, dcterms.title, Literal(self.title))) graph.add((self.uri, dcterms.type, dcmitype.Text)) if self.width is not None: graph.add((self.uri, ebucore.width, Literal(self.width))) if self.height is not None: graph.add((self.uri, ebucore.height, Literal(self.height))) if self.basename.endswith('.tif'): graph.add((self.uri, rdf.type, pcdmuse.PreservationMasterFile)) elif self.basename.endswith('.jp2'): graph.add((self.uri, rdf.type, pcdmuse.IntermediateFile)) elif self.basename.endswith('.pdf'): graph.add((self.uri, rdf.type, pcdmuse.ServiceFile)) elif self.basename.endswith('.xml'): graph.add((self.uri, rdf.type, pcdmuse.ExtractedText)) return graph
def graph(self): graph = super(MetadataFile, self).graph() graph.namespace_manager = namespaces.get_manager(graph) graph.add((self.uri, rdf.type, fabio.MetadataDocument)) return graph
def graph(self): graph = super(IssueMetadata, self).graph() graph.namespace_manager = namespaces.get_manager(graph) graph.add((self.uri, rdf.type, fabio.Metadata)) graph.add((self.uri, dcterms.title, Literal(self.title))) return graph
def main(): '''Parse args and handle options.''' parser = argparse.ArgumentParser(description='Delete tool for Fedora 4.') # Path to the repo config (endpoint, relpath, credentials, and WebAC paths) parser.add_argument('-r', '--repo', help='Path to repository configuration file.', action='store', required=True) # Just ping the repository to verify the connection parser.add_argument( '-p', '--ping', help='Check the connection to the repository and exit.', action='store_true') parser.add_argument( '-R', '--recursive', help= 'Delete additional objects found by traversing the given predicate(s)', action='store') parser.add_argument( '-d', '--dryrun', help='Simulate a delete without modifying the repository', action='store_true') parser.add_argument('-f', '--file', help='File containing a list of URIs to delete', action='store') parser.add_argument('uris', nargs='*', help='Zero or more repository URIs to be deleted.') args = parser.parse_args() print_header() # configure logging with open('config/logging.yml', 'r') as configfile: logging_config = yaml.safe_load(configfile) logfile = 'logs/delete.py.{0}.log'.format( datetime.utcnow().strftime('%Y%m%d%H%M%S')) logging_config['handlers']['file']['filename'] = logfile logging.config.dictConfig(logging_config) # Load required repository config file and create repository object with open(args.repo, 'r') as repoconfig: fcrepo = Repository(yaml.safe_load(repoconfig)) logger.info('Loaded repo configuration from {0}'.format(args.repo)) # "--ping" tests repository connection and exits if args.ping: test_connection(fcrepo) sys.exit(0) if args.recursive is not None: logger.info('Recursive delete enabled') manager = namespaces.get_manager() args.predicates = [ from_n3(p, nsm=manager) for p in args.recursive.split(',') ] logger.info( 'Deletion will traverse the following predicates: {0}'.format( ', '.join([p.n3() for p in args.predicates]))) test_connection(fcrepo) if args.dryrun: logger.info('Dry run enabled, no actual deletions will take place') try: if args.file is not None: with open(args.file, 'r') as uri_list: delete_items(fcrepo, uri_list, args) elif args.uris is not None: delete_items(fcrepo, args.uris, args) except RESTAPIException as e: logger.error("Unable to commit or rollback transaction, aborting") sys.exit(1) print_footer()