if skip_refno: refno_url = "" tars_and_gzips = [] if tarball != None: tars_and_gzips.append(tarball) if tdir != None: filetypes = ['gzip compressed', 'tar archive', 'Tar archive'] # FIXME write_message('Currently processing any tarballs in ' + tdir) tars_and_gzips.extend(get_list_of_all_matching_files(tdir, filetypes)) if infile != None: tars_and_gzips.extend(parse_and_download(infile, sdir)) if recids != None: tars_and_gzips.extend(tarballs_by_recids(recids, sdir)) if arXiv != None: tars_and_gzips.extend(tarballs_by_arXiv_id([arXiv], sdir)) if tars_and_gzips == []: write_message('Error: no tarballs to process!') sys.exit(1) if squash: squash_fd, squash_path = mkstemp(suffix="_" + time.strftime("%Y%m%d%H%M%S") + ".xml", \ prefix="plotextractor_", dir=sdir) os.write(squash_fd, '<?xml version="1.0" encoding="UTF-8"?>\n<collection>\n') os.close(squash_fd) for tarball in tars_and_gzips: process_single(tarball, sdir=sdir, xtract_text=xtract_text, \ upload_plots=upload_plots, force=force, squash=squash_path, \
if skip_refno: refno_url = "" tars_and_gzips = [] if tarball != None: tars_and_gzips.append(tarball) if tdir != None: filetypes = ["gzip compressed", "tar archive", "Tar archive"] # FIXME write_message("Currently processing any tarballs in " + tdir) tars_and_gzips.extend(get_list_of_all_matching_files(tdir, filetypes)) if infile != None: tars_and_gzips.extend(parse_and_download(infile, sdir)) if recids != None: tars_and_gzips.extend(tarballs_by_recids(recids, sdir, with_docname, with_doctype, with_docformat)) if arXiv != None: tars_and_gzips.extend(tarballs_by_arXiv_id([arXiv], sdir)) if tars_and_gzips == []: write_message("Error: no tarballs to process!") sys.exit(1) if squash: squash_fd, squash_path = mkstemp( suffix="_" + time.strftime("%Y%m%d%H%M%S") + ".xml", prefix="plotextractor_", dir=sdir ) os.write(squash_fd, '<?xml version="1.0" encoding="UTF-8"?>\n<collection>\n') os.close(squash_fd) for tarball in tars_and_gzips: recid = None