def __init__(self, description, epilog, version): BaseCLI.__init__(self, description, epilog, version) self.remove_options(['--config-file']) self.parser.add_argument('--config-file', metavar='<file>', help="Path to a configuration file.", required=True) self.parser.add_argument('-s', '--schema', help="schema name", default=None, action='append') self.parser.add_argument('-t', '--table', help="table name", default=None, action='append') self.parser.add_argument('-n', '--dryrun', help="dryrun", action="store_true") self.parser.add_argument('-v', '--verbose', help="verbose", action="store_true") self.parser.add_argument('catalog', help="catalog number", type=int) self.parser.set_defaults(host=platform.uname()[1])
def main(): sys.excepthook = excepthook QApplication.setDesktopSettingsAware(False) QApplication.setStyle(QStyleFactory.create("Fusion")) app = QApplication(sys.argv) app.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps) sys.stderr.write("\n") cli = BaseCLI( "DERIVA Authentication Agent", "For more information see: https://github.com/informatics-isi-edu/deriva-qt", VERSION) cli.parser.add_argument( "--cookie-persistence", action="store_true", help="Enable cookie and local storage persistence for QtWebEngine.") args = cli.parse_cli() config = read_config(args.config_file, create_default=False) if args.config_file else None authWindow = AuthWindow(config, args.credential_file, cookie_persistence=args.cookie_persistence) authWindow.show() ret = app.exec_() return ret
def __init__(self, description, epilog): BaseCLI.__init__(self, description, epilog, __version__) self.remove_options(['--host', '--config-file']) self.parser.add_argument("--catalog", default=1, metavar="<1>", help="Catalog number. Default: 1") self.parser.add_argument('host', default='localhost', metavar='<host>', help="Fully qualified host name.") self.parser.add_argument('config', metavar='<config file>', help="Path to a configuration file.") self.parser.add_argument("path", metavar="<output dir>", help="Path to an output directory.") self.parser.add_argument( "kwargs", metavar="[key=value key=value ...]", nargs=argparse.REMAINDER, action=KeyValuePairArgs, help= "Variable length of whitespace-delimited key=value pair arguments used for " "string interpolation in specific parts of the configuration file. " "For example: key1=value1 key2=value2")
def main(): cli = BaseCLI("annotation rollback tool", None, version=__version__, hostname_required=True) cli.parser.add_argument("--catalog", default=1, metavar="<1>", help="Catalog number. Default: 1") cli.parser.add_argument("--snapshot", metavar="<snapshot ID", help="Catalog snapshot ID. Example: 2QG-VWP6-0YG0") args = cli.parse_cli() credential = get_credential(args.host, args.credential_file) rollback_annotation(args.host, args.catalog, snaptime=args.snapshot, credential=credential)
def __init__(self, uploader, description, epilog, cookie_persistence=True, window_icon=":/images/upload.png"): if not issubclass(uploader, DerivaUpload): raise TypeError("DerivaUpload subclass required") BaseCLI.__init__(self, description, epilog, uploader.getVersion()) self.uploader = uploader self.cookie_persistence = cookie_persistence self.window_icon = window_icon
def create_parser(description): cli = BaseCLI(description, None, 1, hostname_required=True) cli.remove_options(['--config-file']) cli.parser.add_argument('config_file', help='project-specific config file') cli.parser.add_argument('--catalog-id', type=int, default=1) cli.parser.add_argument( '--scratch-directory', help='directory for temporary storage of downloaded files') return (cli)
def __init__(self, uploader, description, epilog): if not issubclass(uploader, DerivaUpload): raise TypeError("DerivaUpload subclass required") BaseCLI.__init__(self, description, epilog, uploader.getVersion()) self.remove_options(['--host']) self.parser.add_argument('--no-cfg-update', action="store_true", help="Do not update local config from server.") self.parser.add_argument("--catalog", default=1, metavar="<1>", help="Catalog number. Default: 1") self.parser.add_argument('host', metavar='<host>', help="Fully qualified host name.") self.parser.add_argument("path", metavar="<dir>", help="Path to an input directory.") self.uploader = uploader
def __init__(self, uploader, description, epilog): if not issubclass(uploader, DerivaUpload): raise TypeError("DerivaUpload subclass required") BaseCLI.__init__(self, description, epilog, uploader.getVersion(), hostname_required=True) self.parser.add_argument('--no-config-update', action="store_true", help="Do not check for (and download) an updated configuration from the server.") self.parser.add_argument('--purge-state', action="store_true", help="Purge (delete) any existing transfer state files found in the directory " "hierarchy of the input path.") self.parser.add_argument("--catalog", default=1, metavar="<1>", help="Catalog number. Default: 1") self.parser.add_argument("path", metavar="<input dir>", help="Path to an input directory.") self.uploader = uploader
def main(): logger = logging.getLogger(__name__) logger.setLevel("WARNING") cli = BaseCLI(__name__, "Create a sitemap from a table specified on the command line or a set of tables from a config file", __version__, hostname_required=True) cli.remove_options(["--credential-file", "--token", "--oauth2-token"]) cli.parser.add_argument("--catalog", default=1, metavar="<1>", help="Catalog number") cli.parser.add_argument("-p", "--priority", type=float, help="A floating-point number between 0.0 and 1.0 indicating the table's priority") cli.parser.add_argument("-s", "--schema", help="the name of the schema of the (single) table to include") cli.parser.add_argument("-t", "--table", help="the name of the (single) table to include") args = cli.parse_cli() if args.priority is not None: if args.priority < 0 or args.priority > 1: logger.error("priority should be a floating-point number between 0 and 1") sys.exit(1) if not ((args.schema and args.table) or args.config_file): logger.error("must specify either a schema and table or a config file") sys.exit(1) sb = SitemapBuilder("https", args.host, args.catalog) if args.schema and args.table: sb.add_table_spec(args.schema, args.table, priority=args.priority) if args.config_file: rows = json.load(open(args.config_file)) for row in rows: if row.get("schema") is None or row.get("table") is None: logger.warning("malformed entry in {f}: schema or table is missing. Skipping".format(f=args.config_file)) next if row.get("priority") is None: priority = args.priority else: try: priority = float(row.get("priority")) if priority < 0 or priority > 1: logger.warning("bad priority '{p}' - should be a floating-point number between 0 and 1. Ignoring".format(p=priority)) priority = args.priority except ValueError: logger.warning("malformed priority '{p}' - should be a floating-point number between 0 and 1. Ignoring".format(p=row.get("priority"))) priority = args.priority sb.add_table_spec(row["schema"], row["table"], priority=priority) sb.write_sitemap(sys.stdout) return 0
def __init__(self, description, epilog, **kwargs): BaseCLI.__init__(self, description, epilog, **kwargs) self.parser.add_argument("--catalog", default=1, metavar="<1>", help="Catalog number. Default: 1") self.parser.add_argument("output_dir", metavar="<output dir>", help="Path to an output directory.") self.parser.add_argument( "envars", metavar="[key=value key=value ...]", nargs=argparse.REMAINDER, action=KeyValuePairArgs, default={}, help= "Variable length of whitespace-delimited key=value pair arguments used for " "string interpolation in specific parts of the configuration file. " "For example: key1=value1 key2=value2")
utils.set_foreign_key_acls(catalog, 'PDB', 'ihm_hdx_restraint', 'ihm_hdx_restraint_structure_id_fkey', foreign_key_acls) utils.set_foreign_key_acls( catalog, 'PDB', 'ihm_derived_angle_restraint', 'ihm_derived_angle_restraint_structure_id_fkey', foreign_key_acls) utils.set_foreign_key_acls( catalog, 'PDB', 'ihm_derived_dihedral_restraint', 'ihm_derived_dihedral_restraint_structure_id_fkey', foreign_key_acls) utils.set_foreign_key_acls(catalog, 'PDB', 'struct_ref', 'struct_ref_structure_id_fkey', foreign_key_acls) utils.set_foreign_key_acls(catalog, 'PDB', 'struct_ref', 'struct_ref_db_name_fkey', foreign_key_acls) utils.set_foreign_key_acls(catalog, 'PDB', 'struct_ref_seq', 'struct_ref_seq_structure_id_fkey', foreign_key_acls) utils.set_foreign_key_acls(catalog, 'PDB', 'struct_ref_seq_dif', 'struct_ref_seq_dif_structure_id_fkey', foreign_key_acls) utils.set_foreign_key_acls(catalog, 'PDB', 'struct_ref_seq_dif', 'struct_ref_seq_dif_details_fkey', foreign_key_acls) if __name__ == '__main__': args = BaseCLI("ad-hoc table creation tool", None, 1).parse_cli() credentials = get_credential(args.host, args.credential_file) main(args.host, 99, credentials)
transform(entry["dest"], data) if entry.get("extra_defaults") is None: entry['dest'].insert(data, nondefaults=set(["RID"])) else: entry['dest'].insert(data, defaults=entry.get("extra_defaults"), nondefaults=set(["RID"])) def vocab_to_vocabulary(self, table, data): for row in data: if row.get("Description") is None: row["Description"] = row["Name"] if __name__ == '__main__': cli = BaseCLI("load tables into demo server", None, hostname_required=True) cli.parser.add_argument("--directory", "-d", type=str, help="Parent directory of data files", default="./data") cli.parser.add_argument("--all", "-a", action="store_const", const=True, help="load all tables") cli.parser.add_argument("catalog", type=int) cli.parser.add_argument("--table", type=str, action="append", help="table(s) to load")
'Sequence_Trimming': True, 'Duplicate_Removal': True }] table.insert(rows) # -- ================================================================================= def main(server_name, catalog_id, credentials): server = DerivaServer('https', server_name, credentials) catalog = server.connect_ermrest(catalog_id) catalog.dcctx['cid'] = "oneoff/data" model = catalog.getCatalogModel() #load_table_RNASeq_Reference_Genome(catalog) load_table_RNASeq_Workflow(catalog) # -- -------------------------------------------------------------- # arguments: # --credential-file (optional if authenticated through deriva-auth) # -- host # e.g. python3 row_mutation.py --credential-file ~/.deriva/credential.json --host staging.gudmap.org if __name__ == '__main__': args = BaseCLI("mutate rows", None, 1).parse_cli() credentials = get_credential(args.host, args.credential_file) main(args.host, 2, credentials)
def dump_study_collection(self): table = self.pb.RNASeq.Experiment data = table.filter(table.Species=='Mus musculus')\ .filter(table.Sequencing_Type=="mRNA-Seq")\ .link(self.pb.RNASeq.Study)\ .link(self.pb.RNASeq.Sequencing_Study_Collection)\ .entities() self.finalize_and_write("Sequencing_Study_Collection", self.DATA, list(data)) def dump_collection(self): table = self.pb.RNASeq.Experiment data = table.filter(table.Species=='Mus musculus')\ .filter(table.Sequencing_Type=="mRNA-Seq")\ .link(self.pb.RNASeq.Study)\ .link(self.pb.RNASeq.Sequencing_Study_Collection)\ .link(self.pb.Common.Collection)\ .entities() self.finalize_and_write("Collection", self.DATA, list(data)) if __name__ == '__main__': cli = BaseCLI("dump tables to import into demo", None, hostname_required=True) cli.parser.add_argument("catalog", type=int, default=None) args = cli.parse_cli() dmp = RBKDump(args.host, args.catalog) dmp.dump_all()
"Input_Bag": args.inputBagRID, "Notes": args.notes, "Execution_Status": args.status, "Execution_Status_Detail": args.statusDetail.replace('\\n','\n') } entities = run_table.insert([run_data]) rid = entities[0]["RID"] else: run_data = { "RID": args.update, "Replicate": args.repRID, "Workflow": args.workflowRID, "Reference_Genome": args.referenceRID, "Input_Bag": args.inputBagRID, "Notes": args.notes, "Execution_Status": args.status, "Execution_Status_Detail": args.statusDetail.replace('\\n','\n') } entities = run_table.update([run_data]) rid = args.update print(rid) if __name__ == '__main__': args = get_args() cli = BaseCLI("Custom RNASeq query", None, 1) cli.remove_options(["--config-file"]) host = args.host credentials = {"cookie": args.cookie} main(host, 2, credentials)
changed_catlog_acl = setCatalogACL(model_root) changed_tables_acl = setTablesACL(model_root) changed_tables_acl_bindings = setTablesACLBindings(model_root) changed_columns_acl = setColumnsACL(model_root) changed_columns_acl_bindings = setColumnsACLBindings(model_root) changed_foreign_key_acl = setForeignKeyACL(model_root) changed_foreign_key_acl_bindings = setForeignKeyACLBindings(model_root) changed = changed_catlog_acl or changed_tables_acl or changed_tables_acl_bindings or changed_columns_acl or changed_columns_acl_bindings or changed_foreign_key_acl or changed_foreign_key_acl_bindings if changed == True: print('Applying ACLs changes...') model_root.apply() else: print('No ACLs changes detected.') def main(server_name, catalog_id, credentials): server = DerivaServer('https', server_name, credentials) catalog = server.connect_ermrest(catalog_id) catalog.dcctx['cid'] = 'ACL/model' model = catalog.getCatalogModel() setACLs(model) if __name__ == '__main__': args = BaseCLI('ACL Configuration Tool', None, 1).parse_cli() credentials = get_credential(args.host, args.credential_file) main(args.host, 99, credentials)
Column.define("Collection", builtin_types.text, nullok=False) ], key_defs=[Key.define(["Study", "Collection"])], fkey_defs=[ ForeignKey.define(["Study"], self.DATA, "Study", ["RID"], constraint_names=[[ self.DATA, "Study_Collection_Study_fkey" ]]), ForeignKey.define(["Collection"], self.DATA, "Collection", ["RID"], constraint_names=[[ self.DATA, "Study_Collection_Collection_fkey" ]]) ], comment="Many-to-many associations between studies and collection") self.try_create_table(schema, table_def) if __name__ == '__main__': cli = BaseCLI("demo database creation tool", None, hostname_required=True) cli.parser.add_argument("--use-catalog", type=int, default=None) args = cli.parse_cli() credentials = get_credential(args.host) dc = DemoCatalog(args.host, credentials, catalog_number=args.use_catalog) dc.create_tables() print(dc.catalog.catalog_id)