def create_session(*, path, name: str = None): """ Create an aquarium session connected to the named Aquarium instance. Arguements: path (String): the config directory path Returns: Aquarium Session Object """ file_path = config_file_path(path) config = get_config(file_path) if not name: name = config["default"] if name not in config["instances"]: raise BadInstanceError(name) credentials = config["instances"][name] session = AqSession( credentials["login"], credentials["password"], credentials["aquarium_url"] ) session.set_verbose(False) return session
def test_push_category(cli, credentials): cli.register(**credentials['nursery']) cli.fetch("ParrotFishTest") cli.ls() # just get the first operation type sm = cli._session_manager session = sm.list_dirs()[0] protocol = session.categories[0].list_dirs()[0] old_local_ot = session.read_operation_type("ParrotFishTest", protocol.name) aqsession = AqSession(**credentials['nursery']) old_loaded_ot = aqsession.OperationType.find_by_name(old_local_ot.name) # push the content new_content = str(uuid.uuid4()) protocol.protocol.write(new_content) cli.push_category("ParrotFishTest") # new operation types new_local_ot = session.read_operation_type("ParrotFishTest", protocol.name) new_loaded_ot = aqsession.OperationType.find_by_name(new_local_ot.name) # compare content assert new_loaded_ot.protocol.content == new_content assert new_local_ot.protocol.content == new_content assert old_local_ot.protocol.content != new_content print( utils.compare_content(old_local_ot.protocol.content, new_local_ot.protocol.content))
def create_session(self, encryption_key): """ Creates a new :class:`AqSession` instance using the login, aquarium_url, and encrypted password information stored. The encryption_key is used to decrypt the password. :param encryption_key: Fernet key to decrypt the password :type encryption_key: str :return: AqSession instance :rtype: AqSession """ cipher_suite = Fernet(encryption_key) aqsession = None try: aqsession = AqSession(self.login, cipher_suite.decrypt( self.encrypted_password).decode(), self.aquarium_url, name=self.name) except InvalidToken: logger.warning(self.encrypted_password) logger.warning("Encryption key mismatch! Cannot create session. " "Use 'pfish generate-encryption-key' to generate" "a new key. Alternatively, use " "'pfish set-encryption-key [YOURKEY]' if you have " "a pre-generated key") self.aquarium_session = aqsession return aqsession
def test_find_query_returns_none(monkeypatch, mock_login_post): """Empty find queries should return None. Here, we replace the AqHTTP.post with a mock post, that has an error code 422 (which is thrown by Aquarium in cases where it cannot find the model). """ class MockResponse: def __init__(self, json_data, status_code): self.json_data = json_data self.status_code = status_code def json(self): return self.json_data # Create a mock session monkeypatch.setattr(requests, "post", mock_login_post) aquarium_url = "http://52.52.525.52" session = AqSession("username", "password", aquarium_url) def mock_post(*args, **kwargs): raise TridentRequestError("There was an error", MockResponse({}, 422)) monkeypatch.setattr(AqHTTP, "post", mock_post) sample = session.SampleType.find(2342342) assert sample is None
def create_session(aq_instance): """ Create a session using credentials in secrets.json. :param aq_instance: the instance of Aquarium to use Corresponds to a key in the secrets.json file :type aq_instance: str :return: new Session """ dirname = os.path.dirname(__file__) filename = os.path.join(dirname, 'secrets.json') with open(filename) as f: secrets = json.load(f) credentials = secrets[aq_instance] session = AqSession(credentials["login"], credentials["password"], credentials["aquarium_url"]) msg = "Connected to Aquarium at {} using pydent version {}" print(msg.format(session.url, str(__version__))) me = session.User.where({'login': credentials['login']})[0] print('Logged in as {}\n'.format(me.name)) return session
def session(server): db = AqSession( resources['aquarium']['login'], resources['aquarium']['password'], resources['aquarium']['aquarium_url'][server] ) return db
def test_instance(): session = AqSession("vrana", "Mountain5", "http://52.27.43.242/") fas = session.OperationType.where({"name": "Fragment Analyzing", "deployed": True}) fa = fas[0] fa.instance()
def run(self, aq: AqSession, models: List[Sample], new_node_callback=None): with aq.with_cache(timeout=60) as sess: page_size = 1000 with Progress() as progress: task0 = progress.add_task("getting items...", total=page_size*2) with infinite_task_context(progress, task_id=task0) as callback: items = sess.Item.where({"sample_id": [m.id for m in models]}, page_size=page_size, page_callback=callback) sess.browser.get(items, {'object_type'}) return list(sess.browser.models)
def main(): session = AqSession(resources['aquarium']['login'], resources['aquarium']['password'], resources['aquarium']['aquarium_url']) plan = session.Plan.find(35448) trace = TraceFactory.create_from(session=session, plans=[plan], experiment_id="dummy") print(json.dumps(trace.as_dict(), indent=4))
def main(): session = AqSession( resources['aquarium']['login'], resources['aquarium']['password'], resources['aquarium']['aquarium_url'] ) sample = session.Sample.find_by_name('IAA1-Nat-F') print("Name: {}\nProject: {}\nDescription: {}\nSample Type: {}\nProperties: {}".format( sample.name, sample.project, sample.description, sample.sample_type.name, sample.properties) )
def main(): session = AqSession( resources['aquarium']['login'], resources['aquarium']['password'], resources['aquarium']['aquarium_url'] ) canvas = planner.Planner(session) submit_pcrs( sample_range=list(range(25589, 25591)), session=session, canvas=canvas ) print(canvas.plan)
def get_session(instance): with open('secrets.json') as f: secrets = json.load(f) credentials = secrets[instance] session = AqSession(credentials["login"], credentials["password"], credentials["aquarium_url"]) msg = "Connected to Aquarium at {} using pydent version {}" print(msg.format(session.url, str(__version__))) me = session.User.where({'login': credentials['login']})[0] print('Logged in as {}\n'.format(me.name)) return session
def test_load_model_from_json(monkeypatch, mock_login_post): """Tests heirarchical loading of a JSON file into Trident Models. Should return a User with name and login attributes. Groups attribute should contain a list of Group models. """ # Create a mock session monkeypatch.setattr(requests, "post", mock_login_post) aquarium_url = "http://52.52.525.52" session = AqSession("username", "password", aquarium_url) # monkey patch the "find" method def find_user(*args, json_data=None, **kwargs): user_default = { "name": "default_name", "login": "******", "groups": [ { "id": 1, "name": "default_group1" }, { "id": 2, "name": "defulat_group2" }, ], } json_data.update(user_default) return json_data monkeypatch.setattr(AqHTTP, "post", find_user) # find a user u = session.User.find(1) # assert user properties assert isinstance(u, User) assert u.id == 1 assert u.name == "default_name" assert u.login == "default_login" # test load groups assert len(u.groups) == 2 assert isinstance(u.groups[0], Group) assert u.groups[1].id == 2
def test_library_controller(tmpdir, credentials): """This test creates an session_environment and writes an Library. It then reads it. Its expected the loaded Library and Library retrieved from the AqSession will have equivalent attributes.""" session = AqSession(**credentials['nursery']) lib = session.Library.all()[-1] session_env = SessionEnvironment(**credentials['nursery'], encryption_key=Fernet.generate_key()) session_env.set_dir(tmpdir) session_env.write_library(lib) loaded_lib = session_env.read_library_type(lib.category, lib.name) assert loaded_lib.dump() == lib.dump() assert loaded_lib.source.dump() == loaded_lib.source.dump()
def test_load_model_with_many(monkeypatch, mock_login_post): """Tests a relationship using a database connection. Should return a Sample instance with an accessible SampleType instance. """ # Create a mock session monkeypatch.setattr(requests, "post", mock_login_post) aquarium_url = "http://52.52.525.52" session = AqSession("username", "password", aquarium_url) def mock_post(*args, json_data=None, **kwargs): dummy_object = {"id": 3, "name": "Primer"} if "method" not in json_data or json_data["method"] != "where": return dummy_object samples = [ { "id": 1, "sample_type_id": 3, "name": "sample1" }, { "id": 2, "sample_type_id": 3, "name": "sample2" }, { "id": 3, "sample_type_id": 5, "name": "sample3" }, ] return [ s for s in samples if s["sample_type_id"] == json_data["arguments"]["sample_type_id"] ] monkeypatch.setattr(AqHTTP, "post", mock_post) st = session.SampleType.find(3) samples = st.samples assert len(samples) == 2 assert isinstance(samples[0], Sample)
def main(): session = AqSession(resources['aquarium']['login'], resources['aquarium']['password'], resources['aquarium']['aquarium_url']) primer_type = session.SampleType.find_by_name('Primer') sample = session.Sample.new(name="Example Primer", project="trident-demo", description="primer created with trident", sample_type_id=primer_type.id, properties={ 'Overhang Sequence': 'AAAAA', 'Anneal Sequence': 'GGGGGGGG', 'T Anneal': 70 }) print( "Name: {}\nProject: {}\nDescription: {}\nSample Type: {}\nProperties: {}" .format(sample.name, sample.project, sample.description, sample.sample_type.name, sample.properties))
def test_load_model_with_database_connection(monkeypatch, mock_login_post): """Tests a relationship using a database connection. Should return a Sample instance with an accessible SampleType instance. """ # Create a mock session monkeypatch.setattr(requests, "post", mock_login_post) aquarium_url = "http://52.52.525.52" session = AqSession("username", "password", aquarium_url) # monkey patch the "find" method def find_user(*args, json_data=None, **kwargs): sample_default = {"id": 3, "name": "MyPrimer", "sample_type_id": 5} sample_type_default = {"name": "Primer", "id": 5} if json_data["model"] == "Sample": if json_data["id"] == sample_default["id"]: json_data.update(sample_default) return json_data if json_data["model"] == "SampleType": if json_data["id"] == sample_type_default["id"]: json_data.update(sample_type_default) return json_data monkeypatch.setattr(AqHTTP, "post", find_user) sample = session.Sample.find(3) sample_type = sample.sample_type # Sample properties assert isinstance(sample, Sample) assert sample.id == 3 assert sample.name == "MyPrimer" assert sample.sample_type_id == 5 # SampleType properties assert isinstance(sample_type, SampleType) assert sample_type.name == "Primer" assert sample_type.id == 5
def test_where_queries_should_return_empty_array(monkeypatch, mock_login_post): """Empty where queries should return empty arrays. Here, we replaces AqHTTP.post with a mock post that returns an empty array. """ # Create a mock session monkeypatch.setattr(requests, "post", mock_login_post) aquarium_url = "http://52.52.525.52" session = AqSession("username", "password", aquarium_url) def mock_post(*args, **kwargs): return [] monkeypatch.setattr(AqHTTP, "post", mock_post) samples = session.SampleType.where({"id": 3454345, "object_type_id": 23432}) assert samples == [], "Where should return an empty list"
def test_operation_type_controller(tmpdir, credentials): """This test creates an session_environment and writes an OperationType. It then reads it. Its expected the loaded OperationType and OperationType retrieved from the AqSession will have equivalent attributes.""" session = AqSession(**credentials['nursery']) ot = session.OperationType.all()[-1] session_env = SessionEnvironment(**credentials['nursery'], encryption_key=Fernet.generate_key()) session_env.set_dir(tmpdir) session_env.write_operation_type(ot) loaded_ot = session_env.read_operation_type(ot.category, ot.name) assert loaded_ot.dump() == ot.dump() assert loaded_ot.protocol.dump() == ot.protocol.dump() assert loaded_ot.precondition.dump() == ot.precondition.dump() assert loaded_ot.documentation.dump() == ot.documentation.dump() assert loaded_ot.cost_model.dump() == ot.cost_model.dump()
def run( self, aq: AqSession, models: List[Sample], *, new_node_callback: Optional[NewNodeCallback] = None ) -> List[ModelBase]: with aq.with_cache(timeout=60) as sess: sess: AqSession # non_browser_sess = aq(using_cache=False) page_size = 1000 browser: Browser = sess.browser with Progress() as progress: task0 = progress.add_task("collecting samples", total=page_size * 2) with infinite_task_context(progress, task0) as callback: query = { '__model__': 'FieldValue', '__query__': { 'parent_class': 'Operation', 'child_sample_id': [m.id for m in models], '__return__': { 'operation': { 'plan_associations': {}, 'job_associations': {}, 'operation_type': 'field_types', 'field_values': {'sample', 'item'} }, 'field_type': {} } } } results = sess.query(query, use_cache=True, page_size=page_size, page_callback=callback) browser.update_cache(results) return list(browser.models)
def _create_network( cls, aq: AqSession, models: List[ModelBase], new_node_callback: NewNodeCallback = None, new_edge_callback: NewEdgeCallback = None, ) -> nx.DiGraph: with aq.with_cache(timeout=120) as sess: browser: Browser = sess.browser browser.clear() browser.update_cache(models) g = relationship_network( sess.browser, models, reverse=True, get_models=cls.get_models, cache_func=cls.cache_func, key_func=cls.key_func, strict_cache=False, new_node_callback=new_node_callback, new_edge_callback=new_edge_callback, ) return g
#!/usr/bin/env python # coding: utf-8 # Caleb Ellington # [email protected] # In[6]: from pydent import AqSession session = AqSession("", "", "http://52.27.43.242") def get_ops(name): op_type = session.OperationType.find_by_name("Transform Cells") ops = op_type.operations return ops def get_op_ids(ops): op_ids = [ops[i].id for i in range(len(ops))] return op_ids def get_fvs(op_ids): fvs = session.FieldValue.where({'parent_class': 'Operation', 'parent_id': op_ids, 'role': 'output'}) return fvs def get_fv_ids(fvs): fv_ids = [fvs[i].id for i in range(len(fvs))] return fv_ids def get_wires(fvs_ids): wires = session.Wire.where({'from_id': fv_ids}) return wires
def aq(config): session = AqSession(config.aquarium.user, config.aquarium.password, config.aquarium.host) yield session
class DAMPAqData: # Resets all class variables with each new DAMPAqData() instance def __init__(self): self.USERS = [] self.PROTOCOLS = [] self.USER_KEYS = [] self.PROTOCOL_KEYS = [] self.HANDS_OFF_TIME = [] self.OUTPUTS = [] self.COSTS = [] self.ERRORS = [] self.session = None self.spreadsheet = None self.op_data = {} # ===================================================================================================== # # Functions related to initial set-up with user-specified inputs # Includes: loadInputs(input_file) and login(user,password,IP) # ===================================================================================================== # # Parses input .yaml file for the users, protocols, hands-off time of each protocol, and desired outputs # Creates class lists of each for later access # Used in aquarium_clean_data_pull with passed in name of .yaml file # MUST be a .yaml file # MUST be in same directory def loadInputs(self,input_file): with open(input_file) as stream: constants = yaml.load(stream) self.USERS = constants['users'] self.PROTOCOLS = constants['protocols'] self.HANDS_OFF_TIME = constants['times'] self.OUTPUTS = constants['outputs'] self.COSTS = constants['costs'] self.ERRORS = constants['errors'] # Accesses aquarium server and logs-in with supplied credentials and local Aquarium IP address # Times out after 60 minutes # Used in aquarium_clean_data_pull with passed in username, password, and IP address def login(self,user,password,IP): self.session = AqSession(user, password, "http://" + IP) self.session.User self.session.set_timeout(60) # ===================================================================================================== # # Functions related to Google Sheets # Includes: createSheets(), connectSheet(name,creds), findFirstEmptyRow(sheet), and writeData(protocol) # ===================================================================================================== # # Supplies necessary credentials for access and use of pygsheets # Opens the specified worksheet and sets the class variable 'spreadsheet' # Requires sharing of the spreadsheet inside Google Sheets with the email inside the credential file # Credential file needs to be in same directory # Used in aquarium_clean_data_pull with passed user inputs of spreadsheet and credential files def connectSheet(self,name,creds): scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive'] client = pygsheets.authorize(service_file=creds) self.spreadsheet = client.open(name) self.createSheets() # Checks if a sheet within the worksheet exists with each protocol name # If not, creates a sheet with the title of the protocol for later access and writing # Used in connectSheet(name,creds) def createSheets(self): for protocol in self.PROTOCOLS: try: self.spreadsheet.worksheet_by_title(protocol) except: self.spreadsheet.add_worksheet(protocol) # Finds the first empty row in the sheet to prevent overwriting previous data # Used in writeData(protocol) def findFirstEmptyRow(self,sheet): column = sheet.get_col(1) row = 0 if column: try: while column[row]: row += 1 except: sheet.add_rows(1) return(row) return(row+1) # Converts the class dictionary to a dataframe # Writes the dataframe to the last empty row in the protocol's spreadsheet # Updates the headers (row 1) to the current desired outputs # Used in collectData(time) for each protocol def writeData(self,protocol): df = pd.DataFrame(self.op_data) sheet = self.spreadsheet.worksheet("title",protocol) #Selects the sheet based on the current protocol sheet.update_row(1,self.OUTPUTS) row = self.findFirstEmptyRow(sheet) if not df.empty: sheet.set_dataframe(df,(row,1),copy_head = False) # ===================================================================================================== # # Functions related to the filtering of Aquarium data by user-specified inputs and their helper functions # User-specified inputs include desired users, protocols, outputs, and length of time collected # Includes: collectOperations(protocol_key,time), createEmptyDict(), findKeys(), and findTimeDelta(date) # ===================================================================================================== # # Collects all operations within a specified protocol # Filters by user and length of time # Used in collectData(time) def collectOperations(self,protocol_key,time): op_type = self.session.OperationType.find(protocol_key) #Finds the protocol/operation type object from the key. We need the object to find additional information. ops = (op for op in op_type.operations if op.user_id in self.USER_KEYS and op.jobs and op.jobs[-1].user_id) #Filters by user, elimates jobs without a technician, collects all operations in system #ops = (op for op in op_type.operations if op.user_id in self.USER_KEYS and op.jobs and op.jobs[-1].user_id and self.findTimeDelta(op.created_at) <= time) #Filters by user, elimates jobs without a technician, collects most recent operations return ops # Creates an empty dictionary with labels corresponding to desired outputs # Used to reset the collection dictionary for every protocol and reduce data storage # Used in collectData(time) def createEmptyDict(self): self.op_data = dict([(output, []) for output in self.OUTPUTS]) # Finds the Aquarium database keys for the users and protocols for faster access # Used in collectData(time) def findKeys(self): self.USER_KEYS = [self.session.User.find_by_name(user).id for user in self.USERS] self.PROTOCOL_KEYS = [self.session.OperationType.where({"name" : protocol, "deployed" : True})[-1].id for protocol in self.PROTOCOLS] # Finds the difference in time between the operation's creation date and today # Information is used to filter the list of operations for every protocol to the most recent (as user-specified) # Used in collectData(time) def findTimeDelta(self,date): tz = timezone('UTC') today = tz.localize(datetime.utcnow()) op_date = parser.parse(date) op_date = op_date.astimezone(tz) dif = today - op_date return dif.days # ===================================================================================================== # # Functions related to collection of data from Aquarium server # Includes: appendData(key,value), collectData(time), findData(op,key), and findRuntime(op) # ===================================================================================================== # # Appends the passed in data to the current dictionary holding information on a particular protocol # Used in findData(op,key) for every data value collected def appendData(self,key,value): self.op_data[key].append(value) # Key method for the collection of data and integration of helper methods # Uses: findKeys() (once), createEmptyDict() (Nx, N = number of protocols), collectOperations(protocol,time) (Nx) # findData(operation,output) ((M*N)x, M = number of outputs * number of operations inside one protocol), # and writeData(protocol) (Nx) # Used in aquarium_clean_data_pull with user specified period of past time to collect from the current date def collectData(self,time): self.findKeys() for p in self.PROTOCOL_KEYS: self.createEmptyDict() operations = self.collectOperations(p,time) for op in operations: for o in self.OUTPUTS: self.findData(op,o) self.writeData(self.session.OperationType.find(p).name) # Finds the data value as specified by the current output ("key") and operation # Throws error if the current output is not a "known" output (i.e., the method of locating the value is not specified below) # Can pass in specific values as a 3rd parameter if desired # TODO: Make more programmatic/cleaner # Used in collectData(time) def findData(self,op,key,check=None): value = "" if check is None: #key = known if key == "Date": value = op.created_at if key == "ID": value = int(op.id) if key == "Protocol": value = op.operation_type.name if key == "Technician": value = self.session.User.find(int(op.jobs[-1].user_id)).name if key == "Status": value = op.status try: if op.data_associations and value == "error": for da in op.data_associations: if da.key == "job_crash": value = "crashed" if da.key == "aborted": value = "aborted" if da.key == "canceled": value = "canceled" except: pass if key == "Error Message": try: if op.data_associations and self.op_data["Status"][-1] != "done": data = (da for da in op.data_associations if da.key in self.ERRORS) for da in data: value = da.key except: pass if key == "Job Size": value = len(op.jobs[-1].operations) if key == "Runtime": if self.op_data["Status"][-1] == "done": value = self.findRuntime(op) if key == "Hands-off Time": if self.op_data["Status"][-1] == "done": value = self.HANDS_OFF_TIME[self.PROTOCOLS.index(op.operation_type.name)] if key == "Hands-on Time": if self.op_data["Status"][-1] == "done" and self.op_data["Runtime"][-1]: value = self.op_data["Runtime"][-1] - self.op_data["Hands-off Time"][-1] if key == "Hands-on Time/Job": if self.op_data["Status"][-1] == "done" and self.op_data["Runtime"][-1]: value = self.op_data["Hands-on Time"][-1]/self.op_data["Job Size"][-1] if key == "Cost/Job": value = self.COSTS[self.PROTOCOLS.index(op.operation_type.name)] if key == "Total Cost": value = self.op_data["Job Size"][-1] * self.op_data["Cost/Job"][-1] if key == "Cost/Minute (Total)": if self.op_data["Status"][-1] == "done" and self.op_data["Runtime"][-1]: value = self.op_data["Total Cost"][-1]/self.op_data["Runtime"][-1] if key == "Cost/Minute (Hands-on)": if self.op_data["Status"][-1] == "done" and self.op_data["Runtime"][-1]: value = self.op_data["Total Cost"][-1]/self.op_data["Hands-on Time"][-1] if key == "Concentration Keyword": if op.outputs[-1] and op.outputs[-1].item and op.outputs[-1].item.data_associations: for da in op.outputs[-1].item.data_associations: if da.key == "concentration_keyword": value = da.value if key == "White Colonies" and self.op_data["Protocol"][-1] == "Check Plate": if op.outputs[-1] and op.outputs[-1].item and op.outputs[-1].item.data_associations: for da in op.outputs[-1].item.data_associations: if da.key == "white_colonies": value = da.value if key == "Blue Colonies" and self.op_data["Protocol"][-1] == "Check Plate": if op.outputs[-1] and op.outputs[-1].item and op.outputs[-1].item.data_associations: for da in op.outputs[-1].item.data_associations: if da.key == "blue_colonies": value = da.value if key in self.OUTPUTS: self.appendData(key,value) else: self.appendData(key,"") print(key + " is not a known data type. Must input additional collection parameter (i.e., op.id)") else: self.appendData(key,value) # Finds the runtime of the current operation's job based on the json provided by job.state # Localizes all times to UTC # Finds runtime as the difference between the first and last step's time stamps # Used in findData() def findRuntime(self,op): tz = timezone('UTC') time_json = json.loads(op.jobs[-1].state) runtime = "" try: start_time = time_json[0]['time'] end_time = time_json[-2]['time'] except: return runtime else: if type(start_time) is str: start = tz.localize(datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S+00:00')) else: start = datetime.fromtimestamp(int(start_time), utc_tz) if type(end_time) is str: end = tz.localize(datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S+00:00')) else: end = datetime.fromtimestamp(int(end_time), utc_tz) total_time = end - start runtime = total_time.days*1440 + total_time.seconds/60 #In minutes return runtime
def login(self,user,password,IP): self.session = AqSession(user, password, "http://" + IP) self.session.User self.session.set_timeout(60)
def session(config): """ Returns a live aquarium connection. """ return AqSession(**config)
op_id = 201958 #Find this from Aquarium. Open your plan, open the Job and find the list of operations on the left hand side. host_folder = '/Users/Orlando/Documents/Data/SynAgFiles' #Folder that will host a new directory with the data files and figures for this analaysis new_folder_name = 'ODL_SynAg_190116' #Name for new directory, which will be a subdirectory of 'host folder' import pandas as pd import os from FlowCytometryTools import * from pylab import * from FlowCytometryTools import FCPlate import pprint import csv import pydent from pydent import AqSession from tqdm import tqdm prod = AqSession(aq_username, aq_password, aq_url ) # Production Database #Enter a plan ID, get a list of operations. plan = prod.Plan.find(plan_id) job = prod.Job.find(job_id) # for file in job.uploads: # file.async_download(outdir=dir_path,overwrite=True) cwd = os.getcwd() dir_path= cwd + '/' + str(job.id) os.mkdir(dir_path) # uploads = job.uploads job_uploads=prod.Upload.where({'job': job.id}) # prod.Upload._download_files(job_uploads, dir_path, overwrite) for u in job_uploads: u.download(outdir=dir_path, filename = u.name, overwrite=False)
def main(): session = AqSession(resources['aquarium']['login'], resources['aquarium']['password'], resources['aquarium']['aquarium_url'])
from pydent import AqSession from tqdm import tqdm import json prettyprint = lambda x: json.dumps(x, indent=4, sort_keys=True) def serialize_aq_obj(aq_obj): return aq_obj.dump(all_relations=True) if __name__ == '__main__': session = AqSession('danyfu', 'whiteCatsSlouchFar', 'http://0.0.0.0/') with open('all_allowable_field_types.json', 'w+') as f: f.write('[') for aq_obj in tqdm(session.AllowableFieldType.all()): f.write(prettyprint(serialize_aq_obj(aq_obj))) f.write(',') f.write(']') with open('all_collections.json', 'w+') as f: f.write('[') for aq_obj in tqdm(session.Collection.all()): f.write(prettyprint(serialize_aq_obj(aq_obj))) f.write(',') f.write(']') with open('all_data_associations.json', 'w+') as f: f.write('[') for aq_obj in tqdm(session.DataAssociation.all()):
for key in args: #for every possible argument if args[key] is not None: #if it was used input_var_dict[key] = args[key] #change the dictionary value to be the inputted value #reassign the variables if they were changed through the command prompt job_id = input_var_dict["job"] username = input_var_dict["username"] password = input_var_dict["password"] image_area = input_var_dict["area"] #%% """ This module connects to Aquariumand downloads all the images """ prod = AqSession(username, password,"http://52.27.43.242/") #the URL is for the UW BIOFAB production server #Enter a plan ID, get a list of operations. job = prod.Job.find(job_id) cwd = os.getcwd() dir_path= "%s/Images_%d" % (cwd, job_id) os.mkdir(dir_path) job_uploads=prod.Upload.where({'job': job.id}) #if job_uploads is not None for u in job_uploads: u.download(outdir=dir_path, filename = u.name, overwrite=False) #%% """ Defines a dictionary with each item as key that leads to an array of the counts for each of its images in the main code. Also, there is a function which adds a new image count value to the appropriate