def _compare_structured_requests(self, document):
        golden_structured_request = intent_parser_utils.load_json_file(document)
        golden_doc_url = golden_structured_request['experiment_reference_url']
        doc_id = intent_parser_utils.get_google_doc_id(golden_doc_url) 

        if 'doc_revision_id' not in golden_structured_request:
            self.fail('No document revision specified')

        doc_revision_id = golden_structured_request['doc_revision_id']
        
        upload_mimetype = intent_parser_constants.GOOGLE_DOC_MIMETYPE
        download_mimetype = intent_parser_constants.WORD_DOC_MIMETYPE
        response = self.google_accessor.get_file_with_revision(doc_id, doc_revision_id, download_mimetype)

        drive_folder_test_dir = '1693MJT1Up54_aDUp1s3mPH_DRw1_GS5G'
        self.uploaded_file_id = self.google_accessor.upload_revision(golden_structured_request['name'], response.content, drive_folder_test_dir, download_mimetype, title=golden_structured_request['name'], target_format=upload_mimetype)
        print('%s upload doc %s' % (datetime.now().strftime("%d/%m/%Y %H:%M:%S"), self.uploaded_file_id))
        
        intent_parser = self.intentparser_factory.create_intent_parser(self.uploaded_file_id)
        intent_parser.process()
        generated_structured_request = intent_parser.get_structured_request()
        
        # Skip data that are modified from external resources:
        # experiment_reference, challenge_problem, doc_revision_id, and experiment_id.
        self.assertEqual('https://docs.google.com/document/d/%s' % self.uploaded_file_id, generated_structured_request['experiment_reference_url'])
        self.assertEqual(golden_structured_request['lab'], generated_structured_request['lab'])
        self.assertEqual(golden_structured_request['name'], generated_structured_request['name'])
        self._compare_runs(golden_structured_request['runs'], generated_structured_request['runs'])
        if 'parameters' in golden_structured_request:
            self.assertEqual(golden_structured_request['parameters'], generated_structured_request['parameters'])
 def update_project_metadata(self, script_id, remote_content, local_code_path, local_manifest_path):
     """
     Update a script project's remote metadata with local metadata. 
     
     Args:
         script_id: id associated to a script project.
         remote_content: A Content object
         
     Returns:
         If request is successful, a Content object is returned in the form of json
     """
     file_list = remote_content['files']
   
     code_index = self._get_code_file_index(file_list)
     
     file_list[code_index]['source'] = str(intent_parser_utils.load_file(local_code_path)).strip()
     
     manifest_index = self._get_manifest_file_index(file_list)
     file_list[manifest_index] = intent_parser_utils.load_json_file(local_manifest_path)
     request = {'files' : file_list}
     
     content_obj = self._service.projects().updateContent(
         body=request,
         scriptId=script_id).execute()
     return content_obj
    def start_analyze_controller(self):
        self.LOGGER.info('Fetching ignored terms from file.')

        self._analyze_processing_lock.acquire()
        ignore_terms = ip_utils.load_json_file(self.ANALYZE_IGNORE_TERMS_FILE)
        self._ignore_terms = ignore_terms
        self._analyze_processing_lock.release()
        self._started = True
        self._analyze_thread.start()
Ejemplo n.º 4
0
    def set_project_metadata(self, script_id, project_metadata, user_obj,
                             local_code_path, local_manifest_path,
                             code_file_name):
        """
        Set project's remote metadata with local metadata. 
        This includes updating manifest file and adding to project metadata a SERVER_JS file for the server code.
         
        Args:
            script_id: id associated to a script project.
         
        Returns:
            If request is successful, a Content object representing the metadata is returned in the json format.
        """

        file_list = project_metadata['files']
        manifest_index = self._get_manifest_file_index(file_list)
        file_list[manifest_index] = intent_parser_utils.load_json_file(
            local_manifest_path)

        source = str(intent_parser_utils.load_file(local_code_path)).strip()
        code_functions = script_addon_utils.get_function_names_from_js_file(
            local_code_path)

        d = datetime.datetime.utcnow()
        created_time = d.isoformat("T") + "Z"

        code_file = {
            "name": code_file_name,
            "type": 'SERVER_JS',
            "source": source,
            "lastModifyUser": user_obj,
            "createTime": created_time,
            "updateTime": created_time,
            "functionSet": code_functions
        }

        file_list.append(code_file)
        request = {'files': file_list}

        content_obj = self._service.projects().updateContent(
            body=request, scriptId=script_id).execute()
        return content_obj
Ejemplo n.º 5
0
    def setUp(self):
        """
        Configure an instance of IntentParserServer for spellcheck testing.
        """

        creds = intent_parser_utils.load_json_file(
            os.path.join(os.path.dirname(os.path.realpath(__file__)),
                         'sbh_creds.json'))
        sbh_collection_uri = 'https://hub-staging.sd2e.org/user/sd2e/src/intent_parser_collection/1'

        sbh = IntentParserSBH(
            sbh_collection_uri=sbh_collection_uri,
            sbh_spoofing_prefix='https://hub.sd2e.org',
            spreadsheet_id=intent_parser_constants.UNIT_TEST_SPREADSHEET_ID,
            sbh_username=creds['username'],
            sbh_password=creds['password'])

        sbol_dictionary = SBOLDictionaryAccessor(
            intent_parser_constants.UNIT_TEST_SPREADSHEET_ID, sbh)
        strateos_accessor = StrateosAccessor()
        intent_parser_server = IntentParserServer(sbh,
                                                  sbol_dictionary,
                                                  strateos_accessor,
                                                  bind_ip='localhost',
                                                  bind_port=8081)

        self.doc_content = None
        with open(os.path.join(self.dataDir, self.spellcheckFile), 'r') as fin:
            self.doc_content = json.loads(fin.read())

        if self.doc_content is None:
            self.fail('Failed to read in test document! Path: ' +
                      os.path.join(self.dataDir, self.spellcheckFile))

        self.ips.initialize_server()
        self.ips.start(background=True)

        self.ips.google_accessor = Mock()
        self.ips.google_accessor.get_document = Mock(
            return_value=self.doc_content)
        self.ips.send_response = Mock()
 def _authenticate_credentials(self):
     credential_file = os.path.join(
         os.path.dirname(os.path.realpath(__file__)),
         'intent_parser_api_keys.json')
     credential = ip_util.load_json_file(credential_file)['dbURI']
     self.database = pymongo.MongoClient(credential).catalog_staging
def perform_automatic_run(current_release,
                          drive_id='1FYOFBaUDIS-lBn0fr76pFFLBbMeD25b3'):
    drive_access = GoogleAccessor().get_google_drive_accessor(version=3)
    app_script_access = GoogleAccessor().get_google_app_script_accessor()

    local_docs = util.load_json_file(ADDON_FILE)
    remote_docs = drive_access.get_all_docs(drive_id)
    while len(remote_docs) > 0:
        doc = remote_docs.pop(0)
        r_id = doc
        logger.info('Processing doc: ' + r_id)
        if r_id in local_docs:
            try:
                metadata = local_docs[r_id]
                if metadata['releaseVersion'] != current_release:
                    logger.info(
                        'Updating script project metadata for doc: %s' % r_id)
                    script_id = metadata['scriptId']

                    remote_metadata = app_script_access.get_project_metadata(
                        script_id)
                    app_script_access.update_project_metadata(
                        script_id, remote_metadata,
                        INTENT_PARSER_ADDON_CODE_FILE,
                        INTENT_PARSER_MANIFEST_FILE)

                    new_version = app_script_access.get_head_version(
                        script_id) + 1
                    publish_message = current_release + ' Release'
                    app_script_access.create_version(script_id, new_version,
                                                     publish_message)

                    local_docs[r_id] = {
                        'scriptId': script_id,
                        'releaseVersion': current_release
                    }
                    util.write_json_to_file(local_docs, ADDON_FILE)
            except errors.HttpError:
                logger.info('Reached update quota limit!')
                remote_docs.append(doc)
                time.sleep(60)
        else:
            try:
                logger.info('Creating add-on for doc: %s' % r_id)
                script_proj_title = 'IPProject Release'
                response = app_script_access.create_project(
                    script_proj_title, r_id)
                script_id = response['scriptId']

                remote_metadata = app_script_access.get_project_metadata(
                    script_id)
                app_script_access.set_project_metadata(
                    script_id, remote_metadata, USER_ACCOUNT,
                    INTENT_PARSER_ADDON_CODE_FILE, INTENT_PARSER_MANIFEST_FILE,
                    'Code')

                local_docs[r_id] = {
                    'scriptId': script_id,
                    'releaseVersion': current_release
                }
                util.write_json_to_file(local_docs, ADDON_FILE)
            except errors.HttpError:
                logger.info('Reached create quota limit!')
                remote_docs.append(doc)
                time.sleep(60)
 def _authenticate_credentials(self):
     credential_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'intent_parser_api_keys.json')
     self._status_token = ip_util.load_json_file(credential_file)['experiment_execution_token']
     self._execution_token = ip_util.load_json_file(credential_file)['experiment_authentication_token']
Ejemplo n.º 9
0
    def generate_item_map(self, *, use_cache=True):
        """
        Use the SBOL Dictionary to generate a dictionary of common names referring to its SBH URI and store it into a local item-map.json file
        """
        item_map = {}
        self.logger.info('Generating item map, %d' % time.time())
        if use_cache:
            item_map = intent_parser_utils.load_json_file(self.item_map_file)
            self.logger.info('Num items in item_map: %d' % len(item_map))

        lab_uid_src_map = {}
        lab_uid_common_map = {}
        sheet_data = self.fetch_spreadsheet_data()
        for tab in sheet_data:
            for row in sheet_data[tab]:
                if not 'Common Name' in row:
                    continue

                if len(row['Common Name']) == 0:
                    continue

                if not 'SynBioHub URI' in row:
                    continue

                if len(row['SynBioHub URI']) == 0:
                    continue

                common_name = row['Common Name']
                uri = row['SynBioHub URI']
                # Add common name to the item map
                item_map[common_name] = uri
                # There are also UIDs for each lab to add
                for lab_uid in intent_parser_constants.LAB_IDS_LIST:
                    # Ignore if the spreadsheet doesn't contain this lab
                    if not lab_uid in row or row[lab_uid] == '':
                        continue
                    # UID can be a CSV list, parse each value
                    for uid_str in row[lab_uid].split(sep=','):
                        # Make sure the UID matches the min len threshold, or is in the exception list
                        if len(
                                uid_str
                        ) >= self.UID_LENGTH_THRESHOLD or uid_str in self.UID_LENGTH_EXCEPTION:
                            # If the UID isn't in the item map, add it with this URI
                            if uid_str not in item_map:
                                item_map[uid_str] = uri
                                lab_uid_src_map[uid_str] = lab_uid
                                lab_uid_common_map[uid_str] = common_name
                            else:  # Otherwise, we need to check for an error
                                # If the UID has been used  before, we might have a conflict
                                if uid_str in lab_uid_src_map:
                                    # If the common name was the same for different UIDs, this won't have an effect
                                    # But if they differ, we have a conflict
                                    if not lab_uid_common_map[
                                            uid_str] == common_name:
                                        self.logger.error(
                                            'Trying to add %s %s for common name %s, but the item map already contains %s from %s for common name %s!'
                                            %
                                            (lab_uid, uid_str, common_name,
                                             uid_str, lab_uid_src_map[uid_str],
                                             lab_uid_common_map[uid_str]))
                                else:  # If the UID wasn't used before, then it matches the common name and adding it would be redundant
                                    pass
                                    # If it matches the common name, that's fine
                                    #self.logger.error('Trying to add %s %s, but the item map already contains %s from common name!' % (lab_uid, uid_str, uid_str))
                        else:
                            self.logger.debug('Filtered %s %s for length' %
                                              (lab_uid, uid_str))

        intent_parser_utils.write_json_to_file(item_map, self.item_map_file)

        self.logger.info('Num items in item_map: %d' % len(item_map))

        return item_map