def setUp(self): caps = {'browserName': os.getenv('BROWSER', 'chrome')} #self.browser = webdriver.Chrome( chrome_driver_path ) address = os.getenv('NODE_HUB_ADDRESS', '192.168.112.2') grid = 'http://{0}:4444/wd/hub'.format(address) self.browser = webdriver.Remote( command_executor=f'http://{address}:4444/wd/hub', desired_capabilities=caps) #self.browser = webdriver.Remote( # command_executor = 'http://192.168.112.2:4444/wd/hub', # desired_capabilities=caps # ) self.logger = get_logger("sunnxt_homepage") self.logger.info("Launching {0} in {1} browswer ".format( website, 'chrome'))
from log_setup import get_logger logger = get_logger() # WOS namespace NS = { 'rec': 'http://scientific.thomsonreuters.com/schema/wok5.4/public/FullRecord' } RECORD_PATH = 'data/pubs/*/*.xml' CACHE_PATH = 'data/rdf/' PUB_GRAPH = "http://localhost/data/pubs" CATEGORY_GRAPH = "http://localhost/data/wos-categories" KEYWORDS_PLUS_GRAPH = "http://localhost/data/wos-keywords-plus" PEOPLE_GRAPH = "http://localhost/data/people" PEOPLE_IDENTIFIERS_GRAPH = "http://localhost/data/people-identifiers" PEOPLE_EMAIL_GRAPH = "http://localhost/data/people-email" PEOPLE_DTU_DAIS_GRAPH = "http://localhost/data/people-dtu-dais" AFFILIATION_NG = "http://localhost/data/people-affiliation" PEOPLE_AUTHORSHIP = "http://localhost/data/people-authorship" ADDRESS_GRAPH = "http://localhost/data/address" SUBORG_GRAPH = "http://localhost/data/suborgs" CLEAN_SUBORG_GRAPH = "'http://localhost/data/clean-suborgs'" CATEGORY_NG = "http://localhost/data/wos-venue-categories" COUNTRY_CODE_NG = "http://localhost/data/organization-extra" # Incites INCITES_TOP_CATEGORIES = "http://localhost/data/incites-top-categories" INCITES_PUB_YEAR_COUNTS = "http://localhost/data/incites-pub-year-counts"
import logging.handlers import sys from converis import backend from converis import client from converis.namespaces import D, VIVO, CONVERIS # local models import models import log_setup from utils import ThreadedHarvest from rdflib import Graph, Literal logger = log_setup.get_logger() if os.environ.get('HTTP_CACHE') == "1": import requests_cache requests_cache.install_cache('converis', backend='redis', allowable_methods=('GET', 'PUT')) def get_trials(trials): q = """ <data xmlns="http://converis/ns/webservice"> <query> <filter for="ClincialTrial" xmlns="http://converis/ns/filterengine" xmlns:sort="http://converis/ns/sortingengine"> <and> <and>
import os import sys from Queue import Queue from threading import Thread from converis import backend from converis import client from converis.namespaces import D, VIVO, CONVERIS # local models import models import log_setup from rdflib import Graph, Literal logger = log_setup.get_logger(client_level=logging.DEBUG) NG = "http://localhost/data/positions" if os.environ.get('HTTP_CACHE') == "1": import requests_cache requests_cache.install_cache('converis', backend='redis', allowable_methods=('GET', 'PUT')) THREADS = int(os.environ.get('THREADS', 5)) def _p(msg): sys.stdout.write(msg + "\n")
"level to DEBUG.") parser.add_argument("--api", action="store_true", help="Post triples " "to VIVO.") parser.add_argument("--extended", action="store_true", help="Create RDF " "for additional InCites fields. *NOTE* These fields " "may NOT be shown publicly.") return parser.parse_args(args) if __name__ == "__main__": args = parse_args(sys.argv[1:]) logger = get_logger(args.debug) filename = path.basename(__file__).replace('.py', '') pubs = get_wos_pubs() if len(pubs) == 0: print('Error getting publications from VIVO.') incites_data = [] for batch in grouper(pubs.keys(), 100): idata = get_incites(batch) incites_data += idata g = process_incites(incites_data, pubs) if args.extended: g += process_extended(incites_data, pubs) if len(g) > 0: logger.info("Writing file as {}".format(filename))