def __init__(self, **kwargs): log = logging.getLogger("%s.%s" % (self.log_name, inspect.stack()[0][3])) log.setLevel(self.log_level) config = kwargs.get('config') if config: print("Should find value") CFG = RdfConfigManager(config=config) else: raise EnvironmentError("kwarg 'config' is required") self.cfg = CFG NSM = RdfNsManager(config=CFG) self.root_file_path = CFG.RDF_DEFINITION_FILE_PATH self._set_datafiles() self.rml = DictClass() # if the the definition files have been modified since the last json # files were saved reload the definition files reset = kwargs.get('reset',False) # verify that the server core is up and running servers_up = True if kwargs.get('server_check', True): servers_up = verify_server_core(600, 0) else: log.info("server check skipped") if not servers_up: log.info("Sever core not initialized --- Framework Not loaded") if servers_up: log.info("*** Loading Framework ***") self._load_data(reset) RdfPropertyFactory(CFG.def_tstore, reset=reset) RdfClassFactory(CFG.def_tstore, reset=reset) log.info("*** Framework Loaded ***")
def setUp(self): self.r = rdfframework.rdfclass self.rdf_framework = RdfFramework(reset=False, config=config) self.cfg = RdfConfigManager() self.item_uri = "<http://library.kean.edu/173849#Work>" self.conn = cfg.data_tstore self.data = get_all_item_data(item_uri, conn) self.x = RdfDataset(data, item_uri)
def setup_conn(**kwargs): """ returns a triplestore conncection based on the kwargs. Order of preceedence is as follows: kwargs['conn'] kwargs['tstore_def'] kwargs['triplestore_url'] kwargs['rdflib'] RdfConfigManager.data_tstore RdfConfigManager.TRIPLESTORE_URL kwargs: conn: established triplestore connection object tstore_def: dictionary of paramaters specifying the connection as outlined in the config file triplestore_url: url to a triplestore. A blazegraph connection will be used if specified rdflib: defintion for an rdflib connection """ from rdfframework.configuration import RdfConfigManager if kwargs.get("conn"): conn = kwargs['conn'] elif kwargs.get("tstore_def"): conn = make_tstore_conn(kwargs['tstore_def']) elif kwargs.get("triplestore_url"): conn = RdfwConnections['triplestore']['blazegraph']( \ kwargs['triplestore_url']) elif kwargs.get("rdflib"): conn = RdfwConnections['triplestore']['rdflib'](kwargs.get('rdflib')) elif RdfConfigManager().data_tstore and \ not isinstance(RdfConfigManager().data_tstore, EmptyDot): conn = ConnManager().datastore elif RdfConfigManager().TRIPLESTORE_URL and \ not isinstance(RdfConfigManager().TRIPLESTORE_URL, EmptyDot): conn = RdfwConnections['triplestore']['blazegraph'](\ RdfConfigManager().TRIPLESTORE_URL) else: conn = RdfwConnections['triplestore']['blazegraph']() return conn
import os import logging import inspect from rdfframework.configuration import RdfConfigManager, RdfNsManager MNAME = "%s.%s" % \ (os.path.basename(os.path.split(inspect.stack()[0][1])[0]), os.path.basename(inspect.stack()[0][1])) LOG_LEVEL = logging.DEBUG CFG = RdfConfigManager() NSM = RdfNsManager() CONVERT_BN_TO_URIS = """ DELETE { ?bn ?bn_p ?bn_o . ?f ?fp ?bn . } INSERT { ?f ?fp ?id . ?id ?bn_p ?bn_o. } where { ?bn a bf:Topic . filter(isblank(?bn)) ?bn rdfs:label ?o . ?bn ?bn_p ?bn_o . ?f ?fp ?bn . BIND (IRI(concat(REPLACE(str(?f),"#Work","#Topic"),'/',ENCODE_FOR_URI(?o))) as ?id)
import validconfig as config, json, pdb, pprint from rdfframework.configuration import RdfConfigManager from rdfframework import rdfclass from rdfframework.utilities import colors from rdfframework.datatypes import Uri, XsdBoolean from rdfframework.datasets import RdfDataset from rdfframework import search from rdfframework.sparql import get_all_item_data import rdfframework, datetime import bibcat # pdb.set_trace() # rdfframework.configure_logging(rdfframework.__modules__, "dummy") import cProfile cfg = RdfConfigManager(config.__dict__, turn_on_vocab=True) from rdfframework.datatypes import XsdString # tutt = rdfclass.schema_Organization("http://tutt.edu/") # tutt.add_property('schema_name', XsdString("Tutt Library")) from rdfframework.datasets import RdfDataset # x = RdfDataset() # x[tutt.subject] = tutt conn = cfg.conns.datastore import rdfframework.sparql as sp import datetime # # # print(colors.white.blue(" Instance Example ")) # # # data = sp.get_all_item_data('<https://plains2peaks.org/20497be2-732c-11e7-b6f2-005056c00008>', conn) # # # item = RdfDataset(data, "<https://plains2peaks.org/20497be2-732c-11e7-b6f2-005056c00008>") # # # item[item.base_uri].add_property(cfg.nsm.bf.test, XsdBoolean(True)) # # # print(json.dumps(item[item.base_uri].es_json(), indent=8)) # # # print(item[item.base_uri].es_json()) # # print(colors.white.blue(" Work Example ")) # # # # data2 = sp.get_all_item_data('<https://plains2peaks.org/1f142250-0871-11e8-ad63-005056c00008#Work>', conn)
from .forms import ProfileForm, SearchForm, ArticleForm, BookForm, BookChapterForm from github import Github from .sparql import add_qualified_generation, add_qualified_revision from .sparql import CITATION, BOOK_CITATION, BOOK_CHAPTER_CITATION, CREATIVE_WORK_CITATION, EMAIL_LOOKUP, ORG_INFO, ORG_LISTING, ORG_PEOPLE from .sparql import PERSON_HISTORY, PERSON_INFO, PERSON_LABEL, PREFIX, PROFILE from .sparql import RESEARCH_STMT, SUBJECTS, SUBJECTS_IRI from .sparql import COUNT_ARTICLES, COUNT_BOOKS, COUNT_JOURNALS, COUNT_ORGS, COUNT_PEOPLE, COUNT_CHAPTERS from .sparql import COUNT_BOOK_AUTHORS, WORK_INFO from .profiles import add_creative_work, add_profile, delete_creative_work from .profiles import edit_creative_work, generate_citation_html, update_profile from rdfframework.configuration import RdfConfigManager app = Flask(__name__, instance_relative_config=True) app.config.from_pyfile('config.py') CONFIG_MANAGER = RdfConfigManager(app.config, verify=False, delay_check=True) CONNECTION = CONFIG_MANAGER.conns BF = CONFIG_MANAGER.nsm.bf SCHEMA = CONFIG_MANAGER.nsm.schema login_manager = LoginManager(app) ldap_manager = LDAP3LoginManager(app) PROJECT_BASE = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) USERS = OrderedDict() BACKEND_THREAD = None class EmailThread(threading.Thread): def __init__(self, **kwargs):
import logging import requests import urllib import datetime import pdb from dateutil.parser import parse as date_parse from rdfframework.connections import ConnManager from rdfframework.datatypes import RdfNsManager from rdfframework.configuration import RdfConfigManager from rdfframework.utilities import make_list from .datamanager import DataFileManager __CONNS__ = ConnManager() __CFG__ = RdfConfigManager() __NSM__ = RdfNsManager() class DefManagerMeta(type): """ Metaclass ensures that there is only one instance of the RdfConnManager """ _instances = {} def __call__(cls, *args, **kwargs): if cls not in cls._instances: cls._instances[cls] = super(DefManagerMeta, cls).__call__(*args, **kwargs) else: values = None
import pdb import argparse import requests import json import pprint from elasticsearch import Elasticsearch, helpers from rdfframework.utilities import IsFirst, get2, Dot from rdfframework.search import get_es_action_item, EsMappings from rdfframework.configuration import RdfConfigManager MODULE_NAME = "%s.%s" % \ (os.path.basename(os.path.split(inspect.stack()[0][1])[0]), os.path.basename(inspect.stack()[0][1])) config = RdfConfigManager() class EsBase(): ''' Base elasticsearch rdfframework class for common es operations''' ln = "%s:EsBase" % MODULE_NAME log_level = logging.INFO def __init__(self, **kwargs): self.es_url = kwargs.get('es_url', config.ES_URL) self.es = kwargs.get("es", Elasticsearch([self.es_url])) self.op_type = kwargs.get("op_type", "index") self.es_index = kwargs.get("es_index") self.doc_type = kwargs.get("doc_type") self.reset_index = kwargs.get("reset_index", False)