Exemple #1
0
    # print respdata
    text_file = open("sample_redis_manifest.rdf", "w")
    text_file.write(respdata)
    text_file.close()
    graph = rdflib.Graph()
    try:
        with open("sample_redis_manifest.rdf", "r") as f:
            graph.parse(f, base="sample_redis_manifest.rdf")
    except IOError, e:
        pass
    #    graph.parse('http://10.0.1.154/'+silo_name +"/datasets/" + item_id +"/manifest.rdf")
    #    graph.parse(data=respdata, format="application/rdf+xml")
    #    graph.parse(data=respdata, format="n3")
    #    graph = respdata

    solr = SolrConnection(c.get(worker_section, "solrurl"))
    solr_doc = gather_document("DataFinder", uuid, item_id, graph)
    # solr_doc = {'identifier': ['wed1'], 'aggregatedResource': ['http://datafinder-d2v.bodleian.ox.ac.uk/DataFinder/datasets/wed1/df_manifest_wed1.rdf'], 'mediator': ['admin'], 'text': ['yes', '', 'zool0982', '', '', 'http://vocab.ox.ac.uk/projectfunding#', '', 'seeking_approval', '', ''], 'embargoedUntilDate': ['2083-05-29T07:54:46Z'], 'alternative': ['wed1title'], 'id': ['wed1'], 'subject': [''], 'rights': ['http://ora.ouls.ox.ac.uk/objects/uuid%3A1d00eebb-8fed-46ad-8e38-45dbdb4b224c'], 'publisher': ['Bodleian Libraries, University of Oxford'], 'license': ['CC0 1.0 Universal (CC0 1.0). See http://creativecommons.org/publicdomain/zero/1.0/legalcode'], 'uuid': [u'51b51cd8e78f4da2951e288078cf3821'], 'language': [''], 'title': ['wed1'], 'embargoStatus': ['False'], 'description': ['wed1desc'], 'format': [''], 'modified': ['2013-05-29 07:54:46.606822'], 'filename': ['wed1/df_manifest_wed1.rdf'], 'currentVersion': ['2'], 'created': ['2013-05-29 07:54:46.360052'], 'silo': ['DataFinder'], 'type': ['', 'http://vocab.ox.ac.uk/dataset/schema#DataSet']}
    #    print "solr_doc = gather_document('DataFinder' ,"+ str(uuid)+" , "+ str(item_id)+" , "+str(graph)+" )"
    print solr_doc
    solr_doc = {"id": ["mond_ay2"], "silo": ["DataFinder"]}
    solr.delete(id="mond_ay2")
    print "deleted'"

    solr_doc = {
        "id": ["mond_ay2"],
        "silo": ["DataFindehhhhr"],
        "uuid": [u"78755d851f9a453b84a51b1c00c68553"],
        "depositor": "zool0982"
        #               'identifier': ['fri_day1'],
        #                'aggregatedResource': ['http://datafinder-d2v.bodleian.ox.ac.uk/DataFinder/datasets/fri_day1/df_manifest.rdf'],
    os.mkdir("workers_available")
  c = Config()
  base_superv_conf = "supervisord.conf.base"
  if len(sys.argv) == 2:
    # use a different base supervisor file
    base_superv_conf = sys.argv[1]
  supervisord_config = Config(base_superv_conf)

  if 'supervisor' in c.sections():
    supervisord_config.add_section('inet_http_server')
    params = {'username':'******',
              'password':'******',
              'port':'127.0.0.1:9001'}
    for key in params:
      if c.has_option('supervisor', key):
        supervisord_config.set('inet_http_server', key, c.get('supervisor', key))
      else:
        supervisord_config.set('inet_http_server', key, params['key'])

  with open("supervisord.conf", "w") as cfgfile:
    supervisord_config.write(cfgfile)
    
  # process_* for simple, single use processes that don't require additional configuration
  #     aside from the 'command' instruction
  #     eg  command = ../redis/redis-server ../redis/redis.conf  
  for worker in [x for x in c.sections() if x.startswith("process_")]:
    # Worker defaults:
    params = {'autorestart':'true',
              'numprocs':'1',
              'process_name':'%s_%%(process_num)s' % worker,
              'autostart':'true',
from LogConfigParser import Config

import sys

from time import sleep

if __name__ == "__main__":
  c = Config()
  redis_section = "redis"
  worker_number = sys.argv[1]
  worker_section = sys.argv[2]
  if len(sys.argv) == 4:
    if "redis_%s" % sys.argv[3] in c.sections():
      redis_section = "redis_%s" % sys.argv[3]

  rq = RedisQueue(c.get(worker_section, "listento"), "logger_%s" % worker_number,
                  db=c.get(redis_section, "db"), 
                  host=c.get(redis_section, "host"), 
                  port=c.get(redis_section, "port")
                  )

  with open(c.get(worker_section, "logfile"), "a+") as logfile:
    while(True):
      line = rq.pop()
      if line:
        try:
          if line.endswith("\n"):
            logfile.write(line)
            rq.task_complete()
          else:
            logfile.writelines((line, "\n"))
      """ File was modified, so read new lines, look for error keywords """
      while True:
        line = fh.readline()
        if not line: break	
        print line
        r.submit_line(line)

    watcher = os.stat(log_filename)
    this_modified = watcher.st_mtime
    time.sleep(1)

if __name__=='__main__':
  c = Config()
  redis_section = "redis"
  worker_number = sys.argv[1]
  worker_section = sys.argv[2]
  if len(sys.argv) == 4:
    if "redis_%s" % sys.argv[3] in c.sections():
      redis_section = "redis_%s" % sys.argv[3]
      
  # from config:
  redis_config = dict(c.items(redis_section))
  
  if 'port' in redis_config:
    redis_config['port'] = int(redis_config['port'])
  servicename = c.get(worker_section, "servicename")
  queuename = c.get(worker_section, "pushto")
  logfilename = c.get(worker_section, "logfile")
  
  log_watcher(logfilename, servicename, queuename, redis_config)
        f.close()
    return document


if __name__ == "__main__":
    c = Config()
    redis_section = "redis"
    worker_section = "worker_solr"
    worker_number = sys.argv[1]
    hours_before_commit = 1
    if len(sys.argv) == 3:
        if "redis_%s" % sys.argv[2] in c.sections():
            redis_section = "redis_%s" % sys.argv[2]

    rq = RedisQueue(
        c.get(worker_section, "listento"),
        "solr_%s" % worker_number,
        db=c.get(redis_section, "db"),
        host=c.get(redis_section, "host"),
        port=c.get(redis_section, "port"),
        errorqueue=c.get(worker_section, "errorq"),
    )

    host = "databank.ora.ox.ac.uk"
    username = "******"
    password = "******"
    db = Databank(host, username, password)

    solr = SolrConnection(c.get(worker_section, "solrurl"))

    idletime = 0.1
Exemple #6
0
            document['text'].append(unicode(o).encode("utf-8"))
    document = dict(document)
    return document


if __name__ == "__main__":
    c = Config()
    redis_section = "redis"
    worker_section = "worker_solr"
    worker_number = sys.argv[1]
    hours_before_commit = 1 
    if len(sys.argv) == 3:
        if "redis_%s" % sys.argv[2] in c.sections():
            redis_section = "redis_%s" % sys.argv[2]

    rq = RedisQueue(c.get(worker_section, "listento"), "solr_%s" % worker_number,
                  db=c.get(redis_section, "db"), 
                  host=c.get(redis_section, "host"), 
                  port=c.get(redis_section, "port"),
                  errorqueue=c.get(worker_section, "errorq")
                 )
    #DB_ROOT = c.get(worker_section, "dbroot")
    #rdfdb_config = Config("%s/production.ini" % DB_ROOT)
    #granary_root = rdfdb_config.get("app:main", "granary.store", 0, {'here':DB_ROOT})
  
    #g = Granary(granary_root)

    solr = SolrConnection(c.get(worker_section, "solrurl"))

    idletime = 2
    commit_time = datetime.now() + timedelta(hours=hours_before_commit)
Exemple #7
0
from LogConfigParser import Config

import sys

from time import sleep

if __name__ == "__main__":
  c = Config()
  redis_section = "redis"
  worker_section = "worker_broker"
  worker_number = sys.argv[1]
  if len(sys.argv) == 3:
    if "redis_%s" % sys.argv[2] in c.sections():
      redis_section = "redis_%s" % sys.argv[2]

  rq = RedisQueue(c.get(worker_section, "listento"), "broker_%s" % worker_number,
                  db=c.get(redis_section, "db"), 
                  host=c.get(redis_section, "host"), 
                  port=c.get(redis_section, "port")
                  )
  if c.has_option(worker_section, "fanout_status_queue"):
    # keep a queue of messages to deliver for a given push'd item
    # better resumeability at the cost of more redis operations
    topushq = RedisQueue(c.get(worker_section, "fanout_status_queue"), "fanout_broker_%s" % worker_number,
                  db=c.get(redis_section, "db"), 
                  host=c.get(redis_section, "host"), 
                  port=c.get(redis_section, "port")
                  )
  fanout_queues = [x.strip() for x in c.get(worker_section, "fanout").split(",") if x]
  
  if c.has_option(worker_section, "idletime"):
Exemple #8
0
        else:
            document['text'].append(unicode(o).encode("utf-8"))
    document = dict(document)
    return document

if __name__ == "__main__":
    c = Config()
    redis_section = "redis"
    worker_section = "worker_solr"
    worker_number = sys.argv[1]
    hours_before_commit = 1
    if len(sys.argv) == 3:
        if "redis_%s" % sys.argv[2] in c.sections():
            redis_section = "redis_%s" % sys.argv[2]

    rq = RedisQueue(c.get(worker_section, "listento"), "solr_%s" % worker_number,
                  db=c.get(redis_section, "db"), 
                  host=c.get(redis_section, "host"), 
                  port=c.get(redis_section, "port"),
                  errorqueue=c.get(worker_section, "errorq")
                 )
    DB_ROOT = c.get(worker_section, "dbroot")
    rdfdb_config = Config("%s/production.ini" % DB_ROOT)
    granary_root = rdfdb_config.get("app:main", "granary.store", 0, {'here':DB_ROOT})
  
    g = Granary(granary_root)

    solr = SolrConnection(c.get(worker_section, "solrurl"))

    idletime = 0.1
    commit_time = datetime.now() + timedelta(hours=hours_before_commit)
           }

from uuid import uuid4
import hashlib

if __name__ == "__main__":
  c = Config()
  redis_section = "redis"
  store_section = "ofs"
  worker_section = "worker_%s" % PROCESS
  worker_number = sys.argv[1]
  if len(sys.argv) == 3:
    if "redis_%s" % sys.argv[2] in c.sections():
      redis_section = "redis_%s" % sys.argv[2]

  rq = RedisQueue(c.get(worker_section, "listento"), "%s_%s" % (PROCESS, worker_number),
                  db=c.get(redis_section, "db"), 
                  host=c.get(redis_section, "host"), 
                  port=c.get(redis_section, "port")
                  )
  r = Redis(db=c.get(redis_section, "db"),
                  host=c.get(redis_section, "host"),
                  port=c.get(redis_section, "port"))

  ofs_module = c.get(store_section, "ofs_client_module")
  ofs_root = c.get(store_section, "ofs_root")
  ofs_class = c.get(store_section, "ofs_class")
  try:
    OFS_mod = __import__(ofs_module)
    OFS_impl = getattr(OFS_mod, ofs_class)
  except ImportError:
Exemple #10
0
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)

if __name__ == "__main__":
  c = Config()
  redis_section = "redis"
  worker_number = sys.argv[1]
  worker_section = "worker_pirus2"
  if len(sys.argv) == 3:
    if "redis_%s" % sys.argv[2] in c.sections():
      redis_section = "redis_%s" % sys.argv[2]

  rq = RedisQueue(c.get(worker_section, "listento"), "pirus_%s" % worker_number,
                  db=c.get(redis_section, "db"), 
                  host=c.get(redis_section, "host"), 
                  port=c.get(redis_section, "port")
                  )

  try:
    plugin_name = c.get(worker_section, "repository_plugin")
    plugin_module = __import__(plugin_name)
    components = plugin_name.split('.')
    for comp in components[1:]:
        plugin_module = getattr(plugin_module, comp)
  except ImportError, e:
    logger.error("Coundn't import module: '%s' - %s" % (c.get(worker_section, "repository_plugin"), e))
    sys.exit(2)