Beispiel #1
0
#!/usr/bin/python
# -*- coding: utf-8 -*-
# coding=utf-8

import logging
import xml.etree.ElementTree as ET

import requests

from biblib.crosswalks import openurl_crosswalk
from biblib.services import config_service
from biblib.util import console
from biblib.util import xmletree

console.setup_console()

config_openurl = config_service.config["openurl"]

openurl_endpoint = config_openurl["endpoint"]


def request_periodical_by_issn(issn):
    # Request periodical by ISSN:
    # http://gl5sm8uv5q.openurl.xml.serialssolutions.com/openurlxml?version=1.0&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=journal&issn=0042-0980
    params = {}
    params["version"] = "1.0"
    params["rft_val_fmt"] = openurl_crosswalk.RFT_VAL_FMT_JOURNAL
    params["rft.genre"] = openurl_crosswalk.GENRE_JOURNAL_JOURNAL
    params["issn"] = issn
    return request_by_openurl_params(params)
#!/usr/bin/python
# -*- coding: utf-8 -*-
# coding=utf-8

import datetime
import logging
import os

from biblib.services import corpus_service
from biblib.util import chrono
from biblib.util import console
from biblib.util import constants
from biblib.cloud import oaipmh_harvester
from biblib.metajson import Target

console.setup_console()


def harvest_by_set(corpus, target, target_set):
    logging.info("harvest_by_set: {}".format(target_set))
    date_begin = datetime.datetime.now()

    # harvest
    metajson_list = oaipmh_harvester.list_records(target, None, None, target_set)
    date_harvest = datetime.datetime.now()
    chrono.chrono_trace("harvest spire and convert to metajson", date_begin, date_harvest, len(ids))
    
    # import
    result_import = corpus_service.import_metajson_list(corpus, metajson_list, True, None)
    date_import = datetime.datetime.now()
    chrono.chrono_trace("harvest spire, convert metadata and save to MongoDB", date_harvest, date_import, len(result_import[0]))