Beispiel #1
0
def StartPipeline(pipeline_script_name, marc_title, conf):
    log_file_name = util.MakeLogFileName(pipeline_script_name,
                                         util.GetLogDirectory())
    util.ExecOrDie(pipeline_script_name, [marc_title], log_file_name)
    log_file_name = util.MakeLogFileName("import_into_vufind",
                                         util.GetLogDirectory())
    ImportIntoVuFind(conf.get("FileNames", "title_marc_data"),
                     conf.get("FileNames", "authority_marc_data"),
                     log_file_name)
Beispiel #2
0
def RunPipelineAndImportIntoSolr(pipeline_script_name, marc_title, conf):
    log_file_name = util.MakeLogFileName(pipeline_script_name,
                                         util.GetLogDirectory())
    util.ExecOrDie(pipeline_script_name, [marc_title], log_file_name)
    log_file_name = util.MakeLogFileName("import_into_vufind",
                                         util.GetLogDirectory())
    ImportIntoVuFind(conf.get("FileNames", "title_marc_data"),
                     conf.get("FileNames", "authority_marc_data"),
                     log_file_name)

    # Write timestamp file for last successful Solr import:
    with open(
            os.open('/usr/local/vufind/public/last_solr_import',
                    os.O_CREAT | os.O_WRONLY, 0o644), 'w') as output:
        output.write(str(datetime.datetime.now()))
Beispiel #3
0
def Main():
    util.default_email_sender = "*****@*****.**"
    util.default_email_recipient = "*****@*****.**"
    if len(sys.argv) != 2:
        util.SendEmail("Create Refterm File (Kickoff Failure)",
                       "This script must be called with one argument,\n" +
                       "the default email recipient\n",
                       priority=1)
        sys.exit(-1)
    util.default_email_recipient = sys.argv[1]
    # Download needed differential files
    config = util.LoadConfigFile()
    log_file_name = log_file_name = util.MakeLogFileName(
        sys.argv[0], util.GetLogDirectory())
    changelist_url = config.get("Unpaywall", "changelist_url")
    api_key = config.get("Unpaywall", "api_key")
    oadoi_download_directory = config.get("LocalConfig", "download_dir")
    oadoi_imported_directory = oadoi_download_directory + "/imported/"
    StartMongoDB()
    json_update_objects = GetChangelists(changelist_url, api_key)
    remote_update_files = GetRemoteUpdateFiles(json_update_objects)
    local_update_files = GetLocalUpdateFiles(config, oadoi_download_directory)
    download_lists = GetAllFilesStartingAtFirstMissingLocal(
        remote_update_files, local_update_files)
    DownloadUpdateFiles(download_lists['download'], json_update_objects,
                        api_key, oadoi_download_directory)

    # Update the Database
    ImportOADOIsToMongo(
        GetImportFiles(config, oadoi_download_directory,
                       oadoi_imported_directory), oadoi_download_directory,
        log_file_name)

    # Generate the files to be used by the pipeline
    share_directory = config.get("LocalConfig", "share_directory")
    ixtheo_dois_file = config.get("LocalConfig", "ixtheo_dois_file")
    ixtheo_urls_file = config.get("LocalConfig", "ixtheo_urls_file")
    ExtractOADOIURLs(share_directory, ixtheo_dois_file, ixtheo_urls_file,
                     log_file_name)
    ShareOADOIURLs(share_directory, ixtheo_urls_file)
    krimdok_dois_file = config.get("LocalConfig", "krimdok_dois_file")
    krimdok_urls_file = config.get("LocalConfig", "krimdok_urls_file")
    ExtractOADOIURLs(share_directory, krimdok_dois_file, krimdok_urls_file,
                     log_file_name)
    ShareOADOIURLs(share_directory, krimdok_urls_file)
    DumpMongoDB(config, log_file_name)
    StopMongoDB()
    util.SendEmail("Update OADOI Data",
                   "Successfully created \"" + ixtheo_urls_file + "\" and \"" +
                   krimdok_urls_file + "\" in " + share_directory,
                   priority=5)
Beispiel #4
0
def Main():
    util.default_email_sender = "*****@*****.**"
    util.default_email_recipient = "*****@*****.**"
    if len(sys.argv) != 2:
         util.SendEmail("Create Refterm File (Kickoff Failure)",
                        "This script must be called with one argument,\n"
                        + "the default email recipient\n", priority=1);
         sys.exit(-1)
    util.default_email_recipient = sys.argv[1]
    conf = util.LoadConfigFile()
    title_data_link_name = conf.get("Misc", "link_name")
    ref_data_pattern = conf.get("Hinweisabzug", "filename_pattern")
    if ref_data_pattern != "" :
        ref_data_archive = util.getMostRecentFileMatchingGlob(ref_data_pattern)
        if ref_data_archive is None:
            util.SendEmail("Create Refterm File (No Reference Data File Found)",
                           "No File matching pattern \"" + ref_data_pattern + "\" found\n", priority=1)
    else:
        ref_data_archive = None

    if FoundNewBSZDataFile(title_data_link_name):
        start = datetime.datetime.now()
        log_file_name = CreateLogFile()
        title_data_file_orig = ExtractTitleDataMarcFile(title_data_link_name)
        date_string = GetDateFromFilename(title_data_file_orig)
        title_data_file = RenameTitleDataFile(title_data_file_orig, date_string)
        atexit.register(CleanUp, title_data_file, log_file_name)
        SetupTemporarySolrInstance(title_data_file, conf, log_file_name)
        create_ref_term_process = multiprocessing.Process(target=CreateRefTermFile, name="Create Reference Terms File",
                                      args=[ ref_data_archive, date_string, conf, log_file_name ])
        create_serial_sort_term_process = multiprocessing.Process(target=CreateSerialSortDate, name="Serial Sort Date",
                                              args=[ title_data_file, date_string, log_file_name ])
        create_match_db_log_file_name = util.MakeLogFileName("create_match_db", util.GetLogDirectory())
        create_match_db_process = multiprocessing.Process(target=CreateMatchDB, name="Create Match DB",
                                      args=[ title_data_file, create_match_db_log_file_name ])
        ExecuteInParallel(create_ref_term_process, create_serial_sort_term_process, create_match_db_process)
        end  = datetime.datetime.now()
        duration_in_minutes = str((end - start).seconds / 60.0)
        util.SendEmail("Create Refterm File", "Refterm file successfully created in " + duration_in_minutes + " minutes.", priority=5)
    else:
        util.SendEmail("Create Refterm File", "No new data was found.", priority=5)
Beispiel #5
0
def CreateLogFile():
    return util.MakeLogFileName(os.path.basename(__file__), util.GetLogDirectory())