def Main(): OUTPUT_FILENAME = "bnb-" + datetime.datetime.now().strftime( "%y%m%d") + ".mrc" try: os.remove(OUTPUT_FILENAME) except: pass yaz_client = ConnectToYAZServer() yaz_client.sendline("format marc21") yaz_client.expect("\r\n") yaz_client.sendline("set_marcdump " + OUTPUT_FILENAME) yaz_client.expect("\r\n") list_no = LoadStartListNumber() total_count = 0 while True: ranges = RetryGetNewBNBNumbers(list_no) if ranges is None: break count = DownloadRecordsRange(yaz_client, CoalesceNumbers(ranges)) util.Info("Dowloaded " + str(count) + " records for list #" + str(list_no) + ".") total_count += count list_no += 1 StoreStartListNumber(list_no) util.Info("Downloaded a total of " + str(total_count) + " new record(s).")
def UpdateAllMarcFiles(orig_deletion_list): # Create a deletion list that consists of the original list from the # BSZ as well as all the ID's from the files starting w/ "Diff": util.Remove("augmented_deletion_list") if orig_deletion_list is None: # Create empty file. with open("augmented_deletion_list", "a") as _: pass else: shutil.copyfile("../" + orig_deletion_list, "augmented_deletion_list") EnsureFileIsEmptyOrEndsWithNewline("augmented_deletion_list") extract_IDs_script_path = GetPathOrDie("extract_IDs_in_erase_format.sh") for marc_file_name in glob.glob("*.mrc"): if not marc_file_name.startswith("Diff"): continue if process_util.Exec(extract_IDs_script_path, args=[marc_file_name, "augmented_deletion_list"], timeout=100) != 0: util.Error("failed to append ID's from \"" + marc_file_name + "\" to \"augmented_deletion_list\"!") util.Info("Created an augmented deletion list.") # Now delete ID's from the augmented deletion list from all MARC-21 files: delete_ids_path = GetPathOrDie("delete_ids") for marc_file_name in glob.glob("*.mrc"): if marc_file_name.startswith("Diff"): continue trimmed_marc_file = marc_file_name[:-4] + "-trimmed.mrc" if process_util.Exec(delete_ids_path, args=["augmented_deletion_list", marc_file_name, trimmed_marc_file], timeout=200, new_stdout=util.GetLogDirectory() + "/trimmed_marc.log", new_stderr=util.GetLogDirectory() + "/trimmed_marc.log") != 0: util.Error("failed to create \"" + trimmed_marc_file + " from \"augmented_deletion_list\" and " "\"" + marc_file_name + "\"!") RemoveOrDie(marc_file_name) RemoveOrDie("augmented_deletion_list") util.Info("Deleted ID's from MARC files.") # Now concatenate the changed MARC records with the trimmed data sets: for marc_file_name in glob.glob("*-trimmed.mrc"): root_name = marc_file_name[:-19] diff_name = glob.glob("Diff" + root_name + "*.mrc")[0] if not util.ConcatenateFiles([marc_file_name, diff_name], root_name + ".mrc"): util.Error("We failed to concatenate \"" + marc_file_name + "\" and \"" + diff_name + "\"!") RemoveOrDie(marc_file_name) RemoveOrDie(diff_name) util.Info("Created concatenated MARC files.") # Rename files to include the current date and move them up a directory: current_date_str = datetime.datetime.now().strftime("%y%m%d") marc_files = glob.glob("*.mrc") for marc_file_name in marc_files: RenameOrDie(marc_file_name, "../" + marc_file_name[:-4] + "-" + current_date_str + ".mrc") os.chdir("..") util.Info("Renamed and moved files.") # Create symlinks with "current" instead of "YYMMDD" in the orginal files: for marc_file in marc_files: new_name = marc_file[:-4] + "-" + current_date_str + ".mrc" util.SafeSymlink(new_name, re.sub("\\d\\d\\d\\d\\d\\d", "current", new_name)) util.Info("Symlinked files.") return ("GesamtTiteldaten-current.mrc", "Normdaten-current.mrc")
def Main(): if len(sys.argv) != 3: util.Info("usage: " + sys.argv[0] + " section entry", file=sys.stderr) sys.exit(-1) util.default_email_recipient = "*****@*****.**" config = util.LoadConfigFile() util.Info(config.get(sys.argv[1], sys.argv[2]))
def Usage(): util.Info("usage: " + sys.argv[0] + " archive_name file_name1:member_name1 " + "[file_name2:member_name2 .. file_nameN:member_nameN]") util.Info( " The colons and member names can be left out in which case the files will be stored under" ) util.Info(" their original names.") sys.exit(-1)
def AugmentDeletionList(orig_list, changed_marc_data, augmented_list): util.Remove(augmented_list) shutil.copyfile(orig_list, augmented_list) if process_util.Exec("extract_IDs_in_erase_format.sh", args=[changed_marc_data, augmented_list], timeout=100) != 0: util.Error("failed to create \"" + augmented_list + "\" from \"" + changed_marc_data + "\"!") util.Info("Successfully created \"" + augmented_list + "\".")
def DeleteMarcRecords(original_marc_file, deletion_list, processed_marc_file): util.Remove(processed_marc_file) if process_util.Exec("delete_ids", args=[deletion_list, original_marc_file, processed_marc_file], timeout=200) != 0: util.Error("failed to create \"" + processed_marc_file + "\" from \"" + deletion_list + "\" and \"" + original_marc_file + "\"!") util.Info("Successfully created \"" + processed_marc_file + "\".")
def update(dry_run): if dry_run: print "This is a dry run. Nothing will be sent to blogger.com" print "Use command line argument --write to disable dry run" else: print "This is not a dry run. Updates will happen" info = util.Info() bloggerfeed = get_blogger_feed(info) autolinks = generate_automatic_links(bloggerfeed) diskfeed = get_disk_feed() unending_bloggerfeed = itertools.chain(reversed(bloggerfeed.entry), itertools.repeat(None)) for d, b in itertools.izip(diskfeed, unending_bloggerfeed): name = os.path.join(root, d) title, doc = prepare.prepare(name, autolinks) if b: print "Considering", d should_update = False if title != b.title.text: print "\tUpdating title '%s' != '%s'" % (title, b.title.text) should_update = True if doc != b.content.text: print "\tUpdating content" should_update = True if should_update and not dry_run: publish.update(info, b, title, doc) else: print "Adding", d if not dry_run: publish.add(info, title, doc)
def Main(): if len(sys.argv) != 2: util.Error("This script expects one argument: default_email_recipient") util.default_email_recipient = sys.argv[1] config = util.LoadConfigFile() try: deletion_list = config.get("Files", "loesch_liste") complete_data = config.get("Files", "komplett_abzug") differential_data = config.get("Files", "differenz_abzug") except Exception as e: util.Error("failed to read config file! (" + str(e) + ")") if not os.access(complete_data, os.R_OK): util.Error("Fehlender oder nicht lesbarer Komplettabzug. (" + complete_data + ")") deletion_list_is_readable = os.access(deletion_list, os.R_OK) if not deletion_list_is_readable: deletion_list = None differential_data_is_readable = os.access(differential_data, os.R_OK) if not deletion_list_is_readable and not differential_data_is_readable: util.Error( "Fehlende oder nicht lesbare Löschliste und Differenzabzug..") # Bail out if the most recent complete data set is at least as recent as the deletion list or the differential # data: complete_data_mtime = os.path.getmtime(complete_data) deletion_list_mtime = None if deletion_list_is_readable: deletion_list_mtime = os.path.getmtime(deletion_list) differential_data_mtime = None if differential_data_is_readable: differential_data_mtime = os.path.getmtime(differential_data) if ((deletion_list_mtime is not None and complete_data_mtime >= deletion_list_mtime) or (differential_data_mtime is not None and complete_data_mtime >= differential_data_mtime)): util.SendEmail( "Nichts zu tun!", "Komplettabzug ist neuer als eventuell vorhandene Differenzabzüge.\n", priority=5) sys.exit(0) data_dir = PrepareDataDirectory( ) # After this we're in the data directory... util.ExtractAndRenameBSZFiles("../" + complete_data) util.ExtractAndRenameBSZFiles("../" + differential_data, "Diff") title_superior_norm_tuple = UpdateAllMarcFiles( deletion_list) # ...and we're back in the original directory. new_tarball_name = complete_data.replace( "current", datetime.date.today().strftime("%y%m%d")) CreateNewTarballAndDeletePreviousTarball(new_tarball_name, title_superior_norm_tuple, complete_data) util.RemoveLinkTargetAndLink(title_superior_norm_tuple[0]) util.RemoveLinkTargetAndLink(title_superior_norm_tuple[1]) util.RemoveLinkTargetAndLink(title_superior_norm_tuple[2]) util.Info("Successfully created updated MARC files.")
def Main(): if len(sys.argv) != 4: util.Info("usage: " + sys.argv[0] + " config_file section entry", file=sys.stderr) sys.exit(-1) config_file = sys.argv[1] if not config_file.startswith("/"): if os.access(DEFAULT_CONFIG_FILE_LOCATION + socket.gethostname() + "/", os.R_OK): config_file = DEFAULT_CONFIG_FILE_LOCATION + socket.gethostname( ) + "/" + config_file else: config_file = DEFAULT_CONFIG_FILE_LOCATION + config_file if not os.access(config_file, os.R_OK): util.Info(sys.argv[0] + ": can't read \"" + config_file + "\"!", file=sys.stderr) sys.exit(-1) config = ConfigParser.ConfigParser() config.read(config_file) util.Info(config.get(sys.argv[2], sys.argv[3]))
def RetryGetNewBNBNumbers(list_no): util.Info("Downloading BBN numbers for list #" + str(list_no)) MAX_NO_OF_ATTEMPTS = 4 sleep_interval = 10 # initial sleep interval after a failed attempt in seconds for attempt in range(1, MAX_NO_OF_ATTEMPTS): print("Attempt #" + str(attempt)) retval = GetNewBNBNumbers(list_no) if type(retval) == list: print("Downloaded and extracted " + str(len(retval)) + " BNB numbers.") return None if len(retval) == 0 else retval else: print("Content-type of downloaded document was " + retval) time.sleep(sleep_interval) sleep_interval *= 2 # Exponential backoff return None
# Note: auto-detect does not support VPP-first program mode. # Just run the programming entry script. run_script('ProgEntryScript') run_script('ReadDevIDScript') pk.write(util.FWCMD_UPLOAD_DATA) result = pk.read(5) run_script('ProgExitScript') print('RESULT:', result) # Skip the LENGTH byte. Unpack a 4-byte integer. DeviceIDMask will # appropriately mask-off for 2-byte device IDs. device_id = ((struct.unpack('<xI', result)[0] >> family['ProgMemShift']) & family['DeviceIDMask']) print('DEVICE:', device_id) for part in info.parts: if part['DeviceID'] == device_id \ and part['Family'] == family['FamilyID']: pprint.pprint(('FOUND:', part)) return part return None if __name__ == '__main__': pk = util.PICkit() info = util.Info() # Auto-detection uses Vdd=3.0V (if we knew a part, we'd know Vdd/Vpp). detect(pk, info, 3.0)
def main(argv): info = util.Info() publish(info, 'I have the answer', 'Eureka! It is 42!') return 0
def Exec(cmd_path: str, args: List[str] = None, timeout: int = 0, env: Dict[str, str] = None, new_stdout: str = None, new_stderr: str = None, append_stdout: bool = False, append_stderr: bool = False, setsid: bool = True) -> int: def PathIsATTY(path: str) -> bool: if not os.path.exists(path): return False with open(path) as file: return file.isatty() if args is None: args = [] if not os.access(cmd_path, os.X_OK): raise Exception("in process_util.Exec: command \"" + cmd_path + "\" either does not exist or is not executable!") child_pid = os.fork() if child_pid != 0: # We're the parent. if timeout != 0: old_handler = signal.getsignal(signal.SIGALRM) signal.signal(signal.SIGALRM, _SigAlarmHandler) signal.alarm(timeout) interrupted = False try: (pid, exit_code, _) = os.wait4(child_pid, 0) except OSError: interrupted = True if timeout != 0: signal.alarm(0) signal.signal(signal.SIGALRM, old_handler) if interrupted: os.kill(-child_pid, signal.SIGTERM) time.sleep(2) # 2 seconds os.kill(-child_pid, signal.SIGKILL) try: while True: (_, exit_code, _) = os.wait4(-child_pid, 0) except OSError as e: pass return -1 if os.WIFEXITED(exit_code): return os.WEXITSTATUS(exit_code) elif os.WIFSIGNALED(exit_code): raise Exception("in process_util.Exec: " + cmd_path + " was killed by signal \"" + str(os.WTERMSIG(exit_code)) + "!") else: raise Exception("in process_util.Exec: no idea why " + cmd_path + " exited!") return exit_code else: # We're the child. if setsid == True: if os.setsid() == -1: util.Info("in process_util.Exec: os.setsid() failed!", file=sys.stderr) sys.exit(-1) if new_stdout is not None: if append_stdout and not PathIsATTY(new_stdout): sys.stdout = open(new_stdout, "ab") else: sys.stdout = open(new_stdout, "wb") os.dup2(sys.stdout.fileno(), 1) if new_stderr is not None: if append_stderr and not PathIsATTY(new_stderr): sys.stderr = open(new_stderr, "ab") else: sys.stderr = open(new_stderr, "wb") os.dup2(sys.stderr.fileno(), 2) errno.errno = 0 args = [cmd_path] + args if env is None: os.execv(cmd_path, args) else: os.execve(cmd_path, args, env) raise Exception("in process_util.Exec: we should never get here! (" + os.strerror(errno.EPERM) + ")")
os.dup2(sys.stdout.fileno(), 1) if new_stderr is not None: if append_stderr and not PathIsATTY(new_stderr): sys.stderr = open(new_stderr, "ab") else: sys.stderr = open(new_stderr, "wb") os.dup2(sys.stderr.fileno(), 2) errno.errno = 0 args = [cmd_path] + args if env is None: os.execv(cmd_path, args) else: os.execve(cmd_path, args, env) raise Exception("in process_util.Exec: we should never get here! (" + os.strerror(errno.EPERM) + ")") if __name__ == '__main__': try: Exec("Non-existent") except Exception as e: util.Info(str(e)) util.Info("hello.sh returned " + str(Exec("./cpp/hello.sh"))) util.Info("fail.sh returned " + str(Exec("./fail.sh"))) util.Info("more_than_5_seconds.sh returned " + str(Exec("./more_than_5_seconds.sh", timeout=5))) util.Info("\"echo_args.sh a b c\" returned " + str(Exec("./echo_args.sh", ['a', 'b', 'c'])))