def get(param, default=None): """ Get a configuration parameter Returns a native value e.g. list, dict, integer, string >>> get("param1", default="value1") 'value1' """ global _config_data, _last_exception if _config_data is not None: return ('ok', _config_data.get(param, default)) typ, value = _maybe_if_ns_then_generate_path(which()) typ, value = _maybe_if_file_then_resolve_path((typ, value)) if typ == "error": return ("ok:default", default) if typ == "json" or typ == "yaml": _code, maybe_data = tos.file_contents(value) if maybe_data is None: return ("ok:default", default) if typ == "json": try: data = json.loads(maybe_data) global _json_data _json_data = data except Exception, e: data = {} _last_exception = e
def check_if_ok(filepath, default="ok"): """ Check for 'ok' string or JSON object with 'ok' code >>> check_if_ok(None) ('ok', None) >>> check_if_ok("/tmp/non_existing_path") ('error', 'empty file') >>> from tools_os import quick_write, rm >>> p="/tmp/_jlddk_tools_misc_test" >>> quick_write(p, "ok") ('ok', '/tmp/_jlddk_tools_misc_test') >>> check_if_ok(p) ('ok', None) >>> quick_write(p, '''{"code": "ok"}''') ('ok', '/tmp/_jlddk_tools_misc_test') >>> check_if_ok(p) ('ok', None) >>> quick_write(p, '''{"code": "error"}''') ('ok', '/tmp/_jlddk_tools_misc_test') >>> check_if_ok(p) # doctest: +ELLIPSIS ('error', ... >>> rm(p) ('ok', '/tmp/_jlddk_tools_misc_test') """ if filepath is None: return (default, None) code, maybe_contents=file_contents(filepath) return checkok(code, maybe_contents)
def process(src_file, dst_file, enable_delete): """ 1. read file, extract URL 2. send "start" 3. send each line 4. send "end" 5. move/delete source file """ code, contents=file_contents(src_file) if not code.startswith("ok"): return ("error", "file/invalid") try: contents=contents.strip() lines=contents.split("\n") except: return ("error", "data/invalid") ############################################### try: stdout({"sp": src_file, "code":"begin"}) for line in lines: stdout({"code": "line", "line": line}) stdout({"sp": src_file, "code":"end"}) except: raise BrokenPipe("Broken Pipe") ############################################### if enable_delete: code, _msg=rm(src_file) if not code.startswith("ok"): logging.error("Can't delete '%s'" % src_file) return ("error", "file/delete") return ("ok", None) ### well then, we need to move the source_file code, _=move(src_file, dst_file) return (code, "file/move")
def run(enable_simulate=False, bucket_name=None, bucket_prefix=None, path_source=None, path_dest=None, delete_old=False, **_): code, path_source=resolve_path(path_source) if not code.startswith("ok"): logging.warning("Source file '%s' can't be accessed..." % path_source) try: conn = boto.connect_s3() except: ## not much we can do ## but actually no remote calls are made ## at this point so it should be highly improbable raise Exception("Can't 'connect' to S3") try: bucket=conn.create_bucket(bucket_name) logging.info("Got bucket '%s'" % bucket_name) except: raise Exception("Can't get bucket '%s'" % bucket_name) base_name=os.path.basename(path_source) logging.info("Basename of file to upload: %s" % base_name) root_name,version,_ext=split_path_version(base_name) if version is not None and len(version)>0: logging.info("Basename of file: %s" % version) logging.info("Version of file: %s" % version) else: version=None if root_name is None: root_name=base_name if path_dest is None: logging.info("Will be using '%s' as filename in bucket" % base_name) path_dest=base_name key_names=None to_delete=None if delete_old: logging.info("Getting bucket keys") code, bkeys=get_all_keys(bucket, bucket_prefix) if not code.startswith("ok"): raise Exception("Can't get bucket keys...") logging.info("Got %s key(s) to filter for 'old' files" % len(bkeys)) _key_names, to_delete=filter_keys(root_name, bkeys) logging.info("Older files found: %s" % to_delete) if enable_simulate: logging.info("! Begin simulation...") code, contents=file_contents(path_source) if not code.startswith("ok"): raise Exception("Can't read file '%s'" % path_source) logging.info("Got source file contents") try: upload_key=S3Key(bucket) upload_key_name=gen_key(bucket_prefix, path_dest) upload_key.key=upload_key_name logging.info("Prepared S3 key: %s" % upload_key.key) except Exception,e: raise Exception("S3 key generation: %s" % str(e))
def process(src_file, dest_path, delete_fetch_error): """ 1. read file, extract URL 2. fetch file from URL 3. write fetched file to dest_path 4. delete pointer file """ code, contents=file_contents(src_file) if not code.startswith("ok"): logging.error("Can't read file contents from '%s'" % src_file) return try: url=contents.strip() except: raise Exception("Invalid data in file: %s" % src_file) code, (http_code, headers, data)=fetch(url) if not code.startswith("ok"): if delete_fetch_error: code, _msg=rm(src_file) logging.warning("Attempting to delete source file '%s': %s" % (src_file, code)) raise Exception("Can't fetch page from url: %s" % url) try: http_code=int(http_code) except: pass if http_code!=200: logging.error("Can't fetch url '%s', http response code: %s" % (url, http_code)) return code, maybe_components=extract_url_filename(url) if not code.startswith("ok"): fbn=str(uuid.uuid1()) dest_filename=os.path.join(dest_path, fbn) else: fbn, fext=maybe_components dest_filename=os.path.join(dest_path, fbn)+fext try: exists=os.path.exists(dest_filename) except: exists=False if exists: fbn=str(uuid.uuid1()) dest_filename=os.path.join(dest_path, fbn) code, msg=atomic_write(dest_filename, data) if not code.startswith("ok"): raise Exception("Can't write to file '%s': %s" % (dest_filename, msg)) ctx={ "dest_filename": dest_filename ,"src_filename": src_file ,"url": url ,"http_code": http_code ,"headers": headers } ### no need code, msg=rm(src_file) if not code.startswith("ok"): logging.error("Can't delete '%s' : will probably cause excessive downloads..." % src_file) try: sys.stdout.write(json.dumps(ctx)+"\n") except: raise BrokenPipe()