def main(server_class=HTTPServer, handler_class=MyHTTPRequestHandler): # Setup logger log.init_logger() # Run Server server_address = ('', 8000) httpd = server_class(server_address, handler_class) httpd.serve_forever()
def setup_app(): init_logger(level=Config.LOG_LEVEL, path=Config.LOG_PATH) app = Flask(__name__) app.secret_key = "d9e9d4bf-4fbb-4b0a-a600-a4773f2ca61a" app.config.from_object(Config) Config.init_app(app) rpc.init_app(app) login_manager.init_app(app) from api import api app.register_blueprint(api) return app
def __init__(self, api_key_file_path, **kwargs): self.logger = kwargs.get('logger', init_logger(__name__, kwargs.get("show_debug", False), kwargs.get('log_to_file', False))) self.logger.info(msg='Binance API initialized!') self.candlestick_columns = ['Date', 'Open', 'High', 'Low', 'Close', 'unk1', 'unk2', 'unk3', 'unk4', 'unk5', 'unk6', 'unk7'] self._api_key = self._load_api_key(api_key_file_path) self._client = Client(self._api_key.get('api_key'), self._api_key.get('secret_key'), {"verify": False, "timeout": 20})
def run(self): logger = init_logger(self.loglevel, self.logfile, self.consolelog) self.init_signal_handler(logger) try: self.sc.start() except Exception, e: logger.error("Error Occur %s " % str(e)) [logger.error(str(s)) for s in traceback.format_exc().split('\n')]
def main(): def show(host, name, model): logger.info('host:{}, name:{}, model:{}'.format(host, name, model)) import log import logging log.init_logger(logging.DEBUG) logger.info('start finding device') start_discovery(namespace=APPLE_NAMESPACE, callback=show) start_discovery(namespace=GOOGLE_NAMESPACE, callback=show) start_ssdp_discovery(ROKU_ST_NAME, callback=show) start_dial_discovery(model_list=FIRETV_MODEL_LIST, callback=show) # start_ssdp_discovery(UPNP_ST_NAME, device_type=DEVICE_TYPE_ROKU, callback=show) raw_input() cancel_discovery(namespace=APPLE_NAMESPACE) cancel_discovery(namespace=GOOGLE_NAMESPACE) cancel_ssdp_discovery() cancel_dial_discovery()
def main(): log.init_logger(config.LOG_FILE) log.set_level(config.LOG_LEVEL) sub = SubscribeManager(config.REDIS_MQ_HOST,config.REDIS_MQ_PORT,config.REDIS_MQ_DB) dbc = DBChartAgent(config.DB_HOST, config.DB_PORT, config.DB_USER, config.DB_PASSWD, config.DB_NAME) wss = WebSocketServer(config.WSS_URI, DvClinetDispatcher(sub, dbc), host=config.WSS_HOST, port=config.WSS_PORT) def test_push_online(): dbc.createKey("online") pub = Publisher(config.REDIS_MQ_HOST, config.REDIS_MQ_PORT, config.REDIS_MQ_DB) import random while True: add = random.randint(-100,100); dbc.incKey("online","2003", add) pub.publish('online', {'server':'2003', "op":"inc", "value":add, "time": datasets.current_ms()}) time.sleep(1) tester = Thread(target=test_push_online) tester.setDaemon(True) wss.setDaemon(True) wss.start() tester.start() exit_main = False def stop(): log.info("stop the io thread ...") exit_main=True #wss.stop() #tester.stop() signal.signal(signal.SIGQUIT, stop) while not exit_main: sub.poll() wss.join() tester.join()
def main(): log_file = os.path.dirname(os.getcwd())+"/logs/product_foreign.log" log_obj = log.init_logger(log_file) obj = RemoteRedis() redis_sub = obj.subscribe() while True: msg = redis_sub.parse_response() if msg[2] == b'product_foreign finish': log_obj.info(u"服务端已经收到国外正式环境发布的消息") obj = LocalRedis(log_obj) value = obj.get() obj2 = MySQLClient(value, log_obj) obj2.select() obj2.compare(value) obj2.insert() obj2.update() obj2.insert_log()
def main(): _log = log.init_logger('suds.client', debug=True) # _log = logging.getLogger('suds.client').setLevel(logging.INFO) client = suds.client.Client(service_url) response = client.service.getCustomerList(user=service_user, password=service_password, customerType=None, creationDateFrom=None, creationDateTo=None, id=61983, ExportMode='C') print _log if response and response['wsResult']: customer_list = response['wsResult']['Customers']['Customer'] print len(customer_list) for customer in customer_list: print "Cliente: %s , Canal: %s" % ( customer.General.Name, customer.General.Channel.value or '')
def setUp(self): log.init_logger("INFO", "foo", os.environ.get("PULSAR_HOME") + "/conf/functions-logging/console_logging_config.ini")
def setUp(self): log.init_logger( "INFO", "foo", os.environ.get("PULSAR_HOME") + "/conf/functions-logging/console_logging_config.ini")
tf.random.set_seed(51) np.random.seed(51) parser = argparse.ArgumentParser() parser.add_argument("--d", dest="dataset", nargs='?') parser.add_argument("--in", dest="input_model", nargs='?', default='model') parser.add_argument("--ws", dest="window_size", nargs='?', type=int, default=30) parser.add_argument("--ts", dest="trend_size", nargs='?', type=int, default=7) parser.add_argument("--v", dest="verbose", action='store_true') args = parser.parse_args() logger = init_logger(__name__, show_debug=args.verbose, log_to_file=False) DATASET = args.dataset if args.dataset is not None else 'btc_price.csv' WINDOW_SIZE = args.window_size TREND_SIZE = args.trend_size logger.debug(msg=f'Window Size: {WINDOW_SIZE}') def load_dataset(dataset=DATASET): closing_prices = [] with open(dataset) as csvfile: reader = csv.reader(csvfile, delimiter=',') next(reader) for row in reader: closing_prices.append(float(row[4]))
def main(): # Setup signal handlers signal.signal(signal.SIGTERM, atexit_function) signal.signal(signal.SIGHUP, atexit_function) signal.signal(signal.SIGINT, atexit_function) parser = argparse.ArgumentParser( description='Pulsar Functions Python Instance') parser.add_argument('--function_details', required=True, help='Function Details Json String') parser.add_argument('--py', required=True, help='Full Path of Function Code File') parser.add_argument('--instance_id', required=True, help='Instance Id') parser.add_argument('--function_id', required=True, help='Function Id') parser.add_argument('--function_version', required=True, help='Function Version') parser.add_argument('--pulsar_serviceurl', required=True, help='Pulsar Service Url') parser.add_argument('--client_auth_plugin', required=False, help='Client authentication plugin') parser.add_argument('--client_auth_params', required=False, help='Client authentication params') parser.add_argument('--use_tls', required=False, help='Use tls') parser.add_argument('--tls_allow_insecure_connection', required=False, help='Tls allow insecure connection') parser.add_argument('--hostname_verification_enabled', required=False, help='Enable hostname verification') parser.add_argument('--tls_trust_cert_path', required=False, help='Tls trust cert file path') parser.add_argument('--port', required=True, help='Instance Port', type=int) parser.add_argument('--max_buffered_tuples', required=True, help='Maximum number of Buffered tuples') parser.add_argument('--logging_directory', required=True, help='Logging Directory') parser.add_argument('--logging_file', required=True, help='Log file name') parser.add_argument('--logging_config_file', required=True, help='Config file for logging') parser.add_argument('--expected_healthcheck_interval', required=True, help='Expected time in seconds between health checks', type=int) parser.add_argument( '--install_usercode_dependencies', required=False, help= 'For packaged python like wheel files, do we need to install all dependencies', type=bool) parser.add_argument( '--dependency_repository', required=False, help= 'For packaged python like wheel files, which repository to pull the dependencies from' ) parser.add_argument( '--extra_dependency_repository', required=False, help= 'For packaged python like wheel files, any extra repository to pull the dependencies from' ) args = parser.parse_args() function_details = Function_pb2.FunctionDetails() args.function_details = str(args.function_details) if args.function_details[0] == '\'': args.function_details = args.function_details[1:] if args.function_details[-1] == '\'': args.function_details = args.function_details[:-1] json_format.Parse(args.function_details, function_details) if os.path.splitext(str(args.py))[1] == '.whl': if args.install_usercode_dependencies: cmd = "pip install -t %s" % os.path.dirname(str(args.py)) if args.dependency_repository: cmd = cmd + " -i %s" % str(args.dependency_repository) if args.extra_dependency_repository: cmd = cmd + " --extra-index-url %s" % str( args.extra_dependency_repository) cmd = cmd + " %s" % str(args.py) os.system(cmd) else: zpfile = zipfile.ZipFile(str(args.py), 'r') zpfile.extractall(os.path.dirname(str(args.py))) sys.path.insert(0, os.path.dirname(str(args.py))) log_file = os.path.join( args.logging_directory, util.getFullyQualifiedFunctionName(function_details.tenant, function_details.namespace, function_details.name), "%s-%s.log" % (args.logging_file, args.instance_id)) log.init_logger(logging.INFO, log_file, args.logging_config_file) Log.info("Starting Python instance with %s" % str(args)) authentication = None use_tls = False tls_allow_insecure_connection = False tls_trust_cert_path = None if args.client_auth_plugin and args.client_auth_params: authentication = pulsar.Authentication(args.client_auth_plugin, args.client_auth_params) if args.use_tls == "true": use_tls = True if args.tls_allow_insecure_connection == "true": tls_allow_insecure_connection = True if args.tls_trust_cert_path: tls_trust_cert_path = args.tls_trust_cert_path pulsar_client = pulsar.Client(args.pulsar_serviceurl, authentication, 30, 1, 1, 50000, None, use_tls, tls_trust_cert_path, tls_allow_insecure_connection) pyinstance = python_instance.PythonInstance( str(args.instance_id), str(args.function_id), str(args.function_version), function_details, int(args.max_buffered_tuples), int(args.expected_healthcheck_interval), str(args.py), pulsar_client) pyinstance.run() server_instance = server.serve(args.port, pyinstance) global to_run while to_run: time.sleep(1) pyinstance.join() sys.exit(1)
def main(): # Setup signal handlers signal.signal(signal.SIGTERM, atexit_function) signal.signal(signal.SIGHUP, atexit_function) signal.signal(signal.SIGINT, atexit_function) parser = argparse.ArgumentParser( description='Pulsar Functions Python Instance') parser.add_argument('--function_details', required=True, help='Function Details Json String') parser.add_argument('--py', required=True, help='Full Path of Function Code File') parser.add_argument('--instance_id', required=True, help='Instance Id') parser.add_argument('--function_id', required=True, help='Function Id') parser.add_argument('--function_version', required=True, help='Function Version') parser.add_argument('--pulsar_serviceurl', required=True, help='Pulsar Service Url') parser.add_argument('--client_auth_plugin', required=False, help='Client authentication plugin') parser.add_argument('--client_auth_params', required=False, help='Client authentication params') parser.add_argument('--use_tls', required=False, help='Use tls') parser.add_argument('--tls_allow_insecure_connection', required=False, help='Tls allow insecure connection') parser.add_argument('--hostname_verification_enabled', required=False, help='Enable hostname verification') parser.add_argument('--tls_trust_cert_path', required=False, help='Tls trust cert file path') parser.add_argument('--port', required=True, help='Instance Port', type=int) parser.add_argument('--metrics_port', required=True, help="Port metrics will be exposed on", type=int) parser.add_argument('--max_buffered_tuples', required=True, help='Maximum number of Buffered tuples') parser.add_argument('--logging_directory', required=True, help='Logging Directory') parser.add_argument('--logging_file', required=True, help='Log file name') parser.add_argument('--logging_config_file', required=True, help='Config file for logging') parser.add_argument('--expected_healthcheck_interval', required=True, help='Expected time in seconds between health checks', type=int) parser.add_argument('--secrets_provider', required=False, help='The classname of the secrets provider') parser.add_argument( '--secrets_provider_config', required=False, help='The config that needs to be passed to secrets provider') parser.add_argument( '--install_usercode_dependencies', required=False, help= 'For packaged python like wheel files, do we need to install all dependencies', type=bool) parser.add_argument( '--dependency_repository', required=False, help= 'For packaged python like wheel files, which repository to pull the dependencies from' ) parser.add_argument( '--extra_dependency_repository', required=False, help= 'For packaged python like wheel files, any extra repository to pull the dependencies from' ) parser.add_argument('--state_storage_serviceurl', required=False, help='Managed State Storage Service Url') parser.add_argument( '--cluster_name', required=True, help='The name of the cluster this instance is running on') args = parser.parse_args() function_details = Function_pb2.FunctionDetails() args.function_details = str(args.function_details) if args.function_details[0] == '\'': args.function_details = args.function_details[1:] if args.function_details[-1] == '\'': args.function_details = args.function_details[:-1] json_format.Parse(args.function_details, function_details) if os.path.splitext(str(args.py))[1] == '.whl': if args.install_usercode_dependencies: cmd = "pip install -t %s" % os.path.dirname(str(args.py)) if args.dependency_repository: cmd = cmd + " -i %s" % str(args.dependency_repository) if args.extra_dependency_repository: cmd = cmd + " --extra-index-url %s" % str( args.extra_dependency_repository) cmd = cmd + " %s" % str(args.py) retval = os.system(cmd) if retval != 0: print("Could not install user depedencies") sys.exit(1) else: zpfile = zipfile.ZipFile(str(args.py), 'r') zpfile.extractall(os.path.dirname(str(args.py))) sys.path.insert(0, os.path.dirname(str(args.py))) elif os.path.splitext(str(args.py))[1] == '.zip': # Assumig zip file with format func.zip # extract to folder function # internal dir format # "func/src" # "func/requirements.txt" # "func/deps" # run pip install to target folder deps folder zpfile = zipfile.ZipFile(str(args.py), 'r') zpfile.extractall(os.path.dirname(str(args.py))) basename = os.path.splitext(str(args.py))[0] deps_dir = os.path.join(os.path.dirname(str(args.py)), basename, "deps") if os.path.isdir(deps_dir) and os.listdir(deps_dir): # get all wheel files from deps directory wheel_file_list = [ os.path.join(deps_dir, f) for f in os.listdir(deps_dir) if os.path.isfile(os.path.join(deps_dir, f)) and os.path.splitext(f)[1] == '.whl' ] cmd = "pip install -t %s --no-index --find-links %s %s" % ( os.path.dirname(str( args.py)), deps_dir, " ".join(wheel_file_list)) Log.debug("Install python dependencies via cmd: %s" % cmd) retval = os.system(cmd) if retval != 0: print( "Could not install user depedencies specified by the zip file" ) sys.exit(1) # add python user src directory to path sys.path.insert( 0, os.path.join(os.path.dirname(str(args.py)), basename, "src")) log_file = os.path.join( args.logging_directory, util.getFullyQualifiedFunctionName(function_details.tenant, function_details.namespace, function_details.name), "%s-%s.log" % (args.logging_file, args.instance_id)) log.init_logger(logging.INFO, log_file, args.logging_config_file) Log.info("Starting Python instance with %s" % str(args)) authentication = None use_tls = False tls_allow_insecure_connection = False tls_trust_cert_path = None if args.client_auth_plugin and args.client_auth_params: authentication = pulsar.Authentication(args.client_auth_plugin, args.client_auth_params) if args.use_tls == "true": use_tls = True if args.tls_allow_insecure_connection == "true": tls_allow_insecure_connection = True if args.tls_trust_cert_path: tls_trust_cert_path = args.tls_trust_cert_path pulsar_client = pulsar.Client( args.pulsar_serviceurl, authentication=authentication, operation_timeout_seconds=30, io_threads=1, message_listener_threads=1, concurrent_lookup_requests=50000, log_conf_file_path=None, use_tls=use_tls, tls_trust_certs_file_path=tls_trust_cert_path, tls_allow_insecure_connection=tls_allow_insecure_connection) state_storage_serviceurl = None if args.state_storage_serviceurl is not None: state_storage_serviceurl = str(args.state_storage_serviceurl) secrets_provider = None if args.secrets_provider is not None: secrets_provider = util.import_class( os.path.dirname(inspect.getfile(inspect.currentframe())), str(args.secrets_provider)) else: secrets_provider = util.import_class( os.path.dirname(inspect.getfile(inspect.currentframe())), "secretsprovider.ClearTextSecretsProvider") secrets_provider = secrets_provider() secrets_provider_config = None if args.secrets_provider_config is not None: args.secrets_provider_config = str(args.secrets_provider_config) if args.secrets_provider_config[0] == '\'': args.secrets_provider_config = args.secrets_provider_config[1:] if args.secrets_provider_config[-1] == '\'': args.secrets_provider_config = args.secrets_provider_config[:-1] secrets_provider_config = json.loads(str(args.secrets_provider_config)) secrets_provider.init(secrets_provider_config) pyinstance = python_instance.PythonInstance( str(args.instance_id), str(args.function_id), str(args.function_version), function_details, int(args.max_buffered_tuples), int(args.expected_healthcheck_interval), str(args.py), pulsar_client, secrets_provider, args.cluster_name, state_storage_serviceurl) pyinstance.run() server_instance = server.serve(args.port, pyinstance) # Cannot use latest version of prometheus client because of thread leak # prometheus_client.start_http_server(args.metrics_port) # Use patched version of prometheus # Contains fix from https://github.com/prometheus/client_python/pull/356 # This can be removed one the fix in is a official prometheus client release prometheus_client_fix.start_http_server(args.metrics_port) global to_run while to_run: time.sleep(1) pyinstance.join() # make sure to close all non-daemon threads before this! sys.exit(0)
''' import logging import os import re import shutil import subprocess import sys from threading import Thread from functools import partial import log import tail import xutils from xutils import env, timethis, file_diff, print_diff log.init_logger('table') logger = logging.getLogger('table') sys.stderr = log.ErrOutPutToLogger("table") class CmdError(Exception): pass @timethis(logger.info) def run_cmd(command, desc=None, cwd=None, callback=None): if desc: logger.info('>>>>>>>> %s 开始', desc) ret = subprocess.call(command, shell=True, cwd=cwd) if ret != 0:
#!/usr/bin/python -W ignore::DeprecationWarning import config import getopt import sys from log import init_logger from helpers import * from cloudfront import * from certbot import * from validator import * logger = init_logger('test.log', config.c['email_errors'], config.c['log_level']) domain_objects = load_domain_configs(config.c['domain_config_directory']) def certman(): ran = False found_domain = False try: opts, args = getopt.getopt(sys.argv[1:], "achgrudepw", [ "all", "check-certificates", "generate-certificates", "renew-certificates", "upload-certificates", "update-cloudfront-distributions", "prune-certificates", "generate-hash", "list", "help"]) except getopt.GetoptError as err: print(str(err)) # will print something like "option -z not recognized"
import numpy as np from torch.autograd import Variable from dataset import * import settings, log import torch.nn as nn, torch.utils.data as data import torch.optim as optim from utils import * from tqdm import tqdm from model import resnet6 force_new_model = True pretrained_model = None # init the settings settings.init_settings(force_new_model, pretrained_model) # init the log log.init_logger(tensorboard=False) def show_images(img): show_image(img[:, :, 0]) show_image(img[:, :, 1]) def train(model, train_data, criterion, optimizer, epoch): total_train_images = len(train_data) # print(total_train_images) if opt["useGPU"]: model = model.cuda() # training mode
#!/usr/bin/python -W ignore::DeprecationWarning import getopt import sys from log import init_logger from helpers import * from cloudfront import * from certbot import * from validator import * config_file = "config/certman-sample.conf" config = load_config(config_file) logger = init_logger('test.log', config['email_errors'], config['log_level']) domain_objects = load_domain_configs(config['domain_config_directory']) def certman(): ran = False found_domain = False try: opts, args = getopt.getopt(sys.argv[1:], "achgrudepw", [ "all", "check-certificates", "generate-certificates", "renew-certificates", "upload-certificates", "update-cloudfront-distributions", "prune-certificates", "generate-hash", "list", "help"]) except getopt.GetoptError, err:
KLINE_INTERVAL_3DAY = '3d' KLINE_INTERVAL_3MINUTE = '3m' KLINE_INTERVAL_4HOUR = '4h' KLINE_INTERVAL_5MINUTE = '5m' KLINE_INTERVAL_6HOUR = '6h' KLINE_INTERVAL_8HOUR = '8h' """ import argparse from binance_api import BinanceAPI from log import init_logger from datetime import datetime logger = init_logger(__name__, show_debug=True, log_to_file=False) parser = argparse.ArgumentParser() parser.add_argument("--k", dest="binance_api_key", nargs='?', default='./binance_api_key.json') parser.add_argument("--s", dest="symbol", nargs='?', default='BTCUSDT') parser.add_argument("--i", dest="interval", nargs='?', default='1d') parser.add_argument("--rs", dest="range_start", nargs='?', default='1 Dec, 2017') parser.add_argument("--re", dest="range_end", nargs='?',
""" Calculates the sentence log-prob """ if len(sentence) < 1: return -float('inf') log_probs = torch.log(F.softmax(model(sentence), dim=0)) ids = torch.Tensor(sentence[1:]).long() sentence_log_prob = torch.sum(log_probs.gather(1, ids.view(-1, 1))) return sentence_log_prob.item() if __name__ == '__main__': # initialize logger logger = init_logger() logger.info(args) # Load data logger.info('Loading {} dataset...'.format(args.dataset)) if args.dataset.lower() == 'seame' or args.dataset.lower() == 'qg': dataset = read_dataset(args.data, dataset=args.dataset) dataset = dataset[:int(len(dataset) * args.subset)] train = dataset[:int(len(dataset) * 0.8)] dev = dataset[int(len(dataset) * 0.8) + 1:-1] train_ids = None elif args.dataset.lower() == 'miami' or args.dataset.lower() == 'tagalog': train, dev, test, train_ids, dev_ids, test_ids, miami_dict = read_miami_data( args.data) elif args.dataset.lower() == 'opensub': train, dev, train_ids, dev_ids = read_opensub_data(args.data)
def main(): # Setup signal handlers signal.signal(signal.SIGTERM, atexit_function) signal.signal(signal.SIGHUP, atexit_function) signal.signal(signal.SIGINT, atexit_function) parser = argparse.ArgumentParser(description='Pulsar Functions Python Instance') parser.add_argument('--function_details', required=True, help='Function Details Json String') parser.add_argument('--py', required=True, help='Full Path of Function Code File') parser.add_argument('--instance_id', required=True, help='Instance Id') parser.add_argument('--function_id', required=True, help='Function Id') parser.add_argument('--function_version', required=True, help='Function Version') parser.add_argument('--pulsar_serviceurl', required=True, help='Pulsar Service Url') parser.add_argument('--client_auth_plugin', required=False, help='Client authentication plugin') parser.add_argument('--client_auth_params', required=False, help='Client authentication params') parser.add_argument('--use_tls', required=False, help='Use tls') parser.add_argument('--tls_allow_insecure_connection', required=False, help='Tls allow insecure connection') parser.add_argument('--hostname_verification_enabled', required=False, help='Enable hostname verification') parser.add_argument('--tls_trust_cert_path', required=False, help='Tls trust cert file path') parser.add_argument('--port', required=True, help='Instance Port', type=int) parser.add_argument('--max_buffered_tuples', required=True, help='Maximum number of Buffered tuples') parser.add_argument('--logging_directory', required=True, help='Logging Directory') parser.add_argument('--logging_file', required=True, help='Log file name') parser.add_argument('--logging_config_file', required=True, help='Config file for logging') parser.add_argument('--expected_healthcheck_interval', required=True, help='Expected time in seconds between health checks', type=int) parser.add_argument('--secrets_provider', required=False, help='The classname of the secrets provider') parser.add_argument('--secrets_provider_config', required=False, help='The config that needs to be passed to secrets provider') parser.add_argument('--install_usercode_dependencies', required=False, help='For packaged python like wheel files, do we need to install all dependencies', type=bool) parser.add_argument('--dependency_repository', required=False, help='For packaged python like wheel files, which repository to pull the dependencies from') parser.add_argument('--extra_dependency_repository', required=False, help='For packaged python like wheel files, any extra repository to pull the dependencies from') args = parser.parse_args() function_details = Function_pb2.FunctionDetails() args.function_details = str(args.function_details) if args.function_details[0] == '\'': args.function_details = args.function_details[1:] if args.function_details[-1] == '\'': args.function_details = args.function_details[:-1] json_format.Parse(args.function_details, function_details) if os.path.splitext(str(args.py))[1] == '.whl': if args.install_usercode_dependencies: cmd = "pip install -t %s" % os.path.dirname(str(args.py)) if args.dependency_repository: cmd = cmd + " -i %s" % str(args.dependency_repository) if args.extra_dependency_repository: cmd = cmd + " --extra-index-url %s" % str(args.extra_dependency_repository) cmd = cmd + " %s" % str(args.py) retval = os.system(cmd) if retval != 0: print "Could not install user depedencies" sys.exit(1) else: zpfile = zipfile.ZipFile(str(args.py), 'r') zpfile.extractall(os.path.dirname(str(args.py))) sys.path.insert(0, os.path.dirname(str(args.py))) elif os.path.splitext(str(args.py))[1] == '.zip': # Assumig zip file with format func.zip # extract to folder function # internal dir format # "func/src" # "func/requirements.txt" # "func/deps" # run pip install to target folder deps folder zpfile = zipfile.ZipFile(str(args.py), 'r') zpfile.extractall(os.path.dirname(str(args.py))) basename = os.path.splitext(str(args.py))[0] requirements_txt_file = os.path.join(os.path.dirname(str(args.py)), basename, "requirements.txt") deps_file = os.path.join(os.path.dirname(str(args.py)), basename, "deps") cmd = "pip install -t %s -r %s --no-index --find-links %s" % (os.path.dirname(str(args.py)), requirements_txt_file, deps_file) retval = os.system(cmd) if retval != 0: print "Could not install user depedencies specified by the zip file" sys.exit(1) sys.path.insert(0, os.path.join(os.path.dirname(str(args.py)), basename, "src")) log_file = os.path.join(args.logging_directory, util.getFullyQualifiedFunctionName(function_details.tenant, function_details.namespace, function_details.name), "%s-%s.log" % (args.logging_file, args.instance_id)) log.init_logger(logging.INFO, log_file, args.logging_config_file) Log.info("Starting Python instance with %s" % str(args)) authentication = None use_tls = False tls_allow_insecure_connection = False tls_trust_cert_path = None if args.client_auth_plugin and args.client_auth_params: authentication = pulsar.Authentication(args.client_auth_plugin, args.client_auth_params) if args.use_tls == "true": use_tls = True if args.tls_allow_insecure_connection == "true": tls_allow_insecure_connection = True if args.tls_trust_cert_path: tls_trust_cert_path = args.tls_trust_cert_path pulsar_client = pulsar.Client(args.pulsar_serviceurl, authentication, 30, 1, 1, 50000, None, use_tls, tls_trust_cert_path, tls_allow_insecure_connection) secrets_provider = None if args.secrets_provider is not None: secrets_provider = util.import_class(os.path.dirname(inspect.getfile(inspect.currentframe())), str(args.secrets_provider)) else: secrets_provider = util.import_class(os.path.dirname(inspect.getfile(inspect.currentframe())), "secretsprovider.ClearTextSecretsProvider") secrets_provider = secrets_provider() secrets_provider_config = None if args.secrets_provider_config is not None: secrets_provider_config = json.loads(str(args.secrets_provider_config)) secrets_provider.init(secrets_provider_config) pyinstance = python_instance.PythonInstance(str(args.instance_id), str(args.function_id), str(args.function_version), function_details, int(args.max_buffered_tuples), int(args.expected_healthcheck_interval), str(args.py), pulsar_client, secrets_provider) pyinstance.run() server_instance = server.serve(args.port, pyinstance) global to_run while to_run: time.sleep(1) pyinstance.join() sys.exit(1)
large_settled_game_round_list = step2_try_settle_bets( _curr_block_height, 3) viewupdator.update_view(small_settled_game_round_list, big_settled_game_round_list, large_settled_game_round_list, _curr_block_height) def game_loop(): try: global prev_block_height while True: # try: curr_block_height = api.get_current_block_height() #log.Info("Current Block Height: {} {}".format(curr_block_height, int(time.time()))) if curr_block_height != prev_block_height: prev_block_height = curr_block_height on_block_height_changed(curr_block_height) time.sleep(1) except Exception as e: _ex_str = traceback.print_exc() # print(_ex_str) if _ex_str is not None: log.Info("Exception: " + _ex_str) log.init_logger() model.init_addresses() db.init_db() game_loop()
train_data = open('./datasets/train_data.json','r',encoding='utf-8').readlines() dev_data = open('./datasets/dev_data.json','r',encoding='utf-8').readlines() id2predicate, predicate2id = json.load(open('./datasets/all_50_schemas_me.json',encoding='utf-8')) id2predicate = {int(i):j for i,j in id2predicate.items()} id2char, char2id = json.load(open('./datasets/all_chars_me.json',encoding='utf-8')) pos = open('datasets/pos','r',encoding='utf-8').readlines() pos2id = {p.strip():i+2 for i,p in enumerate(pos)} char_size = 128 pos_size = 16 num_classes = len(id2predicate) logger = log.init_logger(log_path='logs') logger.info('输入加入pos') def seq_padding(X): L = [len(x) for x in X] ML = max(L) return [x + [0] * (ML - len(x)) for x in X] class data_generator: def __init__(self, data, batch_size=64): self.data = data self.batch_size = batch_size self.steps = len(self.data) // self.batch_size if len(self.data) % self.batch_size != 0: self.steps += 1
print() # as above runner.kill() else: break def runtests(self): pytest.main(['-x', '--log-level=INFO', 'tests']) if hasattr(pytest, "invbox_failed"): sys.exit(1) def init_test_data(self, create=True, drop=True): from models import create_tables, drop_tables if drop: logging.info("drop table ...") drop_tables() if create: logging.info("create table ...") create_tables() logging.info("init test data ...") from tests import init_data init_data.init_all() if __name__ == "__main__": autodiscover.autodiscover("./service") init_logger(level=config.log_level, path=config.log_path) logging.getLogger("peewee").setLevel(getattr(logging, config.log_level.upper())) fire.Fire(Command)
# -*- coding: utf-8 -*- import sys import tornado.httpserver import tornado.wsgi from log import init_logger from config import config from controller import init_controllers from controller.route import Route reload(sys) sys.setdefaultencoding("utf-8") init_controllers() urls = Route.routes() init_logger(path=config.log_path, level=config.log_level) app = tornado.wsgi.WSGIApplication(urls, cookie_secret="fa5012f23340edae6db5df925b345912") application = tornado.wsgi.WSGIAdapter(app)
from torch.autograd import Variable from dataset import * import settings, log import torch.nn as nn, torch.utils.data as data import torch.optim as optim from utils import * from tqdm import tqdm from model import resnet6 import scipy.io as sio pretrained_model = "../scratch/sysu_mm01/deepzeropadding-14May2019-125214_deep-zero-padding/deep_zero_model#156.pth" # init the settings settings.init_settings(False, pretrained_model) # init the log log.init_logger(tensorboard=False, prepend_text="test_") def get_max_test_id(test_ids): int_test_ids = [int(ID) for ID in test_ids] return np.max(int_test_ids) def prepare_empty_matfile_config(max_test_id): cam_features = np.empty(max_test_id, dtype=object) for i in range(len(cam_features)): cam_features[i] = [] return cam_features def test(model, test_dataset, test_ids):
return ldap_user_set - local_user_set, local_user_set - ldap_user_set, ldap_users def handler_data(self, log_obj): add_user_set, delete_user_set, ldap_users = self.compare_users() if delete_user_set: log_obj.info("Deleted users:%s", ",".join(delete_user_set)) delete_users = "(" for user in delete_user_set: delete_users += "'%s'," % user delete_users = delete_users.rstrip(",") delete_users += ");" sql = "delete from users where username in %s" % delete_users self.__cursor.execute(sql) if add_user_set: log_obj.info("Added users:%s", ",".join(add_user_set)) sql2 = "insert into users(`username`,`givenname`,`email`) values" for user in add_user_set: sql2 += "('%s','%s','%s')," % (user, ldap_users.get(user).get( "given_name"), ldap_users.get(user).get("email")) sql2 = sql2.rstrip(",") sql2 += ";" self.__cursor.execute(sql2) self.__cursor.close() self.__conn.close() if __name__ == "__main__": obj = mysqlClient("localhost", "root", "", "cmdb") log_obj = log.init_logger("../logs/sync.log") obj.handler_data(log_obj)
# coding:utf-8 from lxml import etree from StringIO import StringIO import requests from db import Song from datetime import datetime from log import init_logger import threading from db import Proxy import random logger = init_logger() class AtomicInteger(): def __init__(self, value): self.mutex = threading.Lock() self.value = value def incre(self): self.mutex.acquire() self.value += 1 self.mutex.release() return self.value class FetchWorker(threading.Thread): ''' 实际从sina API 抓取数据 '''
# 3、清明节:四月五日左右,规则同元旦 # 4、劳动节:五月一日,规则同元旦 # 5、端午节:农历五月五日,规则同元旦 # 6、中秋节:农历八月十五,规则同元旦 # 7、国庆节:{ # 1、单独放假不和中秋放一起,规则同春节; # 2、和中秋混在一起放: # 1、放八天,如果截止日期为周日则只补国庆前一个周末的某天 # 2、放八天,八号截止前后每周末各补一天 # } import pandas as pd from datetime import datetime, date, timedelta from zhdate import ZhDate from log import logger, init_logger init_logger() #log_path) logger.info('==>start') start_time = '20200101' end_time = '20201231' all_time = [x.date() for x in pd.date_range(start_time, end_time)] class Holiday(): def __init__(self, year): ''' :param :param :param ''' self.year = year
from pytube import YouTube from log import init_logger from log import logger import re init_logger('jwli.log') def download_youtube_video(video_url, title): title = re.sub( u"([^\u4e00-\u9fa5\u0030-" u"\u0039\u0041-\u005a\u0061-\u007a])", " ", title) yt = YouTube(video_url) mp4 = yt.streams.first() logger.warning(f'{title} start') # print(f'{title}start') mp4.download('./youtube_video/', filename=f'{title}') logger.warning(f'{title} end') # print(f'{title}end')
import event_handler from kodi import __player__ import kodi.builtin import kodi.addon_settings import time from tcp_events import State import signaldb as sdb import log as log_module log = log_module.init_logger(__name__) __author__ = 'lars' class KombiInstrument(object): DEVICE = "IBUS_DEV_IKE" DISPLAY_SIZE = 20 MSG_DELAY_INIT = 3.0 MSG_DELAY_OCCUPIED = 10.0 def __init__(self, send): self.send = send self.welcome_msg = None self.last_msg = None self.msg_scheduled = False def init_events(self, bind_event):
from datetime import datetime, time from telegram.utils.request import Request from telegram.ext.messagequeue import MessageQueue from telegram.ext import Updater, Filters from telegram.ext import CommandHandler, MessageHandler import db import tasks from log import init_logger from reddit import Reddit from googleapi.timezone import TimeZoneAPI from bot import MQBot, handlers if __name__ == "__main__": init_logger(os.environ.get("LOGGER_LEVEL", "WARNING")) logger = logging.getLogger(__name__) logger.info(f'Starting bot...') token = os.environ.get("TELEGRAM_TOKEN") if token is None: raise RuntimeError("Set environ variable TELEGRAM_TOKEN") request_kwargs = { "con_pool_size": 8, "connect_timeout": 10, "read_timeout": 3 } proxy = os.environ.get("TELEGRAM_PROXY") if proxy: request_kwargs["proxy_url"] = proxy