Example #1
0
def start_ssl_proxy(
    port: int,
    target: PortOrUrl,
    target_ssl=False,
    client_cert_key: Tuple[str, str] = None,
    asynchronous: bool = False,
    fix_encoding: bool = False,
):
    """Start a proxy server that accepts SSL requests and forwards requests to a backend (either SSL or non-SSL)"""

    if client_cert_key or fix_encoding:
        # use a custom proxy listener, in case the user provides client certificates for authentication
        if client_cert_key:
            server = _do_start_ssl_proxy_with_client_auth(
                port, target, client_cert_key=client_cert_key)
        else:
            server = _do_start_ssl_proxy_with_listener(port, target)
        if not asynchronous:
            server.join()
        return server

    def _run(*args):
        return _do_start_ssl_proxy(port, target, target_ssl=target_ssl)

    if not asynchronous:
        return _run()
    proxy = FuncThread(_run)
    TMP_THREADS.append(proxy)
    proxy.start()
    return proxy
def fire_event(event_type, payload=None):
    if config.DISABLE_EVENTS:
        return
    global SENDER_THREAD
    if not SENDER_THREAD:
        SENDER_THREAD = FuncThread(poll_and_send_messages, {})
        SENDER_THREAD.start()
    api_key = read_api_key_safe()
    if not api_key:
        # only store events if API key has been specified
        return
    from localstack.utils.analytics import log
    from localstack.utils.testutil import (  # leave here to avoid circular dependency
        is_local_test_mode,
    )

    if payload is None:
        payload = {}
    if isinstance(payload, dict):
        if is_travis():
            payload["travis"] = True
        if is_local_test_mode():
            payload["int"] = True

    # event = AnalyticsEvent(event_type=event_type, payload=payload, api_key=api_key)
    # EVENT_QUEUE.put_nowait(event) FIXME: remove old logging code entirely before next release
    log.event("legacy", {"event": event_type, "payload": payload})
Example #3
0
def start_api_server_locally(request):

    if localstack_config.FORWARD_EDGE_INMEM:
        if "__started__" in API_SERVERS:
            return
        API_SERVERS["__started__"] = True

    api = request.get("api")
    port = request.get("port")
    if api in API_SERVERS:
        return API_SERVERS[api]
    result = API_SERVERS[api] = {}

    def thread_func(params):
        if localstack_config.FORWARD_EDGE_INMEM:
            return moto_server.main(
                ["-p", str(port), "-H", constants.BIND_HOST])
        return moto_server.main(
            [api, "-p", str(port), "-H", constants.BIND_HOST])

    thread = FuncThread(thread_func)
    thread.start()
    TMP_THREADS.append(thread)
    result["port"] = port
    result["thread"] = thread
    return result
Example #4
0
 def __init__(self, async_func_gen=None, loop=None):
     """Pass a function that receives an event loop instance and a shutdown event,
     and returns an async function."""
     FuncThread.__init__(self, self.run_func, None)
     self.async_func_gen = async_func_gen
     self.loop = loop
     self.shutdown_event = None
Example #5
0
 def __init__(self,
              events_file,
              callback,
              ready_mutex=None,
              fh_d_stream=None):
     FuncThread.__init__(self, self.retrieve_loop, None)
     self.events_file = events_file
     self.callback = callback
     self.ready_mutex = ready_mutex
     self.fh_d_stream = fh_d_stream
Example #6
0
class JobScheduler:

    _instance = None

    def __init__(self):
        # TODO: introduce RLock for mutating jobs list
        self.jobs = []
        self.thread = None

    def add_job(self, job_func, schedule, enabled=True):
        job = Job(job_func, schedule, enabled=enabled)
        self.jobs.append(job)
        return job.job_id

    def disable_job(self, job_id):
        for job in self.jobs:
            if job.job_id == job_id:
                job.is_enabled = False
                break

    def cancel_job(self, job_id):
        i = 0
        while i < len(self.jobs):
            if self.jobs[i].job_id == job_id:
                del self.jobs[i]
            else:
                i += 1

    def loop(self, *args):
        while True:
            try:
                for job in list(self.jobs):
                    job.run()
            except Exception:
                pass
            # This is a simple heuristic to cause the loop to run apprx every minute
            # TODO: we should keep track of jobs execution times, to avoid duplicate executions
            time.sleep(59.9)

    def start_loop(self):
        self.thread = FuncThread(self.loop)
        self.thread.start()

    @classmethod
    def instance(cls):
        if not cls._instance:
            cls._instance = JobScheduler()
        return cls._instance

    @classmethod
    def start(cls):
        instance = cls.instance()
        if not instance.thread:
            instance.start_loop()
        return instance
Example #7
0
 def retrieve_loop(self, params):
     sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
     sock.bind(self.events_file)
     sock.listen(1)
     if self.ready_mutex:
         self.ready_mutex.release()
     while self.running:
         try:
             conn, client_addr = sock.accept()
             thread = FuncThread(self.handle_connection, conn)
             thread.start()
         except Exception as e:
             LOGGER.error("Error dispatching client request: %s %s", e,
                          traceback.format_exc())
     sock.close()
 def __init__(self, params):
     FuncThread.__init__(self, self.start_reading, params)
     self.buffer = []
     self.params = params
     # number of lines that make up a single log entry
     self.buffer_size = 2
     # determine log level
     self.log_level = params.get("level")
     # get log subscribers
     self.log_subscribers = params.get("log_subscribers", [])
     if self.log_level is None:
         self.log_level = DEFAULT_KCL_LOG_LEVEL
     if self.log_level > 0:
         self.log_level = min(self.log_level, MAX_KCL_LOG_LEVEL)
         levels = OutputReaderThread.get_log_level_names(self.log_level)
         # regular expression to filter the printed output
         self.filter_regex = r".*(%s):.*" % ("|".join(levels))
         # create prefix and logger
         self.prefix = params.get("log_prefix") or "LOG"
         self.logger = logging.getLogger(self.prefix)
         self.logger.severe = self.logger.critical
         self.logger.fatal = self.logger.critical
         self.logger.setLevel(self.log_level)
Example #9
0
 def __init__(self):
     FuncThread.__init__(self, self.run_proxy, None)
     self.shutdown_event = None
     self.loop = None
Example #10
0
 def start_loop(self):
     self.thread = FuncThread(self.loop)
     self.thread.start()
Example #11
0
 def do_run(self):
     FuncThread(self.job_func).start()