コード例 #1
0
def main():
    platform = Platform()
    platform.connect_to_cloud()
    # wait until connected
    while not platform.cloud_ready():
        try:
            time.sleep(0.1)
        except KeyboardInterrupt:
            # this allows us to stop the application while waiting for cloud connection
            platform.stop()
            return

    util = Utility()
    pubsub = PubSub()
    if pubsub.feature_enabled():
        # Publish an event
        # headers = optional parameters for the event
        # body = event payload
        for x in range(10):
            print("publishing event#", x)
            pubsub.publish("hello.topic",
                           headers={
                               "some_parameter": "some_value",
                               "n": x
                           },
                           body="hello world " +
                           util.get_iso_8601(time.time()))
        # quit application
        platform.stop()
    else:
        print("Pub/Sub feature not available from the underlying event stream")
        print("Did you start the language connector with Kafka?")
        print(
            "e.g. java -Dcloud.connector=kafka -Dcloud.services=kafka.reporter -jar language-connector-1.12.31.jar"
        )
コード例 #2
0
 def __init__(self, data=None):
     self.util = Utility()
     self.normalized = False
     self.dataset = dict() if data is None else data
     if not isinstance(self.dataset, dict):
         raise ValueError('Invalid input - Expect: dict, Actual: ' +
                          str(type(data)))
コード例 #3
0
ファイル: logger.py プロジェクト: skofgar/mercury-python
    def __init__(self, log_dir='/tmp/log', log_file=None, log_level='INFO'):
        # automatically create log directory
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)

        # DEBUG | INFO | WARN | ERROR | FATAL
        level = logging.INFO
        if log_level.upper() == 'DEBUG':
            level = logging.DEBUG
        elif log_level.upper() == 'ERROR':
            level = logging.ERROR
        elif log_level.upper() == 'WARN':
            level = logging.WARNING
        elif log_level.upper() == 'FATAL':
            level = logging.CRITICAL
        self.logger = logging.getLogger(log_file)
        self.logger.setLevel(level)
        ch = logging.StreamHandler()
        ch.setLevel(level)
        formatter = logging.Formatter(
            fmt='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s'
        )
        formatter.default_msec_format = '%s.%03d'
        ch.setFormatter(formatter)
        self.logger.addHandler(ch)

        if log_file is not None:
            filename = Utility().normalize_path(log_dir + '/' +
                                                log_file) + '.log'
            fh = RotatingFileHandler(filename,
                                     maxBytes=1024 * 1024,
                                     backupCount=10)
            fh.setLevel(level)
            fh.setFormatter(formatter)
            self.logger.addHandler(fh)
コード例 #4
0
    def __init__(self, config_file: str = None):
        if sys.version_info.major < 3:
            python_version = str(sys.version_info.major) + "." + str(
                sys.version_info.minor)
            raise RuntimeError("Requires python 3.6 and above. Actual: " +
                               python_version)
        self.origin = 'py' + (''.join(str(uuid.uuid4()).split('-')))
        self.config = ConfigReader(config_file)
        self.util = Utility()
        log_dir = self.config.get_property('log.directory')
        log_file = self.config.get_property('log.filename')
        log_level = self.config.get_property('log.level')
        self._max_threads = self.config.get('max.threads')
        self.work_dir = self.config.get_property('work.directory')
        self.log = LoggingService(log_dir=log_dir,
                                  log_file=log_file,
                                  log_level=log_level).get_logger()
        self._loop = asyncio.new_event_loop()
        # DO NOT CHANGE 'distributed.trace.processor' which is an optional user defined trace aggregator
        my_tracer = DistributedTrace(self, 'distributed.trace.processor')
        my_nc = self.config.get_property('network.connector')
        self._cloud = NetworkConnector(self, my_tracer, self._loop, my_nc,
                                       self.origin)
        self._function_queues = dict()
        self._executor = concurrent.futures.ThreadPoolExecutor(
            max_workers=self._max_threads)
        self.log.info("Concurrent thread pool = " + str(self._max_threads))
        #
        # Before we figure out how to solve blocking file I/O, we will regulate event output rate.
        #
        my_test_dir = self.util.normalize_path(self.work_dir + "/test")
        if not os.path.exists(my_test_dir):
            os.makedirs(my_test_dir)
        self._throttle = Throttle(self.util.normalize_path(my_test_dir +
                                                           "/to_be_deleted"),
                                  log=self.log)
        self._seq = 0
        self.util.cleanup_dir(my_test_dir)
        self.log.info("Estimated performance is " +
                      format(self._throttle.get_tps(), ',d') +
                      " events per second")
        self.running = True
        self.stopped = False
        # distributed trace sessions
        self._traces = {}

        # start event loop in a new thread to avoid blocking the main thread
        def main_event_loop():
            self.log.info("Event system started")
            self._loop.run_forever()
            self.log.info("Event system stopped")
            self._loop.close()

        threading.Thread(target=main_event_loop).start()
コード例 #5
0
def publish_some_events():
    util = Utility()
    ps = PubSub()
    if ps.feature_enabled():
        # Publish an event
        # headers = optional parameters for the event
        # body = event payload
        for x in range(10):
            log.info(f'publishing event#{x}')
            ps.publish('hello.topic', headers={"some_parameter": "some_value", "n": x},
                       body="hello python - " + util.get_iso_8601(time.time()))
    else:
        print('Pub/Sub feature is not available from the underlying event stream')
        print('Did you start the language connector with cloud.connector=Kafka or cloud.services=kafka.pubsub?')
        print('e.g. java -Dcloud.connector=kafka -Dcloud.services=kafka.reporter -jar language-connector.jar')

    # quit application
    platform.stop()
コード例 #6
0
 def __init__(self, platform, distributed_trace, loop, url_list, origin):
     self.platform = platform
     self._distributed_trace = distributed_trace
     self._loop = loop
     self.log = platform.log
     self.normal = True
     self.started = False
     self.ready = False
     self.ws = None
     self.close_code = 1000
     self.close_message = 'OK'
     self.last_active = time.time()
     self.max_ws_payload = 32768
     self.util = Utility()
     self.urls = self.util.multi_split(url_list, ', ')
     self.next_url = 1
     self.origin = origin
     self.cache = SimpleCache(loop, self.log, timeout_seconds=30)
     self.api_key = self._get_api_key()
コード例 #7
0
ファイル: diskqueue.py プロジェクト: Accenture/mercury-python
 def __init__(self, queue_dir: str = None, queue_id: str = None):
     # automatically create queue directory
     if queue_dir is None or queue_id is None:
         raise ValueError('Missing queue_dir or queue_id')
     self.queue_id = queue_id
     if not os.path.exists(queue_dir):
         os.makedirs(queue_dir, exist_ok=True)
     self.util = Utility()
     self._dir = self.util.normalize_path(f'{queue_dir}/{queue_id}')
     self._empty = False
     self._create_dir = False
     self._memory = list()
     self._read_file_no = 1
     self._write_file_no = 1
     self._read_counter = 0
     self._write_counter = 0
     self._file = None
     self._peeked = None
     self.initialize()
コード例 #8
0
 def __init__(self, loop, executor, manager_queue, worker_queue, route,
              user_function, instance, singleton, interceptor):
     self.platform = Platform()
     self.util = Utility()
     self.log = self.platform.log
     self._loop = loop
     self._executor = executor
     self.manager_queue = manager_queue
     self.worker_queue = worker_queue
     self.route = route
     # trace all routes except ws.outgoing
     normal_service = not (interceptor and self.util.is_inbox(route))
     self.tracing = normal_service and route != 'ws.outgoing'
     self.user_function = user_function
     self.instance = instance
     self.singleton = singleton
     self.interceptor = interceptor
     self._loop.create_task(self.listen())
     self.log.debug(f'{self.route} #{self.instance} started')
コード例 #9
0
ファイル: platform.py プロジェクト: dhvidding/mercury-python
 def __init__(self, loop, executor, queue, route, user_function,
              total_instances):
     self.platform = Platform()
     self.util = Utility()
     self.log = self.platform.log
     queue_dir = self.util.normalize_path(self.platform.work_dir +
                                          "/queues/" +
                                          self.platform.get_origin())
     self.disk_queue = ElasticQueue(queue_dir=queue_dir, queue_id=route)
     self._loop = loop
     self._executor = executor
     self.queue = queue
     self.route = route
     self.user_function = user_function
     self.ready_queue = asyncio.Queue(loop=self._loop)
     self.worker_list = dict()
     self._peek_worker = None
     self._buffering = True
     self._interceptor = total_instances == 0
     self._singleton = True if total_instances < 1 else False
     self._loop.create_task(self.listen(total_instances))
コード例 #10
0
ファイル: pubsub.py プロジェクト: skofgar/mercury-python
    def __init__(self):
        self.platform = Platform()
        self.po = PostOffice()
        self.util = Utility()
        self.subscription = dict()

        def subscription_sync(headers: dict, body: any):
            if 'type' in headers and headers['type'] == 'subscription_sync':
                if len(self.subscription) > 0:
                    for topic in self.subscription:
                        route_map = self.subscription[topic]
                        for route in route_map:
                            parameters = route_map[route]
                            self.platform.log.info('Update subscription ' +
                                                   topic + ' -> ' + route)
                            self.subscribe(topic, route, parameters)
                else:
                    self.platform.log.info('No subscription to update')

        self.platform.register('pub.sub.sync',
                               subscription_sync,
                               1,
                               is_private=True)
コード例 #11
0
    def __init__(self, route: str = None, expiry_seconds: int = 1800):
        self.platform = Platform()
        self.po = PostOffice()
        self.util = Utility()
        self.route = None
        self.input_stream = None
        self.output_stream = None
        self.eof = False
        self.input_closed = False
        self.output_closed = False

        if route is not None:
            # open an existing stream
            if isinstance(route, str):
                name: str = route
                if name.startswith('stream.') and '@' in name:
                    self.route = name
            if self.route is None:
                raise ValueError('Invalid stream route')
        else:
            # create a new stream
            if not isinstance(expiry_seconds, int):
                raise ValueError('expiry_seconds must be int')
            result = self.po.request(self.STREAM_IO_MANAGER,
                                     6.0,
                                     headers={
                                         'type': 'create',
                                         'expiry_seconds': expiry_seconds
                                     })
            if isinstance(result, EventEnvelope) and isinstance(result.get_body(), str) \
                    and result.get_status() == 200:
                name: str = result.get_body()
                if name.startswith('stream.') and '@' in name:
                    self.route = name
            if self.route is None:
                raise IOError('Stream manager is not responding correctly')
コード例 #12
0
ファイル: platform.py プロジェクト: dhvidding/mercury-python
    def __init__(self,
                 work_dir: str = None,
                 log_file: str = None,
                 log_level: str = None,
                 max_threads: int = None,
                 network_connector: str = None):
        if sys.version_info.major < 3:
            python_version = str(sys.version_info.major) + "." + str(
                sys.version_info.minor)
            raise RuntimeError("Requires python 3.6 and above. Actual: " +
                               python_version)

        self.util = Utility()
        self.origin = 'py' + (''.join(str(uuid.uuid4()).split('-')))
        config = AppConfig()
        my_log_file = (config.LOG_FILE if hasattr(config, 'LOG_FILE') else
                       None) if log_file is None else log_file
        my_log_level = config.LOG_LEVEL if log_level is None else log_level
        self._max_threads = config.MAX_THREADS if max_threads is None else max_threads
        self.work_dir = config.WORK_DIRECTORY if work_dir is None else work_dir
        self.log = LoggingService(
            log_dir=self.util.normalize_path(self.work_dir + "/log"),
            log_file=my_log_file,
            log_level=my_log_level).get_logger()
        self._loop = asyncio.new_event_loop()
        my_distributed_trace = DistributedTrace(
            self, config.DISTRIBUTED_TRACE_PROCESSOR)
        my_connector = config.NETWORK_CONNECTOR if network_connector is None else network_connector
        self._cloud = NetworkConnector(self, my_distributed_trace, self._loop,
                                       my_connector, self.origin)
        self._function_queues = dict()
        self._executor = concurrent.futures.ThreadPoolExecutor(
            max_workers=self._max_threads)
        self.log.info("Concurrent thread pool = " + str(self._max_threads))
        #
        # Before we figure out how to solve blocking file I/O, we will regulate event output rate.
        #
        my_test_dir = self.util.normalize_path(self.work_dir + "/test")
        if not os.path.exists(my_test_dir):
            os.makedirs(my_test_dir)
        self._throttle = Throttle(self.util.normalize_path(my_test_dir +
                                                           "/to_be_deleted"),
                                  log=self.log)
        self._seq = 0
        self.util.cleanup_dir(my_test_dir)
        self.log.debug("Estimated processing rate is " +
                       format(self._throttle.get_tps(), ',d') +
                       " events per second for this computer")
        self.running = True
        self.stopped = False
        # distributed trace sessions
        self._traces = {}

        # start event loop in a new thread to avoid blocking the main thread
        def main_event_loop():
            self.log.info("Event system started")
            self._loop.run_forever()
            self.log.info("Event system stopped")
            self._loop.close()

        threading.Thread(target=main_event_loop).start()
コード例 #13
0
ファイル: po.py プロジェクト: fagan2888/mercury-python
 def __init__(self):
     self.platform = Platform()
     self.util = Utility()
コード例 #14
0
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

from mercury.system.singleton import Singleton
from mercury.platform import Platform
from mercury.system.po import PostOffice
from mercury.system.models import EventEnvelope, AppException
from mercury.system.utility import Utility

platform = Platform()
log = platform.get_logger()
po = PostOffice()
util = Utility()


@Singleton
class PubSub:

    def __init__(self, domain: str = 'system'):
        if not isinstance(domain, str):
            raise ValueError('Pub/sub domain must be str. e.g. system or user')
        value = domain.strip()
        self.domain = 'system' if value == '' else value
        self.subscription = dict()

    def feature_enabled(self):
        result = po.request('pub.sub.controller', 10.0, headers={'type': 'feature', 'domain': self.domain})
        return self._normalize_result(result, True)