Example #1
0
 def __init__(self, logger=None):
     self.logger         = CustomLogger().logger if logger is None else logger
     self.certs_location = '/etc/letsencrypt/live'
     self.endpoint_url   = os.getenv('ENDPOINT_URL')
     self.aws_access_key = os.getenv('AWS_ACCESS_KEY')
     self.aws_secret_key = os.getenv('AWS_SECRET_KEY')
     self.aws_region     = os.getenv('AWS_REGION')
     self.s3_bucket_name = os.getenv('CERTS_BUCKET_NAME')
     self.client         = self._client()
Example #2
0
def main():
	if len(sys.argv) > 2:
		from config import init_config
		init_config(sys.argv[1])
		from config import init_config
		from config import CONFIG
		from logger import CustomLogger
		cust_logger = CustomLogger(CONFIG.web_server.logger_name)
		cust_logger.add_file("log/"+CONFIG.web_server.logger_name, False)
		import app
		if bool(int(sys.argv[2])) == True:
			app.main()
Example #3
0
def main():
    if len(sys.argv) > 2:
        from config import init_config
        init_config(sys.argv[1])
        from config import init_config
        from config import CONFIG
        from logger import CustomLogger
        cust_logger = CustomLogger(CONFIG.web_server.logger_name)
        cust_logger.add_file("log/" + CONFIG.web_server.logger_name, False)
        import app
        if bool(int(sys.argv[2])) == True:
            app.main()
Example #4
0
 def __init__(self, provider=None):
     """
     Automate certbot and lexicon to obtain and store 
     let's encrypt ssl certificates into S3-compatible object storage
     """
     self.logger = CustomLogger().logger
     self.dns_provider = provider
     self.dns_provider_username = os.getenv('DNS_PROVIDER_USERNAME')
     self.dns_provider_auth_token = os.getenv('DNS_PROVIDER_AUTH_TOKEN')
     self.client_ip_address = self._getPublicIP()
     self.dns_provider_update_delay = 30
     self.config = Config(logger=self.logger)
     self.s3_store = Store(logger=self.logger)
     self.test = False
Example #5
0
    def __init__(self, logger=None):
        if logger is None:
            logger = CustomLogger().logger

        self.logger = logger
        self.current_path = os.path.dirname(os.path.realpath(__file__))
        self.config_file_path = os.path.join(self.current_path, 'config.json')
        self.getconfig = self._getConfig()
Example #6
0
import threading
import socket
from webServiceMIB import WebServiceMIB, StatusWebService

from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop


## Initializing the app
app = Flask(__name__)
app.debug = True
config = Config()
host_conf = None

cust_logger = CustomLogger("web_server_%d"%os.getpid())

## Main pages, empty for the moment
@app.route('/')
def index():
    return render_template('index.html')

#Return a fortune : line randomly selected in a file
@app.route('/fortune', methods=['GET', 'POST'])
def fortune():
    if request.method == 'GET':
        cust_logger.info("Received GET request")
        try:
            file_fortune = open("../"+config.fortune_service.path_file_fortunes, 'r')
            selected_line = random.choice(file_fortune.readlines()) #No close in that call since file closes automatically after call.
            response = dict(host_conf=host_conf, result=selected_line) # we add our informations to the answer
Example #7
0
from logger import CustomLogger

CUSTOM_LOGGER_HEADER = 'helper'

log = CustomLogger(CUSTOM_LOGGER_HEADER).log

key_set = set()
try:
    secret_f = open('secret')
    line = secret_f.readline()
    while line:
        key_set.add(line)
        line = secret_f.readline()
except FileNotFoundError:
    log('no secret file found, all requests will be accepted')
    key_set = None


def is_valid_key(key):
    if key_set:
        return key and key in key_set
    else:
        return True


def get_value(args, body, key, useArgs=True):
    argVal = args.get(key, False)
    bodyVal = body.get(key, False)
    if argVal and bodyVal:
        if argVal == bodyVal:
            return argVal
Example #8
0
import datetime
from flask import Flask, render_template, redirect, flash, url_for, request, abort, session
from flask.ext.login import LoginManager, login_required, login_user, logout_user, current_user, \
           current_app
from flask.ext.principal import Principal, Permission, UserNeed, RoleNeed, identity_loaded,\
               identity_changed, Identity

import mongoengine

from config import CONFIG

config = CONFIG

from logger import CustomLogger

cust_logger = CustomLogger(config.web_server.logger_name)

from models import User, Roles, LoggingHandlingException
from loginForm import LoginForm, RegistrationForm

app = Flask(__name__)
app.config['TESTING'] = True

#Secret key of the app, must be from file to prevent invalidating existing ses sions on restart
app.secret_key = os.urandom(24)
app.config['SESSION_COOKIE_HTTPONLY'] = True

# load extension permissions
principals = Principal(app)

#login manager loading
Example #9
0
import time
import threading
import socket
from twisted.internet import protocol, reactor
from webServiceMIB import WebServiceMIB
from logger import CustomLogger
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop


monitors = []
PERIOD_BEAT = 3
PERIOD_EXCEPTION = 2 

cust_logger = CustomLogger("monitor_web_server_%d"%os.getpid())




################# HEART BEAT MANAGEMENT ########################

#web_listen_port is the port where the webservice listen, monitor_listen_port is the listenning port of this monitor
def heartbeatDaemon(web_listen_port, monitor_listen_port,list_monitors):
    while True:
        hbSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        for monitor in list_monitors:
            hbSocket.sendto("heartbeat#%d#%d"% (int(web_listen_port), int(monitor_listen_port)), (monitor['ip'],int(monitor['port_hb'])))
        time.sleep(PERIOD_BEAT)

############# EXCEPTION MANAGEMENT #################################
Example #10
0
sys.path.append('..')
sys.path.append('../../libs')

import os
from config import Config
from twisted.internet import protocol
from twisted.internet import reactor
from logger import CustomLogger
from mongoWebMonitor import WebServiceMonitor, StatusWebService, Logs
import mongoengine
import threading, time
import socket
import httplib
import json

cust_logger = CustomLogger("monitor_health_%d"%os.getpid())

config = Config()
SIZE_BUFFER_HB = 21
HB_DATAGRAM = "heartbeat"
TIMEOUT_HB = 10
PERIOD_CHECK_HB = 3
PERIOD_CHECK_STATUS = 5

def monitorDaemon():
    pass

################ HEART BEAT MANAGEMENT ######################

class Heartbeats(dict):
    '''
Example #11
0
"""
author: arun.rs
created: 26th October 2018
"""

from datetime import datetime
from functools import wraps
from logger import CustomLogger

TRACER = CustomLogger().get_logger('trace')


def message(operation, type, resource, raw_resource, execution_time, status):
    """
    :summary: Concats the supplied parameters and returns them in trace format
    :param operation: Operation (MySQL/ Mongo/ ES/ API/ etc)
    :param type: Type of the Operation (SELECT/ GET/ POST/ etc)
    :param resource: URL / Query / Function name
    :param raw_resource: URL / Query / Function name
    :param execution_time: Time taken to perform that operation
    :param status: Success or Failure
    :return: Concatinated string
    """
    return "%s|%s|%s|%s|%s|%s" % (operation, type, resource, raw_resource,
                                  execution_time, status)


def execution_time(start_time, end_time):
    """
    :summary: Difference of supplied time in seconds
    :param start_time: Start time
Example #12
0
if args.amp:
    print('==> Operate amp')
    net, optimizer = amp.initialize(net, optimizer, opt_level="O1")

if args.scheduler:
    print('==> Operate scheduler')
    scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.9, patience=1, min_lr=1e-10, verbose=True)


# logger
here = os.getcwd()
now = datetime.datetime.now()
args.out = now.strftime('%Y%m%d_%H%M%S.%f')
log_dir = osp.join(here, 'logs', args.out)
os.makedirs(log_dir)
logger = CustomLogger(out=log_dir)

# make dirs for the checkpoint
check_dir = osp.join(here, 'checkpoint', args.out)
os.makedirs(check_dir)

# for .yaml
args.dataset = ['CIFAR10']
args.optimizer = 'SGD'
args.model = 'ResNet18'

with open(osp.join(log_dir, 'config.yaml'), 'w') as f:
        yaml.safe_dump(args.__dict__, f, default_flow_style=False)


# Training
Example #13
0
class Store():    
    def __init__(self, logger=None):
        self.logger         = CustomLogger().logger if logger is None else logger
        self.certs_location = '/etc/letsencrypt/live'
        self.endpoint_url   = os.getenv('ENDPOINT_URL')
        self.aws_access_key = os.getenv('AWS_ACCESS_KEY')
        self.aws_secret_key = os.getenv('AWS_SECRET_KEY')
        self.aws_region     = os.getenv('AWS_REGION')
        self.s3_bucket_name = os.getenv('CERTS_BUCKET_NAME')
        self.client         = self._client()
        
    
    def _client(self):
        try:
            return  boto3.client('s3', 
                        endpoint_url=self.endpoint_url, 
                        aws_access_key_id=self.aws_access_key, 
                        aws_secret_access_key=self.aws_secret_key, 
                        region_name=self.aws_region)
        except Exception:
            self.logger.exception('Can not in create s3 client')
            return None

    def _calcSHA256(self, filepath):
        sha256_hash = hashlib.sha256()
        with open(filepath,'rb') as f:
            # Read and update hash string value in blocks of 4K
            for byte_block in iter(lambda: f.read(4096),b''):
                sha256_hash.update(byte_block)
            return sha256_hash.hexdigest()                               

    def getMetaData(self, object_key):
        """Get the certificate metadata"""
        resp = self.client.head_object(Bucket=self.s3_bucket_name, Key='{0}/metadata.json'.format(object_key))
        if 'Metadata' not in resp:
            return None
        
        return resp['Metadata']
    
    def saveCerts(self):
        """ Saves the letsencrypt certificates files to a s3-compatible object storage"""
        certs_files = {}
        if self.client is None:
            self.logger.error('No s3 client initialized')
            return
        for cert in os.listdir(self.certs_location):
            cert_location = os.path.join(self.certs_location, cert)
            if os.path.isdir(cert_location):
                certs_files[cert] = {}
                cert_files = list(filter(lambda filename: all(ex_str not in filename.lower() for ex_str in ['readme', 'metadata']), os.listdir(cert_location)))
                for file in cert_files:
                    filepath   = os.path.join(cert_location, file)
                    filesha256 = self._calcSHA256(filepath)
                    cert_key = os.path.splitext(file)[0]
                    certs_files[cert][cert_key] = filesha256
                    # Save the certificates to a bucket
                    try:
                        with open(filepath, 'rb') as certdata:
                            self.client.put_object(
                                ACL='private',
                                Body=certdata,
                                Bucket=self.s3_bucket_name,
                                Key='{0}/{1}'.format(cert, file))
                    except Exception:
                        self.logger.error('Can not save the %s certificate file' % cert)

                # create and upload a metadata file contains the certificates files sha256         
                metadata_file = os.path.join(cert_location, 'metadata.json')
                metadata_obj  = json.dumps(certs_files[cert], indent=4)
                try:
                    with open(metadata_file, 'w') as f:
                        f.write(metadata_obj)
                except Exception:
                    self.logger.error('Can not save the metadata json file for %s certificate' % cert)
                    return

                if os.path.isfile(metadata_file):
                    self.client.put_object(
                        ACL='private',
                        Body=metadata_obj,
                        Bucket=self.s3_bucket_name,
                        Key='{0}/{1}'.format(cert, 'metadata.json'),
                        Metadata=certs_files[cert])

        self.logger.info('certificates files saved to %s bucket' % self.s3_bucket_name)      
Example #14
0
sys.path.append('..')
sys.path.append('../../libs')

import os
from config import Config
from twisted.internet import protocol
from twisted.internet import reactor
from logger import CustomLogger
from mongoWebMonitor import WebServiceMonitor, StatusWebService, Logs
import mongoengine
import threading, time
import socket
import httplib
import json

cust_logger = CustomLogger("monitor_health_%d" % os.getpid())

config = Config()
SIZE_BUFFER_HB = 21
HB_DATAGRAM = "heartbeat"
TIMEOUT_HB = 10
PERIOD_CHECK_HB = 3
PERIOD_CHECK_STATUS = 5


def monitorDaemon():
    pass


################ HEART BEAT MANAGEMENT ######################
Example #15
0
# custom data structures
from film_record import FilmRecord
from logger import CustomLogger
# additional modules for better UX
from utils import Gauge

# Global presets
file_name = "movies.json"
kafka_brockers = ["10.40.1.142:9092", "10.40.1.141:9092"]
kafka_topic_name = "avikulin_test"
kafka_clientid = "Python test util"
kafka_value_serializer = FilmRecord.serialize

# ! Executed code
if __name__ == "__main__":
    logger_instance = CustomLogger("kafka_wtire_util")
    logger_instance.activate()

    films_store = list()

    logger_instance.get.info(f"Start reading data from file {file_name}.")
    with open(file_name, mode="r") as source_file:
        data_store = json.load(source_file)
        print("JSON loaded.")
        for i, item in enumerate(data_store):
            films_store.append(FilmRecord.decode(item))

        print(
            f"Statistics: count ={len(data_store)}, collection type = {type(data_store)}"
        )
        print(
Example #16
0
    def init_logger(self, sess):
        if self.logger is None:
            self.logger = CustomLogger(self.config['log_dir'], sess.graph)

        return self.logger
Example #17
0
import os

from flask_login import LoginManager
from flask import Flask, render_template
from flask_sqlalchemy_session import flask_scoped_session
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from logger import CustomLogger

log = CustomLogger()

login_manager = LoginManager()

base_dir = os.getcwd()

app = Flask(__name__)
login_manager.init_app(app)
login_manager.login_view = 'stream'
app.config.from_object(os.environ['APP_SETTINGS'])
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

engine = create_engine(os.environ['DATABASE_URL'])
session_factory = sessionmaker(bind=engine)
db_session = flask_scoped_session(session_factory, app)


@app.route("/")
def index():
    return render_template("index.html")

import os
import sys

from dotenv import load_dotenv
from flask import Flask, render_template, send_from_directory, request
from flask_cors import CORS
from flask_login import LoginManager

from logger import CustomLogger

log = CustomLogger()

login_manager = LoginManager()

base_dir = os.getcwd()

app = Flask(__name__)

# load_dotenv()

try:
    app.config.from_object(os.environ['APP_SETTINGS'])
except KeyError as e:
    print(f"\033[93mEnvironment variables ({str(e)}) not setup!\033[0m")
    sys.exit()

app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

CORS(app)

log.warning("LoginManager is not setup!")
Example #19
0
import time
import threading
import socket
from webServiceMIB import WebServiceMIB, StatusWebService

from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop

## Initializing the app
app = Flask(__name__)
app.debug = True
config = Config()
host_conf = None

cust_logger = CustomLogger("web_server_%d" % os.getpid())


## Main pages, empty for the moment
@app.route('/')
def index():
    return render_template('index.html')


#Return a fortune : line randomly selected in a file
@app.route('/fortune', methods=['GET', 'POST'])
def fortune():
    if request.method == 'GET':
        cust_logger.info("Received GET request")
        try:
            file_fortune = open(
Example #20
0
                section_split_model=
                'section_split/models/training_unfolding_structure-2020-12-22_11-07-07_distilroberta-base'
            )

        pgr.do_convert()
        document_sections = pgr.do_split()

        return pgr.sections_to_doccano(document_sections)

    return 'NOK', 400


# curl -X POST -F data='{"pilot":"Malaga","service":"Asylum Request"}' http://easyrights.linksfoundation.com/v0.3/generate
@app.route('/v0.3/generate', methods=['POST'])
def retrieve_pathways():
    data = json.loads(request.form['data'])

    if data['pilot'].strip().lower() == 'malaga' and data['service'].strip(
    ).lower() == 'asylum request':
        return json.loads(open('api/malaga_pathway.json', 'r').read())
    if data['pilot'].strip().lower() == 'birmingham' and data['service'].strip(
    ).lower() == 'clean air zone':
        return json.loads(open('api/birmingham_pathway.json', 'r').read())

    return 'Service not available yet. Supported services: ', 400


if __name__ == '__main__':
    app.config['logger'] = CustomLogger('log/pgr.log')
    app.run(host='0.0.0.0', debug=True, port=5000)
# -*- coding: utf-8 -*-
'''Python module for initiating and executing commands via REST API.'''

# pylint: disable=too-many-branches, too-many-statements
# pyling: disable=too-many-return-statements

import ujson as json

import requests

from constants import CREDS
from constants import API
from logger import CustomLogger

LOG = CustomLogger(__name__)


class REST(object):
    '''Rest class for invoking REST calls GET, POST, PUT, PATCH, DELETE.'''
    def __init__(self, **kwargs):
        '''This class defines methods to invoke REST calls.

        Args:
            :pcIP (str): IP address.
            :username (str, optional): Username for auth. Default: 'admin'.
            :password (str, optional): Passwd for auth. Default: 'Password'.
            :port (int, optional): Port for sending REST calls. Default: 80.
            :baseURL (str, optional): URI for REST calls. Default: .

        Returns:
            Returns REST object instance.
Example #22
0
from typing import Set, List, Tuple
from kafka.structs import TopicPartition, KafkaMessage

from logger import CustomLogger, LogLevels

from sys import exit

# Global presets
kafka_brokers = ["10.40.1.142:9092", "10.40.1.141:9092"]
kafka_topic_name = "avikulin_test"
kafka_consumer_group_id = "test_group#111"
kafka_client_id = __file__

if __name__ == "__main__":
    # Enable logging for kafka consumer
    kafka_logger = CustomLogger("kafka", log_level= LogLevels.DEBUG)
    kafka_logger.activate()

    # Enable logging for app
    app_logger = CustomLogger("kafka_read_util", log_level=LogLevels.INFO)
    app_logger.activate()

    while True:
        consumer = KafkaConsumer(
            kafka_topic_name,
            group_id=kafka_consumer_group_id,
            client_id=kafka_client_id,
            bootstrap_servers=kafka_brokers,
            request_timeout_ms=6001,
            session_timeout_ms=6000,
            heartbeat_interval_ms=2000,
Example #23
0
class AcmeOperation:
    load_dotenv()

    def __init__(self, provider=None):
        """
        Automate certbot and lexicon to obtain and store 
        let's encrypt ssl certificates into S3-compatible object storage
        """
        self.logger = CustomLogger().logger
        self.dns_provider = provider
        self.dns_provider_username = os.getenv('DNS_PROVIDER_USERNAME')
        self.dns_provider_auth_token = os.getenv('DNS_PROVIDER_AUTH_TOKEN')
        self.client_ip_address = self._getPublicIP()
        self.dns_provider_update_delay = 30
        self.config = Config(logger=self.logger)
        self.s3_store = Store(logger=self.logger)
        self.test = False

    def _providerCheck(self):
        if self.dns_provider in self.config.getconfig['domains']:
            if len(self.config.getconfig['domains'][self.dns_provider]) > 0:
                return True
            else:
                self.logger.error('no domains stored for {0}'.format(
                    self.dns_provider))
                return False

        self.logger.error("provider is not in the domains list")
        return False

    def _getPublicIP(self):
        return urllib.request.urlopen('https://api.ipify.org/').read().decode(
            'utf8')

    def _getToolPath(self, tool):
        if tool is not None and tool not in self.config.getconfig:
            return None
        if 'path' in self.config.getconfig[tool]:
            return self.config.getconfig[tool]['path']

    def _runCmd(self, args):
        result = Result()

        process = Popen(args, stdout=PIPE, stderr=PIPE)
        stdout, stderr = process.communicate()

        result.stdout = stdout
        result.stderr = stderr

        if process.returncode:
            self.logger.error('Error executing command {0}'.format(args))
            self.logger.error('StdErr: {0}'.format(stderr))

        return result

    def obtain(self, test=False, expand=False):
        """
        Obtains the initial letsencrypt certificates for specific domain name provider 
        using manual script hooks to validate the ownership of the domains

        Certbot cli cmd generated:
        certbot certonly --manual --preferred-challenges=dns 
        --manual-auth-hook "/path/to/acmebot.py auth -p namecheap" 
        --manual-cleanup-hook "/path/to/acmebot.py cleanup -p namecheap" 
        -d example.com -d sub2.example.com -d another-example.com 
        --manual-public-ip-logging-ok --noninteractive --agree-tos --test-cert
        """
        hook_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                 'acmebot')
        if self._providerCheck():
            certbot = self._getToolPath('certbot')

            if certbot is None:
                self.logger.error(
                    "failed to run the certbot cmd, certbot path is not set")
                return

            args = [
                certbot, 'certonly', '--manual', '--preferred-challenges',
                'dns'
            ]

            args.extend([
                '--manual-auth-hook',
                quote("{0} auth -p {1}".format(hook_file, self.dns_provider))
            ])
            args.extend([
                '--manual-cleanup-hook',
                quote("{0} cleanup -p {1}".format(hook_file,
                                                  self.dns_provider))
            ])
            for domain in self.config.getconfig['domains'][self.dns_provider]:
                args.extend(['-d', domain])

            # adding certbot options to run in a non interactive mode
            args.extend([
                '--manual-public-ip-logging-ok', '--noninteractive',
                '--agree-tos', '--quiet'
            ])

            if test:
                args.append('--test-cert')

            if expand:
                args.append('--expand')

            certbot_cmd = ' '.join(args)
            # for some reason using Popen does not work with certbot
            # so we are using os.system for now
            # TODO: figure our if we can use subprocess.Popen
            os.system(certbot_cmd)

    def hook(self, action=None):
        # the passed environment variables from certbot
        CERTBOT_DOMAIN = os.environ.get("CERTBOT_DOMAIN", None)
        CERTBOT_VALIDATION = os.environ.get("CERTBOT_VALIDATION", None)

        cmd_index = 6
        lexicon = self._getToolPath('lexicon')

        if lexicon is None:
            self.logger.error(
                "failed to run the lexicon cmd, lexicon path is not set")
            return

        args = [
            lexicon, self.dns_provider,
            '--auth-usernam={0}'.format(self.dns_provider_username),
            '--auth-token={0}'.format(self.dns_provider_auth_token),
            '--auth-client-ip={0}'.format(self.client_ip_address), '--ttl=100',
            CERTBOT_DOMAIN, 'TXT', '--name',
            '_acme-challenge.{0}'.format(CERTBOT_DOMAIN), '--content',
            CERTBOT_VALIDATION
        ]
        if action == 'auth':
            #   https://github.com/AnalogJ/lexicon/blob/master/examples/certbot.default.sh#L46
            #   How many seconds to wait after updating your DNS records. This may be required,
            #   depending on how slow your DNS host is to begin serving new DNS records after updating
            #   them via the API. 30 seconds is a safe default, but some providers can be very slow
            #   (e.g. Linode).
            #
            #   Defaults to 30 seconds
            args.insert(cmd_index, 'create')
            self._runCmd(args)
            time.sleep(self.dns_provider_update_delay)
            # now save the created certificates to s3-compatible object storage
            self.s3_store.saveCerts()
        elif action == 'cleanup':
            args.insert(cmd_index, 'delete')
            self._runCmd(args)

    def manual_s3_upload(self):
        """manually uploads the live certificate files into the configured s3-compatible storage"""
        try:
            self.s3_store.saveCerts()
        except Exception as e:
            self.logger.error(
                'Failed to manually upload the certificates to the s3-compatible storage, Reason: %s'
                % e)
Example #24
0
import os
import subprocess
import sys
import logging
import StringIO
import pycurl
import socket
import base64
try:
    from urllib.parse import urlencode
except:
    from urllib import urlencode

from logger import CustomLogger
logger = CustomLogger(__name__).logger


class CustomException(Exception):
    def __init__(self, code, *args):
        self.code = code
        self.msg = Error.get_code_description(code).format(*args)

    def __str__(self):
        return repr("Error: {code}: {msg}".format(code=self.code,
                                                  msg=self.msg))


class Error(object):

    GENERIC_ERROR = 1
    NOTHING_TO_DO = 2
Example #25
0
import threading
import socket
import json

sys.path.append('..')
sys.path.append('../../libs')
sys.path.append('../..')

from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet import error
from logger import CustomLogger
from config import Config
import random
#logger for this module
cust_logger = CustomLogger("loadBalancer")
servers_manager = None
monitors = []
config = Config()


###########SERVER MANAGER#################
class ServersManager:
    '''
        manage the different list of servers : one containing the potential servers that one can use,
        the other containing the servers that the load balancer is using, and the last one contains the servers
        that are available aka they are not working. An index number is stored to keep track of the last server used 
        in the potential server. This is to be removed for a future use and replace by a non-linear management
    '''
    def __init__(self, possible_servers, in_use_servers):
        self.available_servers = in_use_servers[:]  #initialized with the in use servers
Example #26
0
import random
import time
import threading
import socket
from twisted.internet import protocol, reactor
from webServiceMIB import WebServiceMIB
from logger import CustomLogger
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop

monitors = []
PERIOD_BEAT = 3
PERIOD_EXCEPTION = 2

cust_logger = CustomLogger("monitor_web_server_%d" % os.getpid())

################# HEART BEAT MANAGEMENT ########################


#web_listen_port is the port where the webservice listen, monitor_listen_port is the listenning port of this monitor
def heartbeatDaemon(web_listen_port, monitor_listen_port, list_monitors):
    while True:
        hbSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        for monitor in list_monitors:
            hbSocket.sendto(
                "heartbeat#%d#%d" %
                (int(web_listen_port), int(monitor_listen_port)),
                (monitor['ip'], int(monitor['port_hb'])))
        time.sleep(PERIOD_BEAT)