Example #1
0
    def __init__(self, logger=None):
        if logger is None:
            logger = CustomLogger().logger

        self.logger = logger
        self.current_path = os.path.dirname(os.path.realpath(__file__))
        self.config_file_path = os.path.join(self.current_path, 'config.json')
        self.getconfig = self._getConfig()
Example #2
0
 def __init__(self, logger=None):
     self.logger         = CustomLogger().logger if logger is None else logger
     self.certs_location = '/etc/letsencrypt/live'
     self.endpoint_url   = os.getenv('ENDPOINT_URL')
     self.aws_access_key = os.getenv('AWS_ACCESS_KEY')
     self.aws_secret_key = os.getenv('AWS_SECRET_KEY')
     self.aws_region     = os.getenv('AWS_REGION')
     self.s3_bucket_name = os.getenv('CERTS_BUCKET_NAME')
     self.client         = self._client()
Example #3
0
def main():
    if len(sys.argv) > 2:
        from config import init_config
        init_config(sys.argv[1])
        from config import init_config
        from config import CONFIG
        from logger import CustomLogger
        cust_logger = CustomLogger(CONFIG.web_server.logger_name)
        cust_logger.add_file("log/" + CONFIG.web_server.logger_name, False)
        import app
        if bool(int(sys.argv[2])) == True:
            app.main()
Example #4
0
 def __init__(self, provider=None):
     """
     Automate certbot and lexicon to obtain and store 
     let's encrypt ssl certificates into S3-compatible object storage
     """
     self.logger = CustomLogger().logger
     self.dns_provider = provider
     self.dns_provider_username = os.getenv('DNS_PROVIDER_USERNAME')
     self.dns_provider_auth_token = os.getenv('DNS_PROVIDER_AUTH_TOKEN')
     self.client_ip_address = self._getPublicIP()
     self.dns_provider_update_delay = 30
     self.config = Config(logger=self.logger)
     self.s3_store = Store(logger=self.logger)
     self.test = False
Example #5
0
sys.path.append('..')
sys.path.append('../../libs')

import os
from config import Config
from twisted.internet import protocol
from twisted.internet import reactor
from logger import CustomLogger
from mongoWebMonitor import WebServiceMonitor, StatusWebService, Logs
import mongoengine
import threading, time
import socket
import httplib
import json

cust_logger = CustomLogger("monitor_health_%d" % os.getpid())

config = Config()
SIZE_BUFFER_HB = 21
HB_DATAGRAM = "heartbeat"
TIMEOUT_HB = 10
PERIOD_CHECK_HB = 3
PERIOD_CHECK_STATUS = 5


def monitorDaemon():
    pass


################ HEART BEAT MANAGEMENT ######################
# -*- coding: utf-8 -*-
'''Python module for initiating and executing commands via REST API.'''

# pylint: disable=too-many-branches, too-many-statements
# pyling: disable=too-many-return-statements

import ujson as json

import requests

from constants import CREDS
from constants import API
from logger import CustomLogger

LOG = CustomLogger(__name__)


class REST(object):
    '''Rest class for invoking REST calls GET, POST, PUT, PATCH, DELETE.'''
    def __init__(self, **kwargs):
        '''This class defines methods to invoke REST calls.

        Args:
            :pcIP (str): IP address.
            :username (str, optional): Username for auth. Default: 'admin'.
            :password (str, optional): Passwd for auth. Default: 'Password'.
            :port (int, optional): Port for sending REST calls. Default: 80.
            :baseURL (str, optional): URI for REST calls. Default: .

        Returns:
            Returns REST object instance.
Example #7
0
import time
import threading
import socket
from webServiceMIB import WebServiceMIB, StatusWebService

from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop

## Initializing the app
app = Flask(__name__)
app.debug = True
config = Config()
host_conf = None

cust_logger = CustomLogger("web_server_%d" % os.getpid())


## Main pages, empty for the moment
@app.route('/')
def index():
    return render_template('index.html')


#Return a fortune : line randomly selected in a file
@app.route('/fortune', methods=['GET', 'POST'])
def fortune():
    if request.method == 'GET':
        cust_logger.info("Received GET request")
        try:
            file_fortune = open(
Example #8
0
from logger import CustomLogger

CUSTOM_LOGGER_HEADER = 'helper'

log = CustomLogger(CUSTOM_LOGGER_HEADER).log

key_set = set()
try:
    secret_f = open('secret')
    line = secret_f.readline()
    while line:
        key_set.add(line)
        line = secret_f.readline()
except FileNotFoundError:
    log('no secret file found, all requests will be accepted')
    key_set = None


def is_valid_key(key):
    if key_set:
        return key and key in key_set
    else:
        return True


def get_value(args, body, key, useArgs=True):
    argVal = args.get(key, False)
    bodyVal = body.get(key, False)
    if argVal and bodyVal:
        if argVal == bodyVal:
            return argVal
Example #9
0
import datetime
from flask import Flask, render_template, redirect, flash, url_for, request, abort, session
from flask.ext.login import LoginManager, login_required, login_user, logout_user, current_user, \
           current_app
from flask.ext.principal import Principal, Permission, UserNeed, RoleNeed, identity_loaded,\
               identity_changed, Identity

import mongoengine

from config import CONFIG

config = CONFIG

from logger import CustomLogger

cust_logger = CustomLogger(config.web_server.logger_name)

from models import User, Roles, LoggingHandlingException
from loginForm import LoginForm, RegistrationForm

app = Flask(__name__)
app.config['TESTING'] = True

#Secret key of the app, must be from file to prevent invalidating existing ses sions on restart
app.secret_key = os.urandom(24)
app.config['SESSION_COOKIE_HTTPONLY'] = True

# load extension permissions
principals = Principal(app)

#login manager loading
Example #10
0
"""
author: arun.rs
created: 26th October 2018
"""

from datetime import datetime
from functools import wraps
from logger import CustomLogger

TRACER = CustomLogger().get_logger('trace')


def message(operation, type, resource, raw_resource, execution_time, status):
    """
    :summary: Concats the supplied parameters and returns them in trace format
    :param operation: Operation (MySQL/ Mongo/ ES/ API/ etc)
    :param type: Type of the Operation (SELECT/ GET/ POST/ etc)
    :param resource: URL / Query / Function name
    :param raw_resource: URL / Query / Function name
    :param execution_time: Time taken to perform that operation
    :param status: Success or Failure
    :return: Concatinated string
    """
    return "%s|%s|%s|%s|%s|%s" % (operation, type, resource, raw_resource,
                                  execution_time, status)


def execution_time(start_time, end_time):
    """
    :summary: Difference of supplied time in seconds
    :param start_time: Start time
Example #11
0
                section_split_model=
                'section_split/models/training_unfolding_structure-2020-12-22_11-07-07_distilroberta-base'
            )

        pgr.do_convert()
        document_sections = pgr.do_split()

        return pgr.sections_to_doccano(document_sections)

    return 'NOK', 400


# curl -X POST -F data='{"pilot":"Malaga","service":"Asylum Request"}' http://easyrights.linksfoundation.com/v0.3/generate
@app.route('/v0.3/generate', methods=['POST'])
def retrieve_pathways():
    data = json.loads(request.form['data'])

    if data['pilot'].strip().lower() == 'malaga' and data['service'].strip(
    ).lower() == 'asylum request':
        return json.loads(open('api/malaga_pathway.json', 'r').read())
    if data['pilot'].strip().lower() == 'birmingham' and data['service'].strip(
    ).lower() == 'clean air zone':
        return json.loads(open('api/birmingham_pathway.json', 'r').read())

    return 'Service not available yet. Supported services: ', 400


if __name__ == '__main__':
    app.config['logger'] = CustomLogger('log/pgr.log')
    app.run(host='0.0.0.0', debug=True, port=5000)
Example #12
0
if args.amp:
    print('==> Operate amp')
    net, optimizer = amp.initialize(net, optimizer, opt_level="O1")

if args.scheduler:
    print('==> Operate scheduler')
    scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.9, patience=1, min_lr=1e-10, verbose=True)


# logger
here = os.getcwd()
now = datetime.datetime.now()
args.out = now.strftime('%Y%m%d_%H%M%S.%f')
log_dir = osp.join(here, 'logs', args.out)
os.makedirs(log_dir)
logger = CustomLogger(out=log_dir)

# make dirs for the checkpoint
check_dir = osp.join(here, 'checkpoint', args.out)
os.makedirs(check_dir)

# for .yaml
args.dataset = ['CIFAR10']
args.optimizer = 'SGD'
args.model = 'ResNet18'

with open(osp.join(log_dir, 'config.yaml'), 'w') as f:
        yaml.safe_dump(args.__dict__, f, default_flow_style=False)


# Training
Example #13
0
    def init_logger(self, sess):
        if self.logger is None:
            self.logger = CustomLogger(self.config['log_dir'], sess.graph)

        return self.logger
Example #14
0
import os

from flask_login import LoginManager
from flask import Flask, render_template
from flask_sqlalchemy_session import flask_scoped_session
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from logger import CustomLogger

log = CustomLogger()

login_manager = LoginManager()

base_dir = os.getcwd()

app = Flask(__name__)
login_manager.init_app(app)
login_manager.login_view = 'stream'
app.config.from_object(os.environ['APP_SETTINGS'])
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

engine = create_engine(os.environ['DATABASE_URL'])
session_factory = sessionmaker(bind=engine)
db_session = flask_scoped_session(session_factory, app)


@app.route("/")
def index():
    return render_template("index.html")

Example #15
0
import os
import subprocess
import sys
import logging
import StringIO
import pycurl
import socket
import base64
try:
    from urllib.parse import urlencode
except:
    from urllib import urlencode

from logger import CustomLogger
logger = CustomLogger(__name__).logger


class CustomException(Exception):
    def __init__(self, code, *args):
        self.code = code
        self.msg = Error.get_code_description(code).format(*args)

    def __str__(self):
        return repr("Error: {code}: {msg}".format(code=self.code,
                                                  msg=self.msg))


class Error(object):

    GENERIC_ERROR = 1
    NOTHING_TO_DO = 2
Example #16
0
# custom data structures
from film_record import FilmRecord
from logger import CustomLogger
# additional modules for better UX
from utils import Gauge

# Global presets
file_name = "movies.json"
kafka_brockers = ["10.40.1.142:9092", "10.40.1.141:9092"]
kafka_topic_name = "avikulin_test"
kafka_clientid = "Python test util"
kafka_value_serializer = FilmRecord.serialize

# ! Executed code
if __name__ == "__main__":
    logger_instance = CustomLogger("kafka_wtire_util")
    logger_instance.activate()

    films_store = list()

    logger_instance.get.info(f"Start reading data from file {file_name}.")
    with open(file_name, mode="r") as source_file:
        data_store = json.load(source_file)
        print("JSON loaded.")
        for i, item in enumerate(data_store):
            films_store.append(FilmRecord.decode(item))

        print(
            f"Statistics: count ={len(data_store)}, collection type = {type(data_store)}"
        )
        print(
Example #17
0
from typing import Set, List, Tuple
from kafka.structs import TopicPartition, KafkaMessage

from logger import CustomLogger, LogLevels

from sys import exit

# Global presets
kafka_brokers = ["10.40.1.142:9092", "10.40.1.141:9092"]
kafka_topic_name = "avikulin_test"
kafka_consumer_group_id = "test_group#111"
kafka_client_id = __file__

if __name__ == "__main__":
    # Enable logging for kafka consumer
    kafka_logger = CustomLogger("kafka", log_level= LogLevels.DEBUG)
    kafka_logger.activate()

    # Enable logging for app
    app_logger = CustomLogger("kafka_read_util", log_level=LogLevels.INFO)
    app_logger.activate()

    while True:
        consumer = KafkaConsumer(
            kafka_topic_name,
            group_id=kafka_consumer_group_id,
            client_id=kafka_client_id,
            bootstrap_servers=kafka_brokers,
            request_timeout_ms=6001,
            session_timeout_ms=6000,
            heartbeat_interval_ms=2000,
Example #18
0
import threading
import socket
import json

sys.path.append('..')
sys.path.append('../../libs')
sys.path.append('../..')

from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet import error
from logger import CustomLogger
from config import Config
import random
#logger for this module
cust_logger = CustomLogger("loadBalancer")
servers_manager = None
monitors = []
config = Config()


###########SERVER MANAGER#################
class ServersManager:
    '''
        manage the different list of servers : one containing the potential servers that one can use,
        the other containing the servers that the load balancer is using, and the last one contains the servers
        that are available aka they are not working. An index number is stored to keep track of the last server used 
        in the potential server. This is to be removed for a future use and replace by a non-linear management
    '''
    def __init__(self, possible_servers, in_use_servers):
        self.available_servers = in_use_servers[:]  #initialized with the in use servers
Example #19
0
import random
import time
import threading
import socket
from twisted.internet import protocol, reactor
from webServiceMIB import WebServiceMIB
from logger import CustomLogger
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop

monitors = []
PERIOD_BEAT = 3
PERIOD_EXCEPTION = 2

cust_logger = CustomLogger("monitor_web_server_%d" % os.getpid())

################# HEART BEAT MANAGEMENT ########################


#web_listen_port is the port where the webservice listen, monitor_listen_port is the listenning port of this monitor
def heartbeatDaemon(web_listen_port, monitor_listen_port, list_monitors):
    while True:
        hbSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        for monitor in list_monitors:
            hbSocket.sendto(
                "heartbeat#%d#%d" %
                (int(web_listen_port), int(monitor_listen_port)),
                (monitor['ip'], int(monitor['port_hb'])))
        time.sleep(PERIOD_BEAT)