Ejemplo n.º 1
0
def create_app(config_name):
    """

    the following  method implement  the application
    factory design pattern !
    it's responsible for creating all the app with 2
     configurations mode test ,develpement

    """

    if config_name == "test":
        app = Flask(__name__)
        app.config.from_object(app_config[config_name])

    else:

        #for template folder when runned as a package
        if getattr(sys, 'frozen', False):
            template_folder = os.path.join(sys._MEIPASS, 'templates')
            static_folder = os.path.join(sys._MEIPASS, 'static')
            app = Flask(__name__,
                        template_folder=template_folder,
                        static_folder=static_folder)
        else:
            app = Flask(__name__)
        app.config.from_object(app_config['development'])
    app.register_blueprint(home)
    """
    will use a kv sesssion to store session data in client side

    """
    store = FilesystemStore(SESSION_FOLDER)
    KVSessionExtension(store, app)

    return app
Ejemplo n.º 2
0
def configure_app(configuration):
    # Configure data sources
    mongo_client = MongoClient(host=configuration.mongodb_uri)
    conn = mongo_client[
        configuration.mongodb_uri[configuration.mongodb_uri.rfind('/') +
                                  1:len(configuration.mongodb_uri)]]
    global mongo_connection
    mongo_connection = conn

    if configuration.cert_store_type == 'simplekv_fs':
        kv_store = FilesystemStore(configuration.cert_store_path)
        log.info('Configured a file system certificate store with path=%s',
                 configuration.cert_store_path)
    elif configuration.cert_store_type == 'simplekv_gridfs':
        gfs = gridfs.GridFS(conn)
        kv_store = GridfsKeyValueStore(gfs)
        log.info('Configured a gridfs certificate store')

    # Configure verifier
    global cert_store, verifier
    if configuration.v1_aware:
        cert_store = V1AwareCertificateStore(kv_store, mongo_connection)
        verifier = V1AwareCertificateVerifierBridge(cert_store)
    else:
        cert_store = CertificateStore(kv_store)
        verifier = CertificateVerifierBridge(cert_store)

    # Configure intro store
    global intro_store
    intro_store = IntroStore(mongo_connection)

    # Configure views
    from cert_viewer import views
    views.add_rules(app, configuration)
Ejemplo n.º 3
0
 def main():
     db = FilesystemStore("/home/kiselev/db")
     poll_cooldown = 30
     locations = [Location(56.880372, 60.729744,
                           56.928178, 60.843899)]
     advertiser = PeriscopeAdvertiser(locations, db, logger)
     polling.poll(advertiser.poll, step=poll_cooldown, poll_forever=True)
Ejemplo n.º 4
0
    def __init__(self, cachename):

        storage = StorageWrapper.__STORAGE__

        if not os.path.exists(f"{storage}"):
            os.system(f"mkdir -p {storage}")

        self.store = FilesystemStore(f"{storage}/{cachename}")
Ejemplo n.º 5
0
 def __init__(self,
              session_key=None,
              kv_store: KeyValueStore = None,
              default_session_cookie_age: int = SESSION_COOKIE_AGE):
     if kv_store is None:
         kv_store = FilesystemStore("./.session_data")
     self._session_key = session_key
     self.accessed = False
     self.modified = False
     self.serializer = JSONSerializer
     self.defaul_session_cookie_age = default_session_cookie_age
     self.kv_store = kv_store
Ejemplo n.º 6
0
def report_session(
    self,
    session_id,
    screen_content,
    session_store_backend: KeyValueStore = FilesystemStore("./session_data"),
):
    # to avoid circular import
    from ussd.core import UssdHandlerAbstract

    logger = get_logger(__name__).bind(action="report_session_task",
                                       session_id=session_id)

    logger.info('start')

    ussd_report_session_data = screen_content['ussd_report_session']

    session = SessionStore(session_key=session_id,
                           kv_store=session_store_backend)

    if session.get('posted'):
        logger.info("session_already_reported", posted=session['posted'])
        return

    request_conf = UssdHandlerAbstract.render_request_conf(
        session, ussd_report_session_data['request_conf'])

    UssdHandlerAbstract.make_request(
        http_request_conf=request_conf,
        response_session_key_save=ussd_report_session_data['session_key'],
        session=session,
        logger=logger)

    # check if it is the desired effect
    for expr in ussd_report_session_data['validate_response']:
        if UssdHandlerAbstract.evaluate_jija_expression(expr['expression'],
                                                        session=session):
            session['posted'] = True
            session.save()
            return

    if ussd_report_session_data.get('retry_mechanism'):
        try:
            self.retry(
                **screen_content['ussd_report_session']['retry_mechanism'])
        except MaxRetriesExceededError as e:
            logger.warning("report_session_error", error_message=str(e))
Ejemplo n.º 7
0
    def test_concurrent_mkdir(self, tmpdir, mocker):
        # Concurrent instantiation of the store in two threads could lead to
        # the situation where both threads see that the directory does not
        # exists. For one, the call to mkdir succeeds, for the other it fails.
        # This is ok for us as long as the directory exists afterwards.
        makedirs = mocker.patch('os.makedirs')
        makedirs.side_effect = OSError("Failure")
        mocker.patch('os.path.isdir')

        store = FilesystemStore(os.path.join(tmpdir, 'test'))
        # We have mocked os.makedirs, so this won't work. But it should
        # pass beyond the OS error and simply fail on writing the file itself.
        if PY2:
            with pytest.raises(IOError):
                store.put('test', b'test')
        else:
            with pytest.raises(FileNotFoundError):
                store.put('test', b'test')
Ejemplo n.º 8
0
def open(db_name, backend='filesystem', **kwargs):

    path = os.path.join(os.path.expanduser('~'), '.experimentdb', db_name)

    if backend == 'filesystem':

        print("Using filesystem backend")

        if not os.path.exists(path):
            try:
                os.makedirs(path)
            except Exception as e:
                print(e)
                pass
        return DataBase(FilesystemStore(path))

    if backend == 'redis':

        print("Using redis backend")

        # find or create a new db index
        redis_config_store = RedisStore(StrictRedis(db=0, **kwargs))

        if db_name in redis_config_store:

            db_index = int(redis_config_store.get(db_name))
            print("Database", db_name, "already exists with index", db_index)

        else:

            if 'next_db_index' in redis_config_store:
                next_db_index = int(redis_config_store.get('next_db_index'))
                db_index = next_db_index
                next_db_index += 1
                redis_config_store.put('next_db_index', str(next_db_index))
            else:
                db_index = 1
                redis_config_store.put('next_db_index', '2')

            print("New database", db_name, "created with index", db_index)
            redis_config_store.put(db_name, db_index)

        return DataBase(RedisStore(StrictRedis(db=db_index, **kwargs)))
Ejemplo n.º 9
0
def configure_app(configuration):
    logging.config.fileConfig(os.path.join(BASE_DIR, 'logging.conf'))
    global log
    log = logging.getLogger(__name__)

    mongo_client = MongoClient(host=configuration.mongodb_uri)
    conn = mongo_client[
        configuration.mongodb_uri[configuration.mongodb_uri.rfind('/') + 1:len(configuration.mongodb_uri)]]
    global mongo_connection
    mongo_connection = conn

    if configuration.cert_store_type == 'simplekv_fs':
        kv_store = FilesystemStore(configuration.cert_store_path)
    elif configuration.cert_store_type == 'simplekv_gridfs':
        gfs = gridfs.GridFS(conn)
        kv_store = GridfsKeyValueStore(gfs)

    global cert_store
    if configuration.v1_aware:
        cert_store = V1AwareCertificateStore(kv_store, mongo_connection)
    else:
        cert_store = CertificateStore(kv_store)
Ejemplo n.º 10
0
def geoposition(text):
    """ Use Google Maps API to geocode string """
    store = FilesystemStore('./cache')
    url = "https://maps.googleapis.com/maps/api/geocode/json?address=%s&sensor=false&language=sv" % text
    cache_key = md5.new(text.encode('utf-8')).hexdigest()
    try:
        result = store.get(cache_key)
        if loads(result):
            return loads(result)
        else:
            raise GeoCodingError

    except KeyError:
        print "no cache for %s" % text.encode('utf-8')
        sleep(2)  # Sleep to avoid being blocked by Google Maps API
        result = requests.get(url).json()
        if len(result["results"]):
            store.put(cache_key, dumps(result["results"][0]))
            return result["results"][0]
        else:
            #            print "empty reply when geocoding %s" % text
            store.put(cache_key, dumps(None))
            raise GeoCodingError
Ejemplo n.º 11
0
    def __init__(self,
                 session_id,
                 phone_number,
                 ussd_input,
                 language,
                 journey_name,
                 journey_store: JourneyStore = None,
                 journey_version=None,
                 session_store_backend: KeyValueStore = FilesystemStore(
                     "./session_data"),
                 default_language=None,
                 use_built_in_session_management=False,
                 expiry=180,
                 **kwargs):
        """
        :param session_id: Used to maintain session 
        :param phone_number: user dialing in   
        :param ussd_input: input entered by user
        :param language: language to be used
        :param default_language: language to used
        :param use_built_in_session_management: Used to enable ussd_airflow to 
            manage its own session, by default its set to False, is set to true 
        then the session_id should be None and expiry can't be None. 
        :param expiry: Its only used if use_built_in_session_management has
        been enabled. 
        :param kwargs: All other extra arguments
        """

        self.expiry = expiry
        # A bit of defensive programming to make sure
        # session_built_in_management has been initiated
        if use_built_in_session_management and session_id is not None:
            raise InvalidAttribute("When using built_in_session_management "
                                   "has been enabled session_id should "
                                   "be None")
        if use_built_in_session_management and expiry is None:
            raise InvalidAttribute("When built_in_session_management has been"
                                   "enabled expiry should not be None")
        # session id should not be None if built in session management
        # has not been enabled
        if session_id is None and not use_built_in_session_management:
            raise InvalidAttribute(
                "Session id should not be None if built in session management "
                "has not been enabled")

        if session_id is None:
            session_id = str(phone_number)

        # for support when using django session table
        if len(str(session_id)) < 8:
            session_id = 's' * (8 - len(str(session_id))) + session_id

        self.phone_number = str(phone_number)
        self.input = unquote(ussd_input)
        self.language = language
        self.default_language = default_language or 'en'
        self.session_id = session_id

        # session store config
        self.use_built_in_session_management = use_built_in_session_management
        self.session_store_backend = session_store_backend
        self.session = self.get_session()
        self.session.set_expiry(self.expiry)

        # journey config
        if journey_store is None:
            self.journey_store = YamlJourneyStore(
                "./ussd/tests/sample_screen_definition")
        else:
            self.journey_store = journey_store
        self.journey_name = journey_name
        self.journey_version = journey_version

        # screen configs
        self.menu_index_format = kwargs.get('menu_index_format', ". ")

        for key, value in kwargs.items():
            if not hasattr(self, key):
                setattr(self, key, value)
Ejemplo n.º 12
0

def log(message, **kwargs):
    guy = kwargs.get('guy', '')
    type = kwargs.get('type', 'info')

    print('{}: {} {}'.format(now(), message, guy))
    getattr(logger, type)('{} proc={}'.format(message, kwargs.get('proc', 0)),
                          extra=kwargs)

    if type == 'error':
        print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno))


try:
    store = FilesystemStore('./data')
except Exception as e:
    log(e, type='error')
    store = None


def send_message(message):
    subprocess.Popen(['notify-send', message])
    return


class element_has_attribute(object):
    def __init__(self, locator, attribute):
        self.attribute = attribute
        self.locator = locator
Ejemplo n.º 13
0
def _create_store_fs(type, params):
    if params['create_if_missing'] and not os.path.exists(params['path']):
        os.makedirs(params['path'])
    return FilesystemStore(params['path'])
Ejemplo n.º 14
0
def get_db() -> FilesystemStore:
    if 'db' not in g:
        g.db = FilesystemStore(current_app.config['DATABASE'])
    return g.db
Ejemplo n.º 15
0
import os
from flask import Flask
from simplekv.fs import FilesystemStore
from flask.ext.kvsession import KVSessionExtension
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from flask.ext.sqlalchemy import SQLAlchemy

# initialize server KV session store
if not os.path.exists('./sessiondata'):
    os.makedirs('./sessiondata')
store = FilesystemStore('./sessiondata')

# instantiate flask app
app = Flask(__name__,
            static_folder='static',
            template_folder='templates',
            static_url_path='/static')

# get configuration from a non-repo file specified
# in this envvar
app.config.from_envvar('MAPROULETTE_SETTINGS')

# set up the ORM engine and database object
engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'],
                       convert_unicode=True)
Base = declarative_base()
db = SQLAlchemy(app)

if not app.debug:
    import logging
Ejemplo n.º 16
0
import redis
from config import REDIS_HOST, REDIS_PORT, REDIS_DB, REDIS_PASS, IS_DEBUG, PORT
import uvicorn
from wordcookies import game

# init FastAPI
app = FastAPI()
app.mount("/public", StaticFiles(directory="./public"), name="public")

# init cache
# TODO: move to constant
text_domain = "wordcookies"
if REDIS_HOST == "":
    cache_path = "./cache"
    print("cache storage: file, inside %s" % cache_path)
    store = FilesystemStore(cache_path)
else:
    print("cache storage: Redis, host %s" % REDIS_HOST)
    store = RedisStore(
        redis.StrictRedis(REDIS_HOST, REDIS_PORT, REDIS_DB, REDIS_PASS))


@app.get("/")
def read_index():
    return FileResponse("./public/index.html")


@app.get("/healthcheck")
def read_healthcheck():
    return {"message": "ok"}
Ejemplo n.º 17
0
def create_app(db_connection_string=None, testing=None):
    class CustomFlask(Flask):
        SKIP_RULES = [  # These endpoints will be removed for v3
            '/v3/ws/<workspace_name>/hosts/bulk_delete/',
            '/v3/ws/<workspace_name>/vulns/bulk_delete/',
            '/v3/ws/<workspace_id>/change_readonly/',
            '/v3/ws/<workspace_id>/deactivate/',
            '/v3/ws/<workspace_id>/activate/',
        ]

        def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
            # Flask registers views when an application starts
            # do not add view from SKIP_VIEWS
            for rule_ in CustomFlask.SKIP_RULES:
                if rule_ == rule:
                    return
            return super().add_url_rule(rule, endpoint, view_func, **options)

    app = CustomFlask(__name__, static_folder=None)

    try:
        secret_key = faraday.server.config.faraday_server.secret_key
    except Exception:
        # Now when the config file does not exist it doesn't enter in this
        # condition, but it could happen in the future. TODO check
        save_new_secret_key(app)
    else:
        if secret_key is None:
            # This is what happens now when the config file doesn't exist.
            # TODO check
            save_new_secret_key(app)
        else:
            app.config['SECRET_KEY'] = secret_key

    if faraday.server.config.faraday_server.agent_registration_secret is None:
        save_new_agent_creation_token_secret()

    login_failed_message = ("Invalid username or password", 'error')

    app.config.update({
        'SECURITY_BACKWARDS_COMPAT_AUTH_TOKEN':
        True,
        'SECURITY_PASSWORD_SINGLE_HASH':
        True,
        'WTF_CSRF_ENABLED':
        False,
        'SECURITY_USER_IDENTITY_ATTRIBUTES': [{
            'username': {
                'mapper': uia_username_mapper
            }
        }],
        'SECURITY_POST_LOGIN_VIEW':
        '/_api/session',
        'SECURITY_POST_CHANGE_VIEW':
        '/_api/change',
        'SECURITY_RESET_PASSWORD_TEMPLATE':
        '/security/reset.html',
        'SECURITY_POST_RESET_VIEW':
        '/',
        'SECURITY_SEND_PASSWORD_RESET_EMAIL':
        True,
        # For testing porpouse
        'SECURITY_EMAIL_SENDER':
        "*****@*****.**",
        'SECURITY_CHANGEABLE':
        True,
        'SECURITY_SEND_PASSWORD_CHANGE_EMAIL':
        False,
        'SECURITY_MSG_USER_DOES_NOT_EXIST':
        login_failed_message,
        'SECURITY_TOKEN_AUTHENTICATION_HEADER':
        'Authorization',

        # The line bellow should not be necessary because of the
        # CustomLoginForm, but i'll include it anyway.
        'SECURITY_MSG_INVALID_PASSWORD':
        login_failed_message,
        'SESSION_TYPE':
        'filesystem',
        'SESSION_FILE_DIR':
        faraday.server.config.FARADAY_SERVER_SESSIONS_DIR,
        'SQLALCHEMY_TRACK_MODIFICATIONS':
        False,
        'SQLALCHEMY_RECORD_QUERIES':
        True,
        # app.config['SQLALCHEMY_ECHO'] = True
        'SECURITY_PASSWORD_SCHEMES': [
            'bcrypt',  # This should be the default value
            # 'des_crypt',
            # 'pbkdf2_sha256',
            # 'pbkdf2_sha512',
            # 'sha256_crypt',
            # 'sha512_crypt',
        ],
        'PERMANENT_SESSION_LIFETIME':
        datetime.timedelta(hours=int(
            faraday.server.config.faraday_server.session_timeout or 12)),
        'SESSION_COOKIE_NAME':
        'faraday_session_2',
        'SESSION_COOKIE_SAMESITE':
        'Lax',
    })

    store = FilesystemStore(app.config['SESSION_FILE_DIR'])
    prefixed_store = PrefixDecorator('sessions_', store)
    KVSessionExtension(prefixed_store, app)
    user_logged_in.connect(user_logged_in_succesfull, app)
    user_logged_out.connect(expire_session, app)

    storage_path = faraday.server.config.storage.path
    if not storage_path:
        logger.warn(
            'No storage section or path in the .faraday/config/server.ini. Setting the default value to .faraday/storage'
        )
        storage_path = setup_storage_path()

    if not DepotManager.get('default'):
        if testing:
            DepotManager.configure(
                'default',
                {
                    'depot.storage_path': '/tmp'  # nosec
                })
        else:
            DepotManager.configure('default',
                                   {'depot.storage_path': storage_path})

    check_testing_configuration(testing, app)

    try:
        app.config[
            'SQLALCHEMY_DATABASE_URI'] = db_connection_string or faraday.server.config.database.connection_string.strip(
                "'")
    except AttributeError:
        logger.info(
            'Missing [database] section on server.ini. Please configure the database before running the server.'
        )
    except NoOptionError:
        logger.info(
            'Missing connection_string on [database] section on server.ini. Please configure the database before running the server.'
        )

    from faraday.server.models import db  # pylint:disable=import-outside-toplevel
    db.init_app(app)
    # Session(app)

    # Setup Flask-Security
    app.user_datastore = SQLAlchemyUserDatastore(
        db, user_model=User,
        role_model=None)  # We won't use flask security roles feature

    from faraday.server.api.modules.agent import agent_creation_api  # pylint: disable=import-outside-toplevel

    app.limiter = Limiter(app, key_func=get_remote_address, default_limits=[])
    if not testing:
        app.limiter.limit(faraday.server.config.limiter_config.login_limit)(
            agent_creation_api)

    app.register_blueprint(agent_creation_api)

    Security(app, app.user_datastore, login_form=CustomLoginForm)
    # Make API endpoints require a login user by default. Based on
    # https://stackoverflow.com/questions/13428708/best-way-to-make-flask-logins-login-required-the-default

    app.view_functions['security.login'].is_public = True
    app.view_functions['security.logout'].is_public = True
    app.debug = faraday.server.config.is_debug_mode()
    minify_json_output(app)

    for handler in LOGGING_HANDLERS:
        app.logger.addHandler(handler)
    app.logger.propagate = False
    register_blueprints(app)
    register_handlers(app)

    app.view_functions[
        'agent_creation_api.AgentCreationView:post'].is_public = True
    app.view_functions[
        'agent_creation_api.AgentCreationV3View:post'].is_public = True

    return app
Ejemplo n.º 18
0
    def __init__(
        self,
        root,
        name,
        store_uuid=None,
        storetype="hfs",
        algorithm="sha1",
        alt_root=None,
    ):
        """
        Loads a base store type
        Requires a root path where the store resides
        Create a store from persisted data
        Or create a new one
        """
        self._mstore = CronusObjectStore()
        self._dstore = FilesystemStore(f"{root}")
        self._alt_dstore = None
        if alt_root is not None:
            self.__logger.info("Create alternative data store location")
            self._alt_dstore = FilesystemStore(f"{alt_root}")
        self._algorithm = algorithm
        if store_uuid is None:
            # Generate a new store
            self.__logger.info("Generating new metastore")
            self._mstore.uuid = str(uuid.uuid4())
            self._mstore.name = f"{self._mstore.uuid}.{name}.cronus.pb"
            self._mstore.address = self._dstore.url_for(self._mstore.name)
            self._mstore.info.created.GetCurrentTime()
            self.__logger.info("Metastore ID %s", self._mstore.uuid)
            self.__logger.info("Storage location %s", self._mstore.address)
            self.__logger.info("Created on %s",
                               self._mstore.info.created.ToDatetime())
        elif store_uuid is not None:
            self.__logger.info("Load metastore from path")
            self._load_from_path(name, store_uuid)
        else:
            self.__logger.error("Cannot retrieve store: %s from datastore %s",
                                store_uuid, root)
            raise KeyError

        self._name = self._mstore.name
        self._uuid = self._mstore.uuid
        self._parent_uuid = self._mstore.parent_uuid
        self._info = self._mstore.info
        self._aux = self._info.aux

        self._dups = dict()
        self._child_stores = dict()

        objects = dict()

        for item in self._info.objects:
            self.__logger.debug("Loading object %s", item.uuid)
            objects[item.uuid] = item
            if item.WhichOneof("info") == "dataset":
                for child in item.dataset.files:
                    objects[child.uuid] = child
                for child in item.dataset.hists:
                    objects[child.uuid] = child
                for child in item.dataset.tdigests:
                    objects[child.uuid] = child
                for child in item.dataset.logs:
                    objects[child.uuid] = child
                for child in item.dataset.jobs:
                    objects[child.uuid] = child
                for child in item.dataset.tables:
                    objects[child.uuid] = child

        super().__init__(objects)
Ejemplo n.º 19
0
 def setUp(self):
     TestHashGen.setUp(self)
     self.tmpdir = tempfile.mkdtemp()
     self.store = HashDecorator(FilesystemStore(self.tmpdir))
Ejemplo n.º 20
0
 def setUp(self):
     self.tmpdir = tempfile.mkdtemp()
     self.store = UUIDDecorator(FilesystemStore(self.tmpdir))
Ejemplo n.º 21
0
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_kvsession import KVSessionExtension
from flask_migrate import Migrate
# from flask_mail import Mail
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from simplekv.fs import FilesystemStore

from config import config

store = FilesystemStore('./data/session')
kvsession = KVSessionExtension()

bootstrap = Bootstrap()
# mail = Mail()
moment = Moment()
db = SQLAlchemy()
migrate = Migrate()


def create_app(config_name):
    app = Flask(__name__)
    app.config.from_object(config[config_name])
    config[config_name].init_app(app)

    kvsession.init_app(app, session_kvstore=store)
    bootstrap.init_app(app)
    # mail.init_app(app)
    moment.init_app(app)
    db.init_app(app)
        """
        https://requests-cache.readthedocs.io/en/latest/
        
        Does not cache 40x results.
        """
        requests_cache.install_cache('iiif_cache',
                                     backend='redis',
                                     connection=redis.StrictRedis(
                                         host=redis_host, db=0),
                                     expire_after=cache_requests_timeout,
                                     allowable_codes=[200])
else:
    """ 
    Use sqlite for local requests caching.
    """
    store = FilesystemStore(settings.simplekv_path)

    if flask_cache_timeout:
        cache = Cache(app,
                      config={
                          'CACHE_TYPE': 'simple',
                          'CACHE_DEFAULT_TIMEOUT': flask_cache_timeout
                      })
    else:
        cache = Cache(app, config={'CACHE_TYPE': 'null'})

    if cache_requests:
        """
        https://requests-cache.readthedocs.io/en/latest/
        
        Does not cache 40x results.
Ejemplo n.º 23
0
from email.MIMEText import MIMEText
from email.Utils import COMMASPACE, formatdate
from email import Encoders
from sqltools import *
from email_validator import validate_email, EmailNotValidError
import requests
import config

ACCOUNT_CREATION_DIFFICULTY = '0400'
LOGIN_DIFFICULTY = '0400'

data_dir_root = os.environ.get('DATADIR')

store_dir = data_dir_root + '/sessions/'
session_store = FilesystemStore(
    store_dir
)  # TODO: Need to roll this into a SessionInterface so multiple services can hit it easily

if config.DOMAIN is None:
    email_domain = socket.gethostname()
else:
    email_domain = config.DOMAIN

if config.EMAILFROM is None:
    email_from = "noreply@" + str(email_domain)
else:
    email_from = config.EMAILFROM

app = Flask(__name__)
app.debug = True
Ejemplo n.º 24
0
 def store(self, tmpdir, perms):
     return FilesystemStore(tmpdir, perm=perms)
Ejemplo n.º 25
0
    class BaseUssdTestCase(TestCase):
        validate_ussd = True

        session_store = FilesystemStore("./session_data")

        def setUp(self):
            self.journey_store = YamlJourneyStore(
                "sample_screen_definition", journey_directory='ussd/tests')
            file_prefix = self.__module__.split('.')[-1].replace('test_', '')
            self.journey_name = file_prefix
            journey_version_suffix = file_prefix + "_conf"

            self.valid_version = 'valid_' + journey_version_suffix
            self.invalid_version = 'invalid_' + journey_version_suffix

            self.mermaid_file = path + file_prefix + '/' + '/' + 'valid_' + file_prefix + '_mermaid.txt'
            self.graph_file = path + '/' + file_prefix + '/' + 'valid_' + file_prefix + '_graph.json'
            self.namespace = self.__module__.split('.')[-1]
            self.maxDiff = None

            super(UssdTestCase.BaseUssdTestCase, self).setUp()

            #

        #
        # @classmethod
        # def tearDownClass(cls) -> None:
        #     if os.path.exists('.journeys'):
        #         shutil.rmtree('.journeys')

        def _test_ussd_validation(self, version_to_validate,
                                  expected_validation, expected_errors):

            if self.validate_ussd:
                ussd_screens = self.journey_store.get(self.journey_name,
                                                      version_to_validate)

                is_valid, error_message = UssdEngine.validate_ussd_journey(
                    ussd_screens)

                self.assertEqual(is_valid, expected_validation, error_message)

                for key, value in expected_errors.items():
                    args = (value, error_message[key], key)
                    if isinstance(value, dict):
                        self.assertDictEqual(*args)
                    else:
                        self.assertEqual(*args)

                self.assertDictEqual(error_message, expected_errors)

        def testing_valid_customer_journey(self):
            self._test_ussd_validation(self.valid_version, True, {})

        def testing_invalid_customer_journey(self):

            try:
                self._test_ussd_validation(
                    self.invalid_version, False,
                    getattr(self, "validation_error_message", {}))
            except Exception as e:
                if not (hasattr(self, "expected_error") and isinstance(
                        e, self.__getattribute__("expected_error"))):
                    raise e

        def test_rendering_graph_js(self):
            if os.path.exists(self.graph_file):
                ussd_screens = self.journey_store.get(self.journey_name,
                                                      self.valid_version)

                actual_graph_js = render_journey_as_graph(ussd_screens)

                expected_graph_js = json.loads(
                    self.read_file_content(self.graph_file))

                for key, value in expected_graph_js["vertices"].items():
                    if value.get('id') == 'test_explicit_dict_loop':
                        for i in ("a for apple\n", "b for boy\n",
                                  "c for cat\n"):
                            self.assertRegex(value.get('text'), i)
                    else:
                        self.assertDictEqual(value,
                                             actual_graph_js.vertices[key])
                # self.assertDictEqual(expected_graph_js["vertices"], actual_graph_js.vertices)

                for index, value in enumerate(expected_graph_js['edges']):
                    self.assertDictEqual(value,
                                         actual_graph_js.get_edges()[index])
                self.assertEqual(expected_graph_js["edges"],
                                 actual_graph_js.get_edges())

        def test_rendering_mermaid_js(self):
            if os.path.exists(self.mermaid_file):
                ussd_screens = self.journey_store.get(self.journey_name,
                                                      self.valid_version)

                mermaid_text_format = render_journey_as_mermaid_text(
                    ussd_screens)

                file_content = self.read_file_content(self.mermaid_file)

                expected_text_lines = file_content.split('\n')
                actual_text_lines = mermaid_text_format.split('\n')

                for index, line in enumerate(expected_text_lines):
                    self.assertEqual(line, actual_text_lines[index])

                self.assertEqual(mermaid_text_format, file_content)

        def read_file_content(self, file_path):
            with open(file_path) as f:
                mermaid_text = f.read()
            return mermaid_text

        def ussd_session(self, session_id):
            return SessionStore(session_id, kv_store=self.session_store)

        def ussd_client(self, generate_customer_journey=True, **kwargs):
            class UssdTestClient(object):
                def __init__(
                    self,
                    session_id=None,
                    phone_number=200,
                    language='en',
                    extra_payload=None,
                ):

                    if extra_payload is None:
                        extra_payload = {}
                    self.phone_number = phone_number
                    self.language = language
                    self.session_id = session_id \
                        if session_id is not None \
                        else str(uuid.uuid4())
                    self.extra_payload = extra_payload

                def send(self, ussd_input, raw=False):
                    payload = {
                        "session_id": self.session_id,
                        "ussd_input": ussd_input,
                        "phone_number": self.phone_number,
                        "language": self.language,
                    }
                    payload.update(self.extra_payload)

                    ussd_request = UssdRequest(**payload)

                    response = UssdEngine(ussd_request).ussd_dispatcher()

                    if raw:
                        return response
                    return str(response)

            customer_journey_conf = {
                'journey_name': self.journey_name,
                'journey_version': self.valid_version,
                "journey_store": self.journey_store
            }

            if kwargs.get('extra_payload'):
                customer_journey_conf.update(kwargs['extra_payload'])

            kwargs['extra_payload'] = customer_journey_conf

            return UssdTestClient(**kwargs)
        """
        If set, will cache requests to remote servers (using Redis)

        Useful during testing. Will expire requests after settings.cache_requests_timeout.

        https://requests-cache.readthedocs.io/en/latest/
        """
        requests_cache.install_cache(
            'iiif_cache',
            backend='redis',
            expire_after=settings_offline.cache_requests_timeout,
            allowable_codes=[200, 404])
else:
    """ 
    Simple key-value store using JSON on the filesystem.

    Plus Flask Cache using local filesystem.
    """
    store = FilesystemStore(settings_offline.simplekv_path)
    if settings_offline.cache_requests:
        """
        If set, will cache requests to remote servers.

        Useful during testing. Use sqlite backend if not using Redis.    
        """
        requests_cache.install_cache(
            'iiif_cache',
            backend='sqlite',
            expire_after=settings_offline.cache_requests_timeout,
            allowable_codes=[200, 404])
# ====================================
Ejemplo n.º 27
0
def create_app(db_connection_string=None, testing=None):
    app = Flask(__name__)

    try:
        secret_key = faraday.server.config.faraday_server.secret_key
    except Exception:
        # Now when the config file does not exist it doesn't enter in this
        # condition, but it could happen in the future. TODO check
        save_new_secret_key(app)
    else:
        if secret_key is None:
            # This is what happens now when the config file doesn't exist.
            # TODO check
            save_new_secret_key(app)
        else:
            app.config['SECRET_KEY'] = secret_key

    if faraday.server.config.faraday_server.agent_token is None:
        save_new_agent_creation_token()

    login_failed_message = ("Invalid username or password", 'error')

    app.config.update({
        'SECURITY_PASSWORD_SINGLE_HASH': True,
        'WTF_CSRF_ENABLED': False,
        'SECURITY_USER_IDENTITY_ATTRIBUTES': ['username'],
        'SECURITY_POST_LOGIN_VIEW': '/_api/session',
        'SECURITY_POST_LOGOUT_VIEW': '/_api/login',
        'SECURITY_POST_CHANGE_VIEW': '/_api/change',
        'SECURITY_CHANGEABLE': True,
        'SECURITY_SEND_PASSWORD_CHANGE_EMAIL': False,
        'SECURITY_MSG_USER_DOES_NOT_EXIST': login_failed_message,
        'SECURITY_TOKEN_AUTHENTICATION_HEADER': 'Authorization',

        # The line bellow should not be necessary because of the
        # CustomLoginForm, but i'll include it anyway.
        'SECURITY_MSG_INVALID_PASSWORD': login_failed_message,

        'SESSION_TYPE': 'filesystem',
        'SESSION_FILE_DIR': faraday.server.config.FARADAY_SERVER_SESSIONS_DIR,

        'SQLALCHEMY_TRACK_MODIFICATIONS': False,
        'SQLALCHEMY_RECORD_QUERIES': True,
        # app.config['SQLALCHEMY_ECHO'] = True
        'SECURITY_PASSWORD_SCHEMES': [
            'bcrypt',  # This should be the default value
            # 'des_crypt',
            'pbkdf2_sha1',  # Used by CouchDB passwords
            # 'pbkdf2_sha256',
            # 'pbkdf2_sha512',
            # 'sha256_crypt',
            # 'sha512_crypt',
            'plaintext',  # TODO: remove it
        ],
        'PERMANENT_SESSION_LIFETIME': datetime.timedelta(hours=12),
        'SESSION_COOKIE_NAME': 'faraday_session_2',
        'SESSION_COOKIE_SAMESITE': 'Lax',
    })

    store = FilesystemStore(app.config['SESSION_FILE_DIR'])
    prefixed_store = PrefixDecorator('sessions_', store)
    KVSessionExtension(prefixed_store, app)
    user_logged_out.connect(expire_session, app)

    storage_path = faraday.server.config.storage.path
    if not storage_path:
        logger.warn('No storage section or path in the .faraday/config/server.ini. Setting the default value to .faraday/storage')
        storage_path = setup_storage_path()

    if not DepotManager.get('default'):
        if testing:
            DepotManager.configure('default', {
                'depot.storage_path': '/tmp'
            })
        else:
            DepotManager.configure('default', {
                'depot.storage_path': storage_path
            })

    check_testing_configuration(testing, app)

    try:
        app.config['SQLALCHEMY_DATABASE_URI'] = db_connection_string or faraday.server.config.database.connection_string.strip("'")
    except AttributeError:
        logger.info('Missing [database] section on server.ini. Please configure the database before running the server.')
    except NoOptionError:
        logger.info('Missing connection_string on [database] section on server.ini. Please configure the database before running the server.')

    from faraday.server.models import db # pylint:disable=import-outside-toplevel
    db.init_app(app)
    #Session(app)

    # Setup Flask-Security
    app.user_datastore = SQLAlchemyUserDatastore(
        db,
        user_model=User,
        role_model=None)  # We won't use flask security roles feature
    Security(app, app.user_datastore, login_form=CustomLoginForm)
    # Make API endpoints require a login user by default. Based on
    # https://stackoverflow.com/questions/13428708/best-way-to-make-flask-logins-login-required-the-default

    app.view_functions['security.login'].is_public = True
    app.view_functions['security.logout'].is_public = True

    app.debug = faraday.server.config.is_debug_mode()
    minify_json_output(app)

    for handler in LOGGING_HANDLERS:
        app.logger.addHandler(handler)

    register_blueprints(app)
    register_handlers(app)

    app.view_functions['agent_api.AgentCreationView:post'].is_public = True

    return app
 def backend(self, session_key=None):
     return SessionStore(session_key,
                         FilesystemStore("./session_data_test"))
Ejemplo n.º 29
0
 def store(self, tmpdir):
     return FilesystemStore(tmpdir)
Ejemplo n.º 30
0
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.Utils import COMMASPACE, formatdate
from email import Encoders
from sqltools import *
from recaptcha.client import captcha
import config

ACCOUNT_CREATION_DIFFICULTY = '0400'
LOGIN_DIFFICULTY = '0400'

if config.LOCALDEVBYPASSDB:
    data_dir_root = config.LOCALDATADIR
    store_dir = data_dir_root + '/sessions/'
    session_store = FilesystemStore(store_dir)

if config.DOMAIN is None:
    email_domain = socket.gethostname()
else:
    email_domain = config.DOMAIN

if config.EMAILFROM is None:
    email_from = "noreply@" + str(email_domain)
else:
    email_from = config.EMAILFROM

app = Flask(__name__)
app.debug = True