def _consider_retweets(tweets): """ Retweets if positive classification is above THRESHOLD. 0 = neg, 1 = pos """ logger.info('Considering retweeting...') num_retweeted = 0 retweet_threshold = config().retweet_threshold # Filter out protected tweets. candidates = [ tweet for tweet in tweets if not tweet['protected'] and not tweet['retweeted'] ] txts = _get_tweet_texts(candidates) if txts: for idx, doc_probs in enumerate(CLS.classify(txts)): if num_retweeted >= config().max_retweets: logger.info('Hit maximum retweet limit, stopping for now.') break if doc_probs[1] > retweet_threshold: logger.info( 'Classified as %s retweetable, above %s threshold, retweeting...' % (doc_probs[1], retweet_threshold)) twitter.retweet(candidates[idx]['tid']) num_retweeted += 1 else: logger.info( 'Classified as %s retweetable, below %s threshold, not retweeting...' % (doc_probs[1], retweet_threshold))
def _consider_retweets(tweets): """ Retweets if positive classification is above THRESHOLD. 0 = neg, 1 = pos """ logger.info("Considering retweeting...") num_retweeted = 0 retweet_threshold = config().retweet_threshold # Filter out protected tweets. candidates = [tweet for tweet in tweets if not tweet["protected"] and not tweet["retweeted"]] txts = _get_tweet_texts(candidates) if txts: for idx, doc_probs in enumerate(CLS.classify(txts)): if num_retweeted >= config().max_retweets: logger.info("Hit maximum retweet limit, stopping for now.") break if doc_probs[1] > retweet_threshold: logger.info( "Classified as %s retweetable, above %s threshold, retweeting..." % (doc_probs[1], retweet_threshold) ) twitter.retweet(candidates[idx]["tid"]) num_retweeted += 1 else: logger.info( "Classified as %s retweetable, below %s threshold, not retweeting..." % (doc_probs[1], retweet_threshold) )
def test_production(self, production_config): from app.config import config env = os.getenv("APP_ENV") assert config( env ).DATABASE_URL == "postgresql://*****:*****@prod_host:15432/prod_database" assert config(env).SECRET_KEY == "prod_secretkey" assert config(env).LOG_LEVEL == INFO
def test_staging(self, staging_config): from app.config import config env = os.getenv("APP_ENV") assert ( config(env).DATABASE_URL == "postgresql://*****:*****@staging_host:15432/staging_database" ) assert config( env).SECRET_KEY == b".\xa3\x1b5\x11\x9c$d\x02zS\x87\x9a;\x94\x03" assert config(env).LOG_LEVEL == DEBUG
def __init__(self) -> None: """ ロガー初期化 """ formatter = RequestFormatter( "[%(asctime)s] %(remote_addr)s %(levelname)s: %(name)s(%(process)d): %(message)s" # noqa ) handler = StreamHandler(sys.stdout) handler.setFormatter(formatter) logger = getLogger("app") logger.setLevel(config().LOG_LEVEL) logger.addHandler(handler) # SQLAlchemyもフォーマットを揃える。 # https://github.com/sqlalchemy/sqlalchemy/blob/main/lib/sqlalchemy/engine/base.py sqla_logger = getLogger("sqlalchemy.engine.Engine") for h in sqla_logger.handlers: sqla_logger.removeHandler(h) sqla_logger.addHandler(handler) getLogger("werkzeug").disabled = True logger.info("logging initialized.")
def init_app() -> Flask: """ アプリ起動時、リロード時にしか呼ばれない """ Logger() app = Flask(__name__) # flask環境変数指定 app.secret_key = config().SECRET_KEY # Bliueprint app.register_blueprint(signup.bp) app.register_blueprint(user.bp) app.before_request(before_action) app.after_request(after_action) FlaskInjector(app=app, modules=[UsecaseDIModule(), RepositoryDIModule()]) Swagger(app, template=template) logger.debug("app initialized") logger.debug(f"URL Map: {app.url_map}") logger.debug(f"app.config: {app.config}") logger.debug(f"config: {config().dict()}") return app
def auth(token: str, required_scopes=None) -> dict: """Check api key authentication Parameters ---------- token : string Token provided via HTTP header required_scopes Scopes (not used) Return ------ dict Informations about user (empty, not used) """ authKey = config().getKey(connexion.request.headers['Host']) if (isinstance(authKey, str)): # Validate against single auth key if token == authKey: return {} elif (isinstance(authKey, dict)): # Validate against url rule name auth key try: if token == authKey[str(connexion.request.url_rule)]: return {} except KeyError: pass # No valid key found raise connexion.exceptions.OAuthProblem('Authentication error')
def __new__(singletonClass): """Instantiate singleton class""" if singletonClass.__instance is None: print('Initialize data orm object...') singletonClass.__instance = \ super(data, singletonClass).__new__(singletonClass) # Autoload data files singletonClass.__instance.__loadFiles(config().getHostfiles()) return singletonClass.__instance
def client(): app.config["TESTING"] = True with app.test_client() as client: init_db() yield client db_session.remove() os.unlink(config().DATABASE_URL.replace("sqlite:///", ""))
def consider(): """ Decide whether or not to act (tweet). """ logger.info("Considering tweeting...") roll = random.random() chance = config().chance_to_act if roll < chance: logger.info("Rolled %s, chance to act is %s, tweeting." % (roll, chance)) twitter.tweet(MKV.generate()) else: logger.info("Rolled %s, chance to act is %s, NOT tweeting." % (roll, chance))
def main(): dir = os.path.realpath(config('app.directories.temp')) try: os.stat(dir) except: os.mkdir(dir) cmd1 = 'mysqldump -uroot -paryafoole --no-data --result-file="{0}\schema.sql" {1}'.format( dir, config('app.databases.source.name')) call(cmd1) cmd2 = 'mysqldump -uroot -paryafoole --no-create-info --skip-triggers --result-file="{}\data.sql" {}'.format( dir, config('app.databases.source.name')) call(cmd2) cmd3 = 'mysql -hlocalhost -uroot -paryafoole {} < {}\schema.sql'.format( config('app.databases.destination.name'), dir) call(cmd3) cmd4 = 'mysql -hlocalhost -uroot -paryafoole {} < {}\data.sql'.format( config('app.databases.destination.name'), dir) call(cmd4) '''
def prompt_update_config(): try: from app.config import config except ImportError: from app.defaultconfig import config config = config() for name, key, converter in [ ("CouchDB Server URL", "couchdb_server_url", str), ("CouchDB db", "couchdb_db", str), ]: config[key] = user_query(name, converter, config[key]) return config
def create_app(): """ Create App """ app = Flask(__name__) ''' Init Database ''' db.init_app(app) ''' Init Login ''' login.init_app(app) ''' Configuring App ''' Configuration.config(app) ''' Administration interface ''' from app.models import User, Post # Admin Left Side admin = Admin(app, index_view=AdminView(url='/admin'), template_mode='bootstrap3') admin.add_view(UserView(User, db.session)) admin.add_view(PostView(Post, db.session)) # Admin Right Side admin.add_link( MenuLink(name='Torna al sito publico', endpoint='public.public')) ''' Add Blueprint Routes ''' from app.routes import register_blueprints_routes register_blueprints_routes(app) ''' Return Configured App ''' return app
def client_with_testdata(): app.config["TESTING"] = True with app.test_client() as client: init_db() file = open("./tests/api/v1/sample_data.sql", mode="r", encoding="utf-8") engine.execute(text(file.read())) yield client db_session.remove() os.unlink(config().DATABASE_URL.replace("sqlite:///", ""))
def consider(): """ Decide whether or not to act (tweet). """ logger.info('Considering tweeting...') roll = random.random() chance = config().chance_to_act if roll < chance: logger.info('Rolled %s, chance to act is %s, tweeting.' % (roll, chance)) twitter.tweet(MKV.generate()) else: logger.info('Rolled %s, chance to act is %s, NOT tweeting.' % (roll, chance))
def connect(): try: params = config() con = psycopg2.connect(**params) cur = con.cursor() cur.execute('SELECT version()') version = cur.fetchone()[0] #print(version) return con except psycopg2.DatabaseError as e: print(f'Error {e}') sys.exit(1)
def _loadConfig(self) -> dict: self._config = config().getPluginConfig(self.getName()) for host in list(self._config): try: pluginEnabled = self._config[host]['enabled'] except KeyError: pluginEnabled = False if not pluginEnabled: self._config.pop(host) if hasattr(self, '_configDefault'): for host, hostConfig in self._config.items(): self._config[host] = pydeepmerge.deep_merge( self._configDefault, hostConfig) return self._config
def commit(self, host: str) -> bool: """Persist current API information to file Parameters ---------- host : str Host to persist Returns ------- bool """ filename = 'config/apidata/%s' % config().getHostfile(host) with safer.open(filename, 'w') as f: f.write(json.dumps(self.__data[host], indent=2)) print('Saved data for %s ...' % host) return True
from app.config import config from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker engine = create_engine(config().DATABASE_URL, convert_unicode=True, echo=config().SQL_ALCHEMY_ECHO) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base = declarative_base() Base.query = db_session.query_property() def init_db() -> None: import app.interfaces.gateways.database.schema # noqa Base.metadata.create_all(bind=engine)
from tweepy.error import TweepError from mongoengine.errors import NotUniqueError, OperationError from pymongo.errors import DuplicateKeyError import random # Logging from app.logger import logger logger = logger(__name__) # Load the classifier and markov. # Loaded here so we can keep it in memory. # accessible via app.brain.CLS or app.brain.MKV CLS = Classifier() MKV = Markov(ramble=config().ramble, ngram_size=config().ngram_size, spasm=config().spasm) def ponder(): """ Fetch tweets from the Muses and memorize them; i.e. train classifier or Markov on them. """ logger.info('Pondering new twitter data...') # Each of these are just a list # of tweets as strings. pos = []
from app.config import config template = { "swagger": "2.0", "info": { "title": "My API", "description": "API for my data", "contact": { "responsibleOrganization": "ME", "responsibleDeveloper": "Me", "email": "*****@*****.**", "url": "example.com", }, "termsOfService": "https://example.com/terms", "version": "1.0.0", }, "host": config().SWAGGER_HOST, "basePath": "/api/", "schemes": ["http"], "operationId": "getmyData", }
from flask_admin.contrib.sqla import ModelView from flask_bootstrap import Bootstrap from flask_admin.menu import MenuLink from app.routes.public import public_route from app.routes.security import security_route from app.routes.handler import handler_route app = Flask(__name__) #Security security = Security() security.init_app(app) #Instantiare db db = flask_sqlalchemy.SQLAlchemy(app) Configuration.config(app) #Add Bootstrap Bootstrap(app) #Register Routes Blueprint #app.register_blueprint(admin_route) app.register_blueprint(public_route) app.register_blueprint(security_route) app.register_blueprint(handler_route) #Administration interface admin = Admin(app, name='admin', template_mode='bootstrap3') admin.add_view(Test1(name='Hello 1', endpoint='test1', category='Test')) admin.add_view(Test2(name='Hello 2', endpoint='test2', category='Test')) admin.add_view(ModelView(User, db.session))
import importlib as lib, os ##### from flask import Flask, Cookies from app.config import config ##### from app.model import Router, SendMail, Token ##### from app.DB import SQLite, MySQL, DBJson app = Flask(os.getcwd()) config(app) class model: def __init__(self): self.mail = SendMail(app) self.token = Token(app) self.sqlite = SQLite(app) #self.mysql = MySQL ( app) self.dbJson = DBJson(app) self.cookies = Cookies def __dir__(self): return ['mail', 'sqlite', 'token', "mysql", "cookies", "dbJson"] class Controllers(): def __init__(self, App):
def test_invalid_env(self): from app.config import config with pytest.raises(EnvironmentError): _ = config("hoge")
from tweepy.error import TweepError from mongoengine.errors import NotUniqueError, OperationError from pymongo.errors import DuplicateKeyError import random # Logging from app.logger import logger logger = logger(__name__) # Load the classifier and markov. # Loaded here so we can keep it in memory. # accessible via app.brain.CLS or app.brain.MKV CLS = Classifier() MKV = Markov(ramble=config().ramble, ngram_size=config().ngram_size, spasm=config().spasm) def ponder(): """ Fetch tweets from the Muses and memorize them; i.e. train classifier or Markov on them. """ logger.info("Pondering new twitter data...") # Each of these are just a list # of tweets as strings. pos = [] neg = []
#!/usr/bin/env python3 import argparse import logging from app.allstar import allstar from app.asterisk import asterisk from app.config import config import app.log controller = 'controller-1' logger = app.log.setup_custom_logger('RPT_Manager_CLI') if __name__ == "__main__": config = config() node_parser = argparse.ArgumentParser(add_help=False) node_parser.add_argument('-n', '--node', type=int, help="The Allstar Node", action='append', required=True) debug_parser = argparse.ArgumentParser(add_help=False) debug_parser.add_argument('-D', dest='debug', help="DEBUG Logging", action='store_true') parser = argparse.ArgumentParser(description='RPT Manager CLI',
def test_default(self): from app.config import config config().APP_VERSION = "0.1.0" config().SQL_ALCHEMY_ECHO = False