def __init__(self): if AIRBRAKE_PROJECT_KEY and AIRBRAKE_PROJECT_ID: self.airbrake_notifier = pybrake.Notifier( project_id=AIRBRAKE_PROJECT_ID, project_key=AIRBRAKE_PROJECT_KEY, environment=ENV, )
def get_airbrake_notifier(): return pybrake.Notifier( project_id=os.environ.get('AIRBRAKE_PROJECT_ID'), project_key=os.environ.get('AIRBRAKE_API_KEY'), environment=os.environ.get('AIRBRAKE_ENVIRONMENT', 'development') )
def __init__(self, environment=None, verbose=False): logger = logging.getLogger() if os.environ.get("LOCATION_LOG_FILE") is not None: logfile = os.environ.get("LOCATION_LOG_FILE") else: logfile = "/tmp/produksjonssystem.log" handler = TimedRotatingFileHandler(logfile, when="D", interval=7, backupCount=5) fmt = "%(asctime)s %(levelname)-8s [%(threadName)-30s] %(message)s" formatter = logging.Formatter(fmt=fmt) handler.setFormatter(formatter) handler.setLevel(level=logging.DEBUG if os.environ.get("DEBUG", "1") == "1" else logging.INFO) logger.addHandler(handler) if verbose: consoleHandler = logging.StreamHandler(sys.stdout) consoleHandler.setFormatter(formatter) consoleHandler.setLevel(level=logging.DEBUG if os.environ.get("DEBUG", "1") == "1" else logging.INFO) logger.addHandler(consoleHandler) # add airbrake.io handler self.airbrake_config = { "project_id": os.getenv("AIRBRAKE_PROJECT_ID", None), "project_key": os.getenv("AIRBRAKE_PROJECT_KEY", None), "environment": os.getenv("AIRBRAKE_ENVIRONMENT", "development") } if self.airbrake_config["project_id"] and self.airbrake_config["project_key"]: notifier = pybrake.Notifier(**self.airbrake_config) airbrake_handler = pybrake.LoggingHandler(notifier=notifier, level=logging.ERROR) logging.getLogger().addHandler(airbrake_handler) else: self.airbrake_config = None logging.warning("Airbrake.io not configured (missing AIRBRAKE_PROJECT_ID and/or AIRBRAKE_PROJECT_KEY)") # Set environment variables (mainly useful when testing) if environment: assert isinstance(environment, dict) for name in environment: os.environ[name] = environment[name] self.environment = environment else: self.environment = {} Pipeline.environment = self.environment # Make environment available from pipelines # Check that archive dirs is defined assert os.environ.get("BOOK_ARCHIVE_DIRS"), ( "The book archives must be defined as a space separated list in the environment variable BOOK_ARCHIVE_DIRS (as name=path pairs)") self.book_archive_dirs = {} for d in os.environ.get("BOOK_ARCHIVE_DIRS").split(" "): assert "=" in d, "Book archives must be specified as name=path. For instance: master=/media/archive. Note that paths can not contain spaces." archive_name = d.split("=")[0] archive_path = os.path.normpath(d.split("=")[1]) + "/" self.book_archive_dirs[archive_name] = archive_path # for convenience; both method variable and instance variable so you don't have to # write "self." all the time during initialization. book_archive_dirs = self.book_archive_dirs Config.set("test", os.environ.get("TEST", "false").lower() in ["true", "1"]) Config.set("email.allowed_email_addresses_in_test", os.environ.get("ALLOWED_EMAIL_ADDRESSES_IN_TEST", "").split(",")) # Configure email Config.set("email.sender.name", "NLBs Produksjonssystem") Config.set("email.sender.address", "*****@*****.**") Config.set("email.smtp.host", os.environ.get("MAIL_SERVER", None)) Config.set("email.smtp.port", os.environ.get("MAIL_PORT", None)) Config.set("email.smtp.user", os.environ.get("MAIL_USERNAME", None)) Config.set("email.smtp.pass", os.environ.get("MAIL_PASSWORD", None)) Config.set("email.formatklar.address", os.environ.get("MAIL_FORMATKLAR")) Config.set("email.filesize.address", os.environ.get("MAIL_FILESIZE")) Config.set("email.abklar.address", os.environ.get("MAIL_ABKLAR")) # Configure NLB API URL Config.set("nlb_api_url", os.environ.get("NLB_API_URL")) # Special directories Config.set("master_dir", os.path.join(book_archive_dirs["master"], "master/EPUB")) Config.set("newsfeed_dir", os.path.join(book_archive_dirs["master"], "innkommende/schibsted-aviser/avisfeeder")) Config.set("reports_dir", os.getenv("REPORTS_DIR", os.path.join(book_archive_dirs["master"], "rapporter"))) Config.set("metadata_dir", os.getenv("METADATA_DIR", os.path.join(book_archive_dirs["master"], "metadata"))) Config.set("nlbsamba.dir", os.environ.get("NLBSAMBA_DIR")) # Define directories (using OrderedDicts to preserve order when plotting) self.dirs_ranked = [] self.dirs_ranked.append({ "id": "incoming", "name": "Mottak", "dirs": OrderedDict() }) # self.dirs_ranked[-1]["dirs"]["incoming_NLBPUB"] = os.path.join(book_archive_dirs["master"], "innkommende/NLBPUB") # self.dirs_ranked[-1]["dirs"]["nlbpub_manuell"] = os.path.join(book_archive_dirs["master"], "mottakskontroll/NLBPUB") self.dirs_ranked[-1]["dirs"]["incoming"] = os.path.join(book_archive_dirs["master"], "innkommende/nordisk") # self.dirs_ranked[-1]["dirs"]["incoming-for-approval"] = os.path.join(book_archive_dirs["master"], "innkommende/nordisk-manuell-mottakskontroll") self.dirs_ranked[-1]["dirs"]["old_dtbook"] = os.path.join(book_archive_dirs["master"], "grunnlagsfil/DTBook") self.dirs_ranked[-1]["dirs"]["incoming-statped-nlbpub"] = os.path.join(book_archive_dirs["master"], "innkommende/statped-nlbpub") self.dirs_ranked.append({ "id": "source-in", "name": "Ubehandlet kildefil", "dirs": OrderedDict() }) self.dirs_ranked.append({ "id": "source-out", "name": "Behandlet kildefil", "dirs": OrderedDict() }) self.dirs_ranked.append({ "id": "master", "name": "Grunnlagsfil", "dirs": OrderedDict() }) self.dirs_ranked[-1]["dirs"]["master"] = Config.get("master_dir") self.dirs_ranked[-1]["dirs"]["metadata"] = Config.get("metadata_dir") # self.dirs_ranked[-1]["dirs"]["grunnlag"] = os.path.join(book_archive_dirs["master"], "grunnlagsfil/NLBPUB") self.dirs_ranked[-1]["dirs"]["nlbpub"] = os.path.join(book_archive_dirs["master"], "master/NLBPUB") self.dirs_ranked[-1]["dirs"]["epub_from_dtbook"] = os.path.join(book_archive_dirs["master"], "grunnlagsfil/EPUB-fra-DTBook") self.dirs_ranked[-1]["dirs"]["news"] = Config.get("newsfeed_dir") self.dirs_ranked.append({ "id": "version-control", "name": "Versjonskontroll", "dirs": OrderedDict() }) self.dirs_ranked[-1]["dirs"]["nlbpub-previous"] = os.path.join(book_archive_dirs["master"], "master/NLBPUB-tidligere") self.dirs_ranked.append({ "id": "publication-in", "name": "Format-spesifikk metadata", "dirs": OrderedDict() }) self.dirs_ranked[-1]["dirs"]["pub-in-braille"] = os.path.join(book_archive_dirs["master"], "utgave-inn/punktskrift") self.dirs_ranked[-1]["dirs"]["pub-in-ebook"] = os.path.join(book_archive_dirs["master"], "utgave-inn/e-tekst") self.dirs_ranked[-1]["dirs"]["pub-in-audio"] = os.path.join(book_archive_dirs["master"], "utgave-inn/lydbok") self.dirs_ranked.append({ "id": "publication-ready", "name": "Klar for produksjon", "dirs": OrderedDict() }) self.dirs_ranked[-1]["dirs"]["pub-ready-braille"] = os.path.join(book_archive_dirs["master"], "utgave-klargjort/punktskrift") self.dirs_ranked[-1]["dirs"]["pub-ready-ebook"] = os.path.join(book_archive_dirs["master"], "utgave-klargjort/e-bok") self.dirs_ranked[-1]["dirs"]["pub-ready-docx"] = os.path.join(book_archive_dirs["master"], "utgave-klargjort/DOCX") self.dirs_ranked[-1]["dirs"]["pub-ready-magazine"] = os.path.join(book_archive_dirs["master"], "utgave-klargjort/tidsskrifter") self.dirs_ranked[-1]["dirs"]["epub_narration"] = os.path.join(book_archive_dirs["master"], "utgave-klargjort/EPUB-til-innlesing") self.dirs_ranked[-1]["dirs"]["dtbook_tts"] = os.path.join(book_archive_dirs["master"], "utgave-klargjort/DTBook-til-talesyntese") self.dirs_ranked[-1]["dirs"]["dtbook_news"] = os.path.join(book_archive_dirs["master"], "utgave-klargjort/DTBook-aviser-til-talesyntese") self.dirs_ranked.append({ "id": "publication-out", "name": "Ferdig produsert", "dirs": OrderedDict() }) self.dirs_ranked[-1]["dirs"]["pef"] = os.path.join(book_archive_dirs["master"], "utgave-ut/PEF") self.dirs_ranked[-1]["dirs"]["pef-checked"] = os.path.join(book_archive_dirs["master"], "utgave-ut/PEF-kontrollert") self.dirs_ranked[-1]["dirs"]["html"] = os.path.join(book_archive_dirs["master"], "utgave-ut/HTML") self.dirs_ranked[-1]["dirs"]["epub-ebook"] = os.path.join(book_archive_dirs["share"], "daisy202/EPUB") self.dirs_ranked[-1]["dirs"]["docx"] = os.path.join(book_archive_dirs["master"], "utgave-ut/DOCX") self.dirs_ranked[-1]["dirs"]["daisy202"] = os.path.join(book_archive_dirs["share"], "daisy202") self.dirs_ranked[-1]["dirs"]["abstracts"] = os.path.join(book_archive_dirs["distribution"], "www/abstracts") self.dirs_ranked[-1]["dirs"]["daisy202-ready"] = os.path.join(book_archive_dirs["master"], "utgave-klargjort/lydbok-til-validering") self.dirs_ranked[-1]["dirs"]["daisy202-dist"] = os.path.join(book_archive_dirs["share"], "daisy202") # Make a key/value version of dirs_ranked for convenience self.dirs = { "reports": Config.get("reports_dir") } for rank in self.dirs_ranked: for dir in rank["dirs"]: self.dirs[dir] = rank["dirs"][dir] # also make dirs available from static contexts Directory.dirs_ranked = self.dirs_ranked Directory.dirs_flat = self.dirs # by default, the inactivity timeout for all directories are 10 seconds, # but they can be overridden here # for instance: self.dirs_inactivity_timeouts["master"] = 300 self.dirs_inactivity_timeouts = {} # Define pipelines and input/output/report dirs self.pipelines = [ # Konvertering av gamle DTBøker til EPUB 3 # [NordicDTBookToEpub(retry_missing=True, # only_when_idle=True), "old_dtbook", "epub_from_dtbook"], # Mottak, nordic guidelines 2015-1 # [NLBPUB_incoming_validator(retry_all=True, # during_working_hours=True # ), "incoming_NLBPUB", "grunnlag"], # [NLBPUB_incoming_warning(retry_all=True, # during_working_hours=True # ), "incoming_NLBPUB", "nlbpub_manuell"], # [DummyPipeline("Manuell sjekk av NLBPUB", # labels=["EPUB"]), "nlbpub_manuell", "grunnlag"], # [NLBPUB_validator(overwrite=False), "grunnlag", "nlbpub"], [IncomingNordic(retry_all=True, during_working_hours=True, during_night_and_weekend=True), "incoming", "master"], [NordicToNlbpub(retry_missing=True, overwrite=False, during_working_hours=True, during_night_and_weekend=True), "master", "nlbpub"], [StatpedNlbpubToNlbpub(retry_all=True, during_working_hours=True, during_night_and_weekend=True), "incoming-statped-nlbpub", "nlbpub"], # Grunnlagsfiler [NlbpubPrevious(retry_missing=True), "nlbpub", "nlbpub-previous"], # e-bok [InsertMetadataXhtml(retry_missing=True, retry_old=True, retry_complete=True, check_identifiers=True, during_night_and_weekend=True, during_working_hours=True), "nlbpub", "pub-in-ebook"], [PrepareForEbook(retry_missing=True, check_identifiers=True, during_night_and_weekend=True, during_working_hours=True), "pub-in-ebook", "pub-ready-ebook"], [PrepareForDocx(retry_missing=True, check_identifiers=True, during_working_hours=True), "pub-in-ebook", "pub-ready-docx"], [NlbpubToEpub(retry_missing=True, check_identifiers=True, during_working_hours=True, during_night_and_weekend=True), "pub-ready-ebook", "epub-ebook"], [NlbpubToHtml(retry_missing=True, check_identifiers=True, during_working_hours=True), "pub-ready-ebook", "html"], [NLBpubToDocx(retry_missing=True, check_identifiers=True, during_working_hours=True), "pub-ready-docx", "docx"], [Newsletter(during_working_hours=True, during_night_and_weekend=True), None, "pub-ready-braille"], [NewspaperSchibsted(during_working_hours=True, during_night_and_weekend=True), "news", "dtbook_news"], # punktskrift [InsertMetadataBraille(retry_missing=True, check_identifiers=True, during_working_hours=True), "nlbpub", "pub-in-braille"], [PrepareForBraille(retry_missing=True, check_identifiers=True, during_working_hours=True), "pub-in-braille", "pub-ready-braille"], [NlbpubToPef(retry_missing=True, check_identifiers=True, during_working_hours=True), "pub-ready-braille", "pef"], # [CheckPef(), "pef", "pef-checked"], # innlest lydbok [InsertMetadataDaisy202(retry_missing=True, check_identifiers=True, during_working_hours=True), "nlbpub", "pub-in-audio"], [NlbpubToNarrationEpub(retry_missing=True, check_identifiers=True, during_working_hours=True), "pub-in-audio", "epub_narration"], [DummyPipeline("Innlesing med Hindenburg", labels=["Lydbok", "Statped"]), "epub_narration", "daisy202"], # TTS-lydbok [NlbpubToTtsDtbook(retry_missing=True, check_identifiers=True, during_working_hours=True, during_night_and_weekend=True), "pub-in-audio", "dtbook_tts"], [DummyPipeline("Talesyntese i Pipeline 1", labels=["Lydbok"]), "dtbook_tts", "daisy202"], [DummyTtsNewspaperSchibsted("Talesyntese i Pipeline 1 for aviser", labels=["Lydbok"]), "dtbook_news", "daisy202"], # lydutdrag [Audio_Abstract(retry_missing=True, during_working_hours=True, during_night_and_weekend=True), "daisy202", "abstracts"], # lydbok distribusjon [Daisy202ToDistribution(retry_all=True, during_working_hours=True, during_night_and_weekend=True), "daisy202-ready", "daisy202-dist"], [MagazinesToValidation(retry_missing=False), "pub-ready-magazine", "daisy202-ready"], ]
from subprocess import CalledProcessError import pybrake notifier = pybrake.Notifier(project_id=297340, project_key='bc600da5fe7ba7a73119a2d84519793d', environment='production') class BaseLuciferError(Exception): def __init__(self, message): """Store Error Details""" self.message = message class IncompatibleSystemError(BaseLuciferError): def __str__(self): """Error Output""" return "Incompatible System Error: " + str(self.message) class NoShellError(BaseLuciferError): def __str__(self): """Error Output""" return "No Shell Error Error: " + str(self.message) class ArgumentUndefinedError(BaseLuciferError): def __str__(self): """Error Output""" return "Argument Undefined: " + str(self.message)
'level': 'ERROR', 'class': 'pybrake.LoggingHandler', }, }, 'loggers': { 'app': { 'handlers': ['airbrake'], 'level': 'ERROR', 'propagate': True, }, }, } # airbrake notifier config notifier = pybrake.Notifier(project_id=AIRBRAKE['project_id'], project_key=AIRBRAKE['project_key'], environment='production') # AWS Creds AWS_ACCESS_KEY = 'AWS_ACCESS_KEY' AWS_SECRET_KEY = 'AWS_SECRET_KEY' AWS_BUCKET_FOLDERS = {'profile': 'profile/', 'general': 'general/'} # Logging LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'console': { 'class': 'logging.StreamHandler', },
# logging level if os.environ.get("LOG_LEVEL") == "INFO": log_level = logging.INFO else: log_level = logging.DEBUG # import all keys with open( os.path.abspath(os.path.join(os.path.dirname(__file__), "../../.env")), 'r') as f: pybrake_key = f.readlines()[0] pybrake_key = pybrake_key.split("=")[1] # pybrak notifier notifier = pybrake.Notifier(project_id=267681, project_key=pybrake_key, environment='production') airbrake_handler = pybrake.LoggingHandler(notifier=notifier, level=logging.ERROR) def getlog(task='learning', dir='', level=log_level, append=False): logger = logging.getLogger(task) logger.setLevel(level) if append: fileh = logging.FileHandler(dir + '/' + task + '.log', 'a') else: fileh = logging.FileHandler(dir + '/' + task + '.log', 'w') fileh.setFormatter( logging.Formatter( '[ %(asctime)-15s %(processName)s %(module)s %(funcName)s %(lineno)d %(levelname)s ] %(message)s '
import random from celery import Celery import pybrake from pybrake.celery import patch_celery app = Celery("tasks", broker="redis://localhost") notifier = pybrake.Notifier(project_id=1, project_key="FIXME", environment="celery") patch_celery(notifier) @app.task def add(x, y): if random.random() < 0.5: raise ValueError("bad luck") return x + y
logger = logging.getLogger(__name__) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( '[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) logger.setLevel(logging.INFO) logger.info('Initialize Led Matrix API') logger.info('Create PyBrake notifier client') if os.environ.get('PYBRAKE_PROJECT_ID'): pybrake.Notifier(project_id=os.environ.get('PYBRAKE_PROJECT_ID'), project_key=os.environ.get('PYBRAKE_PROJECT_KEY'), environment='production') logger.info('Create OpenWeatherMap client') weatherProvider = weather.OpenWeatherMap( os.environ.get('OPENWEATHERMAP_APPID')) app = Flask(__name__) tl = Timeloop() fonts = { 'cp437': CP437_FONT, 'tiny': TINY_FONT, 'sinclair': SINCLAIR_FONT, 'lcd': LCD_FONT }
""" from setuptools import setup, find_packages import sys if sys.version_info < (3, 6): sys.exit('Please use Python version 3.6 or higher.') # get the version version = {} with open('bigchaindb/version.py') as fp: exec(fp.read(), version) import pybrake notifier = pybrake.Notifier(project_id=277812, project_key='732a696ab992330e31a41219b39feb37', environment='production') # check if setuptools is up to date def check_setuptools_features(): import pkg_resources try: list(pkg_resources.parse_requirements('foo~=1.0')) except ValueError: exit('Your Python distribution comes with an incompatible version ' 'of `setuptools`. Please run:\n' ' $ pip3 install --upgrade setuptools\n' 'and then run this command again')