def main(argv, stdin=None, stdout=None, stderr=None): from options import parse_opts exit_statuses = {"extracted": 0, "no-extract": 1, "error": 2, "not-set": -1} options = parse_opts(argv, stdin=stdin, stdout=stdout, stderr=stderr) if not options: return exit_statuses["error"] init_logging(options) # do the convertion try: ab2cb(options) except KeyboardInterrupt: writerr(options, "\nInterrupted") except Exception as e: writerr(options, "ab2cb exception", exception=e) options.exit_status = "error" if options.exit_status == "not-set": if options.did_extract: options.exit_status = "extracted" else: options.exit_status = "no-extract" return exit_statuses[options.exit_status]
def create_app(): app = simple_app() app.wsgi_app = ProxyFix(app.wsgi_app) api.init_app(app) init_logging(app) return app
def main(): args = get_argparser().parse_args() init_logging('logs') env = make_env(args.env, args.seed, num_envs=args.num_envs, num_processes=args.num_processes) agent = ActorCritic(env.observation_space, env.action_space, args) train(agent, env, args, max_reward=args.max_reward) test_env = make_env(args.env, args.seed, num_envs=1, num_processes=1) make_fun(agent, test_env, render=True)
def main(): init_logging(10) # Load all of our datasources. This will also validate that all required # properties are present. datasources = modules.load_modules('datasources') # Order the datasources to make sure their dependencies are satisfied. datasource_load_ordered = order_dependencies(datasources) # Build our facts object using all of our loaded datasources facts = Facts(datasource_load_ordered) # Run modules which will consume the data from our datasources modules.run_modules(modules.load_modules('plugins'), facts)
def __init__(self): locale.setlocale(locale.LC_ALL, '') with open("bot.pid", 'w') as f: f.write(str(os.getpid())) init_logging() status.init() self.login() self.replied_to = replied_to.replied_t("save/replied_to.json") logging.log(logger.DEBUG_ALL, self.replied_to.dict) self.maintain_list = maintain_list_t(self, "save/active_comments.json") stat_parsing.init() item.init() if '-force' in sys.argv: self.maintain_list.flag_for_edits(sys.argv) # Init backlog state. Stream threads will toggle these bools when they # have finished resolving their backlogging, allowing this main thread # to know when its ok to status update. self.backlog = { 'comments': True, 'submissions': True, } self.reply_queue = reply_handler_t(self) self.stream_manager = stream_manager_t(self) # initialize threading event, which will let us pause execution in this # thread whenever we want, and allow a stream subthread to signal to # resume execution self.stream_event = threading.Event() # similarly, make an acm event that main thread can use to signal the # ACM thread to go self.acm_event = threading.Event() if config.debug_memory: self.mem_track = tracker.SummaryTracker()
# # threaded example credit; https://forum.derivative.ca/t/python-threaded-tcp-socket-server-example/12002/5 ######################################################################################################################## import logging from threading import Thread import paho.mqtt.client as mqtt from pysolar.solar import get_altitude from logger import init_logging from packet_parser import PacketParser from prediction_manager import PredictionManager from plotting_predictions import PredictionPlotter from file_saver import data_dump_location, html_render_location init_logging() REQUIRED_DEVICE_ID_TO_TRACK = "icspace25_ttnv2_abp" class ThreadedMQTTLogger(Thread): def __init__(self, APPID, PSW): self.pm = PredictionManager() self.pp = PredictionPlotter() self.mqttc = mqtt.Client() # Assign event callbacks self.mqttc.on_connect = self.on_connect self.mqttc.on_message = self.on_message self.mqttc.on_subscribe = self.on_subscribe self.mqttc.on_log = self.on_log
import re from gi.repository import GExiv2 from os import walk from os.path import isdir, join, splitext from django.conf import settings from django.core.management.base import BaseCommand, CommandError from optparse import make_option from storage.models import IMAGE_TYPES from logger import init_logging logger = init_logging(__name__) def get_exiv2(fn): try: img_exiv2 = GExiv2.Metadata(fn) # Catching every exception is really bad, but I can't catch # GLib.Error :-( except Exception as e: #except IOError, GLib.Error: msg = "GExiv2 exception on {0}, ignoring, e={1}".format( fn, e) logger.warn(msg) img_exiv2 = None return img_exiv2 class Command(BaseCommand): args = ''
from behave.__main__ import main as behave_main import logger # https://acme-test.uipath.com/account/login # [email protected] # 123456 if __name__ == '__main__': # Config logger logger.init_logging() # Run example feature # behave_main("features/Example.feature") # behave_main("features/Outline.feature -f json.pretty -o test.json --no-summary") # Run outline feature {-n "<scenario_name>"} # behave_main("features/Outline.feature -f json.pretty -o test.json --no-summary") # Run multiple features # behave_main("features/Outline.feature features/Example.feature -f json.pretty -o test.json --no-summary") # Run selenium features behave_main("features/Selenium.feature -f json.pretty -o test.json --no-summary") # Run step data scenario -- multiple scenarios run {-n "<scenario_name>" -n "<scenario_name>"} # behave_main("features/Outline.feature -n Example1 -n Example2 -f json.pretty -o test.json --no-summary") # Run feature by tags # behave_main("features/Outline.feature --tags=@tag -f json.pretty -o test.json --no-summary")
def user_logger(): """ Returns the User Logger Object""" return init_logging(log_name='USER_LOGS', log_directory='/home/tejas/complain')
def requests_logger(): """ Returns the Requests Logger Objects """ return init_logging(log_name='API_REQUESTS_LOGS', log_directory='/home/tejas/complain')
def main(): # Initialize logger logger.init_logging(0) log = logging.getLogger(__name__) with open(args.description_file, "r") as f: description = f.read() podcast = Podcast(args.title, description, args.mp3, args.img, args.announce) # Generate podcast covers log.delimiter("=" * screen_width) log.topic("Рисуем обложки") podcast.create_covers() log.info(" Готово.") # Upload covers to file-hosting log.delimiter("-" * screen_width) log.topic("Загружаем изображение выпуска на fs") podcast.img_upload() log.info("\n Готово.") log.delimiter("-" * screen_width) log.topic("Загружаем обложку выпуска на fs") podcast.cover_upload() log.info("\n Готово.") log.delimiter("-" * screen_width) log.topic("Загружаем обложку выпуска для RSS на fs") podcast.rss_cover_upload() log.info("\n Готово.") if not args.announce: # Upload mp3 to file-hosting log.delimiter("-" * screen_width) log.topic("Загружаем mp3 на fs") podcast.mp3_upload() log.info("\n Готово.") # Post podcast to site log.delimiter("-" * screen_width) log.topic("Публикуем пост на сайте") podcast.publish_to_site() log.info("\n Готово.") # Render video by ffmpeg if args.need_video: log.delimiter("-" * screen_width) log.topic("Рендерим видео") podcast.render() log.info(" Готово.") # Upload video to youtube in private mode, add to playlist log.delimiter("-" * screen_width) log.topic("Загружаем видео на Youtube") podcast.yt_upload() log.info(" Готово.") log.info(podcast.nice_view)
argp.add_argument('-r', '--root', help="Root to mount at mount point") argp.add_argument('-l', '--log_file', default=None, help="Logfile to use.") argp.add_argument('-d', '--log_db', default=None, help="Log to an sqlite DB instead") argp.add_argument('-a', '--log_all', action="store_true", help="Log everything (DO NOT USE)") argp.add_argument('-c', '--call_log', default=['write', 'read'], action='append', help="Use to log only these calls (read, write, etc)") argp.add_argument('--log_hash', action="store_true", help="Store a hash of read and write buffers") argp.add_argument('--log_bytes', action="store_true", help="Store the full bytes of r/w buffers") argp.add_argument('--config', default='config.json', help="Path to a config file") args = argp.parse_args() init_logging(args) init_injector(args) main(args)
def sendWelcomeEmail(userId): userEmail = myApp.getUserEmail(userId) msg = Message( 'ZPriddy - Alexa Support', sender='*****@*****.**', recipients= [userEmail]) msg.body = ''' Welcome to ZPriddy Alexa SmartThings! This is a conformation that your account has been created and is linked to your SmartThings account! Comming Soon: Better Support! :) ''' mail.send(msg) def run_echopy_app(): import SocketServer #SocketServer.BaseServer.handle_error = close_stream SocketServer.ThreadingTCPServer.allow_reuse_address = True echopy_app.run(app) if __name__ == "__main__": st.smartThingsMongoDBInit() nest.nestDBInit() logger.init_logging() myApp.data_init() nestApp.data_init() run_echopy_app()
def initialize_logging(): logger.init_logging() cologger.set_logger(logger.logger)
import socket import os import json import unittest import sys import _thread import logging import time import random sys.path.append(os.path.dirname(os.path.abspath(__file__ + '/..'))) from pubsub.publisher import Publisher os.chdir(os.getcwd() + '/tests') from logger import init_logging log = init_logging('noc-netmode') def wait_and_send(self, sock): log.info('Waiting for incoming connection') connection, client_address = sock.accept() try: for _ in range(1, 2): # rand_ip = ".".join(map(str, (random.randint(0, 255) for _ in range(4)))) rand_ip = "147.102.13.21" logging.info(rand_ip) message = { 'timestamp': '2016-06-08T22:29:16.250407+0300', 'dest_port': 53, 'flow_id': '35430576640', 'vlan': '',