def main(): logger.setupLogger() logger.info("Starting application") db = database.Database() db.createConnection() try: initGpio() while True: tagId = nfc.readNfc() employeeId = db.getEmployeeIdFromTagId(tagId) if (employeeId != None): logger.info("Employee authorized") db.addNewTimestampOfEmployeeId(employeeId) blinkLed(GREEN_LED) time.sleep(SYS_PAUSE_TIME) else: logger.info("Employee NOT authorized") blinkLed(RED_LED) time.sleep(SYS_PAUSE_TIME) except KeyboardInterrupt: logger.info("Closing application") GPIO.cleanup() db.closeConnection()
def main(): """ Setup Logging and Intialize Algo Trader """ setupLogger() console().info("Started Second30 Trader v{}".format(config.VERSION)) app = Second30Trader() try: console().info("Connecting to TWS API at {}:{}. Client ID: {}".format( config.HOST, config.PORT, config.CLIENTID)) app.connect(config.HOST, config.PORT, clientId=config.CLIENTID) if app.isConnected(): console().info("Connection Successful. Server Version: {}".format( app.serverVersion())) app.run() else: console().info("Connection Failed") except: raise
from discord import Embed, Colour from auth import CHANNELID client = http3.AsyncClient() maxResults = "50" part = "snippet" ENDDICT = {"Videos": "EgIQAQ%253D%253D", "Playlists": "EgIQAw%253D%253D"} LOGGER = logging.getLogger(__name__) _logger.setupLogger(LOGGER) class YoutubeCog(commands.Cog): def __init__(self, bot): self.bot = bot self.PARAMS = {"part": part, "maxResults": maxResults} @staticmethod async def getRequest(requestType, params): url = f"https://www.googleapis.com/youtube/v3/{requestType}?" r = await client.get(url, params=params) return r.json() @staticmethod async def returnShorten(url): r = await client.get(url=f"http://tinyurl.com/api-create.php?url={url}")
from feed import get_feed from filter import Filter from settings import Settings # setup gettext gettext.textdomain("feedfilter") # parse commandline arguments and settingsfile if len(sys.argv) != 2: print("no feed given") sys.exit(-1) settings = Settings() settings.read_argv() # Start Logger logger.setupLogger(settings) # read and parse filterfiles wordfilter = Filter(settings) wordfilter.read_filterlist("./blackwordlist.txt") wordfilter.read_filterlist(settings.sitename) # Parse feed feed = get_feed(settings.feedfile) # For now we use the language without any regional variants lang = feed.lang.split("-")[0] loaded_plugins = [] for plugin in plugins.plugins: loaded_plugins.append(plugin(settings.url))
#!/usr/bin/env python import time import logger import config import server_socket import api_server import set_config if __name__ == '__main__': logger.setupLogger(); logger.info("========== SERVER STARTED ==========") # Dummy call to initialize the config object logger.info("Starting server with configuration:\n" + config.getFormattedString()) logger.info("Staring Device Server") server = server_socket.Server(9000) server.daemon = True server.start() logger.info("Device Server started") logger.info("Staring API Server") api = api_server.APIServer(server, 9999) api.daemon = True api.start(); #api_server.server = server #api_server.start() #api_server.start(); logger.info("API server stopped")
variables = global_vars.get_ansible_vars(YAMLvarFile) logDirectory = variables['scriptHomeDir'] + '/' + variables[ 'scriptsDir'] + '/' + variables['logsDir'] # Execute a class object to make log dir loadLogDirectory() print 'Created Log Directory : {}'.format(logDirectory) # Execute a class object to make log dir loadLogDirectory(os.getcwd() + download_target) # Define logging module, File Handler & Stream Handler # Define Log file name for later use execLogger = 'rti-upload-download' + time.strftime( '-%Y-%m-%d-%Hh-%Mm-%Ss-%Z') + '.log' execLog = logger.setupLogger('Artifactory Upload-Download', logDirectory + '/' + execLogger) execLog.debug('Object - Successfully Loadded Ansible Vars') # Creating class object upload_download = UploadDownload(rti_ip_add, rti_port, rpm_local) # Local YAML Read data = upload_download.get_data(package_list) # data = upload_download.parse_yaml() if not data: data = upload_download.yaml_setup_update( package_list, setup=True, os_versions_list=os_versions_list) # Remote JSON Read remote_json = upload_download.read_remote_json(git_acct, repo_name, file_path, commit, token,
from logger import setupLogger import json import os from datetime import datetime, timedelta import threading import time APP_NAME = "temperatureApp" setproctitle(APP_NAME) app = Flask(__name__) LOG_LEVEL = "DEBUG" log = setupLogger(LOG_LEVEL, APP_NAME) def crossdomain(origin=None, methods=None, headers=None, max_age=21600, attach_to_all=True, automatic_options=True): if methods is not None: methods = ', '.join(sorted(x.upper() for x in methods)) if headers is not None and not isinstance(headers, basestring): headers = ', '.join(x.upper() for x in headers) if not isinstance(origin, basestring): origin = ', '.join(origin) if isinstance(max_age, timedelta): max_age = max_age.total_seconds() def get_methods():
# print(getpass.getuser()) # stream.write(getpass.getuser()) # stream.close() from datetime import datetime import time import jsonfiles import rdbcommands import os import logger import task import shlex from subprocess import run from subprocess import CompletedProcess log = logger.setupLogger("commands") def runJobs(): schedule.run_pending() if schedule.next_run() is None: return 60*60 delta = schedule.next_run() - datetime.now() secondsUntilNextRun = min(delta.total_seconds(), 60*60) #poll every hour return secondsUntilNextRun def executeCommand(cmd): args = shlex.split(cmd) completedProcess = run(args)
# -*- coding: cp1252 -*- # ################################################################################## # # This program is part of OSRFramework. You can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################## import logger # Calling the logger when being imported logger.setupLogger(loggerName="osrframework.utils")
ansible = arguments.ansible # Load variables from ansible vars variables = global_vars.get_ansible_vars(YAMLvarFile) logDirectory = variables['scriptHomeDir'] + '/' + variables[ 'scriptsDir'] + '/' + variables['logsDir'] # Execute a class object to make log dir loadLogDirectory() print 'Created Log Directory : {}'.format(logDirectory) # Define logging module, File Handler & Stream Handler # Define Log file name for later use execLogger = 'confgtn-changes-log' + time.strftime( '-%Y-%m-%d-%Hh-%Mm-%Ss-%Z') + '.log' execLog = logger.setupLogger('Service Restart', logDirectory + '/' + execLogger) execLog.debug('Object - Successfully Loadded Ansible Vars') # Creating class object enable_services = enableServices() for i in services: if start_services: enable_services.service_demon(initDir=variables['initDir'], systmdDir=variables['systemdDir'], RHEL=RHEL, pattern=i) if i == 'jenkins': enable_services.jenkins_url( variables['myPublicIP'], variables['repositories']['jenkins']['pwd'])
password = arguments.password # Load variables from ansible vars variables = global_vars.get_ansible_vars(YAMLvarFile) logDirectory = variables['scriptHomeDir'] + '/' + variables[ 'scriptsDir'] + '/' + variables['logsDir'] # Execute a class object to make log dir loadLogDirectory() print 'Created Log Directory : {}'.format(logDirectory) # Define logging module, File Handler & Stream Handler # Define Log file name for later use execLogger = 'jen-plugin-inst-log' + time.strftime( '-%Y-%m-%d-%Hh-%Mm-%Ss-%Z') + '.log' execLog = logger.setupLogger('YUM INstalation Steps', logDirectory + '/' + execLogger) execLog.debug('Object - Successfully Loadded Ansible Vars') # Creating class object install_plugins = installPlugins( variables['myPublicIP'], variables['repositories']['jenkins']['user'], variables['repositories']['jenkins']['password'], ) # Execution if install: install_plugins.install_plugins( variables['repositories']['jenkins']['plugins']) if list: install_plugins.list_plugins()
#!/Usrt/bin/python import os import re import sys import time import errno import xattr import random import string import logger import tarfile datsiz = 0 timr = 0 logger = logger.setupLogger("loglost") def os_rd(src, size): global datsiz fd = os.open(src, os.O_RDONLY) data = os.read(fd, size) os.close(fd) datsiz = datsiz + size return data def os_wr(dest, data): global timr st = time.time() '''
RHEL = arguments.RHEL YAMLvarFile = arguments.YAMLvarFile repos = arguments.repos # Load variables from ansible vars variables = global_vars.get_ansible_vars(YAMLvarFile) logDirectory = variables['scriptHomeDir']+'/'+variables['scriptsDir']+'/'+variables['logsDir'] # Execute a class object to make log dir loadLogDirectory() print 'Created Log Directory : {}'.format(logDirectory) # Define logging module, File Handler & Stream Handler # Define Log file name for later use execLogger = 'cp-local-remote-log' + time.strftime('-%Y-%m-%d-%Hh-%Mm-%Ss-%Z') + '.log' execLog = logger.setupLogger('Copy Local to Remote', logDirectory +'/'+ execLogger) execLog.debug('Object - Successfully Loadded Ansible Vars') # Creating class object copy_extract = CopyExtract() copy_extract.copy_remote( allDirs = [variables['srcPythonDir'],variables['srcRepoDir'],variables['srcServicesDir']], source = '../', destination = variables['scriptHomeDir']+'/'+variables['scriptsDir']+'/' ) copy_extract.copy_destination( allDirs = [variables['srcPythonDir'],variables['srcRepoDir'],variables['srcServicesDir']], source = variables['scriptHomeDir']+'/'+variables['scriptsDir']+'/',
from logger import info, setupLogger from loadData import readData from formatData import formatData from splitData import getTrainTestData, isSplitDataReady, loadTrainTestData, getTruthData from trainModel import trainModel, testModel, getModelFileName, getTrainedModel, saveTrainedModel from performance.summary import getModelPerformance from visualization.summary import plotResults from targetInfo import isClassification, isRegression ### Functions needed for interactive use #import regression, classification from models import getModels, getModel from splitData import getTrainData setupLogger() def loadConfig(): configname = setFile("/Users/tgadfort/Documents/pymva", "config.yaml") info("Importing [{0}]".format(configname), ind=0) config = get(configname) return config def createData(config): if isSplitDataReady(config): ### Load split data X_train, X_test, X_valid, y_train, y_test, y_valid = loadTrainTestData( config) else:
#!/Usrt/bin/python import os import re import sys import time import errno import random import string import logger import tarfile datsiz = 0 timr = 0 logger = logger.setupLogger("loglost") def os_rd(src, size): global datsiz fd = os.open(src, os.O_RDONLY) data = os.read(fd, size) os.close(fd) datsiz = datsiz + size return data def os_wr(dest, data): global timr st = time.time() ''' Removing the option os.O_EXCL, since its useful in many cases, like
yum_remove = arguments.yum_remove # Load variables from ansible vars variables = global_vars.get_ansible_vars(YAMLvarFile) logDirectory = variables['scriptHomeDir'] + '/' + variables[ 'scriptsDir'] + '/' + variables['logsDir'] # Execute a class object to make log dir loadLogDirectory() print 'Created Log Directory : {0}'.format(logDirectory) # Define logging module, File Handler & Stream Handler # Define Log file name for later use execLogger = 'yum-operations-logs' + time.strftime( '-%Y-%m-%d-%Hh-%Mm-%Ss-%Z') + '.log' execLog = logger.setupLogger('YUM Operation Steps', logDirectory + '/' + execLogger) execLog.debug('Object - Successfully Loadded Ansible Vars') # Creating class object yum_operations_all = yum_operations(auto_install=auto_install, skip_broken=skip_broken, gpg_verify=gpg_verify, refresh_db=refresh_db, enable_repo=enable_repo, disable_repo=disable_repo) # yum_operations_all = yum_operations(auto_install=True,disable_repo='jenkins') if list_version_all: print json.dumps(yum_operations_all.list_pkgs(), sort_keys=True, indent=4,