def __init__(self, fname): QtGui.QWidget.__init__(self) self.fname = fname layout = QtGui.QVBoxLayout() label = QtGui.QLabel("Playback file: %s" % fname) self.start_button = QtGui.QPushButton("&Start") self.next_button = QtGui.QPushButton("&Next") self.continue_button = QtGui.QPushButton("&Continue") self.dump_button = QtGui.QPushButton("&Dump Screen Buffer") self.clear_button = QtGui.QPushButton("C&lear Screen") self.log_textedit = QtGui.QTextEdit() def get_log(modulename): if type(modulename) == types.ClassType: # class type modulename = modulename.__name__ elif type(modulename) == types.InstanceType: # old-style class modulename = modulename.__class__.__name__ elif str(type(modulename)).startswith("<class '"): # new-style class modulename = modulename.__class__.__name__ else: modulename = str(modulename) # don't know... logg = log.Log.get_log(modulename) logg.remove_handler(log.Log.DEFAULT_HANDLER) logg.add_handler(log.FileLogHandler('output.log')) #TODO remove this... logg.add_handler(QWidgetLogHandler(self.log_textedit)) return logg log.get_log = get_log self.log = log.get_log(self) self.trace = log.get_log("Sequence") layout.addWidget(label) layout.addWidget(self.start_button) layout.addWidget(self.next_button) layout.addWidget(self.continue_button) layout.addWidget(self.dump_button) layout.addWidget(self.clear_button) layout.addWidget(self.log_textedit) self.setLayout(layout) self.start_button.clicked.connect(self.start) self.next_button.clicked.connect(self.next_sequence_pressed) self.continue_button.clicked.connect(self.continue_sequence) self.dump_button.clicked.connect(self.dump_buffer) self.clear_button.clicked.connect(self.clear) self.next_button.setDisabled(True) self.continue_button.setDisabled(True) self.dump_button.setDisabled(True) self.clear_button.setDisabled(True) self.channel = terminal.TerminalChannel()
def evaluate_q_threshold(threshold=settings.PREDICT_THRESHOLD, validation=False): ''' Runs evaluation based on generated probability outputs from file (p-files). ''' log = get_log('Thresholds', echo=True) set = 1 if validation else 2 files = fs.load_splits()[set] TP = FP = TN = FN = 0 for file in files: try: arr = genfromtxt('p-' + file + '.csv', delimiter=',') yhat = [ 1 if val >= threshold else 0 for i, val in enumerate(arr[1]) ] tp, fp, tn, fn = metrics.cm_overlap(arr[0], yhat, arr[2], settings.OVERLAP_SCORE, settings.SAMPLE_RATE) TP += tp FP += fp TN += tn FN += fn except Exception as e: print(e) results = metrics.compute_cm_score(TP, FP, TN, FN) log.print('\nThreshold: ' + str(threshold)) log_results(results, validation=validation, filename='Thresholds')
def __init__(self, channel, parent=None): '''channel should be a TerminalChannel object.''' QtGui.QWidget.__init__(self, parent) self.log = log.get_log(self) self.config = TerminalConfig() self.scroll_bar_width = self.config.getint("Display", "scrollbarsize", 14) self.screen = ScreenBuffer(parent=self) self.scroll_bar = QtGui.QScrollBar(self) self.scroll_bar.setCursor(QtCore.Qt.ArrowCursor) self.scroll_bar.valueChanged.connect(self.scrollEvent) (width, height) = self.screen.get_pixel_size() self.resize(width + self.scroll_bar_width, height) self.channel = channel self.channel.dataReceived.connect(self.write) self.channel.endOfFile.connect(self.close) self.sequencer = TerminalEscapeSequencer(self.screen, self.channel) self.dirty = False cursor = self.screen.get_cursor() (self.col_size, self.row_size) = cursor.get_font_metrics() self.setCursor(QtCore.Qt.IBeamCursor) self.clipboard = QtGui.QApplication.clipboard() self.clipboard.dataChanged.connect(self.clipboard_changed) self.word_select_mode = False self.queue = Queue() self.worker_thread = SequencerWorker(self.sequencer, self.screen, self.queue) self.worker_thread.start()
def get_geo_for_ip(): qs = IP.objects.filter(deleted=False, handled=False) log = get_log('geo-ip') for obj in qs: try: ip = obj.ip if ip not in ['127.0.0.1', 'localhost'] and not is_internal_ip(ip): log.info("handle: " + ip) resp = requests.get(url = 'http://ip-api.com/json/%s' % (ip)) data = resp.json() if data['status'] != 'fail': obj.longitude = data['lon'] obj.latitude = data['lat'] obj.country = data['country'] obj.city = data['city'] obj.isp = data['isp'] obj.countryCode = data['countryCode'] obj.region = data['region'] obj.regionName = data['regionName'] else: log.info(ip + " is in private net reported by ip-api") time.sleep(3) else: log.info(ip + " is in private net") obj.handled = True obj.save() except Exception as e: log.error(e)
def error(message): stack = "HappyMac Execution Stack at Error Time:\n%s\n" % "".join(traceback.format_stack()[:-1]) exception = "HappyMac Exception:\n %s\n" % traceback.format_exc() error = "HappyMac Error:\n %s\n%s%s%s%s%s\n%s%sHappyMac Error:\n %s\n" % ( message, get_system_info(), get_home_dir_info(), get_preferences(), get_versions(), log.get_log(), stack, exception, message ) path = get_error_file_path() try: with open(path, "w") as output: output.write("HappyMac Error Report - %s\n\n" % datetime.datetime.utcnow()) os.system("system_profiler SPHardwareDataType >> %s" % path) with open(path, "a") as output: output.write(error) with open(path) as input: print(input.read()) except: pass log.log(error) rumps.notification("HappyMac", "Error: %s. For details see:" % message, path, sound=True)
def compile_epochs(files, save = True): ''' Compiles a single list of all epochs from all files given. this can be stored in a file for later use in modeltraining. Process, errors and amounts are logged for test and evaluations purposes. ''' # initilise log log = get_log('Epochs', True) log.print('Total files: {0}'.format(len(files))) log.printHL() # run through list with try/catch in case of errors epochs = [] for i, filename in enumerate(files): try: X,y = fs.load_csv(filename) eps = epochs_from_prep(X, y) epochs.extend(eps) log.print('{0} created {1} epochs'.format(filename, len(eps))) # backup saves if saveing is optionally on if save and i > 0 and i % int(len(files)/14) == 0: epoch.save_epochs(epochs) log.printHL() log.print('Backup save of {0} epochs'.format(len(epochs))) log.printHL() except Exception as e: log.print('{0} Exception: {1}'.format(filename, str(e))) # optionally stores the epochs if save: epoch.save_epochs(epochs) log.printHL() log.print('Final save of {0} epochs'.format(len(epochs))) log.printHL() return epochs
def run(self): log = get_log("daemon") import server try: server.run() except: log.exception()
def share_log(share_name): share = oscar.get_share(share_name) if not share: return "Share not found", 404 category = flask.request.args.get("category") or None offset = int(flask.request.args.get("offset") or "0") limit = int(flask.request.args.get("limit") or "20") return flask.jsonify(log.get_log(share.path, category, offset, limit))
def reliableFiles(files, ai_all5=10.0, overall5=4.0, slewake5=0.0, maskThreshold_all=0.1, maskTreshhold_single=0.05): ''' Given a list of files, this method returns only the reliable files as determined by MESA variable criteria cuts and mask thresholds. results are logged for testing and evaluation purposes. ''' log = get_log('Discard', echo=True) datasetCsv = fs.getDataset_csv() # determines list of booleans, one for each critera def isReliable(filename): # Target file mesaid = int(filename[-4:]) X,y = fs.load_csv(filename) criteria = [] # MESA variables filter = ['ai_all5','overall5','slewake5',] df = datasetCsv[datasetCsv['mesaid'] == mesaid][filter].iloc[0] criteria += [ df[0] >= ai_all5, # low ai index (events per hour) df[1] >= overall5, # low overall quality df[2] == slewake5, # poor EEG (no sleep stage / arousal scoring) ] # Doublecheck arousals criteria += [sum(y) > 0] # Mask threshhold X,_,_ = wake_removal_endpoints(X,None,None,settings.SAMPLE_RATE) masklist, mask = make_masks(X) criteria += [sum(m)/len(m) <= maskTreshhold_single for m in masklist] criteria += [sum(mask)/len(mask) <= maskThreshold_all] return criteria # Extract criteria arrays for all files, and filters fileslist reliable = [isReliable(fn) for fn in files] reliableFiles = [files[i] for i,r in enumerate(reliable) if all(r)] # Log Status arr = array(reliable) labels = ['ai_all5 ', 'overall5', 'slewake5', 'sum(y)=0', 'mask RR ', 'mask RPA', 'mask PTT', 'mask PWA', 'mask all'] log.print('Preprocessed files: {0}'.format(len(files))) log.print('Removed files: {0}'.format(len(files) - len(reliableFiles))) log.print('Reliable files: {0}'.format(len(reliableFiles))) for i,l in enumerate(labels): a = list(arr[:,i]).count(False) log.print('Removed by {0}: {1}'.format(l,a)) log.printHL() for i,rl in enumerate(reliable): if not all(rl): log.print(files[i] + ' -- ' + ', '.join([labels[j] for j,r in enumerate(rl) if not r])) return reliableFiles
def scan(self, sock): global host_name_dec, host_list, host_list_str, host_list_na returnedList = blescan_func.parse_events(sock, MAX_SCAN) for beacon in returnedList: try: arr_beacon = beacon.split(',') arr_beacon[4] = int(arr_beacon[4]) - 256 arr_beacon[5] = int(arr_beacon[5]) - 256 host_name = 'NA' beacon_na_dict['hostname'] = host_name beacon_na_dict['bssid'] = arr_beacon[0] beacon_na_dict['uuid'] = arr_beacon[1] beacon_na_dict['major'] = arr_beacon[2] beacon_na_dict['minor'] = arr_beacon[3] beacon_na_dict['tx_power'] = arr_beacon[4] beacon_na_dict['rssi'] = arr_beacon[5] host_list_na.append(dict(beacon_na_dict)) except: log.get_log("ScanLogger").error("exception...")
def __init__(self, *args, **kwargs): self.__dict__ = self.__shared_state self.log = get_log(self) self.cfg = get_config() if getattr(self, "initialized", None) is None: self.running = True self.cond = threading.Condition() self.events = {} self.th = None self.initialized = True
def __init__(self, screen, channel): self.log = log.get_log(self) self.trace = TraceSequence(fall_through=True) #TODO change fall_through self.config = TerminalConfig() self.screen = screen self.channel = channel for attr in self.REQUIRED_ATTRS: if not hasattr(self, attr): raise AttributeError("Escape sequence %s missing %s" + \ "attribute" % (self, attr))
def log_results(results, validation=True, filename=None): ''' Takes a results dictionary as input and prints its contents to a log file. ''' if filename is None: filename = 'Validation' if validation else 'Evaluation' log = get_log(filename, echo=True) for k, d in results.items(): log.print(str(k)) for key, val in d.items(): log.print(str(key) + ':' + str(val))
def share_log(share_name): share = samba.get_share(share_name) if not share: raise web.NotFound("Share not exist") share_path = samba.share_real_path(share) if not os.path.isdir(share_path): raise web.NotFound("Dir not exist") category = flask.request.args.get("category") or None offset = int(flask.request.args.get("offset") or "0") limit = int(flask.request.args.get("limit") or "20") return flask.jsonify(log.get_log(share_path, category, offset, limit))
def __init__(self, parent, font_name='Consolas', font_size=11): self.log = log.get_log(self) self.row = 0 self.col = 0 self.parent = parent self.set_font(font_name, font_size) self.reset_attributes() self._cursor_pos_stack = [] self.replace_mode = True if self.parent is not None: self.widget = self.parent.get_widget()
def __init__(self, action, minute=universal, hour=universal, day=universal, month=universal, daysofweek=universal, args=(), kwargs={}): self.action = action self.minutes = to_set(minute) self.hours = to_set(hour) self.days = to_set(day) self.months = to_set(month) self.daysofweek = to_set(daysofweek) self.args = args self.kwargs = kwargs self.log = get_log(self)
def __init__(self, screen, channel): self.log = log.get_log(self) self.trace = TraceSequence(fall_through=True) #TODO change fall_through self.screen = screen self.channel = channel self.__previous_sequence = "" self.config = TerminalConfig() self.encoding = self.config.get("Sequencer", "encoding", "utf-8") self.__sequences = [] sequences = EscapeSequence.__subclasses__() for seq in sequences: inst = seq(screen, channel) self.__sequences.append(inst)
def hours_of_sleep_files(files): ''' Calculated the hours of sleep for a list of files, the results are logged for testing purposes. ''' total = 0.0 log = get_log("SleepHour", echo=True) for file in files: X,_ = fs.load_csv(file) X,_,_ = wake_removal_endpoints(X,None,None,settings.SAMPLE_RATE) t = count_hours_of_sleep(transpose(X)[0]) log.print(file + ' -- {0} hours'.format(t)) total += t log.printHL() log.print('total -- {0} hours'.format(total)) return total
def __init__(self, width=80, height=24, parent=None): self.log = log.get_log(self) self.width = width # in cells, not pixels self.height = height self.parent = parent self.config = TerminalConfig() self.font_name = self.config.get("Display", "font", "Consolas") self.font_size = self.config.getint("Display", "fontsize", 11) self.cursor = TerminalCursor(self, self.font_name, self.font_size) self.scrollback = self.config.getint("Display", "scrollback", 100) self.base = 0 self.alternate_active = False self.create_buffer() self.create_alternate_buffer() self.setup_timer_events() (self.col_size, self.row_size) = self.cursor.get_font_metrics()
def index(): my_logger = get_log() # return "teste" try: cursor, cnx_mysql = db.get_db() query = "SELECT * FROM devopstestedb.form" cursor.execute(query) return render_template('form/getcomments.html', cursor=cursor) except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: my_logger.error( "Something is wrong with your user name or password") elif err.errno == errorcode.ER_BAD_DB_ERROR: my_logger.error("Database does not exist ") else: my_logger.error(err.msg) finally: cnx_mysql.close()
def main(): try: sock = bluez.hci_open_dev(dev_id) log.get_log("ScanLogger").info("1.ble thread started") except: log.get_log("ScanLogger").error("error accessing bluetooth device...") sys.exit(1) blescan_func.hci_le_set_scan_parameters(sock) blescan_func.hci_enable_le_scan(sock) ble = BleScanner() while True: global search_num, host_list, host_list_na, host_list_str, host_list_na_str search_num = search_num + 1 str_search_start = "-----" + str( search_num ) + "번째 SCAN [hostname: BSSID, UUID, MAJOR, MINOR, TX power, RSSI]-----" ble.scan(sock) host_list_na_sorted = sorted( host_list_na, key=(lambda beacon_na_data: beacon_na_data['bssid'])) log.get_log("ScanLogger").info(str_search_start) for i in range(len(host_list_na)): host_list_na_str = list({ host_list_na['bssid']: host_list_na for host_list_na in host_list_na_sorted }.values()) for i in range(len(host_list_na_str)): host_list_na_values = list(host_list_na_str[i].values()) log.get_log("ScanLogger").info(host_list_na_values) host_list.clear() host_list_na.clear() time.sleep(1)
def create(): my_logger = get_log() if request.method == 'GET': return render_template('form/createcomments.html') if request.method == 'POST': _nome = request.form['nome'] _email = request.form['email'] _comentario = request.form['comentario'] try: cursor, cnx_mysql = db.get_db() query = "INSERT INTO devopstestedb.form(nome,email,comentario) \ VALUES('{}','{}','{}')".format(_nome, _email, _comentario) cursor.execute(query) cnx_mysql.commit() cursor, cnx_mysql = db.get_db() query = "SELECT * FROM devopstestedb.form" cursor.execute(query) return render_template('form/getcomments.html', cursor=cursor) except cnx_mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: my_logger.error( "Something is wrong with your user name or password") elif err.errno == errorcode.ER_BAD_DB_ERROR: my_logger.error("Database does not exist ") else: my_logger.error(err.msg) finally: cnx_mysql.close()
def validate(model, files, log_results=False, validation=True, return_probabilities=False): ''' Runs performance test on all of the provided files and computes metrics with overlap. ''' if log_results: filename = 'Validation' if validation else 'Evaluation' log = get_log(filename, echo=True) TP = FP = TN = FN = 0 count = len(files) for file in files: try: if return_probabilities: validate_file(file, model, settings.OVERLAP_SCORE, settings.SAMPLE_RATE, return_probabilities=return_probabilities) else: tp, fp, tn, fn = validate_file(file, model, settings.OVERLAP_SCORE, settings.SAMPLE_RATE) TP += tp FP += fp TN += tn FN += fn file_score = metrics.compute_cm_score(tp, fp, tn, fn) log.print( file + ', ' + '{0:.2f}'.format(file_score['score']['sensitivity']) + ', ' + '{0:.2f}'.format(file_score['score']['precision'])) except Exception as e: print(e) return metrics.compute_cm_score(TP, FP, TN, FN)
def prepAll(force=False): ''' preprocesses all files stored on properly on the drive. either mesa or shhs. the amount of time and errors are logged for testing purposes. optionally re-processing already completed files is possible. ''' log, clock = get_log('Preprocessing', echo=True), stopwatch() filenames = fs.getAllSubjectFilenames(preprocessed=False) # determines already completed files oldFiles = fs.getAllSubjectFilenames(preprocessed=True) if not force: filenames = [fn for fn in filenames if fn not in oldFiles] log.print('Files already completed: {0}'.format(len(oldFiles))) log.print('Files remaining: {0}'.format(len(filenames))) if (len(oldFiles) > 0): log.printHL() for fn in oldFiles: log.print('{0} already completed'.format(fn)) else: log.print('Files re-preprocessing: {0}'.format(len(oldFiles))) log.print('Files remaining: {0}'.format(len(filenames))) log.printHL() # processes each file with try/catch loop incase of errors in single files. clock.round() for i, filename in enumerate(filenames): try: subject = fs.Subject(filename=filename) X, y = preprocess(subject) fs.write_csv(filename, X, y) log.print('{0} preprocessed in {1}s'.format( filename, clock.round())) except Exception as e: log.print('{0} Exception: {1}'.format(filename, str(e))) clock.round() clock.stop()
from haipproxy.client.py_cli import ProxyFetcher from log import get_log from requests.exceptions import ReadTimeout, \ ProxyError, ConnectTimeout, TooManyRedirects, ConnectionError from urllib3.exceptions import MaxRetryError import os from redis import StrictRedis from db import err log = get_log('proxy') err_log = get_log('err_log') # log_url = get_log('urls') import requests args = dict(host='111.231.92.31', port=6379, password='******', db=0) args1 = dict(host='111.231.92.31', port=6379, password='******', db=1) args2 = dict(host='111.231.92.31', port=6379, password='******', db=2) usedConn = StrictRedis(**args1) badConn = StrictRedis(**args2) badConn.flushdb() headers = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate', 'Accept-Language': 'zh-CN,zh;q=0.9', 'Cache-Control':
from time import sleep from config import files import log today = datetime.datetime.today().strftime("%Y%m%d") data_folder = os.path.abspath('../data/{0}'.format(today)) if not os.path.exists(data_folder): os.makedirs(data_folder) else: import random while os.path.exists(data_folder): data_folder += "_" + str(random.randint(0,100)) os.makedirs(data_folder) logger = log.get_log(data_folder) logger.debug('Begin process') for name in files.keys(): filename = '{0}.json'.format(name) filepath = '{0}/{1}'.format(data_folder, filename) logger.debug('Starting download of {0}'.format(name)) #get first page logger.debug('Loading page {0}'.format(1)) response = urllib.urlopen(files[name].format(1)) data = json.loads(response.read()) total_pages = data['totalPages']
import requests from log import get_log log = get_log('http') headers = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate', 'Accept-Language': 'zh-CN,zh;q=0.9', 'Cache-Control': 'max-age=0', # 'Connection': 'keep-alive', 'Host': 'baike.molbase.cn', # 'Sec-Metadata': 'cause="forced", destination="document", target="top-level", site="same-origin"', # 'Upgrade-Insecure-Requests': '1', 'User-Agent': "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36", } ips = "" def get_proxy_ip(): global ips # try: # # resp = requests.get('http://10.9.60.13:8080/get', headers=headers, timeout=1).content.decode() # resp = requests.get('http://123.207.35.36:5010/get/', headers=headers)
def auto_send_email(): log = get_log('send-email') log.info('send email') send_email()
def check_timeout_task(): log = get_log('celery') log.info('check timeout task') check_timeout()
def sync_congress_task(): # sync the data of congress contract from chain log = get_log('celery') log.info('sync congress task') sync_congress()
# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from log import get_log from signal import Signal, signal from sender import Sender, Receiver """ This module defines the EventManager class for managing a dynamic group of signals with forwarding capabilities. """ _log = get_log (__name__) class EventManager (Sender, Receiver): quiet = False def __init__ (self): super (EventManager, self).__init__ () self._events = {} self.on_any_event = Signal () def notify (self, name, *args, **kw): if not self.quiet: if name in self._events: self._events [name].notify (*args, **kw) else:
import time from cpc_fusion import Web3 from pymongo import MongoClient from log import get_log from cpchain_test.config import cfg REFRESH_INTERVAL = 3 log = get_log('rate-update') # chain chain = 'http://{0}:{1}'.format(cfg['chain']['ip'], cfg['chain']['port']) # mongodb mongoHost = cfg['mongo']['ip'] port = int(cfg['mongo']['port']) def update_rate(): cf = Web3(Web3.HTTPProvider(chain)) client = MongoClient(host=mongoHost, port=port) uname = cfg['mongo']['uname'] pwd = cfg['mongo']['password'] db = client['cpchain'] db.authenticate(uname, pwd) rnode_collection = client['cpchain']['rnode'] proposer_collection = client['cpchain']['proposer'] block_collection = client['cpchain']['blocks'] txs_collection = client['cpchain']['txs']
from log import get_log import subprocess import time import os import re log = get_log("programs") xbmc_kill_count = 0 def is_xbmc_running(): s = subprocess.Popen(["pidof", "xbmc.bin"], stdout=subprocess.PIPE, close_fds=True) (out, err) = s.communicate() if out: return True return False def start_xbmc(): s = subprocess.Popen(["/usr/sbin/start-xbmc"], close_fds=True) s.communicate() return is_xbmc_running() def stop_xbmc(): global xbmc_kill_count s = subprocess.Popen(["pidof", "xbmc.bin"], stdout=subprocess.PIPE, close_fds=True) (pid, err) = s.communicate() pid = pid.strip() if not pid: return False
def __init__(self): QtCore.QObject.__init__(self) self.log = log.get_log(self)
import random import lightbot from log import get_log from scheduler import Scheduler,Event from config import get_config cfg = get_config() log = get_log('away') def turn_off_lights(): log.info("Turning off lights") lightbot.lights_off() def turn_on_lights(): log.info("Turning on lights") lightbot.lights_on() def load(): scheduler = Scheduler() # a little randomness... scheduler.register('away', Event(turn_on_lights, hour=random.randint(16,19), minute=random.randint(0,60), daysofweek=[Event.MONDAY, Event.WEDNESDAY, Event.SUNDAY] ), Event(turn_off_lights, hour=random.randint(21,23), minute=random.randint(0,60), daysofweek=[Event.MONDAY, Event.WEDNESDAY, Event.SUNDAY] ),
from queue import Empty import time from cli import get_use_data headers = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate', 'Accept-Language': 'zh-CN,zh;q=0.9', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36', } log = get_log('molbase') def parse(msg): flag = True url = msg['url'] category = msg['category'] while flag: content = get_data(url).content.decode() try: dt = {'url': url, 'category': category} for func in funcs: res = func(content) if isinstance(res, dict): dt[func.__name__] = res else:
import os from ConfigParser import SafeConfigParser from log import get_log log = get_log("config") filename = "%s/default.cfg" % os.path.realpath(os.path.dirname(__file__)) # A Borg config class so that all configuration objects are the same # when configuration values change class DefaultConfig(SafeConfigParser): __shared_state = {} def __init__(self): self.__dict__ = self.__shared_state SafeConfigParser.__init__(self) def save(self): f = open(filename, 'w') try: self.write(f) except: log.exception() finally: f.close() def get_config(): config = DefaultConfig() config.read(filename) return config
import bottle from bottle import request, route, static_file, template from scheduler import Scheduler from log import get_log from config import get_config import lightbot import programs # load all events... from events import * # for keyboard events to X DISPLAY = ":0.0" log = get_log("server") @route('/') def index(action=''): return template('main') @route('/lights', method=['GET','POST']) def lights(): if not request.forms: # GET return lightbot.status() # POST obj = request.forms func = getattr(lightbot, "lights_%s" % obj.get('action', ''), None) if not func: log.debug("action '%s' not found" % obj.get('action', ''))
import time import lightbot from log import get_log from scheduler import Scheduler,Event from config import get_config cfg = get_config() log = get_log('normal') previous_value = 255 def turn_off_lights(disable_ambient=True): log.debug("Turning off lights") lightbot.lights_off() lightbot.AMBIENT = not disable_ambient def ambient_lights(): global previous_value count = 0 if not cfg.getboolean('ambient', 'enabled'): log.debug("ambient not enabled...skipping.") return low_ambient_level = cfg.getint('ambient', 'low') max_ambient_level = cfg.getint('ambient', 'max') value = lightbot.query_sensor() log.debug("ambient_lights enabled=%s current=%s prev=%s" % \ (lightbot.AMBIENT, value, previous_value)) if lightbot.AMBIENT and \ value <= low_ambient_level and \
def __init__(self, fall_through=False): self.log = log.get_log("Sequence") self.fall_through = fall_through
import pytz from datetime import datetime as dt from log import get_log import smtplib from email.mime.text import MIMEText from email.header import Header os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cpchain_test.settings') django.setup() # import models from community.models import Email log = get_log('send-email') sender = '*****@*****.**' mail_host = 'smtp-relay.gmail.com' mail_port = 587 def send_email(): log.info('send email') for item in Email.objects.filter(sent=False): try: log.info(f'send email, id: {item.id}') to = [item.to] message = MIMEText(item.content, 'html', 'utf-8') message['From'] = Header(sender, 'utf-8') message['To'] = Header(item.to, 'utf-8')
from tasks.rate_update import update_rate from log import get_log import time import requests import json import os os.environ['DJANGO_SETTINGS_MODULE'] = 'cpchain_test.settings' channel_layer = get_channel_layer() from apps.explorer.views import wshandler from node_ip.models import IP log = get_log('celery') app = Celery() app.config_from_object('tasks.config') log.info("start celery worker/beat") @app.task def sync_congress_task(): # sync the data of congress contract from chain log = get_log('celery') log.info('sync congress task') sync_congress() @app.task
import time os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cpchain_test.settings') django.setup() def load_key(key): with open(key, 'r') as fr: return fr.read() def load_password(password): with open(password, 'r') as fr: return "".join(fr.readlines()).strip() log = get_log('check-proposal-timeout') host = cfg["chain"]["ip"] port = cfg["chain"]["port"] address = cfg['community']['proposal'] log.info(f"chain rpc interface: http://{host}:{port}") log.info(f'congress constract\'s address is {address}') cf = Web3(Web3.HTTPProvider(f'http://{host}:{port}')) key = cfg['community']['keystore'] password = cfg['community']['password_path'] owner = cf.toChecksumAddress(cfg['community']['owner']) chainID = int(cfg['community']['chainID'])
def sync_proposals_task(): log = get_log('celery') log.info('sync proposals task') sync_proposals()
from config import get_config from log import get_log # get + set app config CONFIG = get_config() # set log LOG = get_log(__name__) if __name__ == "__main__": LOG.info("App Running")
def chart_update_task(): log = get_log('celery') log.info('update chart task') update_chart()
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cpchain_test.settings') django.setup() # import models from community.models import Proposal, ApprovedAddress, VotedAddress from log import get_log STATUS = { 0: 'deposited', 1: 'community congress', 2: 'decision congress', 3: 'timeout' } log = get_log('sync-proposals') host = cfg["chain"]["ip"] port = cfg["chain"]["port"] address = cfg['community']['proposal'] log.info(f"chain rpc interface: http://{host}:{port}") log.info(f'congress constract\'s address is {address}') cf = Web3(Web3.HTTPProvider(f'http://{host}:{port}')) # ProposalABI is the input ABI used to generate the binding from. # abi = "[{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getLockedTime\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getStatus\",\"outputs\":[{\"name\":\"\",\"type\":\"uint8\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"enabled\",\"outputs\":[{\"name\":\"\",\"type\":\"bool\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getLockedAmount\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"withdraw\",\"outputs\":[],\"payable\":true,\"stateMutability\":\"payable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getPeriod\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"length\",\"type\":\"uint16\"}],\"name\":\"setIDLength\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"checkTimeout\",\"outputs\":[],\"payable\":true,\"stateMutability\":\"payable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[],\"name\":\"enableContract\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"threshold\",\"type\":\"uint256\"}],\"name\":\"setAmountThreshold\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[],\"name\":\"refundAll\",\"outputs\":[],\"payable\":true,\"stateMutability\":\"payable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"threshold\",\"type\":\"uint16\"}],\"name\":\"setVoteThreshold\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"maxPeriod\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getOwner\",\"outputs\":[{\"name\":\"\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getApprovalCnt\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"approval\",\"outputs\":[],\"payable\":true,\"stateMutability\":\"payable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"voteThreshold\",\"outputs\":[{\"name\":\"\",\"type\":\"uint16\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getVotedAddress\",\"outputs\":[{\"name\":\"\",\"type\":\"address[]\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"getCongressNum\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getApprovedAddress\",\"outputs\":[{\"name\":\"\",\"type\":\"address[]\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"approvalThreshold\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"proposalsIDList\",\"outputs\":[{\"name\":\"\",\"type\":\"string\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"congress\",\"outputs\":[{\"name\":\"\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[],\"name\":\"disableContract\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"amountThreshold\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"idLength\",\"outputs\":[{\"name\":\"\",\"type\":\"uint16\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"threshold\",\"type\":\"uint256\"}],\"name\":\"setApprovalThreshold\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"period\",\"type\":\"uint256\"}],\"name\":\"setMaxPeriod\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"getProposalsCnt\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"},{\"name\":\"period\",\"type\":\"uint256\"}],\"name\":\"submit\",\"outputs\":[],\"payable\":true,\"stateMutability\":\"payable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"getVoteCnt\",\"outputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"vote\",\"outputs\":[],\"payable\":true,\"stateMutability\":\"payable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"id\",\"type\":\"string\"}],\"name\":\"refund\",\"outputs\":[],\"payable\":true,\"stateMutability\":\"payable\",\"type\":\"function\"},{\"inputs\":[{\"name\":\"_congressAddr\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"name\":\"who\",\"type\":\"address\"},{\"indexed\":false,\"name\":\"id\",\"type\":\"string\"},{\"indexed\":false,\"name\":\"period\",\"type\":\"uint256\"},{\"indexed\":false,\"name\":\"lockedAmount\",\"type\":\"uint256\"},{\"indexed\":false,\"name\":\"lockedTime\",\"type\":\"uint256\"}],\"name\":\"SubmitProposal\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"name\":\"who\",\"type\":\"address\"},{\"indexed\":false,\"name\":\"id\",\"type\":\"string\"}],\"name\":\"ApprovalProposal\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"name\":\"who\",\"type\":\"address\"},{\"indexed\":false,\"name\":\"id\",\"type\":\"string\"}],\"name\":\"VoteProposal\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"name\":\"who\",\"type\":\"address\"},{\"indexed\":false,\"name\":\"id\",\"type\":\"string\"}],\"name\":\"WithdrawMoney\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"name\":\"id\",\"type\":\"string\"}],\"name\":\"proposalTimeout\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"name\":\"id\",\"type\":\"string\"}],\"name\":\"ownerRefund\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[],\"name\":\"ownerRefundAll\",\"type\":\"event\"}]" abi = cfg['community']['proposalABI'][1:-1].replace('\\', '') instance = cf.cpc.contract(abi=abi, address=address)
"""Cache2 is a wrapper around a subset of Redis, it provides support for complex keys as indexes for redis lists and key groups for other Redis types""" import os import logging import util import log LOG = log.get_log(__name__, logging.INFO) LIST = 'list' HASH = 'hashset' DELIM = ':' WILDCARD = '*' PID = str(os.getpid()) redis = None # these compound (key_group + identifier) keys occupy sorted lists, and are used as indexes for other sets of data # identifier is an arbitrary list which will be separated by DELIM # NOTE: in order for complex keys to truly work as indexes, the ordered set of values owned by them need to be used where these keys are currently being used def str_clean4key(input): return util.str_clean4comp(input, DELIM, WILDCARD, '-', '_', '.') def key_name(key_group, *identifier): """get a compound key name for a given identifier and a specified record type""" keyname = DELIM.join([key_group, identifier]) if isinstance(identifier, basestring) or isinstance(identifier, unicode) \
def __init__(self, parent, sock, term): QtCore.QThread.__init__(self) self.log = log.get_log(self) self.parent = parent self.sock = sock self.term = term
""" db.py """ from cpchain_test.config import cfg from log import get_log from pymongo import DESCENDING, MongoClient RNODE_REWARD_META = 'rnode_reward_meta' RNODE_REWARD_TOTEL = 'rnode_reward_total' RNODE_REWARD_HISTORY = 'rnode_reward_history' log = get_log('app') mongo = cfg['mongo']['ip'] port = int(cfg['mongo']['port']) CLIENT = MongoClient(host=mongo, port=port, maxPoolSize=200) uname = cfg['mongo']['uname'] pwd = cfg['mongo']['password'] db = CLIENT['cpchain'] db.authenticate(uname, pwd) cpchain_db = CLIENT['cpchain'] block_collection = CLIENT['cpchain']['blocks'] txs_collection = CLIENT['cpchain']['txs']
import os import threading from serial import Serial,SerialException from log import get_log import config log = get_log("lightbot") cfg = config.get_config() serial = None for i in range(0,5): try: device = '/dev/ttyUSB%s' % i log.debug("Trying device %s" % device) serial = Serial(device, timeout=2) log.debug("Using device %s" % device) break except SerialException: continue serial_lock = threading.Lock() AMBIENT = True LIVING_ROOM = cfg.get('living_room', 'house') LIGHTS = cfg.get('living_room', 'unit') X10_ON = '|' X10_OFF = 'O' X10_BRIGHT = '+' X10_DIM = '-'