Beispiel #1
0
def extract_files(dirname):
	for root, dirs, files in os.walk(dirname, True, None):
		remove = [".background",".Trashes",".DS_Store","Applications"]

		for idir in dirs:
			if dir in remove:
				continue
			dname, ext = os.path.splitext(idir)
			if (ext == '.app'):
				path = os.path.join(root, idir)
				try:
					shutil.copytree(path, os.path.join(APP_FOLDER, idir), symlinks=True)
					notify.notify("Success!", "%s was installed!" % dname, "Enjoy!", sound=True, delay=5)
				except Exception as e:
					print e
					notify.notify("Oooops", "Something went wrong", "%s" % e, sound=True,delay=5)
				return



		for lfile in files:
			if lfile in remove:
				continue
			fname, ext = os.path.splitext(lfile)
			if not fname.startswith('.') and ext == '.dmg':
				print 'DMG FOUND!! install dmg!'
				mount_and_install(os.path.join('tmp', lfile))
				break
			elif ext == '.pkg':
				print 'PKG file found. OMG OGM'
				break
		else:
			continue
Beispiel #2
0
    def start(self):
        notify('GitBox Start', 'monitoring %s' % self.path )
        self.observer.start()
        qsize = self.queue.qsize()
        lastchange = time.time()
        while True:
            if qsize != self.queue.qsize():
                lastchange = time.time()

            if time.time()-lastchange >= 10 and self.queue.qsize() > 0:
                files = self.get_changes()
                notify('GitBox Upload', 'send new and changed files in %' )
                cmd = 'git add .'
                process = subprocess.Popen(cmd.split(' '), cwd=self.path)
                process.communicate()
            
                cmd = 'git commit -am auto-commit'
                process = subprocess.Popen(cmd.split(' '), cwd=self.path)
                process.communicate()
                
                cmd = 'git push'
                process = subprocess.Popen(cmd.split(' '), cwd=self.path)
                process.communicate()
                
            qsize = self.queue.qsize()
            time.sleep(1)
Beispiel #3
0
def initialise(config_file):
    """Initialise configuration module"""

    global cfg, required_options
    try:
        cfg = ConfigParser.ConfigParser()
        cfg.readfp(open(config_file))

        #Check for required section
        if (not cfg.has_section('dbAlerter')):
            raise ConfigParser.NoSectionError('dbAlerter')

        #Initialise configuration defaults
        for option, value in default_options.items():
            if (not cfg.has_option('dbAlerter', option)):
                cfg.set('dbAlerter', option, value)

        #Check for required options
        for option in required_options:
            if (not cfg.has_option('dbAlerter', option)):
                raise ConfigParser.NoOptionError(option, 'dbAlerter')
    except IOError:
        notify.notify("Error", 'Failed to open config file: ' + config_file, 'Failed to open config file: ' + config_file)
        sys.exit(1)
    except ConfigParser.NoSectionError:
        sys.exit(1)
    except ConfigParser.NoOptionError:
        sys.exit(1)
def loop():
  devices = []
  while(True):
    out = runner.parse(runner.execute())
    outputstr = ""

    for i in out:
      try:
        devices.index(i)
      except:
        #this means device just came online
        devices += [i]
        outputstr += str(i[0])+" came online\n"

    for i in devices:
      try:
        out.index(i)
      except:
        #device just left
        devices.remove(i)
        outputstr += str(i[0])+" left\n"

    if(outputstr!=""):
      notify.notify(summary = "Device(s) Changed:",body=outputstr,timeout=60000)
    time.sleep(SLEEPTIME)
Beispiel #5
0
def main():
    """Main section"""
    args = arguments().parse_args()
    options = "-azxhvP --delete --force --ignore-errors --stats"
    command = "rsync {2} {3} {0} {1}/".format(
        args.source,
        args.backup,
        options,
        " ".join("--exclude={0}".format(i) for i in args.exclude.split(","))
    )
    print command

    log = Logger()
    url = "http://joedicastro.com"
    head = "Backup of {0} into {1}".format(args.source, args.backup)
    log.header(url, head)

    log.time('Start Backup')
    if args.notify:
        notify('Rsync Backup', 'Starting backup of {0} into {1}'.
                               format(args.source, args.backup), 'info')

    rsync = Popen(command.split(), stdout=PIPE, stderr=PIPE)
    out, errors = rsync.stdout.read(), rsync.stderr.read()
    log.list('Rsync output', out)
    if errors:
        log.list('Errors', errors)

    log.time('End Backup')
    if args.notify:
        notify('Rsync Backup', 'Backup ended', 'ok')

    if args.send:
        log.send('Rsync backup')
    log.write(True)
Beispiel #6
0
	def prg(count, blockSize, totalSize):
		global previous
		percent = int(count * blockSize * 100 / totalSize)
		sys.stdout.write("\r" + url + "...%d%%" % percent)

		if percent != previous and (percent % 20) == 0:
			previous = percent
			notify.notify("Downloading %s" % filename, "Percentage : %s%%" % percent, "", sound=False)
		sys.stdout.flush()
Beispiel #7
0
def get (section, option):
    """Return option from config file"""

    global cfg

    try:
        return cfg.get(section, option)
    except ConfigParser.NoOptionError:
        notify.notify('Error','Missing option in dbAlerter config file: ' + option, 'Option missing from dbAlerter config: ' + option + "\n\nPlease fix and restart dbAlerter daemon.")
        time.sleep(5)
        sys.exit(1)
Beispiel #8
0
def confirm(request):
    order_id = request.matchdict['order']
    if order_id in ORDERS.iterkeys():
        
        order = ORDERS.pop(order_id)
        
        # notify on new order
        notify(order)
        
        return render_to_response('pt/confirm.pt', { "order" : order }, request)
    else:
        return HTTPFound(location = "/")
Beispiel #9
0
def check(stats=False):
    """Perform MySQL checks.

    Keyword arguments:
    stats -- produce statistics report post check

    """

    global db, statistics
    try:
        db.ping()
        #OS Checks
        if (checkos.supported):
            check_cpu_usage()
            check_disk_usage()
        #MySQL Checks
        update_variables()
        check_status()
        check_auto_increment()
        check_anonymous_accounts()
        check_empty_passwords()
        check_functionality()
        check_error_log()
        check_process_list()
        check_slow_query_log()
        check_slave_status()
        #Produce statistics report?
        if (stats):
            statistics_report()
    except MySQLdb.Error, (error,description):
        if error == 2006:
            notify.notify('Error','(' + str(error) + ') ' + description, 'Error (' + str(error) + ') - ' + description)
            retry_count=0
            while 1:
                try:
                    #Create connection to database
                    db=MySQLdb.connect(host=config.get('dbAlerter', 'mysql_hostname'), port=int(config.get('dbAlerter', 'mysql_port')), user=config.get('dbAlerter', 'mysql_username'), passwd=config.get('dbAlerter', 'mysql_password'))
                    #Update MySQL pid
                    variables['PID'] = int(file(variables['PID_FILE'],'r').read().strip())
                    notify.notify('Info', 'MySQL Server Back Online', 'MySQL Server Back Online')
                    statistics['INFO'] += 1
                    break
                except MySQLdb.Error, (error,description):
                    if (error == 2003 or error == 2002):
                        if (retry_count == 5):
                            notify.log_notify('Error', '(' + str(error) + ') ' + description)
                            retry_count=0
                        else:
                            retry_count+=1
                        time.sleep(int(config.get('dbAlerter','check_interval')))
Beispiel #10
0
def main():
    '''
    Watch for changes in files that match the pattern in a directory.
    Default dir is '.' and default pattern is '*.py'.
    Whenever a change to any matched file in directory happens, peon runs
    the command specified or nosetests by default
    '''
    parser = optparse.OptionParser()
    parser.add_option('-d', '--dir', default='.', dest='directory',
                      help='the directory peon will watch for changes')
    parser.add_option('-p', '--pattern', default='*.py', dest='pattern',
                      help='the glob pattern to watch for changes. '\
                            '(default is "*.py)"')
    parser.add_option('--no-reset', default=True, dest='reset', action="store_false",
                      help='do not clear the screen between runs. '\
                            '(default is True)')
    options, args = parser.parse_args()
    directory = options.directory
    pattern = options.pattern
    reset = options.reset
    command = ' '.join(args) or 'nosetests'
    is_build_broken = False

    try:
        while True:
            if something_has_changed(directory, pattern):
                if reset:
                    clear_screen()
                status = os.system(command)
                if status != 0:
                    is_build_broken = True
                    notify(APP_NAME,
                            "Broken build",
                            "Your command of '%s' returned exit"\
                            "code '%s'. Please verify the console output for"\
                            "more info." % (command, status),
                            "stop.png",
                            urgency=Urgency.critical)
                elif is_build_broken:
                    is_build_broken = False
                    notify(APP_NAME,
                           "Build fixed",
                           "Your build with command '%s' IS FIXED!" % command,
                           "tick.png")

            time.sleep(1)
    except KeyboardInterrupt:
        return
Beispiel #11
0
def check_functionality():
    """Check basic functionality"""

    global db, statistics
    cursor = db.cursor(MySQLdb.cursors.DictCursor)
    cursor.execute("""CREATE DATABASE IF NOT EXISTS dbAlerter""")
    cursor.execute("""CREATE TABLE IF NOT EXISTS dbAlerter.insertcheck (ID int)""")
    cursor.execute("""INSERT INTO dbAlerter.insertcheck VALUES (112233)""")
    cursor.execute("""SELECT * FROM dbAlerter.insertcheck""")
    cursor.execute("""DROP TABLE dbAlerter.insertcheck""")
    cursor.execute("""DROP DATABASE dbAlerter""")
    cursor.execute("""SHOW ERRORS""")
    row = cursor.fetchone ()
    if row:
        notify.notify(row["Level"],  ' (' + str(row["Code"]) + ') - ' + row["Message"], 'Error (' + str(row["Code"]) + ') - ' + row["Message"])
        statistics['ERROR'] += 1
    cursor.close()
Beispiel #12
0
def check_status():
    """Check server statistics"""

    global db, global_status, statistics, warning_state
    cursor = db.cursor(MySQLdb.cursors.DictCursor)
    cursor.execute("""SHOW GLOBAL STATUS""")
    rows = cursor.fetchall ()
    for row in rows:
        #Check Open File Usage
        if (row['Variable_name'] == 'Open_files'):
            if (int(row['Value']) > statistics['MAX_OPEN_FILES']):
                statistics['MAX_OPEN_FILES'] = int(row['Value'])
            connpct = int(((100 / float(variables['OPEN_FILES_LIMIT'])) * float(row['Value'])))
            if (connpct > int(config.get('dbAlerter','mysql_open_files_threshold'))):
                notify.stateful_notify(True, warning_state, 'OPEN_FILES', 'Warning', 'Open file usage crossed ' + config.get('dbAlerter','mysql_open_files_threshold') + '% threshold', 'Open file crossed ' + config.get('dbAlerter','mysql_open_files_threshold') + '% threshold and is currently ' + str(connpct) + '%')
                statistics['WARNING'] += 1
            else:
                notify.stateful_notify(False, warning_state, 'OPEN_FILES', 'Info', 'Open file usage fell below ' + config.get('dbAlerter','mysql_open_files_threshold') + '% threshold', 'Open file usage fell below ' + config.get('dbAlerter','mysql_open_files_threshold') + '% threshold and is currently ' + str(connpct) + '%')

        #Check Current Connection Usage
        if (row['Variable_name'] == 'Threads_connected'):
            if (int(row['Value']) > statistics['MAXCONN']):
                statistics['MAXCONN'] = int(row['Value'])
            connpct = int(((100 / float(variables['MAX_CONNECTIONS'])) * float(row['Value'])))
            if (connpct > int(config.get('dbAlerter','mysql_connection_usage_threshold'))):
                notify.stateful_notify(True, warning_state, 'CONNECTIONS', 'Warning', 'Connection usage crossed ' + config.get('dbAlerter','mysql_connection_usage_threshold') + '% threshold', 'Connection usage crossed ' + config.get('dbAlerter','mysql_connection_usage_threshold') + '% threshold and is currently ' + str(connpct) + "%")
                statistics['WARNING'] += 1
            else:
                notify.stateful_notify(False, warning_state, 'CONNECTIONS', 'Info', 'Connection usage fell below ' + config.get('dbAlerter','mysql_connection_usage_threshold') + '% threshold', 'Connection usage fell below ' + config.get('dbAlerter','mysql_connection_usage_threshold') + '% threshold and is currently ' + str(connpct) + '%')

        #Check Slow Queries
        if (row['Variable_name'] == 'Slow_queries'):
            slowqs = (int(row['Value']) - global_status['SLOW_QUERIES'])
            if (slowqs > 5):
                notify.notify('Warning', str(slowqs) + " Slow Queries during last " + config.get('dbAlerter','check_interval') + " seconds.", str(slowqs) + " Slow Queries during last " + config.get('dbAlerter','check_interval') + " seconds.")
                statistics['WARNING'] += 1
            global_status['SLOW_QUERIES'] = int(row['Value'])
            statistics['SLOWQ'] += slowqs

        #Server uptime
        if (row['Variable_name'] == 'Uptime'):
            global_status['UPTIME'] = int(row['Value'])

    cursor.close()
Beispiel #13
0
 def __init__(self):
     modesupport.__init__(self)
     self.nick = "<UNKNOWN>"
     self.name = ""
     self.channels = casedict()
     self.notifies = notify()
     #
     # the users dict. can be used to store/track user related info
     self.users = casedict() 
     self.state = server.STATE_NOTCONNECTED
Beispiel #14
0
def connect():
    """Connect to MySQL server"""

    try:
        #Create connection to database
        db=MySQLdb.connect(host=config.get('dbAlerter', 'mysql_hostname'), port=int(config.get('dbAlerter', 'mysql_port')), user=config.get('dbAlerter', 'mysql_username'), passwd=config.get('dbAlerter', 'mysql_password'))
    except MySQLdb.Error, (error,description):
        #Access denied error
        if (error==1045):
            notify.notify('Error','(' + str(error) + ') - ' + description, 'Error (' + str(error) + ') - ' + description + "\n\n\nDid you remember to grant the correct privileges?\n\nGRANT PROCESS, SELECT, REPLICATION CLIENT, SHOW DATABASES, SUPER ON *.* TO  'mysqluser'@'localhost' IDENTIFIED BY 'mysqluser_password';\n\nGRANT CREATE, INSERT, DELETE, DROP ON dbAlerter.* TO 'mysqluser'@'localhost' IDENTIFIED BY 'mysqluser_password';")
            notify.log_notify('Shutdown')
            sys.exit(1)
        #No database selected error
        elif (error==1046):
            pass
        else:
            notify.notify('Error','(' + str(error) + ') - ' + description, 'Error (' + str(error) + ') - ' + description + "\n\ndbAlerter Shutdown")
            notify.log_notify('Shutdown')
            sys.exit(1)
Beispiel #15
0
def check_error_log():
    """Check MySQL Error Log"""

    re_error = '^(\d\d\d\d\d\d \d\d:\d\d:\d\d) (\[ERROR\]) (.*)$'

    global last_error, statistics, variables
    try:
        log = open(variables['LOG_ERROR'], 'r')
        for line in log:
            error_pattern = re.compile(re_error).match(line)
            if error_pattern:
                if (time.strptime(error_pattern.group(1),"%y%m%d %H:%M:%S") > last_error):
                    last_error=time.strptime(error_pattern.group(1), "%y%m%d %H:%M:%S")
                    notify.notify('Error', error_pattern.group(3), line)
                    statistics['ERROR'] += 1
        log.close()
    except IOError, ioe:
        notify.notify("Error", ioe.str + ' : ' + ioe.filename + "\n")
        statistics['ERROR'] += 1
Beispiel #16
0
    def send(author, to, subject, content, do_notify=True, extra=None):
        author_key = author.key
        to_keys = set(map(lambda t: t.key, to))

        conversation = Conversation.get_or_create(to_keys.union({author_key}), subject)

        message = Message(author_key=author_key, to=conversation.key, content=content)
        mk = message.put()

        conversation.messages.append(message.key)

        if extra:
            conversation.extra = extra

        ck = conversation.put()

        logging.info('Sent message from %s to %s (subject: %s)', author_key.get().display_name,
                     ', '.join([_.display_name for _ in conversation.participants]), conversation.readable_subject)

        if do_notify:
            notify.notify(conversation, message, author)

        return mk, ck
Beispiel #17
0
def check_slow_query_log():
    """Check slow query log - if enabled"""

    re_slow = '^(# Time:) (\d\d\d\d\d\d \d\d:\d\d:\d\d)$'
  
    global db, last_slow_query, statistics, variables
    if (variables['SLOW_QUERY_LOG'] == "ON"):
        slowquery = ''
        if (variables['LOG_OUTPUT'] == "FILE"):
            #File based logging
            try:
                slowquerylog=open(variables['SLOW_QUERY_LOG_FILE'], 'r')
                for line in slowquerylog:
                    slowpattern = re.compile(re_slow).match(line)
                    if slowpattern:
                        if (time.strptime(slowpattern.group(2),"%y%m%d %H:%M:%S") > last_slow_query):
                            last_slow_query=time.strptime(slowpattern.group(2), "%y%m%d %H:%M:%S")
                            slowquery += line
                            continue
                    if (slowquery != ''):
                        slowquery += line
                if (slowquery != ''):
                    notify.notify('Info','Slow Query Encountered',slowquery)
                    statistics['INFO'] += 1
                slowquerylog.close()
            except IOError, ioe:
                notify.notify("Error", ioe.str + ' : ' + ioe.filename, ioe.str + ' : ' + ioe.filename)
                statistics['ERROR'] += 1

        #Table based logging
        if (variables['LOG_OUTPUT'] == "TABLE"):
            cursor=db.cursor(MySQLdb.cursors.DictCursor)
            cursor.execute("""SELECT * FROM mysql.slow_log WHERE start_time > '""" + time.strftime("%Y-%m-%d %H:%M:%S", last_slow_query) + "' ORDER BY start_time ASC LIMIT 100;")
            rows = cursor.fetchall ()
            for row in rows:
                slowquery += '# Time: ' + str(row['start_time']) + "\n# User@Host: " + row['user_host'] + "\n# Query_time: " + str(row['query_time']) + "\n" + row['sql_text'] + "\n\n"
                if (time.strptime(str(row['start_time']),"%Y-%m-%d %H:%M:%S") > last_slow_query):
                    last_slow_query=time.strptime(str(row['start_time']), "%Y-%m-%d %H:%M:%S")
            if (slowquery != ''):
                notify.notify('Info','Slow Query Encountered',slowquery)
                statistics['INFO'] += 1
            cursor.close()
Beispiel #18
0
def processDepositAddressRequests():
    global walletrpc, accountIsLocked

    dbda = database.DepositAddresses(coinname)
    depositMasterAddress = config["deposit-master"]
    contractAddress = config["contract-address"]

    if "deposit-master-balance" in config["alert"]:
        minimumBalance = config["alert"]["deposit-master-balance"]
        if minimumBalance is not None:
            currentBalance = int(
                walletrpc.eth_getBalance(depositMasterAddress, "latest"),
                16) / 1E18
            if currentBalance < minimumBalance:
                notify.notify(reason="alarm",
                              message="Low deposit master account balance",
                              data={
                                  "coin": coinname.upper(),
                                  "address": depositMasterAddress,
                                  "minimum-balance": minimumBalance,
                                  "current-balance": currentBalance
                              })

    pendingTransactions = []
    for userid in dbda.listPendingRequests(100):
        useridAsHex = hex(userid)[2:].rjust(64, '0')

        address = walletrpc.eth_call(
            {
                "from": depositMasterAddress,
                "to": contractAddress,
                "data": "0x877806af" + useridAsHex
            }, "latest")
        if address != "0x" + 64 * "0":
            address = "0x" + address[-40:]
            dbda.storeAddress(userid, address)
        else:
            if accountIsLocked:
                walletrpc.personal_unlockAccount(depositMasterAddress,
                                                 config["password"])
                accountIsLocked = False

            txhash = walletrpc.eth_sendTransaction({
                "from": depositMasterAddress,
                "to": contractAddress,
                "data": "0x32331feb" + useridAsHex,
                "gas": "0x493e0"
            })
            pendingTransactions.append(txhash)

    for txhash in pendingTransactions:
        while 1:
            receipt = walletrpc.eth_getTransactionReceipt(txhash)
            if receipt is not None:
                break
            time.sleep(1)

        for logentry in receipt["logs"]:
            if len(logentry["topics"]) == 1 and logentry["topics"][
                    0] == "0xd3c75d3774b298f1efe8351f0635db8123b649572a5b810e96f5b97e11f43031":
                userid = int(logentry["data"][-72:-64], 16)
                address = "0x" + logentry["data"][-40:]
                dbda.storeAddress(userid, address)
                break

    for userid, address in dbda.listUnnotifiedRequests(100):
        print("Notify {0} deposit address {1} for user {2}".format(
            coinname.upper(), address, userid))
        if notify.notify(reason="address",
                         coin=coinname,
                         userid=userid,
                         address=address):
            dbda.markAsNotified(userid)
            print("> Accepted!")
        else:
            print("> Rejected!")
Beispiel #19
0
import json
from bs4 import BeautifulSoup
from notify import notify

# track url
page_url = "http://cn163.net/archives/7698/"
# output file location
file = "/Users/memosa/Desktop/process.json"

process = open('process.json', 'r', encoding = 'utf-8')
series = json.load(process)
process.close()
# get page
page = requests.get(page_url).content.decode('utf-8')
index = BeautifulSoup(page, "html.parser")

for x in series:
    # if not tracked
    if 'chinese' not in series[x]:
        link = index.find("a", text=series[x]['name'])
        if link and link['href']:
            series[x]['chinese'] = link['href']
            notify(
                title = 'NOTICE',
                subtitle = '',
                message = "%s 更新了!" %(series[x]['name'])
            )

process = codecs.open(file, 'w+', 'utf-8')
process.write(json.dumps(series, ensure_ascii=False, sort_keys=True, indent=4))
process.close()
Beispiel #20
0
def main(first_exec=False):
    """Main section"""

    # The path to monitor changes in directories dir_size. By default, if none
    # is given, takes the home directory.
    args = arguments().parse_args()
    mon_pth = args.path

    # Ignore all directories that are below these percentage or absolute value
    # of size difference. There are optional, set to zero to override them.
    thld_pct = 20  # In percentage of difference in size for a directory
    thld_sz = 10.486E6  # In bytes of absolute value of directory size diff.

    # Prepare the log
    log = logger.Logger()
    url = "http://joedicastro.com"
    head = ("Changes in size of directories for {0} on {1}".format(
        mon_pth, platform.node()))
    log.header(url, head)
    log.time("START TIME")
    notify("Directory Size Monitor", "Start to check", "info")

    # Load the last dictionary of directories/sizes if exists
    try:
        with open('.dir_sizes.pkl', 'rb') as input_file:
            bfr_dir = pickle.load(input_file)
    except (EOFError, IOError, pickle.PickleError):
        bfr_dir = {}
        first_exec = True

    # Get the current dictionary of directories/sizes
    crr_dir = {}
    for path, dirs, files in os.walk(mon_pth):
        for directory in dirs:
            dir_path = os.path.join(path, directory)
            dir_size = get_size_fast(dir_path)
            crr_dir[dir_path] = dir_size

    # First, Save the current dirs/sizes
    with open(".dir_sizes.pkl", "wb") as output_file:
        pickle.dump(crr_dir, output_file)

    # Create the list depending the status of directories
    deleted = [d for d in bfr_dir if d not in crr_dir]
    added = [d for d in crr_dir if d not in bfr_dir]
    changed = [d for d in crr_dir if d in bfr_dir if crr_dir[d] != bfr_dir[d]]

    log.list("Deleted directories", list4log(bfr_dir, mon_pth, deleted))
    log.list("New directories", list4log(crr_dir, mon_pth, added))
    log.list("Changed directories",
             diff4log(bfr_dir, crr_dir, mon_pth, changed, thld_pct, thld_sz))

    # If thresholds are nonzero, then report the values
    if thld_pct or thld_sz:
        tsz = best_unit_size(thld_sz)
        log.list("Threshold Values", [
            "The directories whose size differences are less than any of"
            " these values are ignored:", "",
            "Percentage: {0:6} %".format(thld_pct),
            "Size:       {0:6.2f} {1}".format(tsz['s'], tsz['u'])
        ])

    # Show some statistics for the analyzed path
    mon_pth_sz = best_unit_size(get_size_fast(mon_pth))
    log.list("{0} Statistics".format(mon_pth), [
        "{0:8} directories".format(len(crr_dir)), "{0:8.2f} {1}".format(
            mon_pth_sz['s'], mon_pth_sz['u'])
    ])
    log.time("END TIME")
    notify("Directory Size Monitor", "Finished", "Ok")
    if not first_exec:
        log.send("Changes in size of directories")
        log.write()
Beispiel #21
0
def statistics_report():
    """MySQL server statistics"""

    global auto_increment_state, db, global_status, security_state, statistics, variables
    stats_report = ''
    cursor = db.cursor(MySQLdb.cursors.DictCursor)

    #Auto increment threshold
    if (len(auto_increment_state) > 0):
        stats_report += "=== Auto Incrememnt Threshold Crossed ===\n"
        for key in auto_increment_state.keys():
            stats_report += ' ' + key + "\n"

    #Anonymous Accounts
    if (security_state['ANONACCOUNT'] == 1):
        stats_report += "=== Anonymous Accounts Detected ===\n"
        cursor.execute("""SELECT User, Host FROM mysql.user WHERE User=''""")
        rows = cursor.fetchall ()
        if (cursor.rowcount > 0):
            for row in rows:
                stats_report += ' ' + row['User'] + "'@'" + row['Host'] + "\n"

    #Empty passwords
    if (security_state['EMPTYPASS'] == 1):
        stats_report += "=== Accounts Without Passwords Detected ===\n"
        cursor.execute("""SELECT User, Host FROM mysql.user WHERE Password=''""")
        rows = cursor.fetchall ()
        if (cursor.rowcount > 0):
            for row in rows:
                stats_report += ' ' + row['User'] + "'@'" + row['Host'] + "\n"

    if (stats_report != ''):
        stats_report = "== Outstanding Issues ==\n" + stats_report + "\n"

    #Server Information
    stats_report += "== Server Information ==\n"
    #Uptime
    stats_report += 'Uptime: ' + format_uptime(global_status['UPTIME']) + "\n"
    #MySQL Version
    stats_report += 'MySQL Version: ' + variables['VERSION'] + ' (' + variables['VERSION_COMMENT'] + ")\n"
    #MySQL Server ID
    stats_report += 'Server ID: ' +  str(variables['SERVERID']) + "\n"
    #Base Directory
    stats_report += 'Basedir: ' +  variables['BASEDIR'] + "\n"
    #Data Directory
    stats_report += 'Datadir: ' + variables['DATADIR'] + "\n"
    #Plugin Directory
    stats_report += 'Plugindir: ' + variables['PLUGIN_DIR'] + "\n"
    #Tmp Directory
    stats_report += 'Tmpdir: ' + variables['TMPDIR'] + "\n"
    #Error Log
    stats_report += 'Error Log: ' +  variables['LOG_ERROR'] + "\n"
    #Slow Query Log
    stats_report += 'Slow Query Log: ' +  variables['SLOW_QUERY_LOG_FILE']
    if (variables['SLOW_QUERY_LOG'] == "OFF"):
        stats_report += " (Slow Query Logging Disabled)\n"
    else:
        stats_report += "\n"
    stats_report += "\n"

    #Server Statistics
    stats_report += "== Server Statistics ==\n"
    #Max Connections
    stats_report += 'Max Connections Encountered: ' + str(statistics['MAXCONN']) + "\n"
    #Max Connections (Since server start)
    stats_report += 'Max Connections (Since server start): ' + str(global_status['MAX_USED_CONNECTIONS']) + "\n"
    #Max Open Files
    stats_report += 'Max Open Files Encountered: ' + str(statistics['MAX_OPEN_FILES']) + "\n"
    #Total Info
    stats_report += 'Info Encountered: ' + str(statistics['INFO']) + "\n"
    #Total Warnings
    stats_report += 'Warnings Encountered: ' + str(statistics['WARNING']) + "\n"
    #Total Errors
    stats_report += 'Errors Encountered: ' + str(statistics['ERROR']) + "\n"
    #Slow Queries
    stats_report += 'Slow Queries Encountered: ' + str(statistics['SLOWQ'])
    if (variables['SLOW_QUERY_LOG'] == "OFF"):
        stats_report += " (Slow Query Logging Disabled)\n"
    else:
        stats_report += "\n"
    #Slow Queries (Since server start)
    stats_report += 'Slow Queries (Since server start): ' + str(global_status['SLOW_QUERIES'])
    if (variables['SLOW_QUERY_LOG'] == "OFF"):
        stats_report += " (Slow Query Logging Disabled)\n"
    else:
        stats_report += "\n"
    stats_report += "\n"

    stats_report += "== Metadata Information ==\n"
    #Schemata Count
    cursor.execute("""SELECT count(*) AS SCOUNT FROM information_schema.SCHEMATA""")
    schemas = cursor.fetchall ()
    for schema in schemas:
        stats_report += 'Schemata: ' + str(schema['SCOUNT']) + "\n"
    #Table Count
    cursor.execute("""SELECT count(*) AS TCOUNT FROM information_schema.TABLES""")
    tables = cursor.fetchall ()
    for table in tables:
        stats_report += 'Tables: ' + str(table['TCOUNT']) + "\n"
    #Column Count
    cursor.execute("""SELECT COUNT(*) AS CCOUNT FROM information_schema.COLUMNS""")
    columns = cursor.fetchall ()
    for column in columns:
        stats_report += 'Columns: ' + str(column['CCOUNT']) + "\n"
    #Event Count
    cursor.execute("""SELECT COUNT(*) AS ECOUNT FROM information_schema.EVENTS""")
    events = cursor.fetchall ()
    for event in events:
        stats_report += 'Events: ' + str(event['ECOUNT']) + "\n"
    #Routine Count
    cursor.execute("""SELECT COUNT(*) AS RCOUNT FROM information_schema.ROUTINES""")
    routines = cursor.fetchall ()
    for routine in routines:
        stats_report += 'Routines: ' + str(routine['RCOUNT']) + "\n"
    #Trigger Count
    cursor.execute("""SELECT COUNT(*) AS TCOUNT FROM information_schema.TRIGGERS""")
    triggers = cursor.fetchall ()
    for trigger in triggers:
        stats_report += 'Triggers: ' + str(trigger['TCOUNT']) + "\n"
    #View Count
    cursor.execute("""SELECT COUNT(*) AS VCOUNT FROM information_schema.VIEWS""")
    views = cursor.fetchall ()
    for view in views:
        stats_report += 'Views: ' + str(view['VCOUNT']) + "\n"
    #View Count
    cursor.execute("""SELECT COUNT(*) AS UCOUNT FROM mysql.user""")
    views = cursor.fetchall ()
    for view in views:
        stats_report += 'Users: ' + str(view['UCOUNT']) + "\n"

    #Recommendations
    recommendations=''
    #Fragmented Tables
    fragmented_tables=get_fragmented_tables()
    if (fragmented_tables):
        recommendations += "=== Fragmented Tables ===\nThe following tables have been identified as being fragmented:\n"
        for table, percent in sorted(fragmented_tables.iteritems()):
            recommendations += ' ' + table + ' (' + str(percent) + "%)\n"
        recommendations += "Consider running OTIMIZE TABLE to reclaim unused space.  See http://dev.mysql.com/doc/refman/5.1/en/optimize-table.html for details.\n"

    #Unused Engines
    unused_engines=get_unused_engines()
    if (unused_engines):
        recommendations += "=== Unused Storage Engines ===\nThe following storage engines are not in use and could be disabled:\n"
        for engine in unused_engines:
            recommendations += ' ' + engine + "\n"
        recommendations += "See http://dev.mysql.com/doc/refman/5.1/en/server-plugin-options.html for details on how to disable plugins.\n"

    if (recommendations != ''):
        stats_report += "\n== Recommendations ==\n" + recommendations + "\n"

    cursor.close()

    notify.notify('Stats', 'dbAlerter Statistics for ' + variables['HOSTNAME'], stats_report)
Beispiel #22
0
                                  'MySQL Server Back Online')
                    statistics['INFO'] += 1
                    break
                except MySQLdb.Error, (error, description):
                    if (error == 2003 or error == 2002):
                        if (retry_count == 5):
                            notify.log_notify(
                                'Error', '(' + str(error) + ') ' + description)
                            retry_count = 0
                        else:
                            retry_count += 1
                        time.sleep(
                            int(config.get('dbAlerter', 'check_interval')))
        else:
            notify.notify(
                'Error', '(' + str(error) + ') ' + description, 'Error (' +
                str(error) + ') - ' + description + "\n\ndbAlerter Shutdown")
            notify.log_notify('Shutdown')
            time.sleep(5)
            sys.exit(1)
    except:
        notify.log_notify('Shutdown')
        raise
        time.sleep(5)
        sys.exit(1)


def connect():
    """Connect to MySQL server"""

    try:
Beispiel #23
0
def check_status():
    """Check server statistics"""

    global db, global_status, statistics, warning_state
    cursor = db.cursor(MySQLdb.cursors.DictCursor)
    cursor.execute("""SHOW GLOBAL STATUS""")
    rows = cursor.fetchall()
    for row in rows:
        #Check Open File Usage
        if (row['Variable_name'] == 'Open_files'):
            if (int(row['Value']) > statistics['MAX_OPEN_FILES']):
                statistics['MAX_OPEN_FILES'] = int(row['Value'])
            connpct = int(((100 / float(variables['OPEN_FILES_LIMIT'])) *
                           float(row['Value'])))
            if (connpct > int(
                    config.get('dbAlerter', 'mysql_open_files_threshold'))):
                notify.stateful_notify(
                    True, warning_state, 'OPEN_FILES', 'Warning',
                    'Open file usage crossed ' +
                    config.get('dbAlerter', 'mysql_open_files_threshold') +
                    '% threshold', 'Open file crossed ' +
                    config.get('dbAlerter', 'mysql_open_files_threshold') +
                    '% threshold and is currently ' + str(connpct) + '%')
                statistics['WARNING'] += 1
            else:
                notify.stateful_notify(
                    False, warning_state, 'OPEN_FILES', 'Info',
                    'Open file usage fell below ' +
                    config.get('dbAlerter', 'mysql_open_files_threshold') +
                    '% threshold', 'Open file usage fell below ' +
                    config.get('dbAlerter', 'mysql_open_files_threshold') +
                    '% threshold and is currently ' + str(connpct) + '%')

        #Check Current Connection Usage
        if (row['Variable_name'] == 'Threads_connected'):
            if (int(row['Value']) > statistics['MAXCONN']):
                statistics['MAXCONN'] = int(row['Value'])
            connpct = int(((100 / float(variables['MAX_CONNECTIONS'])) *
                           float(row['Value'])))
            if (connpct > int(
                    config.get('dbAlerter',
                               'mysql_connection_usage_threshold'))):
                notify.stateful_notify(
                    True, warning_state, 'CONNECTIONS', 'Warning',
                    'Connection usage crossed ' + config.get(
                        'dbAlerter', 'mysql_connection_usage_threshold') +
                    '% threshold', 'Connection usage crossed ' + config.get(
                        'dbAlerter', 'mysql_connection_usage_threshold') +
                    '% threshold and is currently ' + str(connpct) + "%")
                statistics['WARNING'] += 1
            else:
                notify.stateful_notify(
                    False, warning_state, 'CONNECTIONS', 'Info',
                    'Connection usage fell below ' + config.get(
                        'dbAlerter', 'mysql_connection_usage_threshold') +
                    '% threshold', 'Connection usage fell below ' + config.get(
                        'dbAlerter', 'mysql_connection_usage_threshold') +
                    '% threshold and is currently ' + str(connpct) + '%')

        #Check Slow Queries
        if (row['Variable_name'] == 'Slow_queries'):
            slowqs = (int(row['Value']) - global_status['SLOW_QUERIES'])
            if (slowqs > 5):
                notify.notify(
                    'Warning',
                    str(slowqs) + " Slow Queries during last " +
                    config.get('dbAlerter', 'check_interval') + " seconds.",
                    str(slowqs) + " Slow Queries during last " +
                    config.get('dbAlerter', 'check_interval') + " seconds.")
                statistics['WARNING'] += 1
            global_status['SLOW_QUERIES'] = int(row['Value'])
            statistics['SLOWQ'] += slowqs

        #Server uptime
        if (row['Variable_name'] == 'Uptime'):
            global_status['UPTIME'] = int(row['Value'])

    cursor.close()
#! /usr/bin/python2
# -*- coding:utf-8 -*-
from notify import notify


def getline(file, num_line):
    current_line = 0

    if num_line < 1:
        return ''
    for line in open(file, 'r'):
        current_line += 1
        if current_line == num_line:
            return line
    return ''


random_line = getline('word-phrases/Frases-utiles-2.txt', 105)
title = random_line.split("|")[0]
message = random_line.split("|")[1]
print repr(random_line)
notify(repr(title), repr(message))
file.close
Beispiel #25
0
def search(Input):
    # no data received
    if Input == "":
        notify(message="Sorry! Did you say something?")
        return

    # Command for quiting
    if Input in ['quit', 'terminate']:
        speak("Bye")
        Input = 'terminate'
        return

    #Command to lock PC
    if Input in ['lock','lock my mac','lock my pc']:
        speak("See you soon")
        getoutput("/System/Library/CoreServices/Menu\ Extras/User.menu/Contents/Resources/CGSession -suspend")
        return

    #Command to reboot
    if Input in ['reboot', 'reboot my mac', 'reboot my pc']:
        speak("See you soon")
        getoutput("osascript -e 'tell application \"System Events\" to restart'")
        return

    # Command to shutdown
    if Input in ['shutdown', 'shutdown my mac', 'shutdown my pc']:
        speak("See you soon")
        getoutput("osascript -e 'tell application \"System Events\" to shut down'")
        return

    # Command for Self Intoduction
    if Input in ["who are you", "introduce yourself", "describe yourself"]:
        answer = 'I am Nancy, your personal assistant.'
        notify(title=Input, subtitle='I got this:', message=answer)
        speak(answer)
        return

    # Command for Owner Information
    if Input in ["who created you", "who is your master", "who is your owner"]:
        answer = "Team Errorist created me, Although I'm open source!"
        notify(title=Input, subtitle='I got this:', message=answer)
        speak(answer)
        return

    # Command for opening maps
    if match(r"^open maps.*$", Input):
        from webHandler import openMaps
        Input = Input.replace("open maps", " ")
        openMaps(Input)
        speak("Here It is...")
        return

    # Command for downloading lyrics
    if match(r"^download lyrics.*$", Input):
        from lyrics import lyrics_down
        lyrics_down(Input)
        return

    #Command to open Applications
    if match(r"^execute.*$",Input):
        from fInderAndAppControl import openApp
        Input=Input.replace("execute ","")
        openApp(Input)
        speak('There you go')
        return

    #Command to open a file
    if match(r"^open file.*$",Input):
        Input=Input.replace("open file ","")
        from fInderAndAppControl import openFile
        openFile(Input)
        return

    #Command to open a directory
    if match(r"^open folder.*$",Input):
        Input=Input.replace("open folder ","")
        from  fInderAndAppControl import openFolder
        openFolder(Input)
        return

    #Command to play a song
    if match(r"^play song.*$",Input):
        Input=Input.replace("play song ","")
        from fInderAndAppControl import openFile
        openFile(Input,direc="Music")
        return

    #Command to play video
    if match(r"^play video.*$",Input):
        Input=Input.replace("play video ","")
        from fInderAndAppControl import openFile
        openFile(Input,direc="Movies")
        return

    # Commamnd for browsing a website
    if match(r"^browse.*$", Input):
        from webHandler import browseUrl
        Input = Input.replace("browse ", " ")
        browseUrl(Input)
        return

    # Command to throw a dice
    if match(r"^throw a dice$", Input):
        from randomStuff import dice
        output = str(dice())
        notify(message=output)
        speak(output)
        return

    # Command to toss a coin
    if match(r"^toss a coin$", Input):
        from randomStuff import coin
        output = coin()
        notify(message=output)
        speak(output)
        return

    # Command to download mp3 song
    if match(r"^download (audio)|(song).*$", Input):
        from mp3Download import page_link
        Input = sub(r"download audio|song|mp3 ", '', Input)
        #page_link(Input)
        start_new_thread(page_link,(Input,))
        return

    # Command to download mp4 video
    if match(r"^download video.*$", Input):
        from mp4Download import youtube_link
        Input = sub(r"download video ", '', Input)
        #youtube_link(Input)
        start_new_thread(youtube_link,(Input,))
        return

    # Command to read it aloud
    if match(r"^(read out)|(speak out loud)$", Input):
        from pyperclip import paste
        speak(paste())
        return
    try:
        from settings import client
        print('Trying wolframalpha')
        result = client.query(Input)
        answer = next(result.results).text
        notify(title=Input, subtitle='I got this:', message=answer)
        speak(answer)
    except:
        try:
            print('Trying wikipedia')
            from wikipedia import summary
            answer = summary(Input, sentences=1)
            print(answer)
            notify(title=Input, subtitle='I got this:', message=answer)
            speakWiki(answer)
        except Exception as err:
            notify(message='Opps Nothing Found', extra='-timeout 1')
Beispiel #26
0
    # Serve render/ folder, not current folder
    os.chdir(os.path.join("..", "render"))

    try:
        httpd = HTTPServer((IP, PORT), CustomHandler)
        httpd.socket = ssl.wrap_socket(httpd.socket,
                                       certfile=fpem,
                                       server_side=True)
        sa = httpd.socket.getsockname()
        IP, PORT = sa[0], sa[1]
        printc(
            "<green>Serving uLogMe<white> on a HTTPS server, see it locally on '<u><black>https://{}:{}<white><U>' ..."
            .format(IP, PORT))
        notify(
            "Serving <b>uLogMe</b> on a <i>HTTPS</i> server, see it locally on 'https://{}:{}' ..."
            .format(IP, PORT),
            icon="terminal")  # DEBUG
        httpd.serve_forever()
    except socket.error as e:
        if e.errno == 98:
            printc("<red>The port {} was already used ...<white>".format(PORT))
            printc(
                "Try again in some time (about 1 minute on Ubuntu), or launch the script again with another port: '<black>$ ulogme_serve_https.py {}<white>' ..."
                .format(PORT + 1))
        else:
            printc(
                "<red>Error, ulogme_serve.py was interrupted, giving:<white>")
            printc("<red>Exception:<white> ", e)
            # print("Exception: dir(e) =", dir(e))  # DEBUG
    except KeyboardInterrupt:
        printc(
Beispiel #27
0
	if not os.path.exists('tmp'):
		os.makedirs('tmp')

	zfile.extractall('tmp')

	extract_files('tmp')

	#cleaning up
	shutil.rmtree('tmp')

try:
	url = obj["url"][1:][:-1]
	name = obj["url"][1:][:-1].split('/')[-1]
	filename = os.path.join(DOWNLOADS_FOLDER,name)

	notify.notify("Downloading Started", "Downloading %s V. %s " % (name,obj["version"]), "To %s" % filename)
	previous = 0
	def prg(count, blockSize, totalSize):
		global previous
		percent = int(count * blockSize * 100 / totalSize)
		sys.stdout.write("\r" + url + "...%d%%" % percent)

		if percent != previous and (percent % 20) == 0:
			previous = percent
			notify.notify("Downloading %s" % filename, "Percentage : %s%%" % percent, "", sound=False)
		sys.stdout.flush()

	urllib.urlretrieve(url,filename,reporthook=prg)
	print "\n"

	#this is needed so lion won't collate notifications
Beispiel #28
0
 def receive(self,message) :
     notify('GitBox Download', 'receiving new and changed files in %s' % self.path)
     cmd = 'git push'
     process = subprocess.Popen(cmd.split(' '), cwd=self.path)
     process.communicate()
     return True
for site in sites:
    res = requests.get(site, headers={'User-agent': 'your bot 0.1'})

    res.raise_for_status()
    res = res.json()

    for post in res['data']['children']:
        post_data = post['data']
        post_id = post_data['id']
        post_created = post_data['created']
        if post_created < time.time() - TIME_WINDOW or post_id in cache:
            continue
        cache[post_id] = post_created

        time_str = time.strftime('%Y-%m-%d %H:%M:%S',
                                 time.localtime(post_created))

        title = post_data["title"]
        permalink = post_data["permalink"]
        reddit_link = f"https://reddit.com{permalink}"

        message = f"{time_str}: {title}\n\n{reddit_link}"

        if "url" in post_data:
            message += "\n\n" + post_data["url"]

        notify(message)

with open('./cache.json', 'w') as outfile:
    json.dump(cache, outfile)
# see also: has_syllabus.py, which is used to filter certain courses out

# SMTP server
if debug is True:
    server = None
else:
    server = smtplib.SMTP("localhost")

# output values
data = {}

for row in reader:
    # skip bad values for courses e.g. studio courses w/o syllabi
    if has_syllabus(row):
        # skip bad faculty values like "Standby" etc.
        names = filter(lambda f: f.strip().lower() not in skipped_faculty, row["faculty"].split(", "))
        for name in names:
            # initialize if not in output dict already
            if name not in data:
                data[name] = {"courses": [], "username": usernames.get(name)}

            data[name]["courses"].append(row["section"] + " " + row["title"])

for faculty in data:
    print(time.strftime("%m/%d/%Y %H:%M:%S"), "notifying %s..." % faculty)
    notify(faculty, data[faculty]["username"], data[faculty]["courses"], server, args.template)
    # not sure if necessary but I'd rather not spew out emails so fast
    time.sleep(1)

server.quit()
Beispiel #31
0
def statistics_report():
    """MySQL server statistics"""

    global auto_increment_state, db, global_status, security_state, statistics, variables
    stats_report = ''
    cursor = db.cursor(MySQLdb.cursors.DictCursor)

    #Auto increment threshold
    if (len(auto_increment_state) > 0):
        stats_report += "=== Auto Incrememnt Threshold Crossed ===\n"
        for key in auto_increment_state.keys():
            stats_report += ' ' + key + "\n"

    #Anonymous Accounts
    if (security_state['ANONACCOUNT'] == 1):
        stats_report += "=== Anonymous Accounts Detected ===\n"
        cursor.execute("""SELECT User, Host FROM mysql.user WHERE User=''""")
        rows = cursor.fetchall()
        if (cursor.rowcount > 0):
            for row in rows:
                stats_report += ' ' + row['User'] + "'@'" + row['Host'] + "\n"

    #Empty passwords
    if (security_state['EMPTYPASS'] == 1):
        stats_report += "=== Accounts Without Passwords Detected ===\n"
        cursor.execute(
            """SELECT User, Host FROM mysql.user WHERE Password=''""")
        rows = cursor.fetchall()
        if (cursor.rowcount > 0):
            for row in rows:
                stats_report += ' ' + row['User'] + "'@'" + row['Host'] + "\n"

    if (stats_report != ''):
        stats_report = "== Outstanding Issues ==\n" + stats_report + "\n"

    #Server Information
    stats_report += "== Server Information ==\n"
    #Uptime
    stats_report += 'Uptime: ' + format_uptime(global_status['UPTIME']) + "\n"
    #MySQL Version
    stats_report += 'MySQL Version: ' + variables[
        'VERSION'] + ' (' + variables['VERSION_COMMENT'] + ")\n"
    #MySQL Server ID
    stats_report += 'Server ID: ' + str(variables['SERVERID']) + "\n"
    #Base Directory
    stats_report += 'Basedir: ' + variables['BASEDIR'] + "\n"
    #Data Directory
    stats_report += 'Datadir: ' + variables['DATADIR'] + "\n"
    #Plugin Directory
    stats_report += 'Plugindir: ' + variables['PLUGIN_DIR'] + "\n"
    #Tmp Directory
    stats_report += 'Tmpdir: ' + variables['TMPDIR'] + "\n"
    #Error Log
    stats_report += 'Error Log: ' + variables['LOG_ERROR'] + "\n"
    #Slow Query Log
    stats_report += 'Slow Query Log: ' + variables['SLOW_QUERY_LOG_FILE']
    if (variables['SLOW_QUERY_LOG'] == "OFF"):
        stats_report += " (Slow Query Logging Disabled)\n"
    else:
        stats_report += "\n"
    stats_report += "\n"

    #Server Statistics
    stats_report += "== Server Statistics ==\n"
    #Max Connections
    stats_report += 'Max Connections Encountered: ' + str(
        statistics['MAXCONN']) + "\n"
    #Max Connections (Since server start)
    stats_report += 'Max Connections (Since server start): ' + str(
        global_status['MAX_USED_CONNECTIONS']) + "\n"
    #Max Open Files
    stats_report += 'Max Open Files Encountered: ' + str(
        statistics['MAX_OPEN_FILES']) + "\n"
    #Total Info
    stats_report += 'Info Encountered: ' + str(statistics['INFO']) + "\n"
    #Total Warnings
    stats_report += 'Warnings Encountered: ' + str(
        statistics['WARNING']) + "\n"
    #Total Errors
    stats_report += 'Errors Encountered: ' + str(statistics['ERROR']) + "\n"
    #Slow Queries
    stats_report += 'Slow Queries Encountered: ' + str(statistics['SLOWQ'])
    if (variables['SLOW_QUERY_LOG'] == "OFF"):
        stats_report += " (Slow Query Logging Disabled)\n"
    else:
        stats_report += "\n"
    #Slow Queries (Since server start)
    stats_report += 'Slow Queries (Since server start): ' + str(
        global_status['SLOW_QUERIES'])
    if (variables['SLOW_QUERY_LOG'] == "OFF"):
        stats_report += " (Slow Query Logging Disabled)\n"
    else:
        stats_report += "\n"
    stats_report += "\n"

    stats_report += "== Metadata Information ==\n"
    #Schemata Count
    cursor.execute(
        """SELECT count(*) AS SCOUNT FROM information_schema.SCHEMATA""")
    schemas = cursor.fetchall()
    for schema in schemas:
        stats_report += 'Schemata: ' + str(schema['SCOUNT']) + "\n"
    #Table Count
    cursor.execute(
        """SELECT count(*) AS TCOUNT FROM information_schema.TABLES""")
    tables = cursor.fetchall()
    for table in tables:
        stats_report += 'Tables: ' + str(table['TCOUNT']) + "\n"
    #Column Count
    cursor.execute(
        """SELECT COUNT(*) AS CCOUNT FROM information_schema.COLUMNS""")
    columns = cursor.fetchall()
    for column in columns:
        stats_report += 'Columns: ' + str(column['CCOUNT']) + "\n"
    #Event Count
    cursor.execute(
        """SELECT COUNT(*) AS ECOUNT FROM information_schema.EVENTS""")
    events = cursor.fetchall()
    for event in events:
        stats_report += 'Events: ' + str(event['ECOUNT']) + "\n"
    #Routine Count
    cursor.execute(
        """SELECT COUNT(*) AS RCOUNT FROM information_schema.ROUTINES""")
    routines = cursor.fetchall()
    for routine in routines:
        stats_report += 'Routines: ' + str(routine['RCOUNT']) + "\n"
    #Trigger Count
    cursor.execute(
        """SELECT COUNT(*) AS TCOUNT FROM information_schema.TRIGGERS""")
    triggers = cursor.fetchall()
    for trigger in triggers:
        stats_report += 'Triggers: ' + str(trigger['TCOUNT']) + "\n"
    #View Count
    cursor.execute(
        """SELECT COUNT(*) AS VCOUNT FROM information_schema.VIEWS""")
    views = cursor.fetchall()
    for view in views:
        stats_report += 'Views: ' + str(view['VCOUNT']) + "\n"
    #View Count
    cursor.execute("""SELECT COUNT(*) AS UCOUNT FROM mysql.user""")
    views = cursor.fetchall()
    for view in views:
        stats_report += 'Users: ' + str(view['UCOUNT']) + "\n"

    #Recommendations
    recommendations = ''
    #Fragmented Tables
    fragmented_tables = get_fragmented_tables()
    if (fragmented_tables):
        recommendations += "=== Fragmented Tables ===\nThe following tables have been identified as being fragmented:\n"
        for table, percent in sorted(fragmented_tables.iteritems()):
            recommendations += ' ' + table + ' (' + str(percent) + "%)\n"
        recommendations += "Consider running OTIMIZE TABLE to reclaim unused space.  See http://dev.mysql.com/doc/refman/5.1/en/optimize-table.html for details.\n"

    #Unused Engines
    unused_engines = get_unused_engines()
    if (unused_engines):
        recommendations += "=== Unused Storage Engines ===\nThe following storage engines are not in use and could be disabled:\n"
        for engine in unused_engines:
            recommendations += ' ' + engine + "\n"
        recommendations += "See http://dev.mysql.com/doc/refman/5.1/en/server-plugin-options.html for details on how to disable plugins.\n"

    if (recommendations != ''):
        stats_report += "\n== Recommendations ==\n" + recommendations + "\n"

    cursor.close()

    notify.notify('Stats', 'dbAlerter Statistics for ' + variables['HOSTNAME'],
                  stats_report)
Beispiel #32
0
    print("Message: " + msg + " sent to phone number: " + str(pnumber))


# 	Char dictionary
charCount = {}

print("File name + extension: ")
filein = input()

# 	Find out the directory the script was called in
cwd = os.getcwd()
path = os.path.join(cwd, filein)

print("Path to file: " + path)

# 	Open and read selected file.
fileIn = open(path)
fileContent = fileIn.read()

# 	Cycles through each craacter and keeps count
# 	Makes necessary disctionary keys per char
# 	"value[key]"
for char in fileContent:
    charCount.setdefault(char, 0)
    charCount[char] = charCount[char] + 1

# 	For 'pretty printing'
pprint.pprint(charCount)

notify.notify()
Beispiel #33
0
    vms_to_test = [options.dex_vm] if options.dex_vm != "all" else ALL_ART_VMS
    for art_vm in vms_to_test:
        vm_kind_to_test = "_" + options.dex_vm_kind if art_vm != "default" else ""
        return_code = gradle.RunGradle(
            gradle_args + ['-Pdex_vm=%s' % (art_vm + vm_kind_to_test)],
            throw_on_failure=False)

        if options.generate_golden_files_to:
            sha1 = '%s' % utils.get_HEAD_sha1()
            with utils.ChangedWorkingDirectory(
                    options.generate_golden_files_to):
                archive = utils.create_archive(sha1)
                utils.upload_file_to_cloud_storage(
                    archive, 'gs://r8-test-results/golden-files/' + archive)

        if return_code != 0:
            if options.archive_failures and os.name != 'nt':
                archive_failures()
            return return_code

    return 0


if __name__ == '__main__':
    return_code = Main()
    if return_code != 0:
        notify.notify("Tests failed.")
    else:
        notify.notify("Tests passed.")
    sys.exit(return_code)
Beispiel #34
0
def detect(cam, color_model, grey_model, vehicle_model, config, ha,
           mqtt_client):
    threshold = config["detector"].getfloat("threshold")
    image = cam.image
    if image is None:
        return 0, "{}=[{}]".format(cam.name, cam.error)
    prediction_start = timer()
    try:
        if len(cam.resized.shape) == 3:
            predictions = color_model.predict_image(cam.resized)
        elif len(cam.resized.shape) == 2:
            predictions = grey_model.predict_image(cam.resized)
        else:
            return 0, "Unknown image shape {}".format(cam.resized.shape)
    except OSError as e:
        return 0, "{}=error:{}".format(cam.name, sys.exc_info()[0])
    cam.age = cam.age + 1
    vehicle_predictions = []
    if cam.vehicle_check and vehicle_model is not None:
        vehicle_predictions = vehicle_model.predict_image(cam.resized2)
        # include all vehicle predictions for now
        predictions += vehicle_predictions
    prediction_time = timer() - prediction_start
    # filter out lower predictions
    predictions = list(
        filter(
            lambda p: p["probability"] > config["thresholds"].getfloat(
                p["tagName"], threshold) or
            (p["tagName"] in cam.objects and p["probability"] > 0.4),
            predictions,
        ))
    for p in predictions:
        p["camName"] = cam.name
    add_centers(predictions)
    # remove road
    if cam.name in ["driveway", "peach tree"]:
        for p in predictions:
            x = p["center"]["x"]
            if x < 0.651:
                road_y = 0.31 + 0.038 * x
            else:
                road_y = 0.348 + 0.131 * (x - 0.651) / (1.0 - 0.651)
            p["road_y"] = road_y
            if p["center"]["y"] < road_y and (p["tagName"] in [
                    "vehicle", "person", "package", "dog"
            ]):
                p["ignore"] = "road"
    elif cam.name == "garage-l":
        for p in predictions:
            if p["boundingBox"]["top"] + p["boundingBox"]["height"] < 0.22 and (
                    p["tagName"] in ["vehicle", "person"]):
                p["ignore"] = "neighbor"
    elif cam.name in ["front entry"]:
        for p in filter(lambda p: p["tagName"] == "package", predictions):
            if p["center"]["x"] < 0.178125:
                p["ignore"] = "in grass"
    for p in filter(lambda p: "ignore" not in p, predictions):
        if "*" in cam.excludes:
            for e in cam.excludes["*"]:
                iou = bb_intersection_over_union(e, p["boundingBox"])
                if iou > 0.5:
                    p["ignore"] = e.get("comment", "static")
                    break
        if p["tagName"] in cam.excludes:
            for i, e in enumerate(cam.excludes[p["tagName"]]):
                iou = bb_intersection_over_union(e, p["boundingBox"])
                if iou > 0.5:
                    p["ignore"] = e.get("comment", "static iou {}".format(iou))
                    break

    valid_predictions = list(filter(lambda p: not ("ignore" in p),
                                    predictions))
    valid_objects = set(p["tagName"] for p in valid_predictions)
    departed_objects = cam.objects - valid_objects

    yyyymmdd = date.today().strftime("%Y%m%d")
    save_dir = os.path.join(config["detector"]["save-path"], yyyymmdd)
    today_dir = os.path.join(config["detector"]["save-path"], "today")
    if not os.path.exists(today_dir):
        os.symlink(yyyymmdd, today_dir)
    if os.readlink(today_dir) != yyyymmdd:
        os.unlink(today_dir)
        os.symlink(yyyymmdd, today_dir)

    os.makedirs(save_dir, exist_ok=True)
    if len(departed_objects) > 0 and cam.prior_priority > -3:
        logger.info("\n{} current={}, prior={}, departed={}".format(
            cam.name,
            ",".join(valid_objects),
            ",".join(cam.objects),
            ",".join(departed_objects),
        ))
        basename = os.path.join(
            save_dir,
            datetime.now().strftime("%H%M%S") + "-" +
            cam.name.replace(" ", "_") + "-" + "_".join(departed_objects) +
            "-departed",
        )
        if isinstance(image, Image.Image):
            image.save(basename + ".jpg")
        else:
            cv2.imwrite(basename + ".jpg", image)
        cam.objects = valid_objects
    # Always open garage door, we can call this many times
    if "cat" in valid_objects and cam.name in ["shed", "garage-l", "garage-r"]:
        logger.info("Letting cat in")
        ha.crack_garage_door()
    elif "cat" in valid_objects and cam.name in ["garage"]:
        logger.info("Letting cat out")
        ha.let_cat_out()

    colors = config["colors"]
    new_predictions = []
    for p in valid_predictions:
        this_box = p["boundingBox"]
        this_name = p["tagName"]
        prev_class = cam.prev_predictions.setdefault(p["tagName"], [])
        for prev in prev_class:
            prev_box = prev["boundingBox"]
            iou = bb_intersection_over_union(prev_box, this_box)
            logger.debug(
                f"iou {cam.name}:{this_name} = prev_box & this_box = {iou}")
            if iou > 0.5:
                p["iou"] = iou
                prev["boundingBox"] = this_box  # move the box to current
                prev["last_time"] = datetime.now()
                prev["age"] = prev["age"] + 1
                p["age"] = prev["age"]
        expired = [
            x for x in prev_class if x["last_time"] < datetime.now() -
            timedelta(minutes=2) and x["age"] > 10
        ]
        for e in expired:
            logger.info(
                f"{e['tagName']} expired from {cam.name} after being seen {e['age']} times"
            )
        prev_class[:] = [
            x for x in prev_class
            if x["last_time"] > datetime.now() - timedelta(minutes=2)
        ]
        if not "iou" in p:
            p["start_time"] = datetime.now()
            p["last_time"] = datetime.now()
            p["age"] = 0
            prev_class.append(p)
            new_predictions.append(p)

    if len(valid_predictions) >= 0:
        if isinstance(image, Image.Image):
            im_pil = image.copy()  # for drawing on
        else:
            im_pil = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
        for p in predictions:
            if "ignore" in p:
                width = 2
            else:
                width = 4
            color = colors.get(p["tagName"], fallback="red")
            draw_bbox(im_pil, p, color, width=width)
        if cam.name in ["peach tree", "driveway"]:
            draw_road(im_pil, [(0, 0.31), (0.651, 0.348),
                               (1.0, 0.348 + 0.131)])
        elif cam.name in ["garage-l"]:
            draw_road(im_pil, [(0, 0.22), (1.0, 0.22)])

    new_objects = set(p["tagName"] for p in new_predictions)

    # Only notify deer if not seen
    if "deer" in new_objects:
        ha.deer_alert(cam.name)
    # mosquitto_pub -h mqtt.home -t "homeassistant/sensor/deck-dog/config" -r -m '{"name": "deck dog count", "state_topic": "deck/dog/count", "state_class": "measurement", "uniq_id": "deck-dog", "availability_topic": "aicam/status"}'
    for o in ["vehicle", "dog"]:
        if o in new_objects or o in departed_objects:
            count = len(
                list(filter(
                    lambda p: p["tagName"] == o,
                    valid_predictions,
                )))
            logging.info(f"Publishing count {cam.name}/{o}/count={count}")
            mqtt_client.publish(f"{cam.name}/{o}/count", count, retain=False)

    if len(new_objects):
        if cam.name in ["driveway", "garage"]:
            message = "%s in %s" % (",".join(valid_objects), cam.name)
        elif cam.name == "shed":
            message = "%s in front of garage" % ",".join(valid_objects)
        elif cam.name == "garage-r":
            message = "%s in front of left garage" % ",".join(valid_objects)
        elif cam.name == "garage-l":
            message = "%s in front of right garage" % ",".join(valid_objects)
        else:
            message = "%s near %s" % (",".join(valid_objects), cam.name)
        if cam.age > 2:
            priority = notify(cam, message, im_pil, valid_predictions, config,
                              ha)
        else:
            logger.info("Skipping notifications until after warm up")
            priority = -4
    elif len(valid_predictions) > 0:
        priority = cam.prior_priority
    else:
        priority = -4

    # Notify may also mark objects as ignore
    valid_predictions = list(filter(lambda p: not ("ignore" in p),
                                    predictions))
    cam.objects = set(p["tagName"] for p in valid_predictions)

    if priority > -3 and not cam.is_file:
        # don't save file if we're reading from a file
        if cam.prior_image is not None:
            priorname = (os.path.join(
                save_dir,
                cam.prior_time.strftime("%H%M%S") + "-" + cam.name.replace(
                    " ", "_") + "-" + "_".join(valid_objects) + "-prior",
            ) + ".jpg")
            if isinstance(cam.prior_image, Image.Image):
                cam.prior_image.save(priorname)
            else:
                cv2.imwrite(priorname, cam.prior_image)
            cam.prior_image = None
            utime = time.mktime(cam.prior_time.timetuple())
            os.utime(priorname, (utime, utime))
        basename = os.path.join(
            save_dir,
            datetime.now().strftime("%H%M%S") + "-" +
            cam.name.replace(" ", "_") + "-" + "_".join(valid_objects),
        )
        if isinstance(image, Image.Image):
            image.save(basename + ".jpg")
        else:
            cv2.imwrite(basename + ".jpg", image)
        with open(basename + ".txt", "w") as file:
            j = {
                "source": str(cam.source),
                "time": str(datetime.now()),
                "predictions": predictions,
            }
            file.write(json.dumps(j, indent=4, default=str))
        im_pil.save(basename + "-annotated.jpg")
    else:
        cam.prior_image = image
    cam.prior_time = datetime.now()
    cam.prior_priority = priority

    def format_prediction(p):
        o = "{}:{:.2f}".format(p["tagName"], p["probability"])
        if "iou" in p:
            o += ":iou={:.2f}".format(p["iou"])
        if "ignore" in p:
            o += ":ignore={}".format(p["ignore"])
        if "priority" in p:
            o += ":p={}".format(p["priority"])
        if "priority_type" in p:
            o += ":pt={}".format(p["priority_type"])
        if "age" in p:
            o += ":age={}".format(p["age"])
        return o

    return (
        prediction_time,
        "{}=[".format(cam.name) +
        ",".join(format_prediction(p) for p in predictions) + "]",
    )
Beispiel #35
0
def check_logs(errors):
    if errors != '':
        subject = 'ERRORS with drivesync on %s' % (COMPUTER_NAME)
        notify(CONTANCT, errors, subject=subject)
def main(first_exec=False):
    """Main section"""

    # The path to monitor changes in directories dir_size. By default, if none
    # is given, takes the home directory.
    args = arguments().parse_args()
    mon_pth = args.path

    # Ignore all directories that are below these percentage or absolute value
    # of size difference. There are optional, set to zero to override them.
    thld_pct = 20       # In percentage of difference in size for a directory
    thld_sz = 10.486E6  # In bytes of absolute value of directory size diff.

    # Prepare the log
    log = logger.Logger()
    url = "http://joedicastro.com"
    head = ("Changes in size of directories for {0} on {1}".
            format(mon_pth, platform.node()))
    log.header(url, head)
    log.time("START TIME")
    notify("Directory Size Monitor", "Start to check", "info")

    # Load the last dictionary of directories/sizes if exists
    try:
        with open('.dir_sizes.pkl', 'rb') as input_file:
            bfr_dir = pickle.load(input_file)
    except (EOFError, IOError, pickle.PickleError):
        bfr_dir = {}
        first_exec = True

    # Get the current dictionary of directories/sizes
    crr_dir = {}
    for path, dirs, files in os.walk(mon_pth):
        for directory in dirs:
            dir_path = os.path.join(path, directory)
            dir_size = get_size_fast(dir_path)
            crr_dir[dir_path] = dir_size

    # First, Save the current dirs/sizes
    with open(".dir_sizes.pkl", "wb") as output_file:
        pickle.dump(crr_dir, output_file)

    # Create the list depending the status of directories
    deleted = [d for d in bfr_dir if d not in crr_dir]
    added = [d for d in crr_dir if d not in bfr_dir]
    changed = [d for d in crr_dir if d in bfr_dir if crr_dir[d] != bfr_dir[d]]

    log.list("Deleted directories", list4log(bfr_dir, mon_pth, deleted))
    log.list("New directories", list4log(crr_dir, mon_pth, added))
    log.list("Changed directories", diff4log(bfr_dir, crr_dir, mon_pth,
                                             changed, thld_pct, thld_sz))

    # If thresholds are nonzero, then report the values
    if thld_pct or thld_sz:
        tsz = best_unit_size(thld_sz)
        log.list("Threshold Values",
                 ["The directories whose size differences are less than any of"
                  " these values are ignored:", "",
                  "Percentage: {0:6} %".format(thld_pct),
                  "Size:       {0:6.2f} {1}".format(tsz['s'], tsz['u'])])

    # Show some statistics for the analyzed path
    mon_pth_sz = best_unit_size(get_size_fast(mon_pth))
    log.list("{0} Statistics".format(mon_pth),
             ["{0:8} directories".format(len(crr_dir)),
              "{0:8.2f} {1}".format(mon_pth_sz['s'], mon_pth_sz['u'])])
    log.time("END TIME")
    notify("Directory Size Monitor", "Finished", "Ok")
    if not first_exec:
        log.send("Changes in size of directories")
        log.write()
Beispiel #37
0
    def onClick(self, e):
        global x, y, r, g, b
        x, y = e.x(), e.y()
        r, g, b = img.getpixel((x, y))
        self.setStyleSheet("background-color: rgb(%i,%i,%i);" % (r, g, b))
        print(r, g, b)

    def onExit(self):
        fullCopy = "rgb(%s,%s,%s)" % (r, g, b)
        pyperclip.copy(fullCopy)
        exit()


def changedFocusSlot(old, now):
    print("hej")


tempImgPath = "hej123.png"
img = ImageGrab.grabclipboard()
try:
    img.save(tempImgPath)
except AttributeError:
    notify("ERROR: No image in clipboard.")
    exit()
r, g, b, x, y = 0, 0, 0, 0, 0
app = QApplication(sys.argv)
form = Form()
form.show()
sys.exit(app.exec_())
Beispiel #38
0
        target_urls = [item_url(target_id) for target_id in target_ids]
        scores += fetch_scores_for_ids(target_ids)
        max_id -= FETCH_WINDOW
    return scores

def calculate():
    front_page_scores = fetch_scores_for_ids(fetch_front_page_ids())[:STORIES_ON_PAGE]
    print("Front page scores: {}".format(front_page_scores))
    new_page_scores = fetch_newest_stories()[:STORIES_ON_PAGE]
    print("New page scores: {}".format(new_page_scores))

    highest_new_submissions = sorted(new_page_scores, reverse=True)
    lowest_front_page_submissions = sorted(front_page_scores)

    # Report the second-highest and second-lowest score
    new_score = highest_new_submissions[1]
    front_score = lowest_front_page_submissions[1]

    return {'new': new_score, 'front': front_score, 'time': int(time.time())}

if __name__ == '__main__':
    while True:
        try:
            scores = calculate()
            db.write_scores(scores)
            notify.notify(scores)
            print("Current 2nd highest/lowest scores are: {}".format(scores))
        except Exception as error:
            print("ERROR: {}".format(error))
        time.sleep(60*5)
#Script to handle notifications in kde 5 plasma.
#The notifications show current weather and sky conditions, as pulled by wunderground.com
#
#TODO: creates notification bubble that has too high priority. Need to analyze the knotify event line


import sys, dbus
from lxml import html
import requests
import notify

location='80918'
url="http://mobile.wunderground.com/cgi-bin/findweather/getForecast?brand=mobile&query="+location

page = requests.get(url)
tree = html.fromstring(page.content)

temp = tree.xpath('//td[contains(.,"Temperature")]/following-sibling::td/span[@class="nowrap"]/b/text()')[0]
cond = tree.xpath('//td[contains(.,"Conditions")]/following-sibling::td/b/text()')[0]

notify.notify(summary = 'Weather - %s' %(cond), body = '%sF' % (temp))

#print('%sF - %s  ' % (temp,cond))


#knotify = dbus.SessionBus().get_object("org.kde.knotify", "/Notify")
#title = "Weather"
#text = temp
#knotify.event("info", "kde", [], title, '%sF - %s  ' % (temp,cond) , [], [], 0, 0,\
#dbus_interface="org.kde.KNotify")
            response = urllib2.urlopen(req)
            status_code = response.getcode()
    except Exception, e:
        logger.error("Error occured while trying to fetch subtitles")

    logger.info(status_code)

    if str(status_code) == '200':
        logger.info('Subtitles Fetched')
        content = response.read()
        sub_name = open(file_name+'.srt', 'wb')
        try:
            sub_name.write(content.encode('ascii', 'ignore'))
        except Exception, e:
            logger.exception(e)
            sub_name.write(content)
        try:
            notify.notify("Sub-Downloader: Success!", "Subtitles Found", "The subtitles are stored in a file named \"" + file_name + ".srt\"", delay=1, sound=True)
        except Exception, e:
            pass
        sub_name.close()
    elif str(status_code) == '404':
        try:
            notify.notify("Sub-Downloader: Error!", "Subtitle Not Found", "The subtitles for this file are not available in the database", delay=1, sound=True)
        except Exception, e:
            pass
        logger.error('Subtitles not available in Database')
    else:
        logger.error('Bad Request!')
    logger.info('============================')
Beispiel #41
0
                    db=MySQLdb.connect(host=config.get('dbAlerter', 'mysql_hostname'), port=int(config.get('dbAlerter', 'mysql_port')), user=config.get('dbAlerter', 'mysql_username'), passwd=config.get('dbAlerter', 'mysql_password'))
                    #Update MySQL pid
                    variables['PID'] = int(file(variables['PID_FILE'],'r').read().strip())
                    notify.notify('Info', 'MySQL Server Back Online', 'MySQL Server Back Online')
                    statistics['INFO'] += 1
                    break
                except MySQLdb.Error, (error,description):
                    if (error == 2003 or error == 2002):
                        if (retry_count == 5):
                            notify.log_notify('Error', '(' + str(error) + ') ' + description)
                            retry_count=0
                        else:
                            retry_count+=1
                        time.sleep(int(config.get('dbAlerter','check_interval')))
        else:
            notify.notify('Error','(' + str(error) + ') ' + description, 'Error (' + str(error) + ') - ' + description + "\n\ndbAlerter Shutdown")
            notify.log_notify('Shutdown')
            time.sleep(5)
            sys.exit(1)
    except:
        notify.log_notify('Shutdown')
        raise
        time.sleep(5)
        sys.exit(1)



def connect():
    """Connect to MySQL server"""

    try:
    if arguments["--compile-only"]:
        import multiprocessing as mp

        pool = mp.Pool(16)
        pool.map(do_compile, m.getTrueFalse())
    else:
        last = time.time()

        try:
            for i, comb in enumerate(m.getTrueFalse()):
                test = tm.createTest(comb)
                print  test.uid
                test.compile()
                # test.loadResults()
                test.loadOrRun(run_interface)
                r = test.getResult()
                m.addResult(i, r)

                dt = datetime.time(datetime.now())
                print "{0:02d} [{1:02d}:{2:02d}:{3:02d}] ".format(int(time.time()-last), dt.hour, dt.minute, dt.second),test.uid, r
                sys.stdout.flush()
                last = time.time()
        except:
            traceback.print_exc()
            if arguments["--notify"]:
                notify.notify("Exception occured!",traceback.format_exc(1))
        else:
            if arguments["--notify"]:
                notify.notify("Tests complete", "{0} tests completed.\nCommand line: {1}\nBenchmark: {2}\nPlatform: {3}".format(len(m.matrix), " ".join(sys.argv), arguments['BENCHMARK'], arguments['PLATFORM']))
Beispiel #43
0
from tqdm import tqdm

for r_idx, row in tqdm(enumerate(y_pred)):
    for c_idx, col in enumerate(row):
        if y_pred[r_idx][c_idx] == 1 and y_test[r_idx][c_idx] == 1:
            true_pos += 1
        if y_pred[r_idx][c_idx] == 0 and y_test[r_idx][c_idx] == 1:
            false_neg += 1
        if y_pred[r_idx][c_idx] == 1 and y_test[r_idx][c_idx] == 0:
            false_pos += 1
        if y_pred[r_idx][c_idx] == 0 and y_test[r_idx][c_idx] == 0:
            true_neg += 1

if true_pos > 0:
    precision = true_pos / (true_pos + false_pos)
    recall = true_pos / (true_pos + false_neg)
    accurracy = (true_pos + true_neg) / (true_pos + true_neg + false_pos +
                                         false_neg)
    f1 = (2 * precision * recall) / (precision + recall)
else:
    f1 = 0

logger.info(
    f"F1: {f1}, trained on: {len(train_docs)} samples, weights saved as: {fp}")
logger.info(f"batch_size: {batch_size}, epochs: {epochs}")
# logger.info("used co-occ feature engineering here")

from notify import notify
notify(f"f1: {f1}")