Example #1
0
    async def on_ready(self):
        print('Logged in as')
        print(self.user.name)
        print(self.user.id)
        print('------')

        for guild in self.guilds:
            for channel in guild.text_channels:
                if channel.name == self.channel_str:
                    with channel.typing():
                        await channel.send(
                            f'Now changing update interval to 20 minutes. If percentage changes more than {self.alarm_threshold:.2f} percent within this interval, it will append 💥. \n\nThis bot will also execute a DOGE buy if memelongated muskrat tweets (anything, just in case it\'s doge-related) and sell the same amount after a minute in order to sell on the pump. It will always do a test purchase when it starts.'
                        )
        old_vals = await self.act()
        every_n_seconds = 20 * 60
        tweet_monitor_interval = 10
        beat.set_rate(1 / tweet_monitor_interval)
        count = tweet_monitor_interval
        while True:
            count += tweet_monitor_interval
            await self.monitor_memelon()
            if count % every_n_seconds == 0:
                rh.login(self.rh_user,
                         self.rh_pw,
                         expiresIn=3600 * 24,
                         by_sms=True)
                new_vals = await self.act(old_vals=old_vals)
                old_vals = new_vals
            time.sleep(tweet_monitor_interval)
Example #2
0
def psutil_process(communication_queue, worker_id, worker_parameters):
    logging.basicConfig(
        filename=base_path + '/log/agent.log',
        level=logging.DEBUG,
        format=
        '%(asctime)s.%(msecs)d %(levelname)s %(module)s - %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S")

    tick = worker_parameters["tick"]
    warning = {}
    if tick > 0:
        tick = my_frequency(tick)
        beat.set_rate(tick)
        while beat.true():
            for proc in psutil.process_iter():
                try:
                    pinfo = proc.as_dict(attrs=['pid', 'name'])
                    pinfo = base64.b64encode(json.dumps(pinfo))
                except psutil.NoSuchProcess:
                    pass
                else:
                    message = {
                        "agent": host_name,
                        "function": "process",
                        "value": pinfo
                    }
                    communication_queue.put(json.dumps(message))

            beat.sleep()
Example #3
0
def startDetect(onoff):
    global doDetect
    if onoff == 1:
        doDetect = True
        beat.set_rate(conf["fps"])
    else:
        doDetect = False
        beat.set_rate(50)
Example #4
0
File: ufh.py Project: grrehm/Pi-UFH
def ufhloop():
    beat.set_rate(.1)
    while beat.true():
        schedule.run_pending()
        xrf.receive()
#        for d in iter(W1.devices):
#            d.read()
        output.relays() # calls room.control and outputs changes.
#        xiv.send()
        state.save()
        try:
            beat.sleep()
        except:
            print "no time to sleep"
Example #5
0
def capture(directory):

    if not os.path.exists(directory):
        os.makedirs(directory)

    print "Screenshotting every %d seconds..." % screenshot_rate
    beat.set_rate(1.0/screenshot_rate)
    index = 0
    while beat.true():
        try:
            now = datetime.now()
            timestamp = datetime.strftime(now, timestamp_format)
            filename = "%08d.jpg" % index
            path = os.path.join(directory, filename)
            subprocess.call(["scrot", "-q", str(scrot_quality), path])
            annotate_image(directory, filename, timestamp)
            index += 1
            beat.sleep()
        except KeyboardInterrupt:
            print "Encoding..."
            encode(directory)
            sys.exit(0)
Example #6
0
def capture(directory):

    if not os.path.exists(directory):
        os.makedirs(directory)

    print "Screenshotting every %d seconds..." % screenshot_rate
    beat.set_rate(1.0 / screenshot_rate)
    index = 0
    while beat.true():
        try:
            now = datetime.now()
            timestamp = datetime.strftime(now, timestamp_format)
            filename = "%08d.jpg" % index
            path = os.path.join(directory, filename)
            subprocess.call(["scrot", "-q", str(scrot_quality), path])
            annotate_image(directory, filename, timestamp)
            index += 1
            beat.sleep()
        except KeyboardInterrupt:
            print "Encoding..."
            encode(directory)
            sys.exit(0)
Example #7
0
import mysql.connector
import alpaca_trade_api as tradeapi
import threading
import time
import datetime
from datetime import datetime as dt
import random
import string
import json
import cronus.beat as beat
from collections import deque
updateRate = 2
beat.set_rate(updateRate)

#init alpaca connection
API_KEY = "PKOCPSVQ9AIPDXBKLNQD"
API_SECRET = "uIGUxRsxZd58rCvmn4RQtJgdPtijr8uMqORQcQLq"
APCA_API_BASE_URL = "https://paper-api.alpaca.markets"
alpaca = tradeapi.REST(API_KEY, API_SECRET, APCA_API_BASE_URL, 'v2')

#init database connection
db = mysql.connector.connect(host="localhost",
                             user="******",
                             passwd="Lobstero1ogy",
                             database="stockdata",
                             auth_plugin='mysql_native_password')
cursor = db.cursor()

# We only consider stocks with per-share prices inside this range
min_share_price = 1.0
max_share_price = 10.0
    # Setup logging.
    logging.basicConfig(level = opts.loglevel, datefmt='%H:%M:%S', format='%(asctime)s %(levelname)s:%(name)s:%(funcName)s:%(message)s')
    logger = logging.getLogger(__name__)

    logger.debug(opts)

    # Data storage
    sqlcon = sqlite3.connect('/var/lib/temperatur/temperatur.db')

    i2c = SMBus(1)

    thermo_luft = thermometer(i2caddr=0x19, name='Luft vorne')
    thermo_vorlauf = thermometer(i2caddr=0x18, name='Vorlauf')
    thermo_ruecklauf = thermometer(i2caddr=0x1e, name='Ruecklauf')

    beat.set_rate(1.0/60)

    while beat.true():
        temp_luft = thermo_luft.get_temp()
        temp_vorlauf = thermo_vorlauf.get_temp()
        temp_ruecklauf = thermo_ruecklauf.get_temp()

        temps = [1, temp_vorlauf, temp_ruecklauf, temp_luft, None, None, None]
        temps[0] = datetime.datetime.now()

	logger.debug("Write into db: {0}".format(temps))

        # convert (back) to tuple for sqlite3
        ttemps = tuple(temps)

        # Table:
Example #9
0
 def set_interval(self, interval):
     interval = max(1, interval)  # make sure interval is > 1
     hz = 1/float(interval*60)    # convert interval to Hz
     beat.set_rate(hz)
Example #10
0
		doDetect = False


# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-c", "--conf", required=True,
	help="path to the JSON configuration file")
args = vars(ap.parse_args())

# filter warnings, load the configuration and initialize the Dropbox
# client
warnings.filterwarnings("ignore")
conf = json.load(open(args["conf"]))
client = None
fps = 15 #conf["fps"]
beat.set_rate(fps)

# check to see if the Dropbox should be used
if conf["use_dropbox"]:
	# connect to dropbox and start the session authorization process
	flow = DropboxOAuth2FlowNoRedirect(conf["dropbox_key"], conf["dropbox_secret"])
	print( "[INFO] Authorize this application: {}".format(flow.start()))
	authCode = input("Enter auth code here: ").strip()

	# finish the authorization and grab the Dropbox client
	(accessToken, userID) = flow.finish(authCode)
	client = DropboxClient(accessToken)
	print( "[SUCCESS] dropbox account linked")

# initialize the camera and grab a reference to the raw camera capture
#camera = PiCamera()
Example #11
0
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-c",
                "--conf",
                required=True,
                help="path to the JSON configuration file")
args = vars(ap.parse_args())

# filter warnings, load the configuration and initialize the Dropbox
# client
warnings.filterwarnings("ignore")
conf = json.load(open(args["conf"]))
client = None
fps = 28  # conf["fps"]
beat.set_rate(fps)

width = 640  # 320
height = 480  # 240

# initialize the camera and grab a reference to the raw camera capture
# vs = PiVideoStream((width, height), fps, conf["rotation"]).start()
cap = cv2.VideoCapture(0)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, width)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, height)

# allow the camera to warmup, then initialize the average frame, last
# uploaded timestamp, and frame motion counter
print("[INFO] warming up...")
time.sleep(conf["camera_warmup_time"])
def downloader():

        print("\n\n\n= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ")
        print(" Option 1: Download data from UN ComTrade ")
        print("= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ")

        print("\nFor this option, you will have to:\n\
        Specify the level of aggregation wanted for the commodity data\n\
        Specify the first and last year to be downloaded")
        print("\n= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ")
        
        # Choose the aggregate level: 2-digit/4-digit/6-digit
        valid_choices = ["AG2", "AG4", "AG6"]
        AG = None
        while AG not in valid_choices:
                AG = raw_input("Choose the levels of aggregation for commodities [AG2, AG4, AG6]: ")

        # Choose the year:
        valid_years = range(1962,2014) # gives [1962-2013]
        year_s = None
        while year_s not in valid_years:
                year_s = int(raw_input("Choose the first year to be downloaded [1962-2013]: "))
        year_e = None
        while year_e not in valid_years:
                year_e = int(raw_input("Choose the last year to be downloaded [1962-2013]: "))

        for year in range(year_s, year_e+1):                
                dl_path = os.path.join('data','dl',AG) # where the data from ComTrade will be downloaded
                if not os.path.exists(dl_path):
                        os.makedirs(dl_path)
                add_res = os.path.join('data','add_res') # where the additional files needed are stored

                ctrycodes = pd.read_excel(os.path.join(add_res,'country_iso_codes.xls'))
                ctryrx = pd.read_csv(os.path.join(add_res,'country_list_redux.csv'), sep='\t')

                ctryrx = pd.merge(ctryrx, ctrycodes, how='left', left_on='iso3', right_on='ISO3-digit Alpha')

                ctrys = ctryrx.loc[ctryrx['End Valid Year'] > 2009]
                ctrys = ctrys[['country', 'iso3', 'ctyCode']].drop_duplicates()

                error_list = []

                i = 0
                beat.set_rate(0.027) # 100req/hour = 0.027req/s * 3600s/h

                while beat.true():
                        try:
                                ctry = ctrys.iloc[i]
                        except:
                                print '\nDownload of %d files completed' % i
                                break

                        print '\ndownloading', year, ctry['country'], '...'
                        myfn = os.path.join(dl_path,"comtrade_EXtoWorld_%s_%s.csv" % (str(ctry['iso3']), str(year)))
                        if (os.path.exists(myfn) == True):
                                i += 1
                                continue

                        print 'Saving file in', myfn, '...'
                        ctry_code = ctry['ctyCode']
                        file_url = 'http://comtrade.un.org/api/get?max=50000&type=C&freq=A&cc=%s&px=HS&ps=%s&r=%s&p=0&rg=2&fmt=csv' % (str(AG), year, str(ctry_code))
                        
                        try:
                                file_name = wget.download(file_url, out = myfn)
                        except:
                                print 'error for ', ctry['country']
                                error_list[ctry_code]

                        i += 1
                        beat.sleep()


                # Redownload instantly the files with errors

                print 'Check for errors', '...'
                i = 0
                j = 0
                beat.set_rate(0.027) # 100req/hour = 0.027req/s * 3600s/h

                while beat.true():
                        try:
                                ctry = ctrys.iloc[i]
                        except:
                                print '\nRedownload of %d files completed' % i
                                break

                        myfn = os.path.join(dl_path,"comtrade_EXtoWorld_%s_%s.csv" % (str(ctry['iso3']), str(year)))
                        size = os.path.getsize(myfn)
                        if not (size == 36):
                                i += 1
                                j = i - 1
                                continue
                        print '\nReplacing', year, ctry['country'], '...'
                        os.remove(myfn)
                        print 'Saving file in', myfn, '...'
                        ctry_code = ctry['ctyCode']
                        file_url = 'http://comtrade.un.org/api/get?max=50000&type=C&freq=A&cc=%s&px=HS&ps=%s&r=%s&p=0&rg=2&fmt=csv' % (str(AG), year, str(ctry_code))
                        
                        try:
                                file_name = wget.download(file_url, out = myfn)
                        except:
                                print 'error for ', ctry['country']
                        size = os.path.getsize(myfn)
                        if (size == 36):
                                i -= 1   
                        i += 1
                        beat.sleep()


                #Cleaning the downloads
                        
                # Get all files.
                list = os.listdir(dl_path)

                redo_list = []
                filename = []
                cat1 = []
                cat2 = []
                cat3 = []
                cat4 = []

                for file in list:
                        location = os.path.join(dl_path, file)
                        size = os.path.getsize(location)
                        if size < 1000:
                                if pd.read_csv(location).iloc[0,0] == 'No data matches your query or your query is too complex. Request JSON or XML format for more information.':
                                        redo_list.append(file)
                                        filename.append(file[:-4])
                                        cat1.append(file[:-4].split('_')[0])
                                        cat2.append(file[:-4].split('_')[1])
                                        cat3.append(file[:-4].split('_')[2])
                                        cat4.append(file[:-4].split('_')[3])
                                        os.remove(location)
                                
                deleted = pd.DataFrame(filename, columns=['filename'])
                deleted['source'] = cat1
                deleted['type'] = cat2
                deleted['country'] = cat3
                deleted['year'] = cat4

                # Save a report of the deleted files
                print("\nThere were %s empty files. They have been deleted automatically" % len(redo_list))
                fname = 'DeletedFiles_%s_%s.csv' % (str(AG),str(cat4[0]))
                # Check the folder exists
                dest = os.path.join('data','dl','dl_reports')
                if not os.path.exists(dest):
                        os.makedirs(dest)
                # Save the file
                fdest = os.path.join(dest,fname)
                deleted.to_csv(fdest, sep='\t', index=False)
                print '\nSaving files in', fdest, '...'
                print("\nThe report DeletedFiles_%s.csv contains the information on the files that were empty and have been deleted.") % str(cat4[0])

        print '\nOperation complete.'
        raw_input("\nPress Enter to continue...")
Example #13
0
def psutil_cpu_count(communication_queue, worker_id, worker_parameters):
    logging.basicConfig(
        filename=base_path + '/log/agent.log',
        level=logging.DEBUG,
        format=
        '%(asctime)s.%(msecs)d %(levelname)s %(module)s - %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S")

    tick = worker_parameters["tick"]
    warning = {}
    warning["cpu_count"] = 0
    warning["cpu_count_logical"] = 0
    if tick > 0:
        beat.set_rate(my_frequency(tick))

        while beat.true():
            #Physical CPU
            try:
                pinfo = psutil.cpu_count(logical=False)
                if int(pinfo < worker_parameters["min_cpu"]):
                    message = {
                        "agent": host_name,
                        "function": "cpu_count",
                        "value": pinfo
                    }
                    communication_queue.put(json.dumps(message))

                    warning["cpu_count"] = 1
                    beat.set_rate(my_frequency(
                        worker_parameters["tick_error"]))
                else:
                    if warning["cpu_count"] == 1:
                        logging.debug("in solved cpu_count")
                        warning["cpu_count"] = 0
                        message = {
                            "agent": host_name,
                            "function": "cpu_count",
                            "value": "solved"
                        }
                        communication_queue.put(json.dumps(message))
            except:
                logging.error(
                    "Unexpected error in psutil_cpu_count - cpu_count: %s" %
                    (sys.exc_info()[0]))
                pass

            #Logical CPU
            try:
                pinfo = psutil.cpu_count(logical=True)
                if int(pinfo < worker_parameters["min_logical_cpu"]):
                    message = {
                        "agent": host_name,
                        "function": "cpu_count_logical",
                        "value": pinfo
                    }
                    communication_queue.put(json.dumps(message))

                    warning["cpu_count_logical"] = 1
                    beat.set_rate(my_frequency(
                        worker_parameters["tick_error"]))
                else:
                    if warning["cpu_count_logical"] == 1:
                        logging.debug("in solved cpu_count_logical")
                        warning["cpu_count_logical"] = 0
                        message = {
                            "agent": host_name,
                            "function": "cpu_count_logical",
                            "value": "solved"
                        }
                        communication_queue.put(json.dumps(message))
            except:
                logging.error(
                    "Unexpected error in psutil_cpu_count - cpu_count_logical: %s"
                    % (sys.exc_info()[0]))
                pass

            beat.sleep()
    
    """

    def __init__(self):
        self.producer = KafkaProducer(bootstrap_servers='hdp01.woolford.io:6667')
        self.sense = SenseHat()
        self.sensor_record = dict()

    def read_values_from_sensor(self):
        self.sensor_record['host'] = socket.gethostname()
        self.sensor_record['timestamp'] = int(time.time())
        self.sensor_record['temperature'] = self.sense.get_temperature()
        self.sensor_record['humidity'] = self.sense.get_humidity()
        self.sensor_record['pressure'] = self.sense.get_pressure()

    def send_record_to_kafka(self):
        sensor_record_json = json.dumps(self.sensor_record)
        self.producer.send("temperature_humidity_json", sensor_record_json)

    def run(self):
        self.read_values_from_sensor()
        self.send_record_to_kafka()


if __name__ == "__main__":
    sensor_hat_logger = SensorHatLogger()
    beat.set_rate(0.1)
    while beat.true():
        sensor_hat_logger.run()
        beat.sleep()
Example #15
0
    matriz=map(int,matriz)
    return sum(matriz)/8


if __name__ == "__main__":
    
    # Configure logging
    logger = logging.getLogger("controlPump")
    logger.setLevel(logging.DEBUG)
    streamHandler = logging.StreamHandler()
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    streamHandler.setFormatter(formatter)
    logger.addHandler(streamHandler)
    logger.debug("Logger initialized")

    arduinoIP = getSecret("arduino1")
    api_key = getSecret("THINGSPEAK_API_KEY")
    channel_id = getSecret("THINGSPEAK_CHANNEL_ID")
    channel = thingspeak.Channel(channel_id,'api_key={api}'.format(api=api_key))    

    beat.set_rate(0.016666667)
    
    # Publish to the same topic in a loop forever
    while beat.true():
        humidityAverage = getAverage(channel)
        else:
            logger.debug("Trying to send humidity to ThingSpeak")
            channel.update(formatMessage(humidity,api_key))
        beat.sleep()

Example #16
0
from nvapi import nvapi
from ipmiapi import ipmiapi

metrics = {
    'gpu': nvapi,
    'ipmi': ipmiapi,
}

@timeout(60)
def report_metrics():
    for metric in metrics:
        for k, v in metrics[metric]().items():
            statsd.gauge(metric + '.' + k, v)

if __name__ == "__main__":
    report_metrics()
    beat.set_rate(1.0 / 10)
    while beat.true():
        try:
            report_metrics()
        except TimeoutError:
            print "timeout"
        try:
            beat.sleep()
        except:
            pass
        print datetime.datetime.now()



Example #17
0
avg = None
lastUploaded = datetime.datetime.now()
motionCounter = 0

cv2.namedWindow("Security Feed")
cv2.namedWindow("ctrl", cv2.WINDOW_NORMAL)
cv2.setMouseCallback("Security Feed", moveMask)
cv2.createTrackbar('1:Exit app', "ctrl", 0, 1, quit)
cv2.createTrackbar('Mask size', "ctrl", maskw, 255, setMaskSize)
cv2.createTrackbar('0:Off\n1:On', "ctrl", 0, 1, startDetect)
cv2.resizeWindow("ctrl", 300, 100)
cv2.moveWindow("ctrl", 500, 35)
cv2.moveWindow("Security Feed", 0, 0)

beat.set_rate(50)

loopT = 1
while beat.true():
    loopstarttime = datetime.datetime.now()
    # grab the raw NumPy array representing the image and initialize
    # the timestamp and occupied/unoccupied text
    #frame = f.array
    frame = vs.read()
    timestamp = datetime.datetime.now()
    text = "Unoccupied"

    # resize the frame, convert it to grayscale, and blur it
    frame = imutils.resize(frame, width=500)
    #START OF DETECT
    if doDetect:
    
    # Configure logging
    logger = logging.getLogger("humidityReader")
    logger.setLevel(logging.DEBUG)
    streamHandler = logging.StreamHandler()
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    streamHandler.setFormatter(formatter)
    logger.addHandler(streamHandler)
    logger.debug("Logger initialized")

    arduinoIP = getSecret("arduino1")
    api_key = getSecret("THINGSPEAK_API_KEY")
    channel_id = getSecret("THINGSPEAK_CHANNEL_ID")
    channel = thingspeak.Channel(channel_id,'api_key={api}'.format(api=api_key))    

    beat.set_rate(0.016666667)
    
    # Publish to the same topic in a loop forever
    while beat.true():
#    while True:
        try:
            humidity = getSensorValue(arduinoIP)
        except TimeoutError:
            pass
            logger.error("Timeout error reading arduino humidity sensor")
        else:
            logger.debug("Trying to send humidity to ThingSpeak")
            channel.update(formatMessage(humidity,api_key))
        beat.sleep()

        datefmt='%H:%M:%S',
        format='%(asctime)s %(levelname)s:%(name)s:%(funcName)s:%(message)s')
    logger = logging.getLogger(__name__)

    logger.debug(opts)

    # Data storage
    sqlcon = sqlite3.connect('/var/lib/temperatur/temperatur.db')

    i2c = SMBus(1)

    thermo_luft = thermometer(i2caddr=0x19, name='Luft vorne')
    thermo_vorlauf = thermometer(i2caddr=0x18, name='Vorlauf')
    thermo_ruecklauf = thermometer(i2caddr=0x1e, name='Ruecklauf')

    beat.set_rate(1.0 / 60)

    while beat.true():
        temp_luft = thermo_luft.get_temp()
        temp_vorlauf = thermo_vorlauf.get_temp()
        temp_ruecklauf = thermo_ruecklauf.get_temp()

        temps = [1, temp_vorlauf, temp_ruecklauf, temp_luft, None, None, None]
        temps[0] = datetime.datetime.now()

        logger.debug("Write into db: {0}".format(temps))

        # convert (back) to tuple for sqlite3
        ttemps = tuple(temps)

        # Table: