Esempio n. 1
0
File: tweet.py Progetto: choro3/miya
 def __call__(self, api):
     try:
         text = self.generator.generate()
         api.update_status(text)
         logging.debug('sent tweet. text=%s' % text)
     except:
         logging.exeption('failed to send tweet.')
Esempio n. 2
0
def post_heartbeat(name):
    """
    Recursive function that posts a heartbeat to the database every 10 seconds.
    Started once when you run your script.
    See stackoverflow.com/questions/3393612
    """
    try:
        conn = pool.getconn()
    except psycopg2.Error as e:
        #if the database is down we just print the error
        logging.exception(str(e))
    else:
        #we have a connection
        try:
            with conn:
                with conn.cursor() as curs:
                    curs.execute("SELECT * FROM post_heartbeat('%s')" % name)
        except psycopg2.Error as e:
            #who knows what went wrong; just print the error
            logging.exeption(str(e))
            #close the connection since it's possible the database
            #is down, so we don't want to reuse this connection
            conn.close()
        pool.putconn(conn)

    t = threading.Timer(10, post_heartbeat, [name])
    t.daemon = True
    t.start()
Esempio n. 3
0
def loop():
    try:
        while True:
            sync(TOKEN)
            time.sleep(TWENTY_MINUTES)
    except Exception as e:
        logging.exeption(e)
Esempio n. 4
0
def post_alarm(alarm_id):
    """
    Posts an alarm to the database for an alarm with a given id.

    Returns None if there was an error or the alarm was already active.
    If the alarm is successfully posted it returns the unique id of the
    alarm that was posted.
    """

    result = None

    try:
        conn = pool.getconn()
    except psycopg2.Error as e:
        #if the database is down we just print the error
        logging.exeption(str(e))
        print(str(e))
    else:
        #we have a connection
        try:
            with conn:
                with conn.cursor() as curs:
                    curs.execute("SELECT * FROM post_alarm(%i)" % alarm_id)
                    result = curs.fetchone()[0]
        except psycopg2.Error as e:
            #who knows what went wrong?  Just print the error
            logging.exception(str(e))
            print(str(e))
            #close the connection since it's possible the database
            #is down, so we don't want to use this connection again
            conn.close()

        pool.putconn(conn)
    return result
Esempio n. 5
0
def main():
    BASEDIR = os.path.dirname(os.path.abspath(__file__))
    askscan_path = os.path.join(BASEDIR, "./AKscan/")
    url_jsonPath = os.path.join(BASEDIR, "urls.json")

    if os.path.exists(url_jsonPath):
        os.remove(url_jsonPath)
    parser = create_cmd_parser()
    args = parser.parse_args()
    try:
        # 运行AKscan
        AKscan_run(args, runpath=askscan_path, json_out_file=url_jsonPath)
        sqlmapapi = args.sqlmapapi
        # 线程池最多的装载数量
        runthreadNum = 40
        # 运行sqlmapapi
        threadPool = []
        for url in get_all_urls(url_jsonPath):
            if len(threadPool) >= runthreadNum:
                for t in threadPool:
                    t.start()
                for t in threadPool:
                    t.join()
                threadPool = []
            threadPool.append(
                threading.Thread(target=run_sqlmapapi, args=(sqlmapapi, url)))
        else:
            for t in threadPool:
                t.start()
            for t in threadPool:
                t.join()
    except Exception:
        logging.exeption()
Esempio n. 6
0
def pvb_writeObject(data, file_name, mode):

    try:
        # default mode is append
        if mode == None:
            mode = "a"

        with open(file_name, mode) as f_handle:
            pickle.dump(data, f_handle)
    except Exception, e:
        logging.exeption("pvb_utils: Error in writing Pyobject to file")
        return -1
Esempio n. 7
0
    def get_save_path(self):
        """ Returns the current model root path."""
        fileName = self.modelUuid
        path = utils.get_data_path() + fileName + "/"

        try:
            utils.create_dir_if_necessary(path)
        except:
            logging.exeption(
                "Could not create dir to save classifier in. Saving in {0} instead."
                .format(utils.get_data_path()))
            path = utils.get_data_path()
        return path
Esempio n. 8
0
    def new_stream(self, stream):
        try:
            res = self.win.update(stream)
            new, old = next(res)

            self.update_state(new, old)

            if self.warmup is True:
                self.send("warm_up", (self.A, self.c))
                if self.E_global is not None:
                    self.warmup = False
            else:
                self.update_drift()
                self.subround_process()

        except StopIteration:
            log.exeption("Window has failed.")
Esempio n. 9
0
def get_dates(request):
    """
    Zwraca krotkę (date_from, date_to) lub None
    """
    try:
        if request is not None and 'date_from' in request.form and request.form['date_from']:
            date_from = request.form['date_from']
        else:
            date_from = str(datetime.now() - timedelta(hours=12))
        if request is not None and 'date_to' in request.form and request.form['date_to']:
            date_to = request.form['date_to']
        else:
            date_to = str(datetime.now() + timedelta(hours=1))
        return (date_from, date_to)
    except Exception as ex:
        logging.exeption("Błąd przy parsowaniu daty do statystyk", ex)
        return None
Esempio n. 10
0
File: jobs.py Progetto: lyft/Airflow
    def _execute(self):
        dag_id = self.dag_id

        def signal_handler(signum, frame):
            logging.error("SIGINT (ctrl-c) received")
            sys.exit(1)

        signal.signal(signal.SIGINT, signal_handler)

        utils.pessimistic_connection_handling()

        # Sleep time (seconds) between master runs

        logging.basicConfig(level=logging.DEBUG)
        logging.info("Starting a master scheduler")

        # This should get new code
        dagbag = models.DagBag(self.subdir)
        executor = dagbag.executor
        executor.start()
        i = 0
        while (not self.test_mode) or i < 1:
            i += 1
            if i % self.refresh_dags_every == 0:
                dagbag.collect_dags(only_if_updated=False)
            else:
                dagbag.collect_dags(only_if_updated=True)
            if dag_id:
                dags = [dagbag.dags[dag_id]]
            else:
                dags = [
                    dag for dag in dagbag.dags.values() if not dag.parent_dag
                ]
            paused_dag_ids = dagbag.paused_dags()
            for dag in dags:
                if dag.dag_id in paused_dag_ids:
                    continue
                try:
                    self.process_dag(dag, executor)
                except Exception as e:
                    logging.exeption(e)
            self.heartbeat()
        executor.end()
Esempio n. 11
0
    def pushTodB(self, subs):
        if not subs:
            logging.info("Received blank submission. Skipping MySQL push")
            return
        while True:
            num = 1
            try:
                cnx = connector.connect(user=MYSQL_USER,
                                        password=MYSQL_PASS,
                                        host=MYSQL_HOST,
                                        port=MYSQL_PORT,
                                        database=MYSQL_DB)
                logging.debug("Database connection established")
                break
            except Exception as e:
                if num == 5:
                    logging.exception(
                        "Attempt %s: Could not connect to MySQL server after 5 attempts. Stopping"
                        % str(num))
                    raise
                logging.exception(
                    "Attempt %s: Could not connect to MySQL server. Waiting 60 seconds"
                    % (str(num)))
                time.sleep(60)
                num += 1

        cursor = cnx.cursor()
        for num in subs:
            query = """INSERT INTO `%s` (Date, Name, Memo, Time, Amount) VALUES
						(CURRENT_DATE(), '%s', '%s', CURRENT_TIME(), '%.2f')""" % (
                MYSQL_TABLE, num[0], "None", float(num[1]))
            cursor.execute(query.replace("\n", ""))

        try:
            cnx.commit()
            logging.info("Successfully committed: %s " %
                         query.replace("\n", ""))
        except Exception as e:
            logging.exeption("Could not commit MySQL query to database")
        cursor.close()
        cnx.close()
        logging.debug("MySQL Connection closd")
Esempio n. 12
0
File: jobs.py Progetto: lyft/Airflow
    def _execute(self):
        dag_id = self.dag_id

        def signal_handler(signum, frame):
            logging.error("SIGINT (ctrl-c) received")
            sys.exit(1)
        signal.signal(signal.SIGINT, signal_handler)

        utils.pessimistic_connection_handling()

        # Sleep time (seconds) between master runs

        logging.basicConfig(level=logging.DEBUG)
        logging.info("Starting a master scheduler")

        # This should get new code
        dagbag = models.DagBag(self.subdir)
        executor = dagbag.executor
        executor.start()
        i = 0
        while (not self.test_mode) or i < 1:
            i += 1
            if i % self.refresh_dags_every == 0:
                dagbag.collect_dags(only_if_updated=False)
            else:
                dagbag.collect_dags(only_if_updated=True)
            if dag_id:
                dags = [dagbag.dags[dag_id]]
            else:
                dags = [
                    dag for dag in dagbag.dags.values() if not dag.parent_dag]
            paused_dag_ids = dagbag.paused_dags()
            for dag in dags:
                if dag.dag_id in paused_dag_ids:
                    continue
                try:
                    self.process_dag(dag, executor)
                except Exception as e:
                    logging.exeption(e)
            self.heartbeat()
        executor.end()
Esempio n. 13
0
    def __init__(self):
        self.handlers = {}

        p = os.path.dirname(os.path.abspath(__file__))
        files = os.listdir(p)
        for f in files:
            if f.startswith("__"): continue
            if not f.endswith(".py"): continue
            m = f.replace('.py', '')
            try:
                module = importlib.import_module('.' + m, 'energenie.Handlers')
            except:
                logging.exeption("Module import failed: " + m)
                continue

            for name, obj in inspect.getmembers(module):
                try:
                    if not inspect.isclass(obj):
                        continue

                    if not issubclass(obj, Handler):
                        continue

                    if name == "Handler":
                        continue

                    plugin = getattr(module, name)

                    if name in self.handlers.keys():
                        logging.debug("Plugin already registered %s" % name)
                        continue

                    self.handlers[name] = plugin
                    logging.info("Plugin loaded \"%s\"" % name)
                except:
                    logging.exception("Plugin failed to load: \"%s\"" % name)
Esempio n. 14
0
from lib.rfid_mfrc522 import read_rfid
from lib import info
import requests
import logging
import time
import os

api = 'http://codeme.krdai.info/api/checkin/'

print info.MACHINE_ID, info.SD_ID

for index, uid in enumerate(read_rfid()):
    print index, uid

    try:
        r = requests.post(api, {
            'rfid': uid,
            'mid': info.MACHINE_ID,
            'data': "{}"
        })
        assert r.ok, r.text
    except Exception as e:
        logging.exeption(e)

    try:
        os.system('sudo python /home/pi/pyconapac-rpi/lib/buzzer.py')
    except Exception as e:
        logging.exception(e)
Esempio n. 15
0
def fillGmeDb(time='recent'):
    """ An overall function to make connections to source databases
    and populate a local database

    Parameters
    ----------
    time : Optional[str]
        Either 'recent' or other.  If not recent, the database will
        populated with data available back to 1995.  Default is 'recent'

    Returns
    -------
    Nothing

    Example
    -------

    """
    from davitpy import gme
    from davitpy import rcParams
    import os
    import davitpy.pydarn.sdio.dbUtils as dbu
    from multiprocessing import Process
    import datetime as dt
    now = dt.datetime.now()

    if(time == 'recent'):
        sYear = dt.datetime.now().year-5
        #fill the omni database
        p0 = Process(target=gme.ind.omni.mapOmniMongo, args=(sYear,now.year,1))
        #fill the omni database
        p1 = Process(target=gme.ind.omni.mapOmniMongo, args=(sYear,now.year))
        #fill the poes database
        p2 = Process(target=gme.sat.poes.mapPoesMongo, args=(sYear,now.year))
        #fill the kp database
        p3 = Process(target=gme.ind.kp.mapKpMongo, args=(sYear,now.year))
        #fill the kp database
        p4 = Process(target=gme.ind.dst.mapDstMongo, args=(sYear,now.year))
        #fill the kp database
        p5 = Process(target=gme.ind.ae.mapAeMongo, args=(sYear,now.year))
        #fill the kp database
        p6 = Process(target=gme.ind.symasy.mapSymAsyMongo, args=(sYear,now.year))
    else:
        db = dbu.getDbConn(username=rcParams['DBWRITEUSER'],password=rcParams['DBWRITEPASS'],dbName='gme')
        db.command('repairDatabase')
        #fill the omni database
        p0 = Process(target=gme.ind.omni.mapOmniMongo, args=(1995,now.year,1))
        #fill the omni database
        p1 = Process(target=gme.ind.omni.mapOmniMongo, args=(1995,now.year))
        #fill the poes database
        p2 = Process(target=gme.sat.poes.mapPoesMongo, args=(1998,now.year))
        #fill the kp database
        p3 = Process(target=gme.ind.kp.mapKpMongo, args=(1980,now.year))
        #fill the kp database
        p4 = Process(target=gme.ind.dst.mapDstMongo, args=(1980,now.year))
        #fill the kp database
        p5 = Process(target=gme.ind.ae.mapAeMongo, args=(1980,now.year))
        #fill the kp database
        p6 = Process(target=gme.ind.symasy.mapSymAsyMongo, args=(1980,now.year))

    try: p0.start()
    except Exception,e:
        logging.exception(e)
        logging.exeption('problem filling Omni db')
Esempio n. 16
0
                num_rows = queryresults.total_rows
            except socket.error, exc:
                logging.exception("IN getPastAlarms: Failed to view database" + \
                    "for past alarms.  Re-try connection.")
                counter += 1
                time.sleep(1)
                dbStatus, db = connectToDB("slowcontrol-alarms")
                continue
            break
        if num_rows > 0:
            alarms_in_db = queryresults.rows[0].value
        else:
            print("Could not get most recent alarms in DB.  Continuing..")
            alarms_in_db = {}
    else:
        logging.exeption("IN getPastAlarms(): could not connect to" + \
            "couchDB slowcontrol-alarms/pi_db database.")
        print("could not connect to couchDB alarm database.")
        alarms_in_db = {}
    return alarms_in_db


#Saves alarms_dict to alarms db
def saveAlarms(alarms_dict, alarms_last, channeldb):
    dbStatus, db = connectToDB("slowcontrol-alarms")
    if dbStatus is "ok":
        counter = 0
        while counter < 3:
            try:
                queryresults = db.view("slowcontrol-alarms/pi_db",
                                       descending=True,
                                       limit=1)
Esempio n. 17
0
def fillGmeDb(time='recent'):
    """ An overall function to make connections to source databases
    and populate a local database

    Parameters
    ----------
    time : Optional[str]
        Either 'recent' or other.  If not recent, the database will
        populated with data available back to 1995.  Default is 'recent'

    Returns
    -------
    Nothing

    Example
    -------

    """
    from davitpy import gme
    from davitpy import rcParams
    import os
    import davitpy.pydarn.sdio.dbUtils as dbu
    from multiprocessing import Process
    import datetime as dt
    now = dt.datetime.now()

    if (time == 'recent'):
        sYear = dt.datetime.now().year - 5
        #fill the omni database
        p0 = Process(target=gme.ind.omni.mapOmniMongo,
                     args=(sYear, now.year, 1))
        #fill the omni database
        p1 = Process(target=gme.ind.omni.mapOmniMongo, args=(sYear, now.year))
        #fill the poes database
        p2 = Process(target=gme.sat.poes.mapPoesMongo, args=(sYear, now.year))
        #fill the kp database
        p3 = Process(target=gme.ind.kp.mapKpMongo, args=(sYear, now.year))
        #fill the kp database
        p4 = Process(target=gme.ind.dst.mapDstMongo, args=(sYear, now.year))
        #fill the kp database
        p5 = Process(target=gme.ind.ae.mapAeMongo, args=(sYear, now.year))
        #fill the kp database
        p6 = Process(target=gme.ind.symasy.mapSymAsyMongo,
                     args=(sYear, now.year))
    else:
        db = dbu.getDbConn(username=rcParams['DBWRITEUSER'],
                           password=rcParams['DBWRITEPASS'],
                           dbName='gme')
        db.command('repairDatabase')
        #fill the omni database
        p0 = Process(target=gme.ind.omni.mapOmniMongo,
                     args=(1995, now.year, 1))
        #fill the omni database
        p1 = Process(target=gme.ind.omni.mapOmniMongo, args=(1995, now.year))
        #fill the poes database
        p2 = Process(target=gme.sat.poes.mapPoesMongo, args=(1998, now.year))
        #fill the kp database
        p3 = Process(target=gme.ind.kp.mapKpMongo, args=(1980, now.year))
        #fill the kp database
        p4 = Process(target=gme.ind.dst.mapDstMongo, args=(1980, now.year))
        #fill the kp database
        p5 = Process(target=gme.ind.ae.mapAeMongo, args=(1980, now.year))
        #fill the kp database
        p6 = Process(target=gme.ind.symasy.mapSymAsyMongo,
                     args=(1980, now.year))

    try:
        p0.start()
    except Exception, e:
        logging.exception(e)
        logging.exeption('problem filling Omni db')