def get(self):
        
        authenticateUser = str(users.get_current_user()) 
        featureList = database.gaeSessionNavBuilder()
        client = memcache.Client()
        case_key = client.get('case_key')
        tabindex = 4
        conn = config.get_connection()
        cursor = conn.cursor()    
        
        cursor.execute("SELECT proc_run.proc_run_id, proc_run.emp_id, proc_run.instance_key, proc_case.case_nm, process.proc_nm, process_step.proc_step_nm, "
                       "proc_run.proc_output_conf, proc_req.proc_req_seq, proc_req.proc_req_nm, proc_req.proc_req_desc, proc_run.proc_notes, "
                       "proc_run.proc_conseq, proc_run.proc_innovation "
                       "FROM proc_run "
                       "INNER JOIN proc_case on (proc_run.case_id = proc_case.case_id) "
                       "INNER JOIN process on (proc_run.proc_id = process.proc_id) "
                       "INNER JOIN process_step on (proc_run.proc_step_id = process_step.proc_step_id) "
                       "INNER JOIN proc_req on (proc_run.proc_req_id = proc_req.proc_req_id) "
                       "WHERE proc_run.instance_key = %s", (case_key)) #rename this -- bad name!!
        
        assessinstance = cursor.fetchall()  
        
        conn.close()

        template_values = {'authenticateUser': authenticateUser, 'featureList': featureList, 'tabindex': tabindex,
                           'assessinstance': assessinstance, 'case_key': case_key }
        template = jinja2_env.get_template('operateprocess.html')
        self.response.out.write(template.render(template_values))
    def post(self):
        authenticateUser = str(users.get_current_user()) 
        featureList = database.gaeSessionNavBuilder()
        processmenu = database.gaeSessionProcessMenu()

        conn = config.get_connection()
        cursor = conn.cursor()  
        
        cursor.execute('INSERT INTO proc_case (case_nm, emp_id, status) ' # status = 1 = ACTIVE
                       'VALUES (%s, %s, 1)',
                       (
                       self.request.get('case_nm'),
                       (authenticateUser),
                       ))   
        
        conn.commit() 
        
        cursor.execute("SELECT case_id, case_nm FROM proc_case WHERE status = 1 AND emp_id =%s", (authenticateUser))
        ddb_active_case = cursor.fetchall()
        
        client = memcache.Client() 
        client.set('ddb_active_case', ddb_active_case, 120) 
      
        cursor.execute("SELECT * FROM capability.vw_proc_run_sum WHERE proc_step_conf is null AND emp_id = %s", (authenticateUser))
        openoperations = cursor.fetchall()        
        
        conn.close()
        
        tabindex = 2

        template_values = {'ddb_active_case': ddb_active_case, 'processmenu': processmenu, 'openoperations': openoperations, 
                           'authenticateUser': authenticateUser, 'tabindex': tabindex, 'featureList': featureList }
        template = jinja2_env.get_template('operateprocess.html')
        self.response.out.write(template.render(template_values))
    def get(self):
        
        conn = config.get_connection()
        cursor = conn.cursor()
        
        authenticateUser = str(users.get_current_user())
        featureList = database.gaeSessionNavBuilder()
        
        cursor.execute("SELECT DISTINCT proc_nm, proc_step_seq, proc_step_nm, proc_step_desc, proc_step_owner, proc_step_status, proc_step_ponc, "
                       "proc_step_poc, proc_step_efc "
                       "FROM vw_processes "
                       "WHERE proc_step_status = 'active' OR (proc_step_status = 'local' AND proc_step_owner = %s) "
                       "ORDER BY proc_id, proc_step_seq", (authenticateUser))
        processcost = cursor.fetchall()
                
        cursor.execute("SELECT proc_run_start_tm, proc_nm, proc_seq, proc_step_nm, case_nm, instance_key, emp_id, "
               "COUNT(proc_step_conf) AS tot_ops, SUM(proc_step_conf) AS tot_success, " #7, #9
               "(COUNT(proc_step_conf) - SUM(proc_step_conf)) AS failure, " #10
               "SUM(proc_ponc) AS sum_ponc, " #11
               "SUM(proc_poc) AS sum_poc, " #12
               "SUM(proc_efc) AS sum_efc, "  #13
               "(SUM(proc_poc) + SUM(proc_ponc)) AS tot_cost " #14
               "FROM vw_proc_run_sum "
               "WHERE emp_id = %s "
               "GROUP BY proc_step_id "
               "ORDER BY proc_id", (authenticateUser))
        capability = cursor.fetchall()
        conn.close()
               
        template_values = {'capability': capability, 'authenticateUser': authenticateUser, 'processcost': processcost, 'featureList': featureList}
        template = jinja2_env.get_template('ponccalculator.html')
        self.response.out.write(template.render(template_values))
 
        
            
Exemple #4
0
def mysql_update_result_and_pay_money():
    connection = get_connection()
    try:
        with connection.cursor() as cursor:
            sql = '''UPDATE bets, clients, matches
                    SET clients.tokens = CASE
                        WHEN bets.match_bet_result = matches.match_result
                            THEN clients.tokens + (bets.tokens * bets.coefficient)
                            ELSE clients.tokens
                        END, 
                    bets.result_of_bet = CASE
                        WHEN bets.match_bet_result = matches.match_result
                            THEN  2
                        WHEN bets.match_bet_result != matches.match_result
                            THEN  3
                            ELSE bets.result_of_bet
                        END
                    WHERE matches.match_status = 2
                    AND bets.result_of_bet = 1
                    AND bet_status = 2
                    AND bets.telegram_id = clients.telegram_id
                    '''
            cursor.execute(sql)
        connection.commit()
    finally:
        connection.close()
    def remove_old_profiler():
        con = config.get_connection()

        while 1:
            # wait
            time.sleep(30)

            # Get oldest profilers (10)
            profilers = database.read_query(
                "SELECT username, timestamp FROM profiler ORDER BY timestamp ASC LIMIT 10;",
                (),
                con=con,
                close_con=False)

            for username, timestamp in profilers:
                timestamp = datetime.strptime(timestamp, "%d.%m.%YT%H:%M:%S")

                if timestamp < (datetime.utcnow() -
                                timedelta(hours=config.PROFILER_DELETE_TIME)):
                    # To old --> remove
                    database.commit_query("SET SQL_SAFE_UPDATES = 0;", (),
                                          con=con,
                                          close_con=False)
                    database.commit_query(
                        "DELETE FROM profiler WHERE username=%s;",
                        (username, ),
                        con=con,
                        close_con=False)

                    # Remove interesting Posts
                    database.commit_query(
                        "DELETE FROM interesting_posts WHERE username=%s;",
                        (username, ),
                        con=con,
                        close_con=False)
    def post(self): 
        now = config.UTCTime()
        authenticateUser = str(users.get_current_user())
        featureList = database.gaeSessionNavBuilder()
        client = memcache.Client()
        case_key = client.get('case_key')
        proc_output_conf = self.request.get('proc_output_conf')
        proc_notes = self.request.get('proc_notes')
        proc_conseq = self.request.get('proc_conseq')
        proc_innovation = self.request.get('proc_innovation')
        proc_run_id = self.request.get('proc_run_id')
        proc_run_status = self.request.get('proc_run_status')
        
        conn = conn = config.get_connection()
        cursor = conn.cursor()
        
        cursor.execute("UPDATE proc_run SET "
                       "proc_run_start_tm =%s, proc_output_conf = %s, proc_notes = %s, proc_conseq = %s, proc_innovation = %s, proc_run_status = %s "
                       "WHERE proc_run_id = %s",
                       (now, proc_output_conf, proc_notes, proc_conseq, proc_innovation, proc_run_status, proc_run_id ))

        conn.commit()
        
        cursor.execute("SELECT proc_run.proc_run_id, proc_run.case_id, proc_run.emp_id, proc_run.instance_key, proc_run.proc_req_id, proc_run.proc_step_id, "
                       "process.proc_id, proc_case.case_nm, process.proc_nm, process_step.proc_step_nm, process_step.proc_step_sop, proc_run.proc_output_conf, "
                       "proc_req.proc_req_seq, proc_req.proc_req_nm, proc_req.proc_req_desc "
                       "FROM proc_run "
                       "INNER JOIN proc_case on (proc_run.case_id = proc_case.case_id) "
                       "INNER JOIN process on (proc_run.proc_id = process.proc_id) "
                       "INNER JOIN process_step on (proc_run.proc_step_id = process_step.proc_step_id) "
                       "INNER JOIN proc_req on (proc_run.proc_req_id = proc_req.proc_req_id)"
                       "WHERE proc_run.proc_output_conf IS NULL AND proc_run.instance_key = %s", (case_key)) #rename this -- bad name!!
        
        casecount = cursor.rowcount
        case = cursor.fetchall()  
        
        cursor.execute("SELECT * FROM capability.vw_proc_run_sum WHERE proc_step_conf is null AND emp_id = %s", (authenticateUser))
        openoperations = cursor.fetchall()
        
        cursor.execute("SELECT case_id, case_nm FROM proc_case WHERE status = 1 AND emp_id =%s", (authenticateUser))
        ddb_active_case = cursor.fetchall()

        cursor.execute("SELECT DISTINCT proc_id, proc_nm, proc_step_id, proc_step_seq, proc_step_nm "
               "FROM vw_processes "
               "WHERE proc_step_status = 'active' OR proc_step_owner = %s "
               "ORDER BY proc_id, proc_step_seq", (authenticateUser))
        processmenu = cursor.fetchall()

        conn.close()
        
        if casecount > 0:
            tabindex = 3
            template_values = {'processmenu': processmenu, 'authenticateUser': authenticateUser, 'case': case, 'case_key': case_key, 
                           'openoperations': openoperations, 'ddb_active_case': ddb_active_case, 'featureList': featureList,
                           'tabindex': tabindex, 'casecount':casecount}
            template = jinja2_env.get_template('operateprocess.html')
            self.response.out.write(template.render(template_values))
        else:
            self.redirect("/AssessPerformance")
def get_visit_count():
    connection = get_connection()
    cursor = connection.cursor()
    cursor.execute(f"select count(*) from visitors;")
    rows = cursor.fetchall()
    connection.commit()
    connection.close()
    return rows[0][0]
 def on_save(self):
     connection = get_connection()
     cursor = connection.cursor()
     cursor.execute(
         f"insert into visitors (ip_address, user_agent, referrer, full_path, visit_time) values ('{self.ip_address}', '{self.user_agent}', '{self.referrer}', '{self.full_path}', '{self.visit_time}');"
     )
     connection.commit()
     connection.close()
     return 0
Exemple #9
0
 def __init__(self, outdir, folders):
     path = os.getcwd() + '/' + outdir + '/export/exporter/*'
     log.info("Loading model from: " + path)
     export_dir = glob.glob(path)[-1]
     self.predict_fn = predictor.from_saved_model(export_dir)
     self.porter = PorterStemmer()
     con = get_connection()
     ok, _ = con.list('INBOX')
     assert ok == "OK"
     # all_folders = [f.split()[-1] for f in folders]
     self.con = con
     self.folders = folders
    def post(self): 
        now = config.UTCTime()
        authenticateUser = str(users.get_current_user())
        featureList = database.gaeSessionNavBuilder()
        processmenu = database.gaeSessionProcessMenu()
        ddb_active_case = database.gaeSessionActiveCase()
        
        perf_stnd_1 = self.request.get('perf_stnd_1')
        perf_stnd_2 = self.request.get('perf_stnd_2')
        perf_stnd_3 = self.request.get('perf_stnd_3')
        perf_stnd_notes_1 = self.request.get('perf_stnd_notes_1')
        perf_stnd_notes_2 = self.request.get('perf_stnd_notes_2')
        perf_stnd_notes_3 = self.request.get('perf_stnd_notes_3')
        client = memcache.Client()
        case_key = client.get('case_key')
        
        if perf_stnd_1 is '':
            perf_stnd_1 = 0
        else:
            perf_stnd_1 = 1
        if perf_stnd_2 is '':
            perf_stnd_2 = 0
        else:
            perf_stnd_2 = 1
        if perf_stnd_3 is '':
            perf_stnd_3 = 0
        else:
            perf_stnd_3 = 1
            
        if case_key is None:
            pass # query for last entry for expired memcache
        else:
            pass 
        
        conn = config.get_connection()
        cursor = conn.cursor()
        
        #perf_stnd_1 =%s, perf_stnd_2 = %s, perf_stnd_3 = %s, // perf_stnd_1, perf_stnd_2, perf_stnd_3, //perf_stnd_notes_ts
        cursor.execute("UPDATE instance SET "
                       "perf_stnd_1 = %s, perf_stnd_2 = %s,perf_stnd_3 = %s, perf_stnd_notes_1 = %s, perf_stnd_notes_2 = %s, perf_stnd_notes_3 = %s, perf_stnd_notes_ts = %s "
                       "WHERE instance_key = %s ",
                       (perf_stnd_1, perf_stnd_2, perf_stnd_3, perf_stnd_notes_1, perf_stnd_notes_2, perf_stnd_notes_3, now, case_key ))

        conn.commit()
        conn.close()       

        tabindex = 2
        
        template_values = {'processmenu': processmenu, 'authenticateUser': authenticateUser, 'ddb_active_case': ddb_active_case, 'featureList': featureList,
                           'tabindex': tabindex, 'case_key': case_key}
        template = jinja2_env.get_template('operateprocess.html')
        self.response.out.write(template.render(template_values))
Exemple #11
0
def mysql_update_bets(main_id, tokens):
    connection = get_connection()
    try:
        with connection.cursor() as cursor:
            sql = '''UPDATE bets
                SET tokens = {},
                bet_status = {}
                WHERE main_id = {}
                '''.format(tokens, 2, main_id)
            cursor.execute(sql)
        connection.commit()
    finally:
        connection.close()
Exemple #12
0
def mysql_bets_update_match_status():
    connection = get_connection()
    try:
        with connection.cursor() as cursor:
            sql = '''UPDATE bets, matches
                    SET bets.match_status = matches.match_status
                    WHERE bets.match_id = matches.match_id
                    AND bets.match_status != matches.match_status
                '''
            cursor.execute(sql)
        connection.commit()
    finally:
        connection.close()
def read_query(query, values, con = None, close_con = True):
    if con is None:
        con = config.get_connection()
        if con is None:
            # Error occured
            return []

    cursor = con.cursor()
    cursor.execute(query, values)
    results = cursor.fetchall()

    if close_con:
        con.close()
    return results
    def __init__(self, username: str, start_get_post_thread=True):
        self.username = username
        self.account = Account(username, blockchain_instance=Hive())

        mysql_con = config.get_connection()
        if mysql_con is None:
            print(
                "[INFO] Can't start Latest Post Manager because of an mysql database error!"
            )
            return

        result = database.read_query(
            "SELECT * FROM profiler WHERE username=%s;", (username, ),
            con=mysql_con,
            close_con=False)
        if len(result) == 0:
            # No profiler exists, create one
            self.category = [0 for i in config.CATEGORIES]
            self.data_length = 0
            self.finished = False
            result = database.commit_query(
                "INSERT INTO profiler(username, category, length, timestamp, finished) VALUES (%s, %s, %s, %s, %s);",
                (username, ' '.join(map(str, self.category)), self.data_length,
                 datetime.utcnow().strftime("%d.%m.%YT%H:%M:%S"), False),
                con=mysql_con,
                close_con=False)
            if result <= 0:
                # Error
                print("[WARNING] Can't add Profiler for " + username)
            else:
                # Start analyze Thread, if the profiler existed bevor, this thread already run!
                self.analyze_thread = Thread(target=self.analyze_activity)
                self.analyze_thread.name = "Analyze Activities from " + username
                self.analyze_thread.daemon = True
                self.analyze_thread.start()
        else:
            # Load existent Profiler
            self.update_timestamp()

            self.category = [float(x) for x in result[0][1].split(' ')]
            self.data_length = result[0][2]
            self.finished = "1" in result[0][4]

        mysql_con.close()
        # Start finder thread
        self.find_posts_thread = Thread(target=self.find_interestings)
        self.find_posts_thread.name = "Find interesting Posts for " + username
        self.find_posts_thread.daemon = True
        if start_get_post_thread:
            self.find_posts_thread.start()
def commit_query(query, values, con = None, close_con = True):
    # delete : SET SQL_SAFE_UPDATES = 0;
    if con is None:
        con = config.get_connection()
        if con is None:
            # Error occured
            return -1

    cursor = con.cursor()
    cursor.execute(query, values)
    con.commit()

    if close_con:
        con.close()

    return cursor.rowcount
Exemple #16
0
def mysql_check_status_and_result():
    connection = get_connection()
    try:
        with connection.cursor() as cursor:
            sql = '''SELECT *
                    FROM bets
                    WHERE match_status = 2 
                    AND result_of_bet = 1
                    '''
            if cursor.execute(sql) == 0:
                return False
            else:
                return True
    finally:
        connection.commit()
        connection.close()
def get_interesting_posts(request):
    username = request.GET.get('username', None)
    if username is None:
        # Error (No Username is given) --> return Error.js
        return render(
            request,
            'error.js',
            context={"info": "Please enter a 'username' and use GET"},
            content_type="application/x-javascript")

    con = config.get_connection()
    posts = database.read_query(
        "SELECT * FROM interesting_posts WHERE username=%s;", (username, ),
        con=con,
        close_con=False)
    database.commit_query(
        "INSERT INTO tasks(name, timestamp, parameter_one, parameter_two) VALUES (%s, %s, %s, %s);",
        ("profiler", datetime.utcnow().strftime("%d.%m.%YT%H:%M:%S"), username,
         ""),
        con=con,
        close_con=False)

    database.commit_query("SET SQL_SAFE_UPDATES = 0;", (),
                          con=con,
                          close_con=False)

    obj = ""
    length = 0
    for _, author, permlink in posts:
        obj += f"{author}/{permlink};"

        database.commit_query(
            "DELETE FROM interesting_posts WHERE username=%s AND author=%s AND permlink=%s;",
            (username, author, permlink),
            con=con,
            close_con=False)

        length += 1
        if length >= 3:
            # Return only 3
            break

    return render(request,
                  'get_interesting_posts.js',
                  context={"posts": obj[:-1]},
                  content_type="application/x-javascript")
    def __init__(self):
        self.mysql_con = config.get_connection()
        if self.mysql_con is None:
            print(
                "[INFO] Can't start Latest Post Manager because of an mysql database error!"
            )
            return
        self.mysql_cursor = self.mysql_con.cursor()
        self.query = "INSERT INTO latest_posts (author, permlink, category, timestamp) VALUES (%s, %s, %s, %s);"

        self.chain = Blockchain(
            blockchain_instance=Hive())  #node=conf.HIVE_NODES[5]

        self.run_thread = Thread(target=self.run)
        self.run_thread.name = 'Get & Categorize Posts'
        self.run_thread.daemon = True
        self.run_thread.start()
Exemple #19
0
def run():
    con = config.get_connection()
    while 1:
        start_time = time.time()
        request_count = len(REMOTE_ADDRS)
        connection_count = len(list(dict.fromkeys(REMOTE_ADDRS)))
        REMOTE_ADDRS.clear()


        # Update
        database.commit_query("UPDATE analytics SET value_one=%s WHERE name=%s", 
                                (request_count, "requests"), con=con, close_con=False)
        database.commit_query("UPDATE analytics SET value_one=%s WHERE name=%s", 
                                (connection_count, "connections"), con=con, close_con=False)

        # wait a second (exactly)        
        time.sleep((1000 - (time.time() - start_time)))
def latest_post_count_manager():
    con = config.get_connection()
    cursor = con.cursor()
    while 1:            
        cursor.execute("SELECT COUNT(*) FROM latest_posts")
        result = cursor.fetchall() # return [(COUNT,)]

        if len(result) == 0:
            # Error occured
            print("[WARNING] Error while trying to get count of latest_posts")
            time.sleep(20)
            continue

        # uppdate statics.count
        count = result[0][0]
        config.statics.LATEST_POSTS_START_LIMIT = count

        # wait
        time.sleep(30)
def get_interesting_posts():
    ''' Ajax Call: get 3 interesting posts '''
    analytics.REMOTE_ADDRS.append(request.remote_addr)
    if "username" not in request.json:
        # Return error json, if no username is given
        return jsonify({
            "status": "failed",
            "code": 1,
            "message": "No username is given"
        })
    username = request.json["username"]

    # Get 3 posts
    LIMIT = 3
    con = config.get_connection()
    posts = database.read_query(
        "SELECT * FROM interesting_posts WHERE username=%s LIMIT %s;",
        (username, LIMIT),
        con=con,
        close_con=False)

    # Prepare and Delete them
    database.commit_query("SET SQL_SAFE_UPDATES = 0;", (),
                          con=con,
                          close_con=False)
    for index, (_, author, permlink) in enumerate(posts):
        posts[index] = {"author": author, "permlink": permlink}

        database.commit_query(
            "DELETE FROM interesting_posts WHERE username=%s AND author=%s AND permlink=%s;",
            (username, author, permlink),
            con=con,
            close_con=False)

    # Start profiler
    database.commit_query(
        "INSERT INTO tasks(name, timestamp, parameter_one, parameter_two) VALUES (%s, %s, %s, %s);",
        ("profiler", datetime.utcnow().strftime("%d.%m.%YT%H:%M:%S"), username,
         ""),
        con=con,
        close_con=False)

    return jsonify({"status": "succes", "posts": posts})
Exemple #22
0
def mysql_get_main_id(telegram_id, date):
    connection = get_connection()
    try:
        with connection.cursor() as cursor:
            temp = []
            sql = '''SELECT main_id
                            FROM bets
                            WHERE bet_date = "{}" AND 
                            telegram_id = "{}" AND 
                            tokens = {} AND
                            bet_status = {} AND 
                            match_status = {}
                            '''.format(date, telegram_id, -1, 1, 1)
            cursor.execute(sql)
            for row in cursor:
                temp.append(row["main_id"])
            return temp
    finally:
        connection.commit()
        connection.close()
Exemple #23
0
def mysql_check_initial_bets(match_id, match_result, date):
    connection = get_connection()
    try:
        with connection.cursor() as cursor:
            sql = '''SELECT *
                FROM bets
                WHERE telegram_id = {} AND 
                match_id = {} AND
                match_bet_result = {} AND
                tokens = {} AND
                bet_date = "{}"
                                '''.format(398000427, match_id, match_result,
                                           50, date)
            if cursor.execute(sql) == 0:
                return False
            else:
                return True
    finally:
        connection.commit()
        connection.close()
Exemple #24
0
def mysql_insert_initial_bets(match_id, result, date):
    connection = get_connection()
    try:
        with connection.cursor() as cursor:
            sql = '''INSERT INTO bets(
                telegram_id,
                match_id,
                match_bet_result,
                coefficient,
                tokens,
                match_status,
                bet_status,
                bet_date)
                VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
                '''
            cursor.execute(sql,
                           (398000427, match_id, result, 1.5, 50, 1, 2, date))
        connection.commit()
    finally:
        connection.close()
Exemple #25
0
def mysql_get_tokens(match_id, match_bet_result, date):
    connection = get_connection()
    try:
        with connection.cursor() as cursor:
            temp = 0
            sql = '''SELECT {}
                        FROM bets
                        WHERE bet_date = "{}" AND 
                        match_id = "{}" AND 
                        match_bet_result = {} AND 
                        bet_status = {}
                        '''.format('tokens', date, match_id, match_bet_result,
                                   2)
            cursor.execute(sql)
            for row in cursor:
                temp += row['tokens']
            return temp
    finally:
        connection.commit()
        connection.close()
def get_analytics():
    ''' AJAX CALL: get analytics'''
    con = config.get_connection()
    context = {}
    context["status"] = "succes"

    # Get tasks_running
    element = database.read_query(
        "SELECT value_one FROM analytics WHERE name=%s;", ("tasks_running", ),
        con=con,
        close_con=False)[0]  # [(value, )]
    context["tasks_running"] = int(element[0])

    # Get connections
    element = database.read_query(
        "SELECT value_one FROM analytics WHERE name=%s;", ("connections", ),
        con=con,
        close_con=False)[0]  # [(value, )]
    context["connections"] = int(element[0])

    # Get requests
    element = database.read_query(
        "SELECT value_one FROM analytics WHERE name=%s;", ("requests", ),
        con=con,
        close_con=False)[0]  # [(value, )]
    context["requests"] = int(element[0])

    # Get latest_post count
    result = database.read_query("SELECT COUNT(*) FROM latest_posts", (),
                                 con=con,
                                 close_con=False)  # return [(COUNT,)]
    context["count_latest_posts"] = result[0][0]

    # Get profiler count
    result = database.read_query("SELECT COUNT(*) FROM profiler", (),
                                 con=con,
                                 close_con=False)  # return [(COUNT,)]
    context["count_accounts"] = result[0][0]

    con.close()
    return jsonify(context)
Exemple #27
0
def reset_analytics():
    con = config.get_connection()

    # Delete all
    database.commit_query("SET SQL_SAFE_UPDATES = 0;", (), con=con, close_con=False)
    database.commit_query("DELETE FROM analytics", (), con=con, close_con=False)

    # tasks_running (currently)
    database.commit_query("INSERT INTO analytics(name, value_one, value_two, value_three) VALUES (%s, %s, %s, %s)",
                             ("tasks_running", 0, None, None), con=con, close_con=False)
    
    # connections (per second)
    database.commit_query("INSERT INTO analytics(name, value_one, value_two, value_three) VALUES (%s, %s, %s, %s)",
                             ("connections", 0, None, None), con=con, close_con=False)

    # requests (per second)
    database.commit_query("INSERT INTO analytics(name, value_one, value_two, value_three) VALUES (%s, %s, %s, %s)",
                             ("requests", 0, None, None), con=con, close_con=False)

    con.close()
    print("[INFO] Analysing is running")
Exemple #28
0
def main():

    # 2. mongodb authenticate
    conn = config.get_connection()
    db = conn["admin"]
    db.authenticate("root","password")

    # 3. make condition to get filtered logs
    now = datetime.datetime.utcnow()
    start = now - datetime.timedelta(days=0, hours=2, minutes=30)
    condition = {"time":{"$gte":start}}

    # 4. define keywords plugins to parse logs
    keywords = ['nginx']

    # 5. parse collections and report
    collections = []
    collections.append(conn["host1"]["nginx_error"])
    collections.append(conn["host2"]["nginx_error"])
    for coll in collections:
        parse(coll, condition, keywords)
    report(keywords)
def main():

    # 2. mongodb authenticate
    conn = config.get_connection()
    # authenticate
    #db = conn["admin"]
    #db.authenticate(config.MONGO_CONFIG['user'], config.MONGO_CONFIG['pswd'])

    # 3. make condition to get filtered logs
    now = datetime.datetime.utcnow()
    start = now - datetime.timedelta(days=0, hours=2, minutes=30)
    condition = {"time":{"$gte":start}}

    # 4. define keywords plugins to parse logs
    keywords = ['new']

    # 5. parse collections and report
    collections = []
    collections.append(conn["host1"]["nginx_access"])
    collections.append(conn["host2"]["nginx_access"])
    parse(collections, condition, keywords)
    report(keywords)
    def get(self):

        conn = config.get_connection()
        cursor = conn.cursor()
        
        authenticateUser = str(users.get_current_user()) 
        
        cursor.execute("SELECT * FROM person WHERE google_user_id = %s", (authenticateUser))        
        yourprofile = cursor.fetchall()     
        
        cursor.execute("SELECT * FROM person")   
                      
        yourteam = cursor.fetchall()
             
        conn.close()
               
        template_values = {'yourprofile': yourprofile, 'yourteam': yourteam}
        template = jinja2_env.get_template('collaborate.html')
        self.response.out.write(template.render(template_values))
 
        
            
    def get(self):
        
        authenticateUser = str(users.get_current_user()) 
        featureList = database.gaeSessionNavBuilder()
        processmenu = database.gaeSessionProcessMenu()
        ddb_active_case = database.gaeSessionActiveCase()
        
        conn = config.get_connection()
        cursor = conn.cursor()    
        '''
        cursor.execute("SELECT case_id, case_nm FROM proc_case WHERE status = 1 AND emp_id =%s", (authenticateUser))
        ddb_active_case = cursor.fetchall()
        '''
        cursor.execute("SELECT * FROM capability.vw_proc_run_sum WHERE proc_step_conf is null AND emp_id = %s", (authenticateUser))
        openoperations = cursor.fetchall()
        
        conn.close()
        tabindex = 2

        template_values = {'ddb_active_case': ddb_active_case, 'processmenu': processmenu, 'authenticateUser': authenticateUser, 
                           'openoperations': openoperations, 'featureList': featureList, 'tabindex': tabindex}
        template = jinja2_env.get_template('operateprocess.html')
        self.response.out.write(template.render(template_values))
    def post(self): # post to DB
        authenticateUser = str(users.get_current_user())
        idGenerator = config.IDGenerator() # generates a unique key
        case_key = str(idGenerator) + authenticateUser
        now = config.UTCTime()
        featureList = database.gaeSessionNavBuilder()
        processmenu = database.gaeSessionProcessMenu()
        ddb_active_case = database.gaeSessionActiveCase()
        
        idGenerator = config.IDGenerator() # generates a unique key
        case_key = str(idGenerator) + authenticateUser
        client = memcache.Client()
        client.set('case_key', case_key, 6000) 
        now = config.UTCTime()
        
        conn = config.get_connection()
        cursor = conn.cursor()

        #create an unique instance key
        cursor.execute('INSERT INTO instance (case_id, proc_step_id, instance_key) '
                       'VALUES (%s, %s, %s)',
                       (
                        self.request.get('case_id'),
                        self.request.get('proc_step_id'),
                        (case_key)
                       ))
        
        conn.commit()
        
        cursor.execute("SELECT proc_case.case_id, proc_case.emp_id, instance.instance_key, proc_req.proc_req_id, process_step.proc_step_id, process.proc_id "
                       "FROM proc_case "
                       "INNER JOIN instance on (proc_case.case_id = instance.case_id) "
                       "INNER JOIN process_step on (instance.proc_step_id = process_step.proc_step_id) "
                       "INNER JOIN proc_req on (process_step.proc_step_id = proc_req.proc_step_id) "
                       "INNER JOIN process on (process_step.proc_id = process.proc_id)"
                       "WHERE instance.instance_key = %s", (case_key))
        caseMake = cursor.fetchall()


        for row in caseMake:
            t = (row)
            cursor.execute("INSERT INTO proc_run (case_id, emp_id, instance_key, proc_req_id, proc_step_id, proc_id) VALUES (%s, %s, %s, %s, %s, %s) ", t)
        conn.commit()

        cursor.execute("SELECT proc_run.proc_run_id, proc_run.case_id, proc_run.emp_id, proc_run.instance_key, proc_run.proc_req_id, proc_run.proc_step_id, "
                       "process.proc_id, proc_case.case_nm, process.proc_nm, process_step.proc_step_nm, process_step.proc_step_sop, proc_run.proc_output_conf, "
                       "proc_req.proc_req_seq, proc_req.proc_req_nm, proc_req.proc_req_desc, process_step.proc_model_link "
                       "FROM proc_run "
                       "INNER JOIN proc_case on (proc_run.case_id = proc_case.case_id) "
                       "INNER JOIN process on (proc_run.proc_id = process.proc_id) "
                       "INNER JOIN process_step on (proc_run.proc_step_id = process_step.proc_step_id) "
                       "INNER JOIN proc_req on (proc_run.proc_req_id = proc_req.proc_req_id) "
                       "INNER JOIN instance on (proc_run.instance_key = instance.instance_key) "
                       "WHERE instance.instance_key = %s", (case_key))
                        
        tabindex = 3                
        case = cursor.fetchall()
        
        cursor.execute("SELECT * FROM capability.vw_proc_run_sum WHERE proc_step_conf is null AND emp_id = %s", (authenticateUser))
        openoperations = cursor.fetchall()
        
        conn.close()

        template_values = {'authenticateUser': authenticateUser, 'case': case, 'case_key': case_key, 'processmenu': processmenu, 'featureList': featureList,
                           'ddb_active_case': ddb_active_case, 'ddb_active_case': ddb_active_case, 'tabindex': tabindex, 'openoperations': openoperations }
        template = jinja2_env.get_template('operateprocess.html')
        self.response.out.write(template.render(template_values))
        self.response.out.write(case_key)
Exemple #33
0
 sql2a = """
 SELECT RECORD_ID, LAST_CORP_HISTORY_ID, SYSTEM_TYPE_CD, LAST_EVENT_DATE, CORP_NUM, CORP_STATE, CORP_TYPE, ENTRY_DATE
 FROM CORP_AUDIT_LOG WHERE CORP_NUM = %s;
 """
 corp_recs = get_db_sql("event_processor", sql2a,
                        (inbound_rec['corp_num'], ))
 if 0 == len(corp_recs):
     # if not, add it
     sql2b = """
     INSERT INTO CORP_AUDIT_LOG 
     (LAST_CORP_HISTORY_ID, SYSTEM_TYPE_CD, LAST_EVENT_DATE, CORP_NUM, CORP_STATE, CORP_TYPE, ENTRY_DATE)
     VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING RECORD_ID;
     """
     cur = None
     try:
         conn = get_connection("event_processor")
         cur = conn.cursor()
         cur.execute(sql2b, (
             inbound_rec['record_id'],
             inbound_rec['system_type_cd'],
             inbound_rec['last_event_date'],
             inbound_rec['corp_num'],
             inbound_rec['corp_state'],
             inbound_rec['corp_typ_cd'],
             datetime.datetime.now(),
         ))
         _record_id = cur.fetchone()[0]
         conn.commit()
         cur.close()
         cur = None
     except (Exception, psycopg2.DatabaseError) as error:
Exemple #34
0
is_BP = False
BP_time = datetime.today()
cur_BP = ''
while True:
    data = ser.readline().decode('utf-8')
    if is_BP and datetime.today() - BP_time > timedelta(seconds=30):
        is_BP = False
        route = cur_BP[30:36].strip()
        airline = cur_BP[36:39].strip()
        flight = cur_BP[39:44].strip()
        for char in flight:
            if char == '0':
                flight = flight[1:]
            else:
                break
        connection = config.get_connection()
        sql_query = 'INSERT INTO %s (route, airline, flight, shop) VALUES (\'%s\', \'%s\', \'%s\', \'%s\');' \
                    % (config.table, route, airline, flight, config.shop)
        try:
            cursor = connection.cursor()
            cursor.execute(sql_query)
            connection.commit()
        except Exception:
            print('Ошибка:\n', traceback.format_exc())
        connection.close()
    if data and data.find('&n') != -1 and data.find('&fp') != -1:
        if is_BP:
            index_start = data.find('&fp')
            index_end = data.find('&n')
            fn = data[index_start+4:index_end]
            route = cur_BP[30:36].strip()
Exemple #35
0
        'INBOX/train/not-important',
        'INBOX/test/not-important',
    ]
    for folder in folders:
        con.select(folder)
        for (uid, msg, flags) in get_messages(con, folder, 'ALL'):
            result = con.store(uid, "+FLAGS", '\\Deleted')
            log.info('{} {}'.format(uid, result))
    con.expunge()


if __name__ == "__main__":
    folders = ['INBOX']
    unknown_words = []

    con = get_connection()
    clean(con)

    for folder in folders:
        log.info(folder)
        con.select(folder, readonly=True)
        for (uid, msg, flags) in get_messages(con, folder, 'ALL'):
            log.info("%s %s", uid, flags)
            label = get_label(flags)
            if label is None:
                continue
            mode = 'test' if (int(uid) % 10) >= 8 else 'train'

            result = con.copy(
                uid, 'INBOX/{0}/{1}'.format(
                    mode, 'important' if label else 'not-important'))
    def get(self):

        conn = config.get_connection()
        cursor = conn.cursor()
        
        authenticateUser = str(users.get_current_user()) 
        featureList = database.gaeSessionNavBuilder()
        
        cursor.execute("SELECT proc_id, proc_nm, proc_step_id, proc_step_nm, proc_seq, case_id, case_nm, instance_key, emp_id, "
                       "ROUND(SUM(proc_step_conf)/COUNT(proc_step_id)*100) AS conf_summary, SUM(proc_step_conf) AS proc_success, COUNT(proc_step_id) AS proc_step_total "
                       "FROM vw_proc_run_sum "
                       "WHERE emp_id = %s "
                       "GROUP BY proc_step_id, proc_id, case_id "
                       "ORDER BY proc_nm, case_nm, proc_seq", (authenticateUser))        

        summary = cursor.fetchall()     
        summary1 = summary[1:1]  #2:3 specifies the row, not the column
        summary1 = summary1[1:4]
        
        cursor.execute("SELECT proc_id, proc_nm, proc_step_id, proc_step_nm, proc_seq, case_id, case_nm, instance_key, emp_id, "
                       "ROUND(SUM(proc_step_conf)/COUNT(proc_step_id)*100) AS conf_summary, SUM(proc_step_conf) AS proc_success, COUNT(proc_step_id) AS proc_step_total "
                       "FROM vw_proc_run_sum "
                       "WHERE emp_id = %s "
                       "GROUP BY proc_step_id "
                       "ORDER BY proc_nm, proc_seq, case_nm", (authenticateUser))     
                      
        sqlMeasurebyPerson = cursor.fetchall()
        
        cursor.execute("SELECT process.proc_nm, process_step.proc_step_nm, proc_case.case_nm, proc_run_start_tm, "
                       "proc_conseq, proc_innovation, proc_run.emp_id "
                       "FROM proc_run "
                       "INNER JOIN proc_case ON (proc_run.case_id = proc_case.case_id) "      
                       "INNER JOIN process_step ON (proc_run.proc_step_id = process_step.proc_step_id) "
                       "INNER JOIN process ON (proc_run.proc_id = process.proc_id) "
                       "WHERE (proc_conseq != ' ' OR not null OR proc_innovation != ' ' OR not null) AND proc_run.emp_id = %s "
                       "ORDER BY process.proc_id, process_step.proc_step_id", (authenticateUser))                     
        innovations = cursor.fetchall()
        

        cursor = conn.cursor()
        cursor.execute("SELECT process.proc_nm, process_step.proc_step_nm, proc_req.proc_req_nm, SUM(proc_run.proc_output_conf)/COUNT(*), COUNT(proc_run.proc_output_conf) "
                       "FROM proc_run "
                       "INNER JOIN proc_req ON (proc_run.proc_req_id = proc_req.proc_req_id) "
                       "INNER JOIN process_step ON (proc_req.proc_step_id = process_step.proc_step_id) "
                       "INNER JOIN process ON (process_step.proc_id = process.proc_id) "
                       "GROUP BY process_step.proc_step_id, proc_req.proc_req_nm "
                       "ORDER BY process.proc_id, process_step.proc_seq") 
        processSummary = cursor.fetchall()
        
        cursor.execute("SELECT process.proc_nm, process_step.proc_step_nm, proc_case.case_nm, proc_run_start_tm, "
                       "proc_notes, proc_conseq, proc_innovation, proc_run.emp_id "
                       "FROM proc_run "
                       "INNER JOIN proc_case ON (proc_run.case_id = proc_case.case_id) "      
                       "INNER JOIN process_step ON (proc_run.proc_step_id = process_step.proc_step_id) "
                       "INNER JOIN process ON (proc_run.proc_id = process.proc_id) "
                       "WHERE proc_run.emp_id = %s AND (proc_notes != ' ' OR not null) "
                       "ORDER BY process.proc_id, process_step.proc_step_id", (authenticateUser))                     
        notes = cursor.fetchall()
             
        conn.close()
        #query = ("SELECT * from person WHERE google_user_id ='" + str(authenticateUser) + "'")
        query = "SELECT * from person WHERE google_user_id = "
        condition1 = authenticateUser
        summary8 = database.query(query, condition1)
               
        template_values = {'summary': summary, 'sqlMeasurebyPerson' : sqlMeasurebyPerson, 'summary1': summary1, 'innovations': innovations, 
                           'authenticateUser': authenticateUser, 'processSummary': processSummary, 'notes': notes, 
                           'summary8': summary8, 'featureList': featureList}
        template = jinja2_env.get_template('measureperformance.html')
        self.response.out.write(template.render(template_values))
    def find_interestings(self):
        mysql_con = config.get_connection()

        while self.data_length < config.PROFILER_MIN_DATA:
            # Wait until enough data is availabel
            time.sleep(0.2)

        percentages, top_cats = [], []
        last_data_len = -1
        while 1:
            # 0 Step: Check if post limit is reached
            interesting_posts = database.read_query(
                "SELECT * FROM interesting_posts WHERE username=%s",
                (self.username, ),
                con=mysql_con,
                close_con=False)
            if len(interesting_posts) >= config.MAX_INTERSTING_POSTS:
                break

            # 1 Step: Calc percentages and top cats
            if len(percentages) == 0 or last_data_len != self.data_length:
                # If no percentages are calced or data_length increased
                last_data_len = self.data_length

                percentages = helper.calc_percentages(self.category)
                top_cats = helper.get_top_elements(percentages, 10)

            # 2 Step: get random Posts
            offset = random.randint(0,
                                    config.statics.LATEST_POSTS_START_LIMIT -
                                    50)  # random offset
            posts = database.read_query("SELECT * FROM latest_posts LIMIT " +
                                        str(offset) + ", 50;", (),
                                        con=mysql_con,
                                        close_con=False)

            # 3 Step: prepare posts to compare
            post_percentages = []  # ([top_percentages], author, permlink)
            for author, permlink, category, timestamp in posts:
                category = [float(x) for x in category.split(' ')]

                # Calc percentages and top ones
                percs = helper.calc_percentages(category)
                top_pers = helper.get_top_elements(percs, 10)

                post_percentages.append((top_pers, author, permlink))

            # 4 Step: Compare
            for top_pers, author, permlink in post_percentages:
                score = 0
                diff = 0
                for value, index in top_cats:
                    for p_value, p_index in top_pers:
                        diff += abs(value - p_value)
                        if p_index == index:
                            # Same top Category
                            score += 1

                #if score >= 7:
                if diff <= 2:
                    exists = database.read_query(
                        "SELECT author FROM interesting_posts WHERE username=%s AND author=%s AND permlink=%s;",
                        (self.username, author, permlink),
                        con=mysql_con,
                        close_con=False)

                    if len(exists) > 0:
                        # Already listed --> Next one
                        continue

                    result = database.commit_query(
                        "INSERT INTO interesting_posts(username, author, permlink) VALUES (%s, %s, %s);",
                        (self.username, author, permlink),
                        con=mysql_con,
                        close_con=False)
                    if result < 1:
                        # Error
                        print("[WARNING] Can't insert an interesting post!")
                        time.sleep(5)

            self.update_timestamp()
Exemple #38
0
    """
    Defines the expected variable types for the body of the post(generate_clue) request
    """
    clue_word: str
    clue_count: int


class generate_guesses_body(BaseModel):
    """
    Defines the expected variable types for the body of the post(generate_guesses) request
    """
    guesses: list


root = "/"
db = config.get_connection()

#When starting the server, create the clientlist and gamelist objects for keeping track of the clients and the games
clientlist = TWIML_codenames_API_Server.Clientlist(db)
gamelist = TWIML_codenames_API_Server.Gamelist(clientlist)

app = FastAPI()  # called by uvicorn server_run:app


@app.get(root)
def get_player_status(player_id: int, player_key: int):
    """
    @params player_id, player_key : used for validating player identity

    @returns (bytes): the current status for the player containing info about active and ended games:
        info for active games includes:
Exemple #39
0
def task_manager():
    con = config.get_connection()
    while 1:
        # Update analytics
        database.commit_query("UPDATE analytics SET value_one=%s WHERE name=%s", 
                                (len(config.statics.task_list), "tasks_running"), con=con, close_con=False)

        while len(config.statics.task_list) >= config.MAX_TASK_THREADS:
            # Thread limit is reached --> Wait for completing
            time.sleep(0.5)

        # Get first available task
        tasks = database.read_query("SELECT * FROM tasks LIMIT 1;", (), con=con, close_con=False)

        if len(tasks) == 0:
            # If nothing is to do
            time.sleep(0.5)
            continue

        # Get first element and test if it is running
        name, timestamp, p_one, p_two = tasks[0]
        already_running = False
        for _name, _, _p_one, _p_two in config.statics.task_list:
            if name in _name and p_one in _p_one and p_two in _p_two:
                # already running
                already_running = True
                break

        # Delete element
        database.commit_query("SET SQL_SAFE_UPDATES = 0;", (), con=con, close_con=False)
        database.commit_query("DELETE FROM tasks WHERE name=%s AND parameter_one=%s AND parameter_two=%s;",
                                 (name, p_one, p_two), con=con, close_con=False)

        if already_running:
            # abort
            continue

        # Insert in list and Run
        config.statics.task_list.append(tasks[0])
        def run(task):
            name, timestamp, p_one, p_two = task

            if 'profiler' in name:
                p = Profiler(p_one, start_get_post_thread=False)
                p.find_interestings()

            if 'adjust' in name:
                p = Profiler(p_one, start_get_post_thread=False)
                p.adjust(categories_as_strings=p_two.split(','))
            
            if 'set_to_zero' in name:
                p = Profiler(p_one, start_get_post_thread=False)
                p.set_zero(category=p_two)  

            if 'delete_user' in name:
                database.commit_query("DELETE FROM profiler WHERE username=%s;",
                                 (p_one, ))  
                database.commit_query("DELETE FROM interesting_posts WHERE username=%s;",
                                 (p_one, ))           
                

            # delete task
            config.statics.task_list.remove(task)

        # Start thread
        t = Thread(target=run, args=(tasks[0], ))
        t.name = f"T - {name} ({str(p_one)};{str(p_two)})"
        t.daemon = True
        t.start()
Exemple #40
0
            ordered_items = sorted(preprocessed_text.items(),
                                   key=lambda i: i[1],
                                   reverse=True)[:100]
            self.finish({'data': ordered_items})
        except HTTPError as err:
            self.write_error(err.code)


def create_app(manager, crypto, client):
    return tornado.web.Application([
        (r"/api/words", WordsHandler),
    ],
                                   debug=True,
                                   manager=manager,
                                   crypto=crypto,
                                   client=client)


if __name__ == "__main__":
    enable_pretty_logging()
    loop = tornado.ioloop.IOLoop.current()
    words_manager = loop.run_sync(lambda: WordsManager.create(
        loop=loop.asyncio_loop, **get_connection()))
    application = create_app(words_manager, Crypto(), AsyncHTTPClient())
    application.listen(8888)
    try:
        loop.start()
    except KeyboardInterrupt:
        print('Stop engine')
        loop.run_sync(lambda: application.settings['manager'].stop())
Exemple #41
0
import numpy as np
import config
# import database
import pandas as pd
view_conn = config.get_connection(config.db_config)
date_1, date_2 = config.get_last_month_dates()


def get_report(*args):
    SqlStr = f"SELECT * FROM V_TROUBLE WHERE TROUBLE_DATE BETWEEN"\
             f"'{(args[1])}' AND '{(args[2])}'"\
             "AND PROCESS_DESC= 'SPUTTER' AND TROUBLE_DESC in "\
             f"{(args[3])}"\
             "ORDER BY TROUBLE_DATE"
    # HEADER CALL
    results = args[0].execute(SqlStr)
    df = args[0].result_to_dataframe(results)
    return df


def get_musconvo(*args):
    # SQL STR
    SqlStr = f"SELECT * FROM V_MU_RESULT WHERE PROD_DATE BETWEEN "\
             f"'{(args[1])}' AND '{(args[2])}'"\
             "AND ERROR_DESC = 'REGEN PM'"\
             "ORDER BY PROD_DATE"

    # HEADER CALL
    results = args[0].execute(SqlStr)
    df = args[0].result_to_dataframe(results)
    return df