Exemple #1
0
def remove_cache():
    backup_db_proxy()
    backup_db_pm()

    shutil.rmtree(CACHE_FOLDER)
    os.mkdir(CACHE_FOLDER)

    conn = sqlite3.connect(DB_FILE_CONTROLLER)
    conn.execute("UPDATE schedule SET Passed = null, Ready = 0")
    conn.commit()
    conn.close()

    backup_db_tc()

    return "ok"
Exemple #2
0
def process_pc_msg(byte_data, addr, pc):
    logging.debug("received data are: {}".format(byte_data))
    cmd = byte_data[0]
    if cmd == PROC_MSG["WB_POST_SEQUENCE"]:
        try:
            schedules = json.loads(byte_data[1:].decode())
            add_time = time.time()
            devices = get_all_devices()
            error_msg = ""

            for schedule in schedules:
                logging.debug("schedule: {}".format(schedule))

                for key in ['DevEui', 'Cat', 'SubCat', 'Criteria', 'Parameter']:
                    if key not in schedule:
                        error_msg += ", {} is not in the sequence {}".format(key, schedule)
                if error_msg != "":
                    continue

                if not is_deveui_under_test(schedule['DevEui'].lower(), devices):
                    error_msg += ", DevEui {} is not in the device list".format(schedule['DevEui'])
                    continue

                schedule["AddTime"] = add_time
                if "Config" not in schedule:
                    schedule["Config"] = {}
                    schedule["Config"] = json.dumps(schedule["Config"])
                pc.test_list.append(schedule)
            pc.dump()
            if error_msg != "":
                error_msg = "Part of the sequence error" + error_msg
                pc.sock.sendto(error_msg.encode("utf-8"), addr)
            else:
                pc.sock.sendto("ok".encode("utf-8"), addr)
        except:
            pc.sock.sendto("error".encode("utf-8"), addr)
    elif cmd == PROC_MSG["WB_GET_SEQUENCE"]:
        logging.debug("sending test_list")
        pc.sock.sendto(json.dumps(pc.test_list).encode(), addr)
    elif cmd == PROC_MSG["WB_DEL_SEQUENCE"]:
        tmp = byte_data[1:].decode()
        logging.debug("delete config, tmp is {}".format(tmp))
        if tmp == "all":
            if len(pc.test_list) > 0:
                pc.test_list = []
                if pc.state != PC_STATE['IDLE']:
                    if pc.process:
                        pc.process.terminate()
                        logging.debug("sending termination")
                        pc.process = None
                    pc.set_state_wait()
                pc.cancel_test_timeout_timer()
                pc.dump()
            pc.sock.sendto("ok".encode("utf-8"), addr)
        else:
            try:
                rows_js = json.loads(tmp)
                rows = [row['rowid'] for row in rows_js]
                rows.sort(reverse=True)
                for row in rows:
                    if row < len(pc.test_list):
                        pc.test_list.pop(row)
                if 0 in rows and pc.state != PC_STATE['IDLE']:
                    if pc.process:
                        pc.process.terminate()
                        pc.process = None
                    pc.cancel_test_timeout_timer()
                    pc.set_state_wait()
                pc.dump()
                pc.sock.sendto("ok".encode("utf-8"), addr)
            except:
                pc.sock.sendto("error".encode("utf-8"), addr)
    elif cmd == PROC_MSG["WB_QUERY_TEST_STATE"]:
        if not pc.test_list:
            pc.sock.sendto("No test running".encode("utf-8"), addr)
        else:
            pc.sock.sendto("Test is running".encode("utf-8"), addr)
    elif cmd == PROC_MSG["TC_SETUP_TEST"]:
        schedule = json.loads(byte_data[1:].decode())
        if pc.state == PC_STATE['WAIT_RSP']:
            pc.test_list[0]["StartTime"] = schedule["StartTime"]
        else:
            pc.start_logger(schedule['Cat']+"_"+schedule['SubCat'])
            logging.debug("Test started from pytest {}".format(schedule))
            pc.test_list.insert(0, schedule)
        pc.start_backup_pm_timer(schedule['TestInstID'])
        pc.state = PC_STATE['RUNNING']
        pc.dump()
        logging.debug("[controller] setup test received, schedule is {}".format(schedule))
    elif cmd == PROC_MSG["TC_TEARDOWN_TEST"]:
        logging.debug("[controller] teardown test received")
        schedule = json.loads(byte_data[1:].decode())
        pc.cancel_test_timeout_timer()
        if pc.state == PC_STATE['RUNNING']:
            pc.state = PC_STATE['IDLE']
            pc.test_list.pop(0)
            pc.dump()
            pc.process = None
        elif pc.state == PC_STATE['WAIT_RSP']:
            pc.state = PC_STATE['IDLE']
        pc.backup_timer.cancel()
        pc.backup_timer = None
        backup_db_pm(schedule['TestInstID'])
        pc.cache_thread = threading.Thread(target=generate_cache, args = (schedule, ))
        pc.cache_thread.start()
        if not pc.test_list:
            pc.start_logger() # switch to default log
Exemple #3
0
 def backup_handler(self, test_inst_id):
     self.backup_timer = threading.Timer(POWER_TB_BACKUP_INTERVAL,
                                         self.backup_handler, args=(test_inst_id,))
     self.backup_timer.start()
     backup_db_pm(test_inst_id)
Exemple #4
0
def backup():
    backup_db_proxy()
    backup_db_tc()
    backup_db_pm()
    return "ok"
Exemple #5
0
def generate_power(start, duration, detail = True):

    cached = False
    fileName = CACHE_FOLDER + "/" + "current_" + str(start) + "_" + str(duration) + "_" + str(detail) + ".html_xz"
    if os.path.exists(fileName):
        cached = True
    else:
        cached = False

    if not cached:
    
        start = float(start)
        duration = float(duration)
        
        if detail and duration > 120:
            return "Duration too long for high sampling rate current profile"
        
        html = ""
        
        conn = sqlite3.connect(DB_FILE_PM, timeout=60)
        conn.row_factory = sqlite3.Row
        
        if conn.execute('SELECT max(time) FROM power').fetchone()["max(time)"]:
            if float(conn.execute('SELECT max(time) FROM power').fetchone()["max(time)"]) <= start + duration + 1:
                backup_db_pm()
        else:
            backup_db_pm()
        
        if detail:
            currents = conn.execute('SELECT time, value FROM power WHERE duration = 1 AND time >= (?) '
                                    'AND time < (?) ORDER BY time',
                                     (start-1, start+duration+1)).fetchall()
        else:
            if duration > 86400:
                currents = conn.execute('SELECT time, average, max FROM power WHERE duration = 60 '
                                        'AND time >= (?) AND time < (?) ORDER BY time',
                                         (start-1, start+duration+1)).fetchall()
            else:
                currents = conn.execute('SELECT time, average, max FROM power WHERE duration = 1 '
                                        'AND time >= (?) AND time < (?) ORDER BY time',
                                         (start-1, start+duration+1)).fetchall()
        
        x = []
        y = []
        y1 = []
        
        for current in currents:
            if detail:
                l_y = [int.from_bytes(current["value"][i:i+4], byteorder='little', signed = True)/1000000.0
                       for i in range(0, len(current["value"]), 4)]
                l_x = np.linspace(current["time"], current["time"]+1, len(l_y))
                
                x += list(l_x)
                y += list(l_y)
            else:
                x.append(current["time"])
                y.append(current["average"]/1000000)
                y1.append(current["max"]/1000000)
        
        plot_x = []
        plot_y = []
        plot_y1 = []
        if detail:
            for i in range(len(x)):
                if start <= x[i] <= start+duration:
                    plot_x.append(datetime.fromtimestamp(x[i]))
                    plot_y.append(y[i])
            y1 = plot_y
        else:
            for i in range(len(x)):
                if start <= x[i] <= start+duration:
                    plot_x.append(datetime.fromtimestamp(x[i]))
                    plot_y.append(y[i])
                    plot_y1.append(y1[i])
        
        print(len(plot_x))
        
        backup_x = plot_x.copy()
        backup_y = plot_y.copy()
        backup_y1 = plot_y1.copy()
        
            
        if not plot_x:
            return "No Data <br>"
        
        fig = plt.figure(figsize=(8.5, 3))
        ax = fig.add_subplot(111)
        ax.plot(plot_x, plot_y)
        
        if not detail:
            ax.plot(plot_x, plot_y1)
            
        conn.close()
        
        ax.grid()
        ax.set_xlabel("time")
        ax.set_ylabel("current")
    
        plt.tight_layout()
        html += mpld3.fig_to_html(fig)
        plt.close(fig)
        
        
        fig = plt.figure(figsize=(8.5, 3))
        ax = fig.add_subplot(121)
        ax.hist(plot_y, bins=32)
        ax.grid()
        ax.set_title("Histogram")
        ax.set_xlabel("current")
    
        ax = fig.add_subplot(122)
        ax.plot(sorted(plot_y), np.linspace(0, 1, len(plot_y)))
        if not detail:
            ax.plot(sorted(plot_y1), np.linspace(0, 1, len(plot_y1)))
        else:
            backup_y1 = backup_y
            
        ax.grid()
        ax.set_title("CDF")
        ax.set_xlabel("current")
        ax.set_ylabel("Probability")
    
        plt.tight_layout()
        html += mpld3.fig_to_html(fig)
        plt.close(fig)
        
        html += "Average Current: %f mA<br>" % np.mean(backup_y)
        html += "Peak Current: %f mA<br>" % np.max(backup_y1)
        print(np.mean(backup_y)*(np.max(backup_x)-np.min(backup_x)))
        html += "Power Consumption: %f mAh<br>" % (np.mean(backup_y)*(np.max(x)-np.min(x))/3600.0)
        
        with lzma.open(fileName, "w") as f:
            f.write(html.encode())
    else:
        with lzma.open(fileName, "r") as f:
            html = f.read().decode()
        
    return html
Exemple #6
0
def run_backup():
    lib_db.backup_db_pm()
    lib_db.backup_db_proxy()
    lib_db.backup_db_tc()