def UserOperation():

    global errorResult, errType
    if roleObject == None:
        errorResult = "Application Error Occurred"
        errType="Error"
        return redirect(url_for('Error'))
    canRole = processRole(7)
    if canRole == False:
        errorResult = "You Don't Have Permission to Access User"
        errType="Error"
        return redirect(url_for('Error'))
    
    
    initialize()
    operation = request.args.get('operation')
    unqid = ""
    row = UserModel(0, "", "", 0)
    rolesDDList = []
    
    conn4 = pypyodbc.connect(connString, autocommit=True)
    cursor4 = conn4.cursor()
    sqlcmd4 = "SELECT * FROM Role"
    cursor4.execute(sqlcmd4)
    while True:
        roleDDrow = cursor4.fetchone()
        if not roleDDrow:
            break
        
        roleDDObj = RoleModel(roleDDrow[0], roleDDrow[1])
        rolesDDList.append(roleDDObj)
    
    if operation != "Create" :
        
        unqid = request.args.get('unqid').strip()
        conn2 = pypyodbc.connect(connString, autocommit=True)
        cursor = conn2.cursor()
        sqlcmd1 = "SELECT userID, userName, emailID, roleID FROM Users WHERE userID = '"+unqid+"'"
        cursor.execute(sqlcmd1)
        while True:
            dbrow = cursor.fetchone()
            if not dbrow:
                break
            conn3 = pypyodbc.connect(connString, autocommit=True)
            cursor3 = conn3.cursor()
            print("dbrow[3]", dbrow[3])
            temp = str(dbrow[3])
            sqlcmd3 = "SELECT * FROM Role WHERE roleID = '"+temp+"'"
            print("sqlcmd3", sqlcmd3)
            cursor3.execute(sqlcmd3)
            rolerow = cursor3.fetchone()
        
            roleObj = None
            if rolerow:
                roleObj = RoleModel(rolerow[0], rolerow[1])
           
            row = UserModel(dbrow[0], dbrow[1], dbrow[2], roleObj)
            
        
    return render_template('UserOperation.html', row = row, operation=operation, rolesDDList=rolesDDList )
Example #2
0
    def get_current_user(self):
        user_id = self.get_secure_cookie("tornado_blog_user")

        if not user_id: return None

        userModel = UserModel()
        user = userModel.getById(user_id)

        return user
Example #3
0
def add_account(stu_id, stu_password, unit_length, lessons, status=1):
    data = read_list()
    for user in data["users"]:
        if user["stu_id"] == stu_id:
            print("error1")
            return False
    if not str(unit_length).isdigit() or int(unit_length) < -1:
        print("error2")
        return False
    stu = dict()
    stu["stu_id"] = stu_id
    stu["stu_password"] = stu_password
    stu["status"] = status
    stu["unit_length"] = int(unit_length) * 60
    stu["lessons"] = lessons
    info = get_account_info(
        UserModel(stu_id, stu_password, unit_length, lessons))
    if info == (-1, -1, -1):
        print("error3")
        return False
    stu["realname"] = info[0]
    stu["major"] = info[1]
    stu["class"] = info[2]
    data["users"].append(stu)
    backup()
    write_list(data)
    return True
Example #4
0
 def get(self):
     username = self.request.cookies.get('username')
     if Validate.is_user_logged_in(username):
         user = UserModel.get_by_id(long(username.split("|")[0]))
         self.render("myprofile.html", user=user)
     else:
         self.redirect("/login")
Example #5
0
    def format(palette):
        if palette is None or palette.timestamp is None:
            return {}

        like = LikeModel.query(ndb.AND(
            LikeModel.palette_id == str(palette.key.id()), 
            LikeModel.added_by == users.get_current_user()
        )).get()

        return {
            # 'id': palette.key.urlsafe(),
            'id': palette.key.id(),
            'image_id': palette.image_id,
            'color_primary': palette.color_primary,
            'color_secondary': palette.color_secondary,
            'color_accent': palette.color_accent,

            'user_like': LikeModel.format(like),
            
            'title': palette.title,
            'description': palette.description,
            'like_count': palette.like_count,
            'added_by': UserModel.format(palette.added_by),
            'timestamp': palette.timestamp.isoformat(),
            'updated': palette.updated.isoformat()
        }
Example #6
0
def index():
    if request.method == 'POST':
        user = request.form
        name = user['name']
        cur = mysql.connection.cursor()
        h = UserModel.post()
        mysql.connection.commit()
        return redirect('/class3')
    return render_template('class2.html')
Example #7
0
    def format(image):
        if image is None:
            return {}

        return {
            'id': image.key.id(),
            'added_by': UserModel.format(image.added_by),
            'timestamp': image.timestamp.isoformat()
        }
Example #8
0
    def post(self):
        email = self.get_argument('email')
        password = self.get_argument('password')
        userModel = UserModel()
        user = userModel.getByEmail(email)
        print(user)

        if not user:
            self.render("login.html", data=None, error='Invalid email')

        hashed_password = yield executor.submit(
            bcrypt.hashpw, tornado.escape.utf8(password),
            tornado.escape.utf8(user['password']))

        if user['password'] == hashed_password:
            self.set_secure_cookie("tornado_blog_user", str(user['id']))
            self.redirect("/dashboard")
        else:
            self.render("login.html", data=None, error='Incorrect password')
Example #9
0
    def format(like):
        if like is None or like.timestamp is None:
            return False

        return {
            'id': like.key.id(),
            'palette_id': like.palette_id,
            'added_by': UserModel.format(like.added_by),
            'timestamp': like.timestamp.isoformat()
        }
Example #10
0
 def post(self):
     username = self.request.get('username')
     password = self.request.get('password')
     query = "WHERE username = '******'" % username
     user = UserModel.gql(query).get()
     if user and hashing_utils.is_same_password(password, user.password):
         self.redirect("/myprofile")
         self.set_cookies(user)
     else:
         self.render("login.html",
                     errors=["Wrong Username/Password combination."],
                     user=self.get_logged_in_user())
Example #11
0
    def post(self):
        errors = []
        username = self.request.get("username")
        password = self.request.get("password")
        verifypassword = self.request.get("verify")
        email = self.request.get("email")

        if not username or not Validate.valid_username(username):
            errors.append("Please enter a valid username.")

        if not password or not Validate.valid_password(password):
            errors.append("Please enter a valid Password.")

        if not verifypassword or not password or (password != verifypassword):
            errors.append("The entered passwords don't match.")

        if email and (not Validate.valid_email(email)):
            errors.append("Please enter a valid email.")

        # Try to add user to database
        if len(errors) == 0:
            query = "WHERE username = '******'" % username
            users_with_username = UserModel.gql(query).get()
            if users_with_username is not None:
                errors.append("Username Already Exists")
            else:
                user = UserModel(
                    username=username,
                    password=hashing_utils.hash_password(password),
                    email=email)
                user.put()
                self.set_cookies(user)
                self.redirect("/myprofile")

        self.render("signup.html",
                    errors=errors,
                    user=self.get_logged_in_user())
    def search_user_by_id(self, id):
        try:
            sql = "SELECT * FROM usuarios WHERE usua_id = " + str(id)
            conn = Connection()
            cursor = conn.execute_sql(sql)
            data = cursor.fetchone()

            if data != None:
                return UserModel(id=data[0], name=data[1], email=data[2], permission=data[4])
            return False
        except Exception as e:
            print(e)
            return 'ERRO'
        finally:
            conn.close_connection()
 def verify_token(self, token):
     try:
         conn = Connection()
         cursor = conn.execute_sql("SELECT * FROM usuarios WHERE usua_token = '" + token + "'")
         if cursor.rowcount == 0:
             return False
         else:
             data = cursor.fetchone()
             return UserModel(id=str(data[0]), name=str(data[1]), email=str(data[2]), permission=str(data[4]),
                              token=str(data[5]))
     except Exception as e:
         print(e)
         return 'ERRO'
     finally:
         conn.close_connection()
Example #14
0
def get_process_info(stu_id):
    data = read_list()
    for user in data["users"]:
        if user["stu_id"] == stu_id:
            user_obj = get_account_object(
                UserModel(user["stu_id"], user["stu_password"],
                          user["unit_length"], user["lessons"]))
            statuses = user_obj.get_status_info()
            if user_obj:
                for status in statuses.values():
                    print("课程: %s" % status["lesson"])
                    print("单元   已挂时间  需挂时间")
                    for unit in status["units"]:
                        print(unit["id"], unit["start_time"],
                              max(user["unit_length"], unit["end_time"]))
                return True
    return False
 def autenticate(self, email, password):
     conn = Connection()
     try:
         cursor = conn.execute_sql(
             "SELECT * FROM usuarios WHERE usua_email ='" + email + "' AND usua_senha = '" + password + "'")
         if cursor.rowcount == 0:
             return False
         else:
             data = cursor.fetchone();
             return UserModel(id=str(data[0]), name=str(data[1]), email=str(data[2]), permission=str(data[4]),
                              token=str(data[5]))
             # return cursor.fetchone()[0]
     except Exception as e:
         print(e)
         return 'ERRO'
     finally:
         conn.close_connection()
Example #16
0
def listen(queue):
    vis = dict()
    while True:
        datas = read_list()
        for user in datas["users"]:
            if vis.get(user["stu_id"], None) and user["status"] == -2:
                user["status"] = -1
                backup()
                write_list(datas)
                queue.put(int(user["stu_id"]))

            if not vis.get(user["stu_id"], None) and user["status"] == 1:
                # print(user)
                vis[user["stu_id"]] = True
                user = UserModel(user["stu_id"], user["stu_password"],
                                 user["unit_length"], user["lessons"])
                queue.put(user)
        time.sleep(5)
    def search_all_users(self):
        try:
            sql = "SELECT * FROM usuarios ORDER BY usua_nome"
            conn = Connection()
            cursor = conn.execute_sql(sql)

            if (cursor.rowcount == 0):
                return False

            listUsers = []
            for data in cursor.fetchall():
                userModel = UserModel(id=data[0], name=data[1], email=data[2], permission=data[4])
                listUsers.append(userModel)
            return listUsers
        except Exception as e:
            print(e)
            return 'ERRO'
        finally:
            conn.close_connection()
def UserListing():

    global errorResult, errType
    if roleObject == None:
        errorResult = "Application Error Occurred"
        errType="Error"
        return redirect(url_for('Error'))
    canRole = processRole(7)
    if canRole == False:
        errorResult = "You Don't Have Permission to Access User"
        errType="Error"
        return redirect(url_for('Error'))
    
    
    if timeControlObject.canUserA != "":
        todayA = datetime.today()
        
        
        ftime = time.strptime(timeControlObject.canUserATime["FromTime"], '%H:%M')
        ttime = time.strptime(timeControlObject.canUserATime["ToTime"], '%H:%M')
        
        todate = datetime(todayA.year, todayA.month, todayA.day, ttime.tm_hour, ttime.tm_min, 0)
        if ftime.tm_hour > ttime.tm_hour :
            todate +=  timedelta(days = 1)
            todate = todate.replace(hour=ttime.tm_hour)
            #todate = datetime.datetime(todate.year, todate.month, todate.day+1, ttime.tm_hour, ttime.tm_minute, 0)
        
        fromdate = datetime(todate.year, todate.month, todayA.day, ftime.tm_hour, ftime.tm_min, 0)
        if fromdate < todayA and todate > todayA:
            errorResult = "You Can't Access Between "+str(fromdate)+" and "+str(todate)
            errType="Error"
            return redirect(url_for('Error'))
        
        
    initialize()
    searchData = request.args.get('searchData')
    print(searchData)
    if searchData == None:
        searchData = "";
    conn = pypyodbc.connect(connString, autocommit=True)
    cursor = conn.cursor()
    sqlcmd1 = "SELECT userID, userName, emailID, roleID FROM Users WHERE UserName LIKE '"+searchData+"%'"
    cursor.execute(sqlcmd1)
    records = []
    
    while True:
        dbrow = cursor.fetchone()
        if not dbrow:
            break
        
        conn3 = pypyodbc.connect(connString, autocommit=True)
        cursor3 = conn3.cursor()
        print("dbrow[3]", dbrow[3])
        temp = str(dbrow[3])
        sqlcmd3 = "SELECT * FROM Role WHERE roleID = '"+temp+"'"
        print(sqlcmd3)
        cursor3.execute(sqlcmd3)
        rolerow = cursor3.fetchone()
        
        roleObj = None
        if rolerow:
           roleObj = RoleModel(rolerow[0], rolerow[1])
        else:
           print("Role Row is Not Available")
        
        row = UserModel(dbrow[0],dbrow[1],dbrow[2],roleObj)
        
        records.append(row)
    cursor.close()
    conn.close()
    return render_template('UserListing.html', records=records, searchData=searchData)
Example #19
0
 def get_logged_in_user(self):
     username_cookie = self.request.cookies.get('username')
     user = None
     if Validate.is_user_logged_in(username_cookie):
         user = UserModel.get_by_id(long(username_cookie.split("|")[0]))
     return user
Example #20
0
from flask_mysqldb import MySQL
import MySQLdb.cursors
import re
from UserModel import UserModel
from util import MailClient
from analyzer import Predictor
app = Flask(__name__)
app.secret_key = 'a'

  
app.config['MYSQL_HOST'] = "remotemysql.com" #"localhost"#
app.config['MYSQL_USER'] = "******"#"username for remote"
app.config['MYSQL_PASSWORD'] = "******"#"password of the remote for remote"
app.config['MYSQL_DB'] = "lxtJiysGzR"
mysql = MySQL(app)
user_model = UserModel(mysql)
mailobj = MailClient()
predictor=Predictor(user_model)
@app.route('/')#app.route(rule,options)
def homer():
    
    predictor.loadData()
    predictor.buildModel()
    predictor.saveModel()
    #y=predictor.predict(10)
    #print("predicted value =",y)
    
    return render_template('home.html')
    
@app.route('/login',methods =['GET', 'POST'])
def login():
Example #21
0
def main():
    init_progname_from_code(4)  # work out what this tool is called

    # Set verbose logging if requested
    if args.verbose:
        init_logger(progname, logging.DEBUG)
    else:
        init_logger(progname)
    logger.info("{} started using '{}' on {}".format(
        progname, os.path.basename(args.configfile),
        strftime("%d-%m-%Y at %H:%M:%S"), gmtime()))

    # Validate and interpret the configuration file
    config = configparser.ConfigParser()
    try:
        config.optionxform = str  # this disallows line-end comments, but preserves capitalization
        config.read(args.configfile)
        assert all(_ in config for _ in ['GENERAL', 'users'])
        assert 'name' in config['GENERAL']
    except AssertionError:
        terminate("Invalid configuration file invalid. Terminating...".format(
            args.configfile))
    # if we get here, the configuration file seems to be valid

    logger.debug("*" * 80)

    # Where to store the output?
    output_directory = os.path.join('.', 'output')
    if args.output:
        if os.path.isfile(args.output.rstrip(os.path.sep)):
            terminate(
                "Cannot create output directory '{}' because a file of the same name exists"
                .format(args.output))
        else:
            output_directory = args.output
    os.makedirs(output_directory, exist_ok=True)
    logger.info("Output files will be stored in {}".format(output_directory))

    # Check if we are supposed to use a particular random seed
    seed = int(round(time() * 1000))
    if 'seed' in config['GENERAL']:
        seed = config.getint('GENERAL', 'seed')
    elif args.seed is not None and int(args.seed) >= 0:
        seed = int(args.seed)
    random.seed(seed)
    logger.info(
        "Initializing the random number generator with seed {}".format(seed))

    # Get number of users and days (for activities where needed)
    days = config.getint('GENERAL',
                         'days') if 'days' in config['GENERAL'] else 1
    if args.days is not None and int(args.days) > 0:
        days = int(args.days)
    logger.info("Preparing to output {} day{} of synthetic data...".format(
        days, ('s' if days != 1 else '')))

    first_day = random.randint(0, 6)
    logger.info("Starting synthetic trace output on a {}...".format(
        Tools.weekdays[first_day]))

    logger.debug("*" * 80)

    # Read all required user models to process this configuration
    users = {}
    for u in config['users']:
        try:
            users[u] = UserModel(os.path.join('users', config.get('users', u)),
                                 days, first_day)
        except ValueError as v:
            terminate(str(v))
    # user models initialized

    logger.info("*" * 80)

    # Read all required input models to process this configuration
    appliances = {}
    mapping = None

    # Load basic appliance mapping file and overwrite individual entries if specified
    if args.mapping is not None:
        logger.info("Loading appliance-to-model mapping from file {}".format(
            args.mapping))
        try:
            config2 = configparser.ConfigParser()
            config2.optionxform = str  # this disallows line-end comments, but preserves capitalization
            config2.read(args.mapping)
            assert 'devices' in config2.sections()
            mapping = config2['devices']
        except AssertionError:
            terminate("Mapping file {} invalid. Terminating...".format(
                args.mapping))

    if 'devices' in config.sections():
        if mapping is None:
            mapping = config['devices']
        else:
            for entry in config['devices']:
                logger.debug("Overriding model file for {}".format(entry))
                mapping[entry] = config['devices'].get(entry)

    if mapping is None:
        terminate(
            "No appliance-to-model mapping! Add [devices] to the configuration or use the -m option. Terminating..."
        )

    for appliance_key in mapping:
        try:
            am = ApplianceModel(
                appliance_key,
                os.path.join('appliances', mapping.get(appliance_key)), days)
        except ValueError as v:
            terminate(str(v))
        appliances[am.appliance_type] = am
        logger.debug(
            "ID {}: Appliance model for {} created successfully with {} components"
            .format(appliance_key, am.appliance_type, len(am.comps)))
    # required appliance models loaded

    logger.info("*" * 80)

    # Validate that required appliance models for all activities are available
    logger.info("Binding appliance models to activities...")
    for user in users.values():
        required_devices = user.get_required_devices()
        if len(required_devices) == 0:
            logger.warning("User '{}' does not operate any appliances!".format(
                user.name))
        for rd in required_devices:
            logger.debug(
                "Looking up a binding for appliance type '{}'...".format(rd))
            if rd in appliances.keys():
                user.bind_appliance_model(rd, appliances[rd])
            else:
                terminate(
                    "Cannot bind appliance type {} - no model available!".
                    format(rd))
    # Done checking if all required appliance models are present and binding them to activities

    logger.info("*" * 80)

    # Synthesize users, activities, and appliances together!
    start_time = time()
    duration = days * Tools.secs_per_day
    all_events = []
    all_powers = {'total': np.zeros([1, duration])}
    vary_runs = True if args.alternate is not None and args.alternate is True else False
    logger.info("Synthesizing '{}' for {} samples".format(
        config.get('GENERAL', 'name'), duration))
    for uid, user in users.items():
        logger.info("Synthesizing data for user ID '{}' ({})".format(
            uid, user.name))
        user.synthesize(all_powers, all_events, first_day, vary_runs)
    logger.info("Synthesis completed in {:0.3f} seconds".format(time() -
                                                                start_time))
    # That's it. Easy, huh?

    logger.info("*" * 80)

    # Generate fake starting date
    start_day = str(20010101 + first_day)
    date_range = pd.date_range(
        start=start_day, freq='S',
        periods=days * Tools.secs_per_day).strftime('%Y-%m-%d %H:%M:%S')

    # create event log, (and dump it when verbose, just in case)
    df = pd.DataFrame()
    for line in all_events:
        entry = line.split(';')
        if len(entry) != 4:
            logger.warning("Invalid log entry: {}".format(line))
            continue
        tstp = date_range[int(entry[0])]
        tp = entry[1]
        src = entry[2]
        evt = entry[3]
        d = pd.Series([tstp, tp, src, evt],
                      index=['Time', 'Type', 'Source', 'Event'])
        df = df.append(d, ignore_index=True)
    df = df.sort_values(['Time', 'Event'],
                        ascending=(True,
                                   False))[['Time', 'Type', 'Source',
                                            'Event']].reset_index(drop=True)
    for idx, row in df.iterrows():
        logger.debug("Event {:3d}/{:3d}: {:>8s} [{:3s}] {:>30s} {:>5s}".format(
            1 + idx, len(all_events), row['Time'], row['Type'], row['Source'],
            row['Event']))

    for an, ap in appliances.items():
        all_powers[an] = ap.total_power

    # Add noise if configured
    ncfg = re.compile('[A-Z][0-9]+')
    if args.noise is not None and ncfg.match(args.noise):
        if args.noise.startswith("G"):
            amplitude = abs(int(args.noise[1:]))
            all_powers['total'] += np.random.normal(
                amplitude, amplitude / 10, len(all_powers['total'][0]))
            threshold_indices = all_powers['total'][0] < 0
            all_powers['total'][0][threshold_indices] = 0
            noise_config = "Gaussian {}W".format(amplitude)
        elif args.noise.startswith("C"):
            amplitude = abs(int(args.noise[1:]))
            all_powers['total'] += amplitude
            noise_config = "Constant {}W".format(amplitude)
        else:
            noise_config = "invalid"
    else:
        noise_config = "none"

    logger.debug("*" * 80)

    # Write CSV data to files
    logger.info("Writing the resulting data to CSV via DataFrame...")
    file_counter = 0
    for k in all_powers.keys():
        outname = os.path.join(output_directory,
                               '{}.csv'.format(k.replace(' ', '_')))
        if os.path.isfile(outname) and args.overwrite is not True:
            logger.warning(
                "Output file {} exists! To overwrite it, use the -w flag.".
                format(outname))
        else:
            logger.info("Writing load signature of '{}' to {}".format(
                k, outname))
            dfr = pd.DataFrame(all_powers[k].T, date_range)
            dfr.to_csv(outname, header=None, float_format='%.1f', sep=";")
            file_counter += 1

    # write event log
    outname = os.path.join(output_directory, 'events.csv')
    if os.path.isfile(outname) and args.overwrite is not True:
        logger.warning(
            "Output file {} exists! To overwrite it, use the -w flag.".format(
                outname))
    else:
        logger.info("Writing event log to {}".format(outname))
        df.to_csv(outname,
                  columns=['Time', 'Source', 'Event'],
                  sep=";",
                  index=False)
        file_counter += 1
    logger.info("=> {:d} out of {:d} output files written!".format(
        file_counter, 1 + len(all_powers)))

    # Plot the data
    if args.plot is True:
        try:
            mpl_logger = logging.getLogger(
                'matplotlib')  # silence plot-related logging
            mpl_logger.setLevel(logging.WARNING)
            import matplotlib.pyplot as plt  # added only here to allow running ANTgen on headless machines

            logger.info("Plotting aggregate data...")
            plt.rcParams['toolbar'] = 'None'
            global fig
            fig = plt.figure(num=None,
                             figsize=(16, 9),
                             dpi=80,
                             facecolor='w',
                             edgecolor='k')
            plt.subplots_adjust(bottom=0.05,
                                top=0.96,
                                left=0.05,
                                right=0.99,
                                hspace=0.5)
            plt.tight_layout()

            ax = {}
            global lined
            lined = dict()
            for i in range(4):
                ax[i] = plt.subplot(411 + i)
                ax[i].set_xlim([0, days * Tools.secs_per_day])
                ax[i].set_xlabel("Time [hrs]")
                ax[i].set_ylabel("Power [W]")
                ax[i].title.set_size(9)
                tix = range(0, 1 + days * Tools.secs_per_day, 3600 * days)
                ax[i].set_xticks(tix)
                ax[i].set_xticklabels([int(t / 3600) for t in tix])
                lines = []

                if i == 0:
                    ax[i].set_title("Aggregate consumption")
                    p = all_powers['total']
                    line, = ax[i].plot(p.reshape(
                        (days * Tools.secs_per_day, )),
                                       label="total")

                elif i == 1:
                    ax[i].set_title("Per-user consumption")
                    for n in users.values():
                        p = all_powers[n.name]
                        p = p.reshape((days * Tools.secs_per_day, ))
                        if max(p) > 0:
                            line, = ax[i].plot(p, label=n.name)
                            lines.append(line)

                elif i == 2:
                    ax[i].set_title("Per-activity consumption")
                    acits = dict()
                    for u in users.values():
                        for a in u.activities.values():
                            if a.activity_type in acits.keys():
                                logger.debug(
                                    "NOTE: Multiple users execute activity '{}'!"
                                    .format(a.activity_type))
                            else:
                                acits[a.activity_type] = a

                    for a in acits.values():
                        p = all_powers[a.activity_type]
                        p = p.reshape((days * Tools.secs_per_day, ))
                        if max(p) > 0:
                            line, = ax[i].plot(p, label=a.activity_type)
                            lines.append(line)

                elif i == 3:
                    ax[i].set_title("Per-appliance consumption")
                    for n, a in appliances.items():
                        p = a.total_power
                        p = p.reshape((days * Tools.secs_per_day, ))
                        if max(p) > 0:
                            line, = ax[i].plot(p, label=a.appliance_type)
                            lines.append(line)

                leg = ax[i].legend(loc=2,
                                   fontsize=10 - int(len(ax[i].lines) / 4),
                                   ncol=1 + int(len(ax[i].lines) / 5))

                # add handles to diagram lines to 'lined' dict (for toggling)
                for legline, origline in zip(leg.get_lines(), lines):
                    legline.set_picker(5)  # 5 pts tolerance
                    lined[legline] = origline

            # allow toggling individual curves by clicking on the legend entry
            cid = fig.canvas.mpl_connect('pick_event', on_legend_click)

            logger.info("Done. Close plot window to terminate...")
            plt.show()
            # Waiting for the user to close the plot
        except ImportError as e:
            logger.warning(
                "Plotting failed. Are you sure you have matplotlib installed?")
            print(e)
    # (optional) plotting completed

    logger.info("*" * 80)

    concurrency = 0
    maximum_concurrency = 0
    for row in df['Event']:
        if 'ON' in row:
            concurrency = concurrency + 1
            if concurrency > maximum_concurrency:
                maximum_concurrency = concurrency
        elif 'OFF' in row:
            concurrency = concurrency - 1

    logger.info("Trace duration (days)  : {:14d}".format(days))
    logger.info("First weekday          : {:>14s}".format(
        Tools.weekdays[first_day]))
    logger.info("# active devices       : {:14d}".format(
        len(df['Source'].value_counts())))
    logger.info("# appliance operations : {:14d}".format(
        int(sum(df['Source'].value_counts()) / 2)))
    logger.info("-" * 39)
    for n, _ in appliances.items():
        logger.info("{:>16s} #runs : {:14d}".format(
            n, int(len(df[df.Source.eq(n)]) / 2)))
    logger.info("-" * 39)
    logger.info("Max. appl. concurrency : {:14d}".format(maximum_concurrency))
    logger.info("Random seed            : {:14d}".format(seed))
    logger.info("Added noise            : {:>14s}".format(noise_config))

    logger.info("*" * 80)

    logger.info("{} completed using '{}' on {}".format(
        progname, os.path.basename(args.configfile),
        strftime("%d-%m-%Y at %H:%M:%S"), gmtime()))
    logging.shutdown()