Exemplo n.º 1
0
def filter_simple_wifi(params):
    params['subject'] =  "Filter WIFI Graphic "
    email_response = utils.show_info(params) + "\n"
    path_real, database_name = utils.download_database(params['filename'], full_path = False)
    only_name = database_name[:database_name.rfind(".")]
    params["all_time"] = utils.get_datetime() #TIME
    params["a_func"] = utils.get_datetime() #TIME
    try:
        email_response += "Exporting data (WIFI) ..."
        export_csv ="filter_wifi%s" % (only_name+".csv")
        wifi_list_mac, wifi_list_name = utils.parse_wifi_list(params['wifi_list'])
        ExportWifi(wifi_list_mac,wifi_list_name,params['is_blacklist']).run(path_real+database_name,path_real+export_csv)
    except Exception as e:
        return utils.get_error(e, params)

    time_txt = utils.end_func("CSV conversion",params["a_func"])
    email_response += "OK\n"
    email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(export_csv))+"\n\n\n"
    email_response += time_txt

    try:
        email_response += "Building Graphic (Bluetooth) ..."
        path_graphic ="filter_wifi_%s" % (only_name+".pdf")
        WifiGraphic().run(path_real+export_csv,path_real+path_graphic)
    except Exception as e:
        return utils.get_error(e, params)

    time_txt = utils.end_func("Graphic creation",params["a_func"])
    email_response += "OK\n"
    email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(path_graphic))+"\n\n\n"
    email_response += time_txt

    email_response += utils.end_func("All process",params["all_time"])
    return utils.response_email(params['email'],params['subject'], email_response)
Exemplo n.º 2
0
def simple_sensorHub(params):
    params['subject'] =  "SensorHub Graphic "
    email_response = ""
    path_real, database_name = utils.download_database(params['filename'], full_path = False)
    only_name = database_name[:database_name.rfind(".")]
    params["all_time"] = utils.get_datetime() #TIME
    params["a_func"] = utils.get_datetime() #TIME
    try:
        email_response += "Exporting data (Sensor Hub) ..."
        export_csv ="SensorHub_%s" % (only_name+".csv")
        ExportSensorHub().run(path_real+database_name, path_real+export_csv)
    except Exception as e:
        return utils.get_error(e, params)

    time_txt = utils.end_func("CSV conversion",params["a_func"])
    email_response += "OK\n"
    email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(export_csv))+"\n\n\n"
    email_response += time_txt

    try:
        email_response += "Building Graphic (Sensor Hub) ..."
        path_graphic ="sensor_hub_%s" % (only_name+".pdf")
        SensorHubGraphic().run(path_real+export_csv,path_real+path_graphic)
    except Exception as e:
        return utils.get_error(e, params)

    time_txt = utils.end_func("Graphic creation",params["a_func"])
    email_response += "OK\n"
    email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(path_graphic))+"\n\n\n"
    email_response += time_txt

    email_response += utils.end_func("All process",params["all_time"])
    return utils.response_email(params['email'],params['subject'], email_response)
Exemplo n.º 3
0
def continue_ML(params):
    params["email_response"] = utils.show_info(params) + "\n"
    params["only_database_name"] = params['filename'][:params['filename'].rfind(".")]
    params["subject"] = "Machine Learning - Results (Continue)"

    params["all_time"] = utils.get_datetime() #TIME
    if params['level_html'] == 1:

        params['csvfile'] = params['filename']
        df = E_pre_processing(params)
        E_clustering(df,params)
        E_graphic(params)


    elif params['level_html'] == 2:

        params['csvpreprocessing'] = params['filename']
        df = pd.read_csv(open(params['path_real']+params["csvpreprocessing"],"r"), sep=',', header=0, index_col=0)
        E_clustering(df,params)
        E_graphic(params)


    elif params['level_html'] == 3:

        params['csvcluster'] = params['filename']
        E_graphic(params)




    params["email_response"] += utils.end_func("All process",params["all_time"])
    print params["email_response"]
    return utils.response_email(params['email'],params["subject"], params["email_response"])
Exemplo n.º 4
0
def edit_meeting(meeting_uuid: str):
    validate_meeting_uuid(meeting_uuid)

    check_json_data()
    owner_uuid = get_owner_uuid()
    name = get_meeting_name()
    description = get_meeting_description()
    stop_name = get_stop_name()
    dt = get_datetime()

    meeting = find_meeting(meeting_uuid)
    user = find_user(owner_uuid)
    must_be_meeting_owner(user, meeting)

    if name is not None:
        meeting.name = name
    if description is not None:
        meeting.description = description
    if dt is not None:
        meeting.datetime = dt
    if stop_name is not None:
        meeting.stop_name = stop_name
    db.session.commit()

    return '', 204
Exemplo n.º 5
0
def register():
    global back
    db = get_db()
    current_date = get_datetime()
    have_error = False
    username = request.form['username']
    password = request.form['password']
    verify = request.form['verify']
    user = query_db("SELECT * FROM users WHERE username = ?", [username], one=True)
    params = dict(username=username)

    if user:
        params['error_username'] = "******"
        have_error = True

    if not valid_username(username):
        params['error_username'] = "******"
        have_error = True

    if not valid_password(password):
        params['error_password'] = "******"
        have_error = True
    elif password != verify:
        params['error_verify'] = "Your passwords didn't match."
        have_error = True

    if have_error:
        return render_template('signup.html', **params)
    else:
        hpw = bcrypt.generate_password_hash(password)
        db.execute('INSERT INTO users (username, hpw, my_date) VALUES (?, ?, ?)',
                 [username, hpw, current_date])
        db.commit()
        session['username'] = username
        return redirect(back)
Exemplo n.º 6
0
        def make_numeric_input_variable(historys, h,id_, now_price):

            def get_movement(historys, h, id_, now_price, arg, prev):
                
                now = get_datetime(id_)
                for p in range(h, min(h+6+arg*25, len(historys))):
                    t = historys[p][0]
                    if t <= prev:
                        return (float(now_price) - float(historys[p][6]))/float(historys[p][6])
                return 0

            week_move, month_move, quater_move, year_move =[],[],[],[]
            now_date = get_datetime(id_)
            now_date = datetime.datetime.strptime(now_date, '%Y-%m-%d')

            week_move = get_movement(historys, h, id_, now_price,0, \
                datetime.date.isoformat(now_date - datetime.timedelta(weeks=1)))
            month_move = get_movement(historys, h, id_, now_price, 1, \
                datetime.date.isoformat(now_date - relativedelta(months=+1)))
            quater_move =get_movement(historys, h, id_, now_price, 3, \
                datetime.date.isoformat(now_date - relativedelta(months=+3)))
            year_move = get_movement(historys, h, id_, now_price, 12, \
                datetime.date.isoformat(now_date - relativedelta(years=+1)))

            return week_move, month_move, quater_move, year_move
Exemplo n.º 7
0
def index():
    if request.method == "POST" and request.is_json:
        now = get_datetime(app.config["APP_TZ"])
        active = doorbell_is_active(
            now,
            suppress_dates=os.environ.get("SUPPRESS_DATES"),
            force_active=os.environ.get("FORCE_ACTIVE",
                                        "false").lower() == "true",
        )
        if request.get_json().get("action") == "status":
            status = "active" if active else "inactive"
            return jsonify({"status": status})
        elif active and request.get_json().get("action") == "ring":
            message = f"{now.strftime('%H:%M:%S')} Ding Dong"
            response = topic.publish(Message=message)
            if response["ResponseMetadata"]["HTTPStatusCode"] == 200:
                return jsonify(
                    {"message": "Someone will open the door shortly."})
            else:
                return jsonify(
                    {"message": "Something went wrong. Please try again."})
        else:
            return jsonify({"message": "Doorbell is inactive at this time."})
    else:
        return jsonify({"message": "Incorrect request type."})
Exemplo n.º 8
0
 async def add_new_delete_old(self):
     for k, v in self.src_paths.items():
         for file in os.listdir(v):
             if file.endswith('.mp4'):
                 created_dt = get_datetime(name=file,
                                           time_fmt=self.time_format,
                                           path=v)
                 ten_min_ago = datetime.today() - timedelta(minutes=10)
                 if created_dt > ten_min_ago:
                     tmp_uuid = uuid.uuid1().hex
                     tmp_link = os.path.join(self.out_path, tmp_uuid)
                     os.symlink(os.path.join(v, file), tmp_link)
                     md5 = self.get_md5(os.path.join(v, file))
                     async with self.conn.transaction():
                         q = (
                             f"INSERT INTO {self.psql_table_name} (type, filename, uuid, md5, created) "
                             "VALUES ($1, $2, $3, $4, $5) ON CONFLICT (md5) DO UPDATE SET filename=$2"
                         )
                         await self.conn.execute(q, k, file[:-4], tmp_uuid,
                                                 md5, created_dt)
                     self.logger.info("New %s %s added.", k.upper(),
                                      file[:-4])
                     self.uuid_dict[k].append({
                         "filename": file[:-4],
                         "uuid": tmp_uuid,
                         "created": created_dt,
                         "md5": md5
                     })
     # Delete entries which do not exist anymore.
     await self.delete_old_links()
Exemplo n.º 9
0
def print_msg():
    msgFromWebApp = request.form['textToPrint']
    print(msgFromWebApp)
    msg_response = {
        'message': 'Message printed on flask console!',
        'datetime': utils.get_datetime()
    }
    return jsonify(msg_response)
Exemplo n.º 10
0
 def get_movement(historys, h, id_, now_price, arg, prev):
     
     now = get_datetime(id_)
     for p in range(h, min(h+6+arg*25, len(historys))):
         t = historys[p][0]
         if t <= prev:
             return (float(now_price) - float(historys[p][6]))/float(historys[p][6])
     return 0
Exemplo n.º 11
0
Arquivo: paper.py Projeto: wdggat/agau
    def __init__(self, kind, bank_buy, bank_sell, price, high, low, time):
        self.kind = kind
        self.bank_buy = float(bank_buy)
	self.bank_sell = float(bank_sell)
	self.price = float(price)
	self.high = float(high)
	self.low = float(low)
	self.time = time
	self.dt = utils.get_datetime(time)
	self.day = utils.get_paper_day(self.dt)
Exemplo n.º 12
0
 def post(self, run_at):
     '''Schedules a task to a specified date/time'''
     due_date = get_datetime(run_at, tz)
     count.apply_async([counter_until], eta=localtime.localize(due_date))
     response = jsonify({
         'status':
         f'Your task is scheduled to be processed at {due_date.strftime("%Y-%m-%d %H:%M:%S")}'
     })
     response.status_code = 202
     return response
Exemplo n.º 13
0
def check_grammar():
    textToCheck = request.form['textToCheck']
    grammarCheckResult = gram.capitalize(textToCheck)
    checkgrammar_response = {
        'message': 'Message printed on flask console!',
        # 'results': grammarCheckResult,
        # injecting dummy data
        'results': dummyData,
        'datetime': utils.get_datetime()
    }
    return jsonify(checkgrammar_response)
Exemplo n.º 14
0
Arquivo: agau.py Projeto: wdggat/agau
    def __init__(self, kind, price, increase, dealed, open_price, closed_price, high, low, time):
        self.kind = kind
	self.price = float(price)
	self.increase = float(increase.strip('%')) / 100
	self.dealed = int(dealed)
	self.open_price = float(open_price)
	self.closed_price = float(closed_price)
	self.high = float(high)
	self.low = float(low)
	self.time = time
	self.dt = utils.get_datetime(time)
	self.day = utils.get_agau_day(self.dt)
Exemplo n.º 15
0
def E_get_database(params):
    try:
        params["email_response"] += "download database ..."
        utils.download_database_full(params, full_path = False)
    except Exception as e:
        utils.get_error(e, params)
        raise

    params["email_response"] += "OK\n"
    params["email_response"] += "\n Database = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['database_name']))+"\n\n\n"

    params["only_database_name"] = params['database_name'][:params['database_name'].rfind(".")]
    params["all_time"] = utils.get_datetime() #TIME
def make_text_report_from_output(outputs):
    '''
    Using dependency checker output, generate a text report to write to a file
    '''
    text_report = ""

    text_report += "Canvass for dependency checker\n\n"
    text_report += "Date and time:\t{}\n\n\n".format(
        get_datetime(outputs[0].get("start_time")))

    text_report += "Table of Contents:\n"
    text_report += "1. Description of input\n"
    text_report += "2. Package manager used to find dependencies (direct or transitive)\n"
    text_report += "3. Warnings or errors encountered\n"
    text_report += "4. Summary of dependencies (direct or transitive) and their vulnerabilities\n"
    text_report += "5. Details of dependencies (direct or transitive) with vulnerabilities\n"
    text_report += "6. Details of vulnerabilities found in dependencies (direct or transitive)\n"
    text_report += "7. Details of dependencies (direct or transitive)\n"
    text_report += "\n\n"

    text_report += "1. Description of input:\n\n"
    text_report += "1.1. input directory path:\t{}\n".format(
        outputs[0].get("package_path"))
    text_report += "\n\n"

    text_report += "2. Package manager used to find dependencies (direct or transitive):\t{}\n".format(
        ",\t".join([
            output.get("package_manager") for output in outputs
            if output.get('package_manager') is not None
        ]))
    text_report += "\n\n"

    text_report += "3. Warnings or errors encountered\n\n"
    text_report += get_warnings_from_outputs(outputs)
    text_report += "\n"

    text_report += "4. Summary of dependencies (direct or transitive) and their vulnerabilities:\n"
    text_report += make_summary_report_for_text_report(outputs)
    text_report += "\n"

    text_report += "5. Details of dependencies (direct or transitive) with vulnerabilities\n"
    text_report += make_vulnerable_libraries_and_versions_text_report(outputs)
    text_report += "\n"

    text_report += "6. Details of vulnerabilities found in dependencies (direct or transitive)\n"
    text_report += make_detailed_vuln_report_text_report(outputs)
    text_report += "\n"

    text_report += "7. Details of dependencies (direct or transitive)\n"
    text_report += make_libraries_found_text_report(outputs)
    return text_report
Exemplo n.º 17
0
    def get_close_index_from_total_index(use_index, id_, error_filename_total_index):

        with open('%s/%s.csv' % (DIR_PRICE, use_index)) as csvfile:
            historys = list(csv.reader(csvfile, delimiter= ','))

        date = get_datetime(id_)
        price = 0
        for history in historys:
            if history[0]==date:
                price = history[6]
        if price == 0:
            price = '0'
            with open(error_filename_total_index, 'a') as ef:
                ef.write('%s\t%s\n' % (use_index, id_))
        return price
Exemplo n.º 18
0
def E_graphic(params):
    params["a_func"] = utils.get_datetime() #TIME
    try:
        params["email_response"] += "Creating graphic..."
        params['pdfgraphic'] = (params["only_database_name"]+".pdf").replace("(","").replace(")","")
        print "Rscript machine_learning/pdf/pdf_lines.R \""+params['path_real']+params['csvcluster']+"\" \""+params['path_real']+params['pdfgraphic']+"\""
        os.system("Rscript machine_learning/pdf/pdf_lines.R \""+params['path_real']+params['csvcluster']+"\" \""+params['path_real']+params['pdfgraphic']+"\"")
    except Exception as e:
        utils.get_error(e, params)
        raise

    time_txt = utils.end_func("Graphic creation",params["a_func"])
    params["email_response"] += "OK\n"
    params["email_response"] += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['pdfgraphic']))+"\n\n"
    params["email_response"] += time_txt
Exemplo n.º 19
0
def add_entry(title):
    if 'username' in session:
        global versions
        versions[title] = 1
        current_date = get_datetime()
        db = get_db()
        db.execute('INSERT INTO entries (title, text, my_date, version, current) VALUES (?, ?, ?, ?, ?)',
                     [title, request.form['content'], current_date, versions[title], True])
        db.commit()
        #flash('New entry was successfully posted')
        if title == "|":
            return redirect(url_for('homepage'))
        return redirect(url_for('viewpage', title=title))
    else:
        return redirect(url_for('login'))
Exemplo n.º 20
0
def update_entry(title):
    global versions
    version = versions[title]
    versions[title] += 1
    current_date = get_datetime()
    db = get_db()
    # create a version
    entry = query_db("SELECT * FROM entries WHERE title = ?", [title], one=True)
    db.execute('INSERT INTO entries (title, text, my_date, version, current) VALUES (?, ?, ?, ?, ?)',
                 [title + "v%s" % version, entry['text'], entry['my_date'], version, False])
    db.commit()
    # update entry
    db.execute('UPDATE entries SET text=?, my_date=?, version=?, current=? WHERE title=?', (request.form['content'], current_date, versions[title], True, title))
    db.commit()
    if title == "|":
        return redirect(url_for('homepage'))
    return redirect(url_for('viewpage', title=title))
Exemplo n.º 21
0
 def update(self) -> None:
     """Updates pending"""
     names = self.get_files()
     self.pending.clear()
     for n in names:
         if n in self.blacklist:
             continue
         try:
             file_dt = get_datetime(n, self.time_format, self.check_path)
             if not isinstance(file_dt, datetime):
                 raise ValueError(f"Expected datetime, got {type(file_dt)}: {file_dt}")
             self.logger.info(f"{n} will be deleted at {file_dt}")
         except Exception:
             self.blacklist.append(n)
             self.logger.exception(f"Cannot determine datetime for {n}")
             continue
         self.pending[n] = file_dt + timedelta(days=self.clean_days)
Exemplo n.º 22
0
    def get_close_price_from_price_history(company_code, id_, error_filename):

        def make_numeric_input_variable(historys, h,id_, now_price):

            def get_movement(historys, h, id_, now_price, arg, prev):
                
                now = get_datetime(id_)
                for p in range(h, min(h+6+arg*25, len(historys))):
                    t = historys[p][0]
                    if t <= prev:
                        return (float(now_price) - float(historys[p][6]))/float(historys[p][6])
                return 0

            week_move, month_move, quater_move, year_move =[],[],[],[]
            now_date = get_datetime(id_)
            now_date = datetime.datetime.strptime(now_date, '%Y-%m-%d')

            week_move = get_movement(historys, h, id_, now_price,0, \
                datetime.date.isoformat(now_date - datetime.timedelta(weeks=1)))
            month_move = get_movement(historys, h, id_, now_price, 1, \
                datetime.date.isoformat(now_date - relativedelta(months=+1)))
            quater_move =get_movement(historys, h, id_, now_price, 3, \
                datetime.date.isoformat(now_date - relativedelta(months=+3)))
            year_move = get_movement(historys, h, id_, now_price, 12, \
                datetime.date.isoformat(now_date - relativedelta(years=+1)))

            return week_move, month_move, quater_move, year_move

        with open('%s/%s.csv' % (DIR_PRICE, company_code)) as csvfile:
            historys = list(csv.reader(csvfile, delimiter= ','))

        date = get_datetime(id_)
        price = 0
        for h in range(len(historys)):
            if historys[h][0]==date:
                price = historys[h][6]
                week_move, month_move, quater_move, year_move = make_numeric_input_variable(historys,h, id_, price)
        
        if price == 0:
            price, week_move, month_move, quater_move, year_move = '0', 0,0,0,0
            with open(error_filename, 'a') as ef:
                ef.write('%s\n' % id_)

        return price, week_move, month_move, quater_move, year_move
Exemplo n.º 23
0
def E_clustering(df,params):
    params["a_func"] = utils.get_datetime() #TIME
    try:
        params["email_response"] += "Clustering ..."
        labels, n_clusters = Clustering(df, mode="fixed_k", n_clusters=int(params['number'])).clusterize()
        timestamp = list(df.index)
        params['csvcluster'] = str(params['number'])+"clusters"+params["only_database_name"]+".csv"
        with open(params['path_real']+params['csvcluster'], 'w') as f:
            f.write("timestamp,clusters\n")
            for i in range(len(labels)):
                f.write("{},{}\n".format(timestamp[i],labels[i]))
    except Exception as e:
        utils.get_error(e, params)
        raise

    time_txt = utils.end_func("Clustering",params["a_func"])
    params["email_response"] += "OK\n"
    params["email_response"] += "\n CSV MachineLearning = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['csvcluster']),str("3"),{})+"\n\n"
    params["email_response"] += time_txt
Exemplo n.º 24
0
def add_entry(title):
    if 'username' in session:
        global versions
        versions[title] = 1
        current_date = get_datetime()
        db = get_db()
        db.execute(
            'INSERT INTO entries (title, text, my_date, version, current) VALUES (?, ?, ?, ?, ?)',
            [
                title, request.form['content'], current_date, versions[title],
                True
            ])
        db.commit()
        #flash('New entry was successfully posted')
        if title == "|":
            return redirect(url_for('homepage'))
        return redirect(url_for('viewpage', title=title))
    else:
        return redirect(url_for('login'))
Exemplo n.º 25
0
def E_pre_processing(params):
    params["a_func"] = utils.get_datetime() #TIME
    try:
        params["email_response"] += "PreProcessing ..."
        params['csvpreprocessing'] = "pre_processing"+params["csvfile"]
        df = pd.read_csv(open(params['path_real']+params["csvfile"],"r"), sep=',', header=0, index_col=0)
        pre_processing = pre.PreProcessing(df,norm=params['optimzation_sensor_hub'])
        df = pre_processing.build()
        df.to_csv(params['path_real']+params['csvpreprocessing'], sep=',', encoding='utf-8', header=True)
    except Exception as e:
        utils.get_error(e, params)
        raise


    time_txt = utils.end_func("PreProcessing",params["a_func"])
    params["email_response"] += "OK\n"
    params["email_response"] += "\n CSV PreProcessing = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['csvpreprocessing']),str("2"),{'optimzation_sensor_hub':params["optimzation_sensor_hub"]})+"\n\n"
    params["email_response"] += time_txt

    return df
def make_summary_report(output):
    '''
    Generates a summary report for console output
    '''
    package_path = output.get("package_path")
    duration = output.get("duration")
    package_manager = output.get("package_manager")
    start_time = output.get("start_time")

    rows = ""
    title = "Dependency checker summary report:\n"
    rows += title
    rows += "{}\t{}\n".format("Date & Time:", get_datetime(start_time))
    rows += "{}\t{}\n".format("Duration:", duration)
    rows += "{}\t{}\n".format("Input path:", package_path)
    rows += "{}\t{}\n".format("Package manager used to find dependencies:",
                              package_manager)

    rows += "\n"
    return rows
Exemplo n.º 27
0
def submit():
    """
    handles user submits
    :return: json object of submit confirmation
    """
    conn = create_connection()
    data = request.form
    user_id, session_id, words_input, content_id, click_type = data["user_id"], data["session_id"], data["words_input"], \
                                                               data["content_id"], data["click_type"]
    email = get_email(user_id)
    words = ast.literal_eval(str(words_input))
    for word in words:
        query = "INSERT into user_content(words,time,session_id,content_id,email) VALUES(%s,%s,%s,%s,%s)"
        run_query(
            conn, query,
            [word[0],
             get_datetime(word[1]), session_id, content_id, email])
    conn.commit()
    conn.close()
    return jsonify({"code": 1, "message": "user response saved"})
Exemplo n.º 28
0
def create_meeting():
    check_json_data()
    owner_uuid = get_owner_uuid()
    name = get_meeting_name()
    description = get_meeting_description()
    dt = get_datetime()
    nickname = get_nickname()

    owner = find_user(owner_uuid)

    meeting = Meeting(name=name,
                      description=description,
                      datetime=dt,
                      owner=owner)
    membership = Membership(meeting=meeting, user=owner, nickname=nickname)
    db.session.add(membership)
    db.session.commit()

    return make_response({
        'uuid': meeting.uuid,
    }, 201, {'Location': f'/api/v1/meetings/{meeting.uuid}'})
Exemplo n.º 29
0
    async def replace_all(self):
        # Clear existing links
        for file in os.listdir(self.out_path):
            os.unlink(os.path.join(self.out_path, file))

        self.logger.info("Old links deleted")
        # Create links
        for k, v in self.src_paths.items():
            self.uuid_dict[k] = []
            for file in os.listdir(v):
                if file.endswith('.mp4'):
                    tmp_uuid = uuid.uuid1()
                    tmp_link = os.path.join(self.out_path, tmp_uuid.hex)
                    os.symlink(os.path.join(v, file), tmp_link)
                    created_dt = get_datetime(name=file,
                                              time_fmt=self.time_format,
                                              path=v)
                    md5 = self.get_md5(os.path.join(v, file))
                    self.uuid_dict[k].append({
                        "filename": file[:-4],
                        "uuid": tmp_uuid,
                        "created": created_dt,
                        "md5": md5
                    })
            self.logger.info("%s links created", k.upper())
        # Insert/update table.
        async with self.conn.transaction():
            for vid_type, dict_list in self.uuid_dict.items():
                for item in dict_list:
                    q = (
                        f"INSERT INTO {self.psql_table_name} (type, filename, uuid, md5, created) "
                        "VALUES ($1, $2, $3, $4, $5) ON CONFLICT (md5) DO UPDATE SET filename=$2, uuid=$3"
                    )
                    await self.conn.execute(q, vid_type, item['filename'],
                                            item['uuid'], item['md5'],
                                            item['created'])
        self.logger.info("Links added to SQL table")
        # Delete entries which do not exist anymore.
        await self.delete_old_links()
Exemplo n.º 30
0
def register():
    global back
    db = get_db()
    current_date = get_datetime()
    have_error = False
    username = request.form['username']
    password = request.form['password']
    verify = request.form['verify']
    user = query_db("SELECT * FROM users WHERE username = ?", [username],
                    one=True)
    params = dict(username=username)

    if user:
        params['error_username'] = "******"
        have_error = True

    if not valid_username(username):
        params['error_username'] = "******"
        have_error = True

    if not valid_password(password):
        params['error_password'] = "******"
        have_error = True
    elif password != verify:
        params['error_verify'] = "Your passwords didn't match."
        have_error = True

    if have_error:
        return render_template('signup.html', **params)
    else:
        hpw = bcrypt.generate_password_hash(password)
        db.execute(
            'INSERT INTO users (username, hpw, my_date) VALUES (?, ?, ?)',
            [username, hpw, current_date])
        db.commit()
        session['username'] = username
        return redirect(back)
Exemplo n.º 31
0
def run_fold(data, i, results):
    """
    Used in main function to run each fold
    :param data: the dictionary with all the configurations
    :param i: the fold number
    :param results: the dictionary containing the results for each classifier
    :return: the results dictionary
    """
    print("Running fold: ", i)
    start_time = utils.get_datetime()
    fold = "fold" + str(i)
    print("Reading and converting arff files")
    # use a converter for arff files to get pandas DataFrames
    train_df, test_df = dfc.convert_arff_to_dataframe(data, fold)
    print("Got train and test dataframes")
    print("Creating numpy arrays")
    # separate labels from features and replace labels with numbers
    train_labels, train_features, train_labels_dict = dfc.get_features_labels_arrays(
        train_df)
    test_labels, test_features, test_labels_dict = dfc.get_features_labels_arrays(
        test_df)
    num_classes = len(train_labels_dict)
    print("Got labels and features for train and test datasets")
    print("Classifying")
    for classifier in data["classifiers"]:
        # for each classifier specified in the configuration file, execute the classification task
        # and return the confusion matrix
        confusion_matrix = Classifier.classify(data, classifier, num_classes,
                                               train_labels, train_features,
                                               test_labels, test_features)
        # get micro/macro precision, recall and F-Measure for current fold
        results = write_results_to_file(data, fold, classifier,
                                        confusion_matrix, test_labels_dict,
                                        results)
    time_needed = utils.elapsed_str(start_time, up_to=None)
    print("Time needed to run fold ", str(i), " is ", time_needed)
    return results
Exemplo n.º 32
0
def update_entry(title):
    global versions
    version = versions[title]
    versions[title] += 1
    current_date = get_datetime()
    db = get_db()
    # create a version
    entry = query_db("SELECT * FROM entries WHERE title = ?", [title],
                     one=True)
    db.execute(
        'INSERT INTO entries (title, text, my_date, version, current) VALUES (?, ?, ?, ?, ?)',
        [
            title + "v%s" % version, entry['text'], entry['my_date'], version,
            False
        ])
    db.commit()
    # update entry
    db.execute(
        'UPDATE entries SET text=?, my_date=?, version=?, current=? WHERE title=?',
        (request.form['content'], current_date, versions[title], True, title))
    db.commit()
    if title == "|":
        return redirect(url_for('homepage'))
    return redirect(url_for('viewpage', title=title))
Exemplo n.º 33
0
def reducer(max_type):
    max_prices, max_times, min_times, min_prices = {}, {}, {}, {}
    days = set()
    for line in sys.stdin:
        items = line.strip().split('\t')
	price, dt = float(items[1]), utils.get_datetime(items[-1])
	if utils.at_bid_time(dt):
	    continue
        if max_type == NIGHT_ONLY and not utils.at_night(dt):
	    continue

        day = utils.get_agau_day(dt)
        if price > max_prices.get(day, 0):
	    max_prices[day] = price 
	    max_times[day] = items[-1]
	if price < min_prices.get(day, sys.maxint):
	    min_prices[day] = price
	    min_times[day] = items[-1]
	days.add(day)

    print '%s\t\t%s\t%s\t\t%s\t%s\t\t%s' % ('DAY', 'MAX_PRICE', 'MAX_AT', 'MIN_PRICE', 'MIN_AT', 'DELTA')
#    print max_prices,max_times, min_prices, min_times
    for day in sorted(days):
        print '%s\t%s\t%s\t%s\t%s\t%s' % (day, max_prices[day], max_times[day], min_prices[day], min_times[day], max_prices[day] - min_prices[day])
Exemplo n.º 34
0
def get_first_report_id(start_date):
    date_struct = utils.get_datetime(start_date)
    term = ('l', 'e', 'm')[(date_struct.tm_mon - 1) / 4]
    year = str(date_struct.tm_year)[-2:]
    return ''.join((term, year, '000'))
Exemplo n.º 35
0
def main():
    ad_dict_list = []

    products = get_list_of_dicts_from_csv_file('Товары.csv')
    current_date = get_datetime(START_TIME)

    for products in products:
        if len(products['Заголовок']) < 20:
            title = f'Уплотнитель двери холодильника {products["Заголовок"]}'
        elif len(products['Заголовок']) < 26:
            title = f'Уплотнитель холодильника {products["Заголовок"]}'
        else:
            title = f'Уплотнитель для {products["Заголовок"]}'

        images = [
            ''.join([PHOTO_STORAGE, x])
            for x in products['Ссылки на картинки'].split(', ')
        ]

        ad_dict_list.append({
            'Id':
            f'{datetime.now().strftime("%Y-%m")}-{slugify(title)}',
            'DateBegin':
            get_repr_world_time(current_date),
            'ListingFee':
            'Package',
            'AdStatus':
            'Free',
            'ManagerName':
            MANAGER_NAME,
            'ContactPhone':
            CONTACT_PHONE,
            'Address':
            ADDRESS,
            'Category':
            CATEGORY,
            'GoodsType':
            GOODS_TYPE,
            'AdType':
            AD_TYPE,
            'Title':
            title,
            'Description':
            get_description(products['Заголовок']),
            'Price':
            products['Цена'],
            'Condition':
            CONDITION,
            'Images':
            images,
        })

        current_date += timedelta(minutes=45)
        if current_date.hour >= 20 and current_date.minute > 0:
            day = current_date.day + 1
            current_date = current_date.replace(day=day, hour=8, minute=0)

    now = datetime.now().strftime('%d-%m-%Y')
    file_name = ''.join([slugify(COMPANY_NAME), '.', now, '.xml'])
    file_path = Path('out_xml') / file_name
    save_root_xml(file_path, ad_dict_list)
Exemplo n.º 36
0
def create_or_update_install_job(db_session,
                                 host_id,
                                 install_action,
                                 scheduled_time,
                                 software_packages=[],
                                 server_id=-1,
                                 server_directory='',
                                 custom_command_profile_ids=[],
                                 dependency=0,
                                 pending_downloads=[],
                                 created_by=None,
                                 install_job=None):

    if not type(software_packages) is list:
        raise ValueError('software_packages must be a list type')

    if not type(custom_command_profile_ids) is list:
        raise ValueError('custom_command_profile_ids must be a list type')

    if not type(pending_downloads) is list:
        raise ValueError('pending_downloads must be a list type')

    # This is a new install_job
    if install_job is None:
        install_job = InstallJob()
        install_job.host_id = host_id
        db_session.add(install_job)

    install_job.install_action = install_action

    if install_job.install_action == InstallAction.INSTALL_ADD and not is_empty(
            pending_downloads):
        install_job.pending_downloads = ','.join(pending_downloads)
    else:
        install_job.pending_downloads = ''

    install_job.scheduled_time = get_datetime(scheduled_time)

    # Only Install Add and Pre-Migrate should have server_id and server_directory
    if install_action == InstallAction.INSTALL_ADD or install_action == InstallAction.PRE_MIGRATE:
        install_job.server_id = int(server_id) if int(server_id) > 0 else None
        install_job.server_directory = server_directory
    else:
        install_job.server_id = None
        install_job.server_directory = ''

    install_job_packages = []

    # Only the following install actions should have software packages
    if install_action == InstallAction.INSTALL_ADD or \
        install_action == InstallAction.INSTALL_ACTIVATE or \
        install_action == InstallAction.INSTALL_REMOVE or \
        install_action == InstallAction.INSTALL_DEACTIVATE or \
        install_action == InstallAction.PRE_MIGRATE:

        for software_package in software_packages:
            if install_action == InstallAction.INSTALL_ADD:
                if is_file_acceptable_for_install_add(software_package):
                    install_job_packages.append(software_package)
            else:
                # Install Activate can have external or internal package names
                install_job_packages.append(software_package)

    install_job.packages = ','.join(install_job_packages)
    install_job.dependency = dependency if dependency > 0 else None

    user = get_user(db_session, created_by)
    install_job.created_by = created_by
    install_job.user_id = None if user is None else user.id

    if install_action == InstallAction.PRE_UPGRADE or install_action == InstallAction.POST_UPGRADE or \
       install_action == InstallAction.PRE_MIGRATE or install_action == InstallAction.MIGRATE_SYSTEM or \
       install_action == InstallAction.POST_MIGRATE:
        install_job.custom_command_profile_ids = ','.join(
            custom_command_profile_ids) if custom_command_profile_ids else None

    # Resets the following fields
    install_job.status = None
    install_job.status_time = None
    install_job.session_log = None
    install_job.trace = None

    if install_job.install_action != InstallAction.UNKNOWN:
        db_session.commit()

    # Creates download jobs if needed
    if install_job.install_action == InstallAction.INSTALL_ADD and \
        len(install_job.packages) > 0 and \
        len(install_job.pending_downloads) > 0:

        # Use the SMU name to derive the platform and release strings
        smu_list = install_job.packages.split(',')
        pending_downloads = install_job.pending_downloads.split(',')

        # Derives the platform and release using the first SMU name.
        platform, release = SMUInfoLoader.get_platform_and_release(smu_list)

        create_download_jobs(db_session, platform, release, pending_downloads,
                             install_job.server_id,
                             install_job.server_directory, created_by)

    return install_job
Exemplo n.º 37
0
                        default='train',
                        choices=['train', 'test'])
    parser.add_argument('--use_tensorboard', type=bool, default=True)

    # Directories.
    parser.add_argument('--cluster_npz_path',
                        type=str,
                        default='data/celeba/generated/clusters.npz')
    parser.add_argument('--dataset_path', type=str, default='./data/celeba/')
    parser.add_argument('--attr_path',
                        type=str,
                        default='data/celeba/list_attr_celeba.txt')
    parser.add_argument('--rafd_image_dir',
                        type=str,
                        default='data/RaFD/train')
    parser.add_argument('--exp_dir', type=str, default='experiments')
    parser.add_argument('--exp_id',
                        type=str,
                        default=get_datetime(),
                        help="experiment id")

    # Step size.
    parser.add_argument('--log_step', type=int, default=10)
    parser.add_argument('--sample_step', type=int, default=1000)
    parser.add_argument('--model_save_step', type=int, default=10000)
    parser.add_argument('--lr_update_step', type=int, default=1000)

    cfg = parser.parse_args()
    print(cfg)
    main(cfg)
Exemplo n.º 38
0
def create_or_update_install_job(db_session, host_id, install_action, scheduled_time, software_packages=[],
                                 server_id=-1, server_directory='', custom_command_profile_ids=[], dependency=0,
                                 pending_downloads=[], created_by=None, install_job=None):

    if not type(software_packages) is list:
        raise ValueError('software_packages must be a list type')

    if not type(custom_command_profile_ids) is list:
        raise ValueError('custom_command_profile_ids must be a list type')

    if not type(pending_downloads) is list:
        raise ValueError('pending_downloads must be a list type')

    # This is a new install_job
    if install_job is None:
        install_job = InstallJob()
        install_job.host_id = host_id
        db_session.add(install_job)

    install_job.install_action = install_action

    if install_job.install_action == InstallAction.INSTALL_ADD and not is_empty(pending_downloads):
        install_job.pending_downloads = ','.join(pending_downloads)
    else:
        install_job.pending_downloads = ''

    install_job.scheduled_time = get_datetime(scheduled_time)

    # Only Install Add and Pre-Migrate should have server_id and server_directory
    if install_action == InstallAction.INSTALL_ADD or install_action == InstallAction.PRE_MIGRATE:
        install_job.server_id = int(server_id) if int(server_id) > 0 else None
        install_job.server_directory = server_directory
    else:
        install_job.server_id = None
        install_job.server_directory = ''

    install_job_packages = []

    # Only the following install actions should have software packages
    if install_action == InstallAction.INSTALL_ADD or \
        install_action == InstallAction.INSTALL_ACTIVATE or \
        install_action == InstallAction.INSTALL_REMOVE or \
        install_action == InstallAction.INSTALL_DEACTIVATE or \
        install_action == InstallAction.PRE_MIGRATE:

        for software_package in software_packages:
            if install_action == InstallAction.INSTALL_ADD:
                if is_file_acceptable_for_install_add(software_package):
                    install_job_packages.append(software_package)
            else:
                # Install Activate can have external or internal package names
                install_job_packages.append(software_package)

    install_job.packages = ','.join(install_job_packages)
    install_job.dependency = dependency if dependency > 0 else None

    user = get_user(db_session, created_by)
    install_job.created_by = created_by
    install_job.user_id = None if user is None else user.id

    if install_action == InstallAction.PRE_UPGRADE or install_action == InstallAction.POST_UPGRADE or \
       install_action == InstallAction.PRE_MIGRATE or install_action == InstallAction.MIGRATE_SYSTEM or \
       install_action == InstallAction.POST_MIGRATE:
        install_job.custom_command_profile_ids = ','.join(custom_command_profile_ids) if custom_command_profile_ids else None

    # Resets the following fields
    install_job.status = None
    install_job.status_time = None
    install_job.session_log = None
    install_job.trace = None

    if install_job.install_action != InstallAction.UNKNOWN:
        db_session.commit()

    # Creates download jobs if needed
    if install_job.install_action == InstallAction.INSTALL_ADD and \
        len(install_job.packages) > 0 and \
        len(install_job.pending_downloads) > 0:

        # Use the SMU name to derive the platform and release strings
        smu_list = install_job.packages.split(',')
        pending_downloads = install_job.pending_downloads.split(',')

        # Derives the platform and release using the first SMU name.
        platform, release = SMUInfoLoader.get_platform_and_release(smu_list)

        create_download_jobs(db_session, platform, release, pending_downloads,
                             install_job.server_id, install_job.server_directory, created_by)

    return install_job
Exemplo n.º 39
0
def parse_args(args, contexts):
	"""Parse the args dictionary returned by docopt.

	Strings are converted into proper objects. For example, datetime related
	strings are converted into datetime objects, hexadecimal task's
	identifiers are converted to integer and context names are converted into
	context objects. The `contexts` argument is the contexts table used to
	retrieve contexts thanks to their names

	If one of the conversion fails, a report is written about the fail. This
	report is None if no failure has been encountered. The report is returned
	by the function"""
	# The command-line interface is ambiguous. There is `todo [<context>]` to
	# only show the tasks of a specific context. There's also all other
	# commands such as `todo history`. The desired behavior is that an
	# existing command always wins over a context's name. If a user has a
	# context which happens to have the name of a command, he can still do
	# `todo ctx history` for example.

	# docopt has no problem making a command win over a context's name *if
	# there are parameters or option accompanying the command*. This means
	# that for parameters-free commands such as `todo history`, docopt thinks
	# that it's `todo <context>` with <context> = 'history'. To make up for
	# such behavior, what I do is that I look if there's a <context> value
	# given for a command which doesn't accept context, this context value
	# being the name of a command. In such case, I set the corresponding
	# command flag to True and the context value to None. Actually, the only
	# command accepting a context value is `ctx`.
	if not args['ctx'] and args['<context>'] in {'contexts', 'history',
	'purge'}:
		args[args['<context>']] = True
		args['<context>'] = None
	report = None
	if args['--priority'] is not None:
		try:
			args['--priority'] = int(args['--priority'])
		except ValueError:
			report = 'PRIORITY must be an integer'
	if args['--visibility'] is not None:
		if args['--visibility'] not in ['discreet', 'wide', 'hidden']:
			report = "VISIBILITY must be one of the following: discreet, "+\
			"wide or hidden"
	for arg in ['--deadline', '--start']:
		if args[arg] is not None:
			dt = utils.get_datetime(args[arg], NOW)
			if dt is None:
				report = "MOMENT must be in the YYYY-MM-DD format, or the "+\
				"YYYY-MM-DDTHH:MM:SS format, or a delay in the "+\
				"([0-9]+)([wdhms]) format"
			else:
				args[arg] = dt
	if args['<id>'] is not None:
		try:
			args['<id>'] = int(args['<id>'], 16)
		except ValueError:
			report = "Invalid task ID"
	for arg in ['--context', '<context>']:
		if args[arg] is not None:
			ctx = contexts.get(args[arg])
			if ctx is None:
				ctx = Context(args[arg])
				contexts[args[arg]] = ctx
			args[arg] = ctx
	return report
Exemplo n.º 40
0
def create_or_update_install_job(
    db_session, host_id, install_action, scheduled_time, software_packages=None,
    server=-1, server_directory='', custom_command_profile=-1, dependency=0, pending_downloads=None, install_job=None):

    # This is a new install_job
    if install_job is None:
        install_job = InstallJob()
        install_job.host_id = host_id
        db_session.add(install_job)

    install_job.install_action = install_action

    if install_job.install_action == InstallAction.INSTALL_ADD and \
        not is_empty(pending_downloads):
        install_job.pending_downloads = ','.join(pending_downloads.split())
    else:
        install_job.pending_downloads = ''

    install_job.scheduled_time = get_datetime(scheduled_time, "%m/%d/%Y %I:%M %p")

    # Only Install Add should have server_id and server_directory
    if install_action == InstallAction.INSTALL_ADD:
        install_job.server_id = int(server) if int(server) > 0 else None
        install_job.server_directory = server_directory
    else:
        install_job.server_id = None
        install_job.server_directory = ''

    install_job_packages = []

    # Only the following install actions should have software packages
    if install_action == InstallAction.INSTALL_ADD or \
        install_action == InstallAction.INSTALL_ACTIVATE or \
        install_action == InstallAction.INSTALL_REMOVE or \
        install_action == InstallAction.INSTALL_DEACTIVATE:

        software_packages = software_packages.split() if software_packages is not None else []
        for software_package in software_packages:
            if install_action == InstallAction.INSTALL_ADD:
                # Install Add only accepts external package names with the following suffix
                if '.pie' in software_package or \
                    '.tar' in software_package or \
                    '.rpm' in software_package:
                    install_job_packages.append(software_package)
            else:
                # Install Activate can have external or internal package names
                install_job_packages.append(software_package)

    install_job.packages = ','.join(install_job_packages)
    install_job.dependency = dependency if dependency > 0 else None
    install_job.created_by = current_user.username
    install_job.user_id = current_user.id

    if install_action == InstallAction.PRE_UPGRADE or install_action == InstallAction.POST_UPGRADE:
        install_job.custom_command_profile_id = custom_command_profile if custom_command_profile else None

    # Resets the following fields
    install_job.status = None
    install_job.status_time = None
    install_job.session_log = None
    install_job.trace = None

    if install_job.install_action != InstallAction.UNKNOWN:
        db_session.commit()

    # Creates download jobs if needed
    if install_job.install_action == InstallAction.INSTALL_ADD and \
        len(install_job.packages) > 0 and \
        len(install_job.pending_downloads) > 0:

        # Use the SMU name to derive the platform and release strings
        smu_list = install_job.packages.split(',')
        pending_downloads = install_job.pending_downloads.split(',')

        # Derives the platform and release using the first SMU name.
        platform = get_platform(smu_list[0])
        release = get_release(smu_list[0])

        create_download_jobs(db_session, platform, release, pending_downloads,
                             install_job.server_id, install_job.server_directory)

    return install_job
Exemplo n.º 41
0
    def test_check_for_warnings(self):
        test_dt = datetime(year=2020, month=4, day=1, hour=0, minute=0)
        input_files = {
            '200330-2100_DEL.flv',  # Already deleted
            '200401-0001_TOO SOON.flv',  # 1 min
            '200401-0100_TOO SOON.flv',  # 1 hour
            '200401-0101_WARN.flv',  # 1 hour, 1 minute
            '200401-0600_WARN.flv',  # 6 hours
            '200401-1201_WARN.flv',  # 12 hours, 1 minute
            '200401-1600_WARN.flv',  # 16 hours
            '200401-2300_WARN.flv',  # 23 hours
            '200402-0000_WARN.flv',  # 24 hours
            '200402-1200_WARN.flv',  # 1 day, 12 hours
            '200403-0000_WARN.flv',  # 2 days
            '200403-1200_WARN.flv',  # 2 days, 12 hours
            '200404-0000_WARN.flv',  # 3 days
            '200405-0000_WARN.flv',  # 4 days
            '200406-0000_WARN.flv',  # 5 days
        }
        self.cleaner.pending.clear()
        for n in input_files:
            self.cleaner.pending[n] = utils.get_datetime(n, TIME_FMT)

        expected_warn = [
            {
                "add_hours":
                0,
                "warned": {
                    '200401-0001_TOO SOON.flv': 12,  # 1 min
                    '200401-0100_TOO SOON.flv': 12,  # 1 hour
                    '200401-0101_WARN.flv': 12,  # 1 hour, 1 minute
                    '200401-0600_WARN.flv': 12,  # 6 hours
                    '200401-1201_WARN.flv': 24,  # 12 hours, 1 minute
                    '200401-1600_WARN.flv': 24,  # 16 hours
                    '200401-2300_WARN.flv': 24,  # 23 hours
                    '200402-0000_WARN.flv': 24,  # 24 hours
                    '200402-1200_WARN.flv': 48,  # 1 day, 12 hours
                    '200403-0000_WARN.flv': 48,  # 2 days
                },
                "warn_str": [
                    '200401-0001_TOO SOON',
                    '200401-0100_TOO SOON',
                    '200401-0101_WARN',
                    '200401-0600_WARN',
                    '200401-1201_WARN',
                    '200401-1600_WARN',
                    '200401-2300_WARN',
                    '200402-0000_WARN',
                    '200402-1200_WARN',
                    '200403-0000_WARN',
                ],
            },
            {
                "add_hours": 0,
                "warned": {
                    '200401-0001_TOO SOON.flv': 12,  # 1 min
                    '200401-0100_TOO SOON.flv': 12,  # 1 hour
                    '200401-0101_WARN.flv': 12,  # 1 hour, 1 minute
                    '200401-0600_WARN.flv': 12,  # 6 hours
                    '200401-1201_WARN.flv': 24,  # 12 hours, 1 minute
                    '200401-1600_WARN.flv': 24,  # 16 hours
                    '200401-2300_WARN.flv': 24,  # 23 hours
                    '200402-0000_WARN.flv': 24,  # 24 hours
                    '200402-1200_WARN.flv': 48,  # 1 day, 12 hours
                    '200403-0000_WARN.flv': 48,  # 2 days
                },
                "warn_str": [],
            },
            {
                "add_hours": 3,  # Reference time 2020-04-01 03:00
                "warned": {
                    '200401-0600_WARN.flv': 12,  # 3 hours
                    '200401-1201_WARN.flv': 12,  # 9 hours, 1 minute
                    '200401-1600_WARN.flv': 24,  # 13 hours
                    '200401-2300_WARN.flv': 24,  # 20 hours
                    '200402-0000_WARN.flv': 24,  # 21 hours
                    '200402-1200_WARN.flv': 48,  # 1 day, 9 hours
                    '200403-0000_WARN.flv': 48,  # 1 day, 21 hours
                },
                "warn_str": [
                    '200401-1201_WARN',
                ],
            },
            {
                "add_hours": 6,  # Reference time 2020-04-01 09:00
                "warned": {
                    '200401-1201_WARN.flv': 12,  # 3 hours, 1 minute
                    '200401-1600_WARN.flv': 12,  # 7 hours
                    '200401-2300_WARN.flv': 24,  # 14 hours
                    '200402-0000_WARN.flv': 24,  # 15 hours
                    '200402-1200_WARN.flv': 48,  # 1 day, 3 hours
                    '200403-0000_WARN.flv': 48,  # 1 day, 15 hours
                },
                "warn_str": [
                    '200401-1600_WARN',
                ],
            },
            {
                "add_hours":
                17,  # Reference time 2020-04-02 02:00
                "warned": {
                    '200402-1200_WARN.flv': 12,  # 10 hours
                    '200403-0000_WARN.flv': 24,  # 22 hours
                    '200403-1200_WARN.flv': 48,  # 1 day, 10 hours
                    '200404-0000_WARN.flv': 48,  # 1 day, 22 hours
                },
                "warn_str": [
                    '200402-1200_WARN',
                    '200403-0000_WARN',
                    '200403-1200_WARN',
                    '200404-0000_WARN',
                ],
            },
            {
                "add_hours":
                22,  # Reference time 2020-04-03 00:00
                "warned": {
                    '200403-1200_WARN.flv': 12,  # 12 hours
                    '200404-0000_WARN.flv': 24,  # 1 day
                    '200405-0000_WARN.flv': 48,  # 2 days
                },
                "warn_str": [
                    '200403-1200_WARN',
                    '200404-0000_WARN',
                    '200405-0000_WARN',
                ],
            },
            {
                "add_hours": 36,  # Reference time 2020-04-04 12:00
                "warned": {
                    '200405-0000_WARN.flv': 12,  # 12 hours
                    '200406-0000_WARN.flv': 48,  # 1 day, 12 hours
                },
                "warn_str": [
                    '200405-0000_WARN',
                    '200406-0000_WARN',
                ],
            },
            {
                "add_hours": 48,  # Reference time 2020-04-06 12:00
                "warned": {},
                "warn_str": [],
            },
        ]
        is_in_err = []
        re_deleted = re.compile(r'-\s\"(.*)\".*')
        ref_dt = test_dt
        for expected in expected_warn:
            ref_dt += timedelta(hours=expected['add_hours'])
            warn_str = self.cleaner.check_for_warnings(ref_dt=ref_dt)
            print(f"\n--- Current time: {str(ref_dt)} ---\n{warn_str}")
            for actual_name, actual_th in self.cleaner.warned.items():
                check.equal(expected['warned'].get(actual_name), actual_th,
                            f"{actual_name} - {ref_dt}")

            actual_str = []
            for m in re_deleted.finditer(warn_str):
                actual_str.append(m.group(1))
            # Make sure output string contains what it should
            for name in expected["warn_str"]:
                check.is_in(name, actual_str, f"MISSING {name} - {ref_dt}")
                if name not in actual_str:
                    is_in_err.append(f"MISSING {name} - {ref_dt}")
            # Make sure there are no extras
            for name in actual_str:
                check.is_in(name, expected["warn_str"],
                            f"EXTRA {name} - {ref_dt}")
                if name not in expected["warn_str"]:
                    is_in_err.append(f"EXTRA {name} - {ref_dt}")
        if is_in_err:
            print(f"\nExpected in warn_str but not found:\n" +
                  '\n'.join(is_in_err))
Exemplo n.º 42
0
def parse_programs_from_feed(data):

    xml = ET.fromstring(data)

    programs_list = []
    for item in xml.getiterator('item'):
        p = classes.Program()

        title = item.find('title').text
        p.title = title

        subtitle = item.find('subtitle').text

        title_match = None
        title_parts = None

        if subtitle:
            # Series 2 Episode 25 Home Is Where The Hatch Is
            title_match = re.search('^[Ss]eries\s?(?P<series>\w+)\s[Ee]p(isode)?\s?(?P<episode>\d+)\s(?P<episode_title>.*)$', subtitle)
            if not title_match:
                # Series 8 Episode 13
                title_match = re.search('^[Ss]eries\s?(?P<series>\w+)\s[Ee]p(isode)?\s?(?P<episode>\d+)$', subtitle)
            if not title_match:
                # Episode 34 Shape Shifter
                title_match = re.search('^[Ee]p(isode)?\s?(?P<episode>\d+)\s(?P<episode_title>.*)$', subtitle)
            if not title_match:
                # Series 10 Rylan Clark, Joanna Lumley, Ant And Dec, The Vaccines
                title_match = re.search('^[Ss]eries\s?(?P<series>\d+)\s(?P<episode_title>.*)$', subtitle)
            if not title_match:
                # Episode 5
                title_match = re.search('^[Ee]p(isode)?\s?(?P<episode>\d+)$', subtitle)
            if not title_match:
                p.episode_title = subtitle

            if title_match:
                title_parts = title_match.groupdict()
                p.episode_title = title_parts.get('episode_title')

        try:
            # If we have actual series/episode fields given
            p.series        = item.find('series').text
            p.episode       = item.find('episode').text
        except:
            try:
                # If we only get series/episode in the subtitle
                p.series        = title_parts.get('series')
                p.episode       = title_parts.get('episode')
            except:
                pass

        p.description   = item.find('description').text
        p.url           = item.find('{http://www.abc.net.au/tv/mrss}videoAsset').text
        p.thumbnail     = item.find('{http://search.yahoo.com/mrss/}thumbnail').attrib['url']

        try:
            p.rating = item.find('{http://www.abc.net.au/tv/mrss}rating').text
        except:
            # Rating not given for all programs
            pass

        try:
            duration = item.find('{http://search.yahoo.com/mrss/}content').attrib['duration']
            p.duration = int(duration)
        except:
            utils.log("Couldn't parse program duration: %s" % duration)
            
        try:
            p.link = item.find('link').text
        except:
            pass

        p.date = utils.get_datetime(item.find('pubDate').text)
        p.expire = utils.get_datetime(item.find('{http://www.abc.net.au/tv/mrss}expireDate').text)

        programs_list.append(p)

    return programs_list
Exemplo n.º 43
0
    class HTTPTestV2(HTTPTest):
        def __init__(self, outer_self, id, test_name, test_assert, **kwargs):
            super(HTTPTest, self).__init__()
            self.outer_self = outer_self
            self.assert_key, self.assert_value = test_assert
            self.response = None
            self.kwargs = kwargs
            self.url = kwargs['url']
            self.env_name = kwargs['env_name']
            self.test_name = test_name
            self.method = kwargs.get('method', "GET")
            self.data = kwargs.get('data', None)
            self.auth = None
            self.id = id
            if kwargs.get('username', None):
                    self.auth = (kwargs['username'], kwargs['password'])

            self.ssl_info = None

        def runTest(self):
            """ 
            timeout
            skip
            ssl_verify
            headers

            - assert_status_code_is
            - assert_status_code_is_not
            - assert_header_is_set
            - assert_header_is_not_set
            - assert_header_value_contains
            - assert_body_contains
            - assert_is_json
            - assert_is_not_json
            """
            if self.kwargs.get('skip', None):
                raise unittest.SkipTest("test marked as skip (%s)" % self.url)
            self.verify = self.kwargs.get('ssl_verify', True)
            try:
                from urlparse import urlparse
                url_parsed = urlparse(self.url)
                if url_parsed.scheme == "https":
                    host_port = url_parsed.netloc.split(":")
                    host = host_port[0]
                    if url_parsed.port:
                        port = url_parsed.port
                    else:
                        port = 443
		    try:
                        self.ssl_info = utils.get_ssl_info(self.outer_self, host, port)
                    except:
                        self.ssl_info = None
                else:
                    self.ssl_info = None
            except Exception, e:
                print e
                import traceback
                traceback.print_exc()

                self.outer_self.error(self.outer_self.rid, str(e))
                if "handshake failure" in str(e):
                    self.ssl_info = str(e)
                else:
                    self.ssl_info = "Unexpected SSL Error (%s)" % str(e)
            try:
                self.starttime = utils.get_datetime()
                self._send_request()
                self.endtime = utils.get_datetime()
                delta = self.endtime - self.starttime
                self.duration = int(delta.total_seconds() * 1000)

                self.response_text = self.response.text
                if self.assert_key == "assert_status_code_is":
                    assert int(self.assert_value) == self.response.status_code, "%s is expected, but was %s" % (self.assert_value, self.response.status_code)
                if self.assert_key == "assert_status_code_is_not":
                    assert int(self.assert_value) != self.response.status_code, "%s is not expected, but was %s" % (self.assert_value, self.response.status_code)
                if self.assert_key == "assert_body_contains":
                    assert self.assert_value in self.response.text, "assert_body_contains failed, string %s not found in response body" % (self.assert_value)

                if self.assert_key == "assert_is_json":
                    try:
                        try:
                            self.response.json()
                        except AttributeError, e:
                            # accept null as valid JSON
                            if self.response_text == "null":
                                pass
                            else:
                                raise e
                    except:
                        raise AssertionError("assert_is_json evaluated to false, should be true")

                if self.assert_key == "assert_is_not_json":
                    try:
                        self.response.json()
                        raise AssertionError("assert_is_json evaluated to true, should be false")
                    except:
                        pass

                if self.assert_key == "assert_header_is_set":
                    header = self.assert_value
                    assert header in self.response.headers.keys(), "assert_header_is_set failed, header '%s' is missing" % header

                if self.assert_key == "assert_header_is_not_set":
                    header = self.assert_value
                    assert header not in self.response.headers.keys(), "assert_header_is_not_set failed, header '%s' is set" % header

                if self.assert_key == "assert_header_value_contains":
                    headers = self.assert_value
                    try:
                        for k, v in headers.iteritems():
                            if self.response.headers.get(k, None):
                                assert v in self.response.headers.get(k), "assert_header_value_contains failed, header '%s' is: %s" % (k, self.response.headers.get(k))
                            else:
                                raise AssertionError("Header '%s' is not set" % k)
                    except AttributeError, e:
                        warn(rid, "Headers isis: "+str(headers))
                        warn(rid, "Headers type is: "+str(type(headers)))

            except AssertionError, e:
                #warn(rid, "%s: %s (%s)" % (self.url, str(e.message), self.response.text[:60]))
                self.error = e.message
                raise AssertionError("%s: %s" % (self.url, str(e.message)))
Exemplo n.º 44
0
        if body.startswith("{"):
            # JSON
            config = json.loads(body)
        elif body.startswith(tuple(ALPHA)):
            # YAML
            import yaml
            config = yaml.load(body)
        else:
            raise Exception("Missing data, body was: "+body )

        # save config_data as dict
        if "yes" in save_data:
            data.data['config_data_dict'] = config

        results, ssl_info, total_counter, success =  httptest.func(self, config, version, True)
        mydatetime = utils.get_datetime()
        runs = {
              'result': results,
              'ssl_info': ssl_info,
              'total': total_counter,
              'datetime': str(mydatetime)
        }
        data.data['last'] = {
                'success': success,
                'datetime': str(mydatetime)
                }
        if data.data.has_key("runs"):
            data.data['runs'].append(runs)
            if len(data.data['runs']) > 30:
                to_cleanup = len(data.data['runs'])-20
                for num in range(0, to_cleanup):
Exemplo n.º 45
0
    def test_delete_pending(self):
        test_dt = datetime(year=2020, month=4, day=1, hour=0, minute=0)
        input_files = {
            '200328-0000_DEL.flv',  # Already deleted
            '200329-1900_DEL.flv',  # Already deleted
            '200330-2100_DEL.flv',  # Already deleted
            '200401-0001_SOON.flv',  # 1 min
            '200401-0100_SOON.flv',  # 1 hour
            '200401-0600_WARN.flv',  # 6 hours
            '200401-1600_WARN.flv',  # 16 hours
            '200403-0000_WARN.flv',  # 2 days
        }
        self.cleaner.pending.clear()
        for n in input_files:
            self.cleaner.pending[n] = utils.get_datetime(n, TIME_FMT)
        expected_def = [
            {
                "add_hours":
                0,  # Reference time 2020-04-01 00:00
                "pending": [
                    '200401-0001_SOON.flv',
                    '200401-0100_SOON.flv',
                    '200401-0600_WARN.flv',
                    '200401-1600_WARN.flv',
                    '200403-0000_WARN.flv',
                ],
                "del_str": [
                    '200328-0000_DEL',
                    '200329-1900_DEL',
                    '200330-2100_DEL',
                ],
            },
            {
                "add_hours":
                0,  # Reference time 2020-04-01 00:00
                "pending": [
                    '200401-0001_SOON.flv',
                    '200401-0100_SOON.flv',
                    '200401-0600_WARN.flv',
                    '200401-1600_WARN.flv',
                    '200403-0000_WARN.flv',
                ],
                "del_str": [],
            },
            {
                "add_hours":
                4,  # Reference time 2020-04-01 04:00
                "pending": [
                    '200401-0600_WARN.flv',
                    '200401-1600_WARN.flv',
                    '200403-0000_WARN.flv',
                ],
                "del_str": [
                    '200401-0001_SOON',
                    '200401-0100_SOON',
                ],
            },
            {
                "add_hours": 24,  # Reference time 2020-04-02 04:00
                "pending": [
                    '200403-0000_WARN.flv',
                ],
                "del_str": [
                    '200401-0600_WARN',
                    '200401-1600_WARN',
                ],
            },
            {
                "add_hours": 24,  # Reference time 2020-04-03 04:00
                "pending": [],
                "del_str": [
                    '200403-0000_WARN',
                ],
            },
        ]
        is_in_err = []
        re_deleted = re.compile(r'-\s\"(.*)\".*')
        ref_dt = test_dt
        for expected in expected_def:
            ref_dt += timedelta(hours=expected['add_hours'])
            del_str = self.cleaner.delete_pending(ref_dt=ref_dt)
            print(f"\n--- Current time: {str(ref_dt)} ---\n{del_str}")
            check.equal(sorted(expected['pending']),
                        sorted(self.cleaner.pending.keys()), str(ref_dt))

            actual_str = []
            for m in re_deleted.finditer(del_str):
                actual_str.append(m.group(1))
            # Make sure output string contains what it should
            for name in expected["del_str"]:
                check.is_in(name, actual_str, f"MISSING {name} - {ref_dt}")
                if name not in actual_str:
                    is_in_err.append(f"MISSING {name} - {ref_dt}")
            # Make sure there are no extras
            for name in actual_str:
                check.is_in(name, expected["del_str"],
                            f"EXTRA {name} - {ref_dt}")
                if name not in expected["del_str"]:
                    is_in_err.append(f"EXTRA {name} - {ref_dt}")
        if is_in_err:
            print(f"\nExpected in del_str but not found:\n" +
                  '\n'.join(is_in_err))
Exemplo n.º 46
0
def E_export_csv(params):
    init_time = None
    finish_time = None

    params["whitelist_ble"]=[]
    params["whitelist_wifi"]=[]
    params["blacklist_ble"]=[]
    params["blacklist_wifi"]=[]

    if params['bluetooth']:
        if 'is_only_hardcode' in params:
            if params['is_only_hardcode']:
                # Hardcode
                print "entro errado"
            else:
                print "entro certo"
                if 'ble_list' in params:
                    if params['ble_list']:
                        if 'is_blacklist_ble' in params:
                            if params['is_blacklist_ble']:
                                params["blacklist_ble"] = params['ble_list']
                            else:
                                params["whitelist_ble"] = params['ble_list']

    if params['wifi']:
        if 'wifi_list' in params:
            if params['wifi_list']:
                if 'is_blacklist_wifi' in params:
                    if params['is_blacklist_wifi']:
                        params["blacklist_wifi"] = utils.parse_wifi_list(params['wifi_list'])[0]
                    else:
                        params["whitelist_wifi"] = utils.parse_wifi_list(params['wifi_list'])[0]


    if 'set_period' in params:
        if params['set_period']:
            init_time = params['init_time']
            finish_time = params['finish_time']


    params["a_func"] = utils.get_datetime() #TIME
    try:
        params["email_response"] += "Converting csv ..."
        params["csvfile"] = params["only_database_name"]+".csv"
        export_csv.run(
            inputfile=params['path_real']+params['database_name'],
            outputfile=params['path_real']+params["csvfile"],
            bluetooth=params['bluetooth'],
            wifi=params['wifi'],
            sensorhub=params['optimzation_sensor_hub'],
            battery= True if params['optimzation_sensor_hub'] else False,
            optimize=params['optimzation_sensor_hub'],
            whitelist_ble=params["whitelist_ble"],
            whitelist_wifi=params["whitelist_wifi"],
            blacklist_ble=params["blacklist_ble"],
            blacklist_wifi=params["blacklist_wifi"],
            init_time = init_time,
            finish_time = finish_time,
            )
    except Exception as e:
        utils.get_error(e, params)
        raise

    time_txt = utils.end_func("CSV conversion",params["a_func"])
    params["email_response"] += "OK\n"
    params["email_response"] += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params["csvfile"]),str("1"),{'optimzation_sensor_hub':params["optimzation_sensor_hub"]})+"\n\n"
    params["email_response"] += time_txt