示例#1
0
def web_shorten(url):

    url = url.strip()

    if len(url) < 2 or utils.check_url(url) == False:
        return no_url()

    conn = utils.create_connection("test.db")

    check = utils.check_entry(url, conn)

    db_url = check[1] if check else False

    if db_url and db_url == url:
        conn.close()
        return already_used()

    shortcode = utils.make_key(6)

    _date = utils.get_date()

    utils.new_entry(url, shortcode, _date, _date, conn)
    conn.close()

    return shortcode
示例#2
0
def use_fresh_certs_only(single_project_row, web_df):
    """if True, `last_cert_id_check` will be read for assembling only fresh
    web certificates for given project and databse will be updated afterwards as well.
    
    Parameters:
     - `single_project_row` (pd.Series): row of company project to match.
     - `web_df` (pd.DataFrame): dataframe of CSP certificates to match to the
     company project.

    Returns:
     - a Pandas DataFrame containing fresh certificates for given project. This will 
     be a subset of initial `web_df` input.
    
    """
    try:
        possible_matches_scored = web_df[
            web_df.cert_id > int(single_project_row.last_cert_id_check)]
    except (TypeError, ValueError):  # last_cert_id_check was `NULL`
        possible_matches_scored = web_df
    update_query = """ 
        UPDATE company_projects 
        SET last_cert_id_check=%s
        WHERE project_id=%s
    """
    if len(possible_matches_scored):
        with create_connection() as conn:
            conn.cursor().execute(update_query, [
                max(possible_matches_scored.cert_id),
                single_project_row.project_id
            ])
            conn.commit()
    return web_df
示例#3
0
def index(*args, **kwargs):
    db = create_connection()
    cursor = db.cursor()

    cursor.execute(select_all_categories)
    result = cursor.fetchall()
    cursor.close()

    box_string = ""

    for item in result:
        box_string += "<input id=\"{0}\" type=\"checkbox\" name=\"checkbox\" " \
                      "value=\"{1}\"/> <label for=\"{2}\">{3}</label><br />".format(item[0], item[1], item[0], item[1])

    sidebar_form = "\n    <form name=\"search\" action=\"\" method=\"POST\">\n" \
                   "    Search query\n    <input type=\"text\" name=\"search_query\"></br>\n" \
                   "    Initial category\n    <div style=\"height: 6em; width: 12em; overflow: auto;\">\n" \
                   "    {0}\n    </div>\n        <input type=\"submit\" value=\"Submit\"/>\n        " \
                   "</form>\n        ".format(box_string)

    template = load_template('index') + sidebar_form + load_template('search_result')

    if 'search_result' in kwargs:
        content_string = "<ul class=\"search_result\">"
        for item in kwargs['search_result']:
            content_string = content_string + "<li>" + item[0] + "  " + str(item[1]) + "</li>"
        content_string += "</ul>"
        template += "<p>Search result for :" + kwargs['query'] + "</p>" + content_string + load_template('footer')

    elif 'message' in kwargs:
        template += kwargs['message'] + load_template('footer')
    else:
        template += load_template('footer')

    return template
示例#4
0
def view_products_cat(*args, **kwargs):
    product_name = kwargs['query_string'].get('name')[0]
    db = create_connection()
    cursor = db.cursor()
    cursor.execute(sql_categories_for_product(product_name))
    result = cursor.fetchall()
    cursor.close()

    cat_for_prod = ""
    excluded_cats = []
    for item in result:
        cat_for_prod += "<option selected=\"selected\">" + item[0] + "</option>"
        excluded_cats.append(item[0])

    cursor = db.cursor()
    cursor.execute(sql_rest_of_cats(excluded_cats))
    result = cursor.fetchall()
    cursor.close()

    other_cats = ""
    for item in result:
        other_cats += "<option>" + item[0] + "</option>"

    form = "<form name=\"add_prod\" action=\"/admin/add_categories_to_product\" method=\"POST\">" \
           "<table style=\"width: 100%\" cellpadding=\"3\" cellspacing=\"0\"><tr><td style=\"width:33%\">" \
           "Categories <select multiple size=\"8\" name=\"init_cat\">{0}</select> </td>" \
           "<td align=\"center\" style=\"width:33%\"><input type=\"Button\" value=\">>\" onClick=\"SelectMoveRows(document.add_prod.init_cat, document.add_prod.other_cat)\"><br>" \
           "<input type=\"Button\" value=\"<<\" onClick=\"SelectMoveRows(document.add_prod.other_cat, document.add_prod.init_cat)\"></td>" \
           "<td style=\"width:33%\">Other categories <select  size=\"8\" multiple name=\"other_cat\">{1}</select></td></tr></table>" \
           "<input type=\"submit\" value=\"Save\">" \
           "<input type=\"hidden\" value=\"{2}\" name=\"prod_name\"\></form>".format(cat_for_prod, other_cats,
                                                                                     product_name)
    template = load_template('admin_header') + "Product :" + product_name + form + load_template('footer')
    return template
示例#5
0
def save_product(*args, **kwargs):
    try:
        prod_name = kwargs['query_string'].get('product_name')[0]
        prod_price = kwargs['query_string'].get('product_price')[0]
        init_cat = kwargs['query_string'].get('init_cat')[0]
    except TypeError:
        return load_template('admin_header') + "Please enter a valid name and price." + load_template('footer')

    db = create_connection()
    cursor = db.cursor()
    try:
        cursor.execute(insert_product(str(prod_name), float(prod_price), str(init_cat)))
        db.commit()
    except ValueError:
        db.rollback()
        template = load_template('admin_header') + "VALUE ERROR" + load_template('footer')
        return template

    except db.Error as e:
        print "Error code %s" % e.errno
        db.rollback()

    cursor.close()
    db.close()

    template = load_template('admin_header') + load_template('footer')
    return template
示例#6
0
def shorten():

    shortcode = ""

    if request.method == 'POST':
        received = request.get_json(force=True)

        url = received["url"] if received["url"] else ""

        if len(url) < 2 or utils.check_url(url) == False:
            return no_url()

        conn = utils.create_connection("test.db")

        check = utils.check_entry(url, conn)
        db_url = check[1] if check else False

        if db_url and db_url == url:
            conn.close()
            return already_used()

        try:
            shortcode = received["shortcode"]
        except KeyError:
            logging.warn("No shortcode provided, generating one...")
            shortcode = utils.make_key(6)

        if utils.check_shortcode(shortcode) == False:
            conn.close()
            return invalid_code()

    _date = utils.get_date()
    utils.new_entry(url, shortcode, _date, _date, conn)
    conn.close()
    return flask.make_response(shortcode, 201)
示例#7
0
def main():
    parsed_args = parser.parse_args()

    conn = create_connection(parsed_args.db_path, check_same_thread=False)

    feature_type = parsed_args.feature_type
    saved = parsed_args.saved
    variant = parsed_args.variant
    superboro_id = parsed_args.superboro_id
    is_sparse = parsed_args.sparse
    num_epochs = parsed_args.num_epochs
    batch_size = parsed_args.batch_size
    model_path = parsed_args.model_path

    feature_vec_size = FEATURE_TYPES[feature_type]['size']
    super_boro = SUPER_BOROS[superboro_id]

    if not model_path is None:
        model = tf.keras.models.load_model(model_path)
        start_epoch = int(model_path.split("_")[-1])
    else:
        model = create_boro_model([200, 50], feature_vec_size)
        start_epoch = 0

    dt_now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
    model_dir = os.path.join("models",
                             f'{dt_now}_{super_boro[0]}_f{feature_type}')
    os.mkdir(model_dir)

    sql_batch_size = 1e6
    sql_block_size = 1e5

    data_generator = extract_features
    data_generator_arguments = {
        "conn": conn,
        "table_name": "rides",
        "variant": 'all',
        "size": sql_batch_size,
        "block_size": sql_block_size,
        "datetime_onehot": FEATURE_TYPES[feature_type]['datetime_onehot'],
        "weekdays_onehot": FEATURE_TYPES[feature_type]['weekdays_onehot'],
        "include_loc_ids": FEATURE_TYPES[feature_type]['include_loc_ids'],
        "start_super_boro": super_boro,
        "end_super_boro": super_boro
    }

    features_file = os.path.join(
        'data', f'features_{super_boro[0]}_{feature_type}.npy')
    values_file = os.path.join('data',
                               f'values_{super_boro[0]}_{feature_type}.npy')

    if variant == 'all':
        train(model, data_generator, data_generator_arguments, saved,
              features_file, values_file, model_dir, is_sparse, num_epochs,
              batch_size, start_epoch)
    elif variant == 'batch':
        train_on_batches(model, data_generator, data_generator_arguments,
                         saved, features_file, values_file, model_dir,
                         is_sparse, num_epochs, batch_size, start_epoch)
示例#8
0
def main():

    DB_URL = 'mysql://{user}:{passwd}@{host}/{db}'.format(host=args.host, user=args.username,\
        passwd=args.password,db=args.db_name)
    logger.debug('DB_URL: {url}'.format(url=DB_URL))

    # Error handling for possible sql path location errors.
    try:
        os.path.exists(args.path_to_sql) or os.path.isdir(args.path_to_sql)
    except IOError as e:
        logger.error(e)
        sys.exit(1)

    if args.updateVersion:
        utils.update_db_version(DB_URL, args.updateVersion)
        sys.exit(0)

    # get scripts
    scripts = utils.get_scripts(args.path_to_sql)
    highest_value = utils.get_max_script(scripts)

    try:
        session = utils.create_connection(DB_URL)
    except CreateConnectionException as e:
        logger.error('There was an Error when creating the DB sesion')
        sys.exit(1)
    try:
        version = utils.get_db_version(session)
    except LoginDbException as e:
        logger.error(e)
        sys.exit(4)

    if utils.do_upgrade(version, highest_value):
        # each script which number is higher that version must be executed:
        # lower to higher => version must be updated for each script

        logger.info(
            "Highest value on sql scripts: {max}".format(max=highest_value))
        logger.info("Doing DB upgrade")

        ordered_scripts, scripts_dict = utils.get_ordered_scripts(scripts)

        for root, dirs, files in os.walk(args.path_to_sql):
            for f in files:
                if scripts_dict[f] > version:
                    # execute script
                    utils.run_sql_script(DB_URL,
                                         os.path.join(args.path_to_sql, f))
                    # update version
                    version = utils.update_db_version(DB_URL, scripts_dict[f])

        logger.info('Ugrade completed')
        logger.info('New version: {v}'.format(v=version))

    else:
        logger.info('Higher value on sql scripts: {max} is equal or lower than version: {version}'\
                .format(version=version, max=highest_value))
        logger.info('Nothing to do')
示例#9
0
def batch_running(
    ip,
    notebook_root='../notebook',
):
    in_result = []
    cursor, db = create_connection()
    sql = 'SELECT notebook_id from result'
    cursor.execute(sql)
    sql_res = cursor.fetchall()
    for row in sql_res:
        in_result.append(int(row[0]))

    sql = 'SELECT id from notebook where add_run=1 and server_ip=\'' + ip + "'"
    cursor.execute(sql)
    sql_res = cursor.fetchall()
    all = 0
    can_use = 0
    can_use_1 = 0
    for row in sql_res:
        notebook_id = int(row[0])
        if notebook_id not in in_result:
            continue
        try:
            origin_code = get_code_txt(notebook_root + '/' + str(notebook_id) +
                                       '.ipynb')
        except Exception as e:
            print(e)
            return "read fail"
        origin_code, add, result = add_result(notebook_id, origin_code)
        # print(type(result))
        # print(result)
        if len(result) == 0:
            can_use += 1
            update_db("notebook", "add_model", '1', 'id', "=", notebook_id)
            update_db("result", "model_type", "'unknown'", 'notebook_id', "=",
                      notebook_id)
        if len(result) == 1:
            can_use += 1
            update_db("notebook", "add_model", '1', 'id', "=", notebook_id)
            sql = 'UPDATE result SET model_type = \'' + list(
                result)[0] + "' WHERE notebook_id=" + str(notebook_id)
            cursor.execute(sql)
            # print('delete id:' + str(notebook_id))
        if len(result) > 1:
            print(result)
            update_db("notebook", "add_model", '2', 'id', "=", notebook_id)
            sql = 'delete from result where notebook_id=' + str(notebook_id)
            cursor.execute(sql)
            db.commit()
            print('delete id:' + str(notebook_id))

            can_use_1 += 1
        all += 1
    print('1:', can_use)
    print('2:', can_use_1)
    print('all:', all)
    print('rate:', can_use / all)
示例#10
0
def update_rules():
        global gSetupRules        
	for bgp_router in Configuration.bgp_routers.keys():
		tn = utils.create_connection(bgp_router)
		deploy_rule(tn, "route-map Path-End-Validation permit 1")
		for rule_id in gSetupRules:
                        deploy_rule(tn, "match ip as-path " + rule_id)
                deploy_rule(tn, "match ip as-path allow-all")
		deploy_rule(tn, "exit")
		utils.close_connection(tn)
示例#11
0
def update_rules():
    global gSetupRules
    for bgp_router in Configuration.bgp_routers.keys():
        tn = utils.create_connection(bgp_router)
        deploy_rule(tn, "route-map Path-End-Validation permit 1")
        for rule_id in gSetupRules:
            deploy_rule(tn, "match ip as-path " + rule_id)
        deploy_rule(tn, "match ip as-path allow-all")
        deploy_rule(tn, "exit")
        utils.close_connection(tn)
示例#12
0
async def routine():
    with create_connection() as conn:
        search_df = pd.read_sql("SELECT * FROM search", conn)
    page_searches = {}
    for _, row in search_df.iterrows():
        page_searches.update({row.career_page: [[str(row.id), row.keywords]]})
    results = await main(page_searches)
    for result in results:
        search_id = result[0]
        for posting in result[1]:
            print(f"{result[1][posting]}: {posting}")
            with create_connection() as conn:
                conn.cursor().execute(
                    f"""
                    INSERT INTO found (search_id, link, keywords, date_found) VALUES (?, ?, ?, ?)
                """, [
                        search_id, posting, result[1][posting],
                        datetime.datetime.now().date()
                    ])
示例#13
0
def deploy_record(raw_record):
        record = raw_record.get()
	rule_id = "as" + str(record.asn)
	record_rule_ids[rule_id] = time.time()
	for bgp_router in Configuration.bgp_routers.keys():
		tn = utils.create_connection(bgp_router)
		deploy_rule(tn, "ip as-path access-list " + rule_id + " deny " + "_[^" + encode_neighbors(record.links) + "]_" + str(record.asn) + "_")
		if not record.transient_flag:
                        deploy_rule(tn, "ip as-path access-list " + rule_id + " deny _" + str(record.asn) + "_[0-9]+_")
		deploy_rule(tn, "exit")
		utils.close_connection(tn)
示例#14
0
def create_table(pageNum):
    sql = """CREATE TABLE IF NOT EXISTS page{0}(
                imgId text PRIMARY KEY,
                score integer NOT NULL);""".format(pageNum)
    try:
        conn = create_connection(database)
        with conn:
            cur = conn.cursor()
            cur.execute(sql)
    except Exception as e:
        print(e)
示例#15
0
def getImgUrl(sign):
    try:
        conn = create_connection(database)
        pageNum = randomTable(conn)
        with conn:
            sql = '''SELECT imgID FROM page{0} WHERE SCORE {1} 10000 ORDER BY RANDOM() LIMIT 1;'''.format(pageNum, sign)
            cur = conn.cursor()
            filename = str(cur.execute(sql).fetchall()[0][0])
            filename = urllib.quote(filename)
            return destination + filename + ".png"
    except Exception as e:
        print(e)
示例#16
0
def read_configuration(bgp_router):
    if (Configuration.DEBUG):
        raw_cfg = file("BGP_config_example.txt").read()
        ip = json.loads(urllib.urlopen("http://ip.jsontest.com/").read())["ip"]
        raw_cfg = raw_cfg.replace("1.1.1.1", ip)
        return raw_cfg
    tn = utils.create_connection(bgp_router)
    tn.write("show running-config\n")
    config = tn.read(1024 * 1024)
    tn.write("exit\n")
    utils.close_connection(tn)
    return config
示例#17
0
def view_category(*args, **kwargs):
    string = "<ul>"
    db = create_connection()
    connection = db.cursor()
    connection.execute(select_all_categories)
    result = connection.fetchall()
    connection.close()
    for item in result:
        string += "<li><a href=\"/admin/view_category?id={0}\">{1}</a></li>".format(item[0], item[1])
    string += "</ul> \n"
    template = load_template('admin_header') + string + load_template('footer')
    return template
示例#18
0
def read_configuration(bgp_router):
    if (Configuration.DEBUG):
        raw_cfg = file("BGP_config_example.txt").read()
        ip = json.loads(urllib.urlopen("http://ip.jsontest.com/").read())["ip"]
        raw_cfg = raw_cfg.replace("1.1.1.1",ip)
        return raw_cfg
    tn = utils.create_connection(bgp_router)
    tn.write("show running-config\n")
    config = tn.read(1024 * 1024)
    tn.write("exit\n")
    utils.close_connection(tn)
    return config
示例#19
0
def save_message():

        # get the message
        request_data = request.get_json()

        # insert a record
        conn = utils.create_connection('/home/pi/projects/ledticker-pi/db/led_messages.db')  
        m_id = utils.create_message(conn, request_data)  

        # return a value
        return jsonify(
            message_id=m_id,
            message="message created"
        )
示例#20
0
def deploy_record(raw_record):
    record = raw_record.get()
    rule_id = "as" + str(record.asn)
    record_rule_ids[rule_id] = time.time()
    for bgp_router in Configuration.bgp_routers.keys():
        tn = utils.create_connection(bgp_router)
        deploy_rule(
            tn, "ip as-path access-list " + rule_id + " deny " + "_[^" +
            encode_neighbors(record.links) + "]_" + str(record.asn) + "_")
        if not record.transient_flag:
            deploy_rule(
                tn, "ip as-path access-list " + rule_id + " deny _" +
                str(record.asn) + "_[0-9]+_")
        deploy_rule(tn, "exit")
        utils.close_connection(tn)
示例#21
0
def update_fts():
    with create_connection() as conn:
        try:
            conn.cursor().execute(
                drop_fts)  # drop fts table if existing so it can be rewritten
            conn.commit()
        except sqlite3.OperationalError:
            pass
        for column in ['title', 'owner', 'contractor', 'city', 'engineer']:
            conn.cursor().execute(clean_string_column.format(column, column))
            conn.commit()
        conn.cursor().execute(create_fts)
        conn.commit()
        conn.cursor().execute(populate_fts)
        conn.commit()
示例#22
0
def send_email(found_id):
    with create_connection() as conn:
        found = pd.read_sql("""
            SELECT
                users.name as user_name,
                users.email,
                search.career_page,
                search.company,
                searc.keywords,
                found.title,
                found.link 
            FROM found
            JOIN search 
            ON search.id = found.search_id
            JOIN users 
            ON users.id = search.user_id
            WHERE found.id = ?
        """, conn, params=[found_id]).iloc[0]
    print(found.user_name)
    message = Mail(
        to_emails=found.email,
        subject=f'New Job Posting for {found.title}',
        html_content=f"""
            <body>
                Hi {found.user_name},
                <br><br>
                Looks like one of your target career pages (<a href='{found.career_page}'>{found.company}</a>)
                recently posted a new job for <a href='{get_valid_link(found.career_page, found.link)}'>{found.title}</a>.
                <br><br>
                This matches your search for keyword "{found.keywords}".
                <br><br>
                Good luck!<br>
                <a href='www.joblert.me'>joblert.me</a>
            </body>
        """)
    message.from_email = From('*****@*****.**', 'joblert.me')
    print(found.user_name)
    message.to_email = To(found.email, found.user_name)
    try:
        with open(".secret.json") as f:
            api_key = json.load(f)["sendgrid_key"]
        sg = SendGridAPIClient(api_key)
        response = sg.send(message)
        print(response.status_code)
        print(response.body)
        print(response.headers)
    except Exception as e:
        print(e.message)
示例#23
0
def add_product_to_category(*args, **kwargs):
    prod_list = kwargs['query_string'].get('init_prod')
    category_id = kwargs['query_string'].get('cat_id')[0]
    db = create_connection()

    cursor = db.cursor()
    cursor.execute(sql_add_product_to_category(category_id, prod_list))
    db.commit()
    cursor.close()

    cursor = db.cursor()
    cursor.execute(sql_del_product_from_category(category_id, prod_list))
    db.commit()
    cursor.close()
    db.close()
    return view_category()
示例#24
0
def shortcodeStats(shortcode):
    """
    Receives a shortcode, check whether it's in the db and if so returns its stats in json
    """

    try:
        conn = utils.create_connection("test.db")
        entry, url = utils.check_entry(shortcode, conn)
        if entry:
            stats = utils.get_stats(shortcode, conn)
            stats = flask.jsonify(stats)
            conn.close()
            return flask.make_response(stats, 200)
    except:
        conn.close()
        return not_found()
示例#25
0
def get_term_sim(term_sublist, i):
    print "Starting thread %d\n" % i
    sim_fpath = config.folder + "resnik_sim%d.csv" % i
    ofile = open(sim_fpath, "w")
    conn = utils.create_connection(config.db_fpath)
    for term in term_sublist:
        cur = conn.cursor()
        t = (term,)
        for record in cur.execute('select node2,lca from lca where node1=?', t):
            term2 = record[0]
            lca_str = record[1]
            arr = lca_str.split(",")
            (max_sim, max_lca) = resnik_sim(term, term2, arr)
            ofile.write( term + "," + term2 + "," + str(max_sim) + "," + max_lca + "\n" )
    ofile.close()
    conn.close()
    print "get_term_sim %d finished!" % i
示例#26
0
def add_categories_to_product(*args, **kwargs):
    cat_list = kwargs['query_string'].get('init_cat')
    product_name = kwargs['query_string'].get('prod_name')
    db = create_connection()
    cursor = db.cursor()
    try:
        cursor.execute(sql_assign_product_to_category(product_name, cat_list))
        db.commit()
    except db.OperationalError:
        return load_template('admin_header') + "Product must have at least one category! " + load_template('footer')
    cursor.close()
    cursor = db.cursor()
    cursor.execute(sql_del_category_from_product(product_name, cat_list))
    db.commit()
    cursor.close()
    db.close()
    return view_product()
示例#27
0
def getImageForNow(pageNum, night):
    sql_night = '''SELECT url FROM page{0} WHERE score <= {1} ORDER BY score ASC'''.format(
        pageNum, set_score)
    sql_day = '''SELECT url FROM page{0} WHERE score > {1} ORDER BY score DESC'''.format(
        pageNum, set_score)
    try:
        conn = create_connection(database)
        with conn:
            cur = conn.cursor()
            if night:
                cur.execute(sql_night)
            else:
                cur.execute(sql_day)
            data = cur.fetchall()
            return selectImage(data)
    except Exception as e:
        print(e)
示例#28
0
def showShortcode(shortcode):
    """
    Receives a shortcode, check whether it's in the db and if so returns the corresponding url
    """
    try:
        conn = utils.create_connection("test.db")
        entry, url = utils.check_entry(shortcode, conn)
        if entry:
            newdate = utils.get_date()
            utils.update_entry(shortcode, conn)
            conn.close()
            resp = flask.make_response(url, 302)
            resp.headers["Location"] = url
            return resp
    except:
        conn.close()
        return not_found()
示例#29
0
def save_category(*args, **kwargs):
    cat = kwargs['query_string'].get('category')
    db = create_connection()
    cursor = db.cursor()
    try:
        cursor.execute(insert_category(str(cat[0])))
        db.commit()
    except db.Error:
        db.rollback()
    except TypeError:
        return load_template('admin_header') + "Enter a vaild name." + load_template('footer')

    cursor.close()
    db.close()

    template = load_template('admin_header') + load_template('footer')
    return template
示例#30
0
 def create_notebook_pool():
     notebook_pool = []
     in_result = []
     cursor, db = create_connection()
     sql = 'select distinct notebook_id from result'
     cursor.execute(sql)
     sql_res = cursor.fetchall()
     for row in sql_res:
         in_result.append(int(row[0]))
     sql = 'select pair.nid from pair,dataset where pair.did=dataset.id and dataset.server_ip = \'' + ip + '\''
     cursor.execute(sql)
     sql_res = cursor.fetchall()
     for row in sql_res:
         if int(row[0]) not in in_result:
             continue
         if int(row[0]) not in notebook_pool:
             notebook_pool.append(int(row[0]))
     return notebook_pool
示例#31
0
def storeRating(prev_click, next_click, rating, joke_history):
    session_cookie = flask.request.cookies.get('custom-auth-session')

    if rating is not None and joke_history is not None:
        jokes, index, _, _ = json.loads(joke_history)

        joke_number = jokes[index].split(
            os.sep)[-1].split('init')[1].split('.')[0]
        joke_column = 'joke{}'.format(joke_number)
        j_dict = dict()
        j_dict['joke'] = [joke_column, rating]
        j_dict['user'] = session_cookie
        conn = create_connection(sqlite3_db)
        update_rating(conn, j_dict)
        conn.commit()
        conn.close()

    return None
示例#32
0
def analyze(imgUrl, title, pageNum):
    imgData = requests.get(imgUrl).content
    bin_data = io.BytesIO(imgData)
    file_bytes = np.asarray(bytearray(bin_data.read()), dtype=np.uint8)
    img = cv2.imdecode(file_bytes, cv2.IMREAD_COLOR)
    param = [0.114, 0.587, 0.299]
    result = 0
    for i in range(3):
        histr = cv2.calcHist([img], [i], None, [256], [0, 256])
        result += np.argmax(histr)**2 * param[i]
    try:
        conn = create_connection(database)
        with conn:
            # create_score(conn, pageNum, (title,result,imgUrl))
            create_or_update_score(conn, pageNum, imgUrl, title, result)
            clean_table(conn, pageNum)
    except Exception as e:
        print(e)
示例#33
0
def get_connect(db_file):
    """Get connect to the database."""
    db_schema = """
        CREATE TABLE IF NOT EXISTS auctions (
            auct_num    integer not NULL,
            date_in     text not NULL,
            date_out    text not NULL,
            money       real not NULL,
            percent     real not NULL,
            val_code    text not NULL,
            stock_code  text not NULL,
            PRIMARY KEY (auct_num, date_in)
        );
    """
    if not os.path.exists(db_file):
        result = init_db(db_file, db_schema)
        if result is not True:
            show_report("Database initialization error: {}".format(result))
    return create_connection(db_file)
示例#34
0
def generate_wordcloud(term_field):
    field = term_field.split('_')[-1]
    term = '_'.join(term_field.split('_')[:-1])
    query = """
        SELECT {}
        FROM web_certificates
        WHERE cert_id in (
            SELECT cert_id 
            FROM cert_search 
            WHERE text MATCH %s
        )
    """
    with create_connection() as conn:
        df = pd.read_sql(query.format(field), conn, params=[term])
    df['contractor_clean'] = df[field].apply(lambda x: cleanco(x).clean_name())
    relevant_words = [
        word.lower().lstrip().rstrip().replace('.', '')
        for word in df['contractor_clean']
    ]
    relevant_text = " ".join(relevant_words)
    stopwords = set(STOPWORDS)
    stopwords.update(general_terms + dvision_terms + term.split(' '))
    if field != 'owner':
        stopwords.update(geographic_locations)
    try:
        wordcloud = WordCloud(
            stopwords=stopwords,
            background_color=None,
            mode='RGBA',
            width=1000,
            height=400,
            color_func=lambda *args, **kwargs: "black").generate(
                relevant_text.upper())
        if len(wordcloud.words_):
            wordcloud.recolor(color_func=grey_color_func, random_state=3)
            wordcloud.to_file(
                f"static/wordcloud_{term.replace(' ', '_')}_{field}.png")
        return len(df), len(wordcloud.words_) / len(df)
    except ValueError:
        pass  # search term did not generate enough words
        return len(df), 0
示例#35
0
def add_product(*args, **kwargs):
    db = create_connection()
    cursor = db.cursor()
    cursor.execute(select_all_categories)
    result = cursor.fetchall()
    cursor.close()
    db.close()
    string = ""
    for item in result:
        string += "<option>" + item[1] + "</option>"

    add_product_form = "<form name=\"add_prod\" action=\"/admin/add_product\" method=\"POST\">" \
                       "Product name : <input type=\"text\" name=\"product_name\"></br>" \
                       "Product price : <input type=\"text\" name=\"product_price\"></br>" \
                       "Initial category : <select name=\"init_cat\">{0}</select>" \
                       "<input type=\"submit\" value=\"Submit\"></form>".format(string)

    template = load_template('admin_header') + \
               add_product_form + \
               load_template('footer')
    return template
示例#36
0
def single_add_model(notebook_root, notebook_id, ip):
    origin_code = get_code_txt(notebook_root + '/' + str(notebook_id) +
                               '.ipynb')
    origin_code, add, result = add_result(notebook_id, origin_code)
    cursor, db = create_connection()
    sql = "SELECT pair.nid, dataset.dataSourceUrl, notebook.server_ip " \
          "FROM pair, notebook, dataset " \
          "WHERE notebook.id=pair.nid " \
          "and dataset.id=pair.did " \
          "and (notebook.add_sequence=1 or notebook.add_sequence=0 and (notebook.cant_sequence=2 or notebook.cant_sequence=3)) " \
          "and dataset.isdownload=1 " \
          "and dataset.server_ip='" + ip + "' " \
          "and notebook.id="+str(notebook_id)
    cursor.execute(sql)
    sql_res = cursor.fetchall()
    for row in sql_res:
        dataset_name = row[1].split('/')[-1].strip()
        break
    dataset_path_root = '../spider/unzip_dataset/' + dataset_name + '.zip'

    code_list = origin_code.split('\n')
    for index, line in enumerate(code_list):
        print(index, line)
示例#37
0
def search(*args, **kwargs):
    db = create_connection()
    cursor = db.cursor()
    try:
        query = kwargs['query_string'].get('search_query')[0]
    except TypeError:
        query = ""

    cats = (kwargs['query_string'].get('checkbox'))
    try:
        cursor.execute(sql_search_query(query, cats))
        result = cursor.fetchall()
        cursor.close()
        db.close()
    except db.Error:
        error_message = "Ops ! Something went wrong ! "
        template = index(message=error_message)
        return template
    template = index(search_result=result, query=query)
    if result == ():
        empty_result_message = "God hate us all!!!!!!!!!"
        template = index(message=empty_result_message)

    return template
def process_as_feedback(feedback):
    """Takes in user feedback from web app or clicked emailed link and updates the database accordingly.
    
    Parameters:
    feedback (dict): request.args coming from url clicked by user to submit feedback with
    regards to the quality of the match. User click either comes from the web app or a
    potential match email that would have been sent out.
    
    """
    project_id = feedback["project_id"]
    job_number = feedback["job_number"]
    response = int(feedback["response"])
    source = feedback["source"]
    cert_id = feedback["cert_id"]
    logger.info(f"got feedback `{response}` for job #`{job_number}`")
    with create_connection() as conn:
        try:
            was_prev_closed = (pd.read_sql(
                "SELECT * FROM company_projects WHERE project_id=%s",
                conn,
                params=[project_id],
            ).iloc[0].closed)
        except IndexError:
            logger.info(
                "job must have been deleted from company_projects at some point... skipping."
            )
            return "deleted"
    if was_prev_closed:
        logger.info(
            "job was already matched successfully and logged as `closed`... skipping."
        )
        return "already_closed"
    if response == 1:
        logger.info(
            f"got feeback that cert_id {cert_id} from {source} was correct")
        update_status_query = (
            "UPDATE company_projects SET closed = 1 WHERE project_id = %s")
        with create_connection() as conn:
            conn.cursor().execute(update_status_query, [project_id])
            conn.commit()
        logger.info(
            f"updated company_projects to show `closed` status for job #{job_number}"
        )
    with create_connection() as conn:
        conn.cursor().execute(
            f"""
            INSERT INTO attempted_matches (project_id, cert_id, ground_truth, multi_phase, log_date, validate) 
            VALUES (%s, %s, %s, %s, %s, 0)
        """, [
                project_id, cert_id, 1 if response == 1 else 0,
                1 if response == 2 else 0,
                str(datetime.datetime.now().date())
            ])
        conn.commit()
        df = pd.read_sql("SELECT * FROM attempted_matches", conn)
        delete_record = df[df.duplicated(subset=["project_id", "cert_id"],
                                         keep='last')]
        if len(delete_records):
            conn.cursor().execute(
                f"""
                DELETE FROM attempted_matches
                WHERE idx = %s
            """, [delete_record.idx])
            conn.commit()
        logger.info(
            f"cert_id`{cert_id}` from {source} was a "
            f"{'successful match' if response == 1 else 'mis-match'} for job "
            f"{project_id} (#{job_number})")
def communicate(single_web_cert, single_project, test=False):
    """Constructs email message with info pertaining to match of company project and web
    CSP certificate. Sends email accordingly.
    
    Parameters:
     - `single_web_cert` (pd.DataFrame): single-row dataframe containg info of successfully
     matched web CSP certificate.
     - `single_project` (pd.Series): series containg info of successfully matched
     company_project.
     - `test`: if set to `True`, will short-circuits out of function without doing anything.
    
    """
    if len(single_web_cert) > 1:
        raise ValueError(
            f"dataframe passed was suppose to conatin only 1 single row - "
            f"one of them contained {len(single_project)} rows instead.")
    if type(single_project) != pd.Series:
        raise TypeError(
            f"`single_project` was supposed to be a pandas series object type."
        )
    receiver_emails_dump = single_project.receiver_emails_dump
    receiver_email = ast.literal_eval(receiver_emails_dump)
    source = single_web_cert.iloc[0].source
    source_base_url_query = "SELECT base_url FROM base_urls WHERE source=%s"
    with create_connection() as conn:
        base_url = pd.read_sql(source_base_url_query, conn,
                               params=[source]).iloc[0].base_url
    url_key = single_web_cert.iloc[0].url_key
    pub_date = datetime.datetime(*[
        int(single_web_cert.iloc[0].pub_date.split("-")[x]) for x in range(3)
    ]).date()
    due_date = lambda delay: pub_date + datetime.timedelta(days=delay)
    with create_connection() as conn:
        project_title = pd.read_sql(
            "SELECT * FROM company_projects WHERE project_id=%s",
            conn,
            params=[single_project.project_id]).iloc[0].title
    intro_msg = (
        f"Hi {', '.join(receiver_email.keys())},"
        f"<br><br>"
        f"It looks like your project #{single_project.job_number} "
        f"({project_title}) might be almost ready for holdback release!"
        f"<br>")
    cert_msg = (
        f"Please <a href='{base_url}{url_key}'>click here</a> to make sure "
        f"HBR-Bot correctly matched your project. <b>Don't forget to come back"
        f"when you're done!</b>")
    timing_msg = (
        f"If it's the right project, then the certificate was just published "
        f"on {datetime.datetime.strftime(pub_date,'%B %e, %Y')}. This means a "
        f"valid holdback release invoice could be submitted as of:<br>"
        f"A)&#9;{datetime.datetime.strftime(due_date(45),'%B %e, %Y')} "
        f"if the contract was signed before October 1, 2019 or;<br>"
        f"B)&#9;{datetime.datetime.strftime(due_date(60),'%B %e, %Y')} "
        f"if the contract was signed since then."
        f"<br>")
    link_constructor = "https://www.hbr-bot.ca/process_feedback?project_id={}&job_number={}&response={}&source={}&cert_id={}"
    feedback_msg = (
        f"Your feedback will be required so that HBR Bot can properly "
        f"handle this ticket, whether that means closing it out or keep "
        f"searching for new matches. It will also help improve the "
        f"matching algorithm for future projects.<br>"
        f"<br>"
        f"<a href='{link_constructor.format(single_project.project_id, single_project.job_number, 0, source, single_web_cert.iloc[0].cert_id)}'>link does not relate to my project</a>"
        f"<br>"
        f"<a href='{link_constructor.format(single_project.project_id, single_project.job_number, 1, source, single_web_cert.iloc[0].cert_id)}'>link is accurate match for my project</a>"
        f"<br>"
        f"<a href='{link_constructor.format(single_project.project_id, single_project.job_number, 2, source, single_web_cert.iloc[0].cert_id)}'>link is close but seems to relate to a different phase or stage</a>"
        f"<br><br>")
    disclaimer_msg = (
        "Fianlly, please be aware this is a fully automated message. "
        "The info presented above could be erroneous."
        "<br>")
    closeout_msg = "Thanks,<br>" "HBR Bot<br>"
    message = "<br>".join([
        intro_msg, cert_msg, timing_msg, feedback_msg, disclaimer_msg,
        closeout_msg
    ])
    send_email(receiver_email, message, single_project.job_number, test=test)
示例#40
0
def deploy_allow_all_rule():
	for bgp_router in Configuration.bgp_routers.keys():
		tn = utils.create_connection(bgp_router)
		deploy_rule(tn, tn, "ip as-path access-list allow-all permit")
from utils import create_connection
from thread import MyThread

connection = create_connection()
profiles_collection = connection["profiles"]
script_path = '/var/InstagramBot/bot/get_followers/get_followers.php'
log_path = '/var/log/InstagramBot/get_followers/'

profiles = profiles_collection.find({'verifyData.verify': True, 'config.profile.numberOfDays': {'$gt': 0}},
                                    {'_id': 1, 'instagramData': 1, 'config': 1})

for profile in profiles:
    MyThread(profile, script_path, log_path).start()
def create_test_db():
    abs_dir_path = os.path.abspath(__file__).replace("test/test_setup.py", "")
    os.chdir(abs_dir_path + "test/")
    csv_file_names = [x for x in os.listdir() if x.endswith(".csv")]
    for csv_table_name in csv_file_names:
        with create_connection(db_name="test_cert_db.sqlite3") as conn:
            pd.read_csv(csv_table_name).to_sql(csv_table_name[:-4],
                                               conn,
                                               index=False)
    # Execute migration scripts below to modify on newly populated tables
    with create_connection(db_name="test_cert_db.sqlite3") as conn:
        conn.cursor().executescript("""
            PRAGMA foreign_keys=off;
            ALTER TABLE company_projects RENAME TO old_table;
            CREATE TABLE company_projects (
                project_id INTEGER PRIMARY KEY,
                job_number TEXT,
                city TEXT,
                address TEXT,
                title TEXT,
                contractor TEXT,
                owner TEXT,
                engineer TEXT,
                closed INTEGER,
                receiver_emails_dump TEXT,
                address_lat REAL,
                address_lng REAL, 
                city_lat REAL,
                city_lng REAL,
                city_size REAL,
                company_id TEXT NOT NULL,
                last_cert_id_check INTEGER
            );
            INSERT INTO company_projects SELECT * FROM old_table;
            DROP TABLE old_table;
            PRAGMA foreign_keys=on;
        """)
        conn.commit()
        conn.cursor().executescript("""
            PRAGMA foreign_keys=off;
            ALTER TABLE web_certificates RENAME TO old_table;
            CREATE TABLE web_certificates (
                cert_id INT PRIMARY KEY NOT NULL,
                pub_date TEXT,
                city TEXT,
                address TEXT,
                title TEXT,
                owner TEXT,
                contractor TEXT,
                engineer TEXT,
                url_key TEXT,
                source VARCHAR DEFAULT "dcn",
                cert_type VARCHAR DEFAULT "csp",
                address_lat REAL,
                address_lng REAL,
                city_lat REAL,
                city_lng REAL,
                city_size REAL
            );
            INSERT INTO web_certificates SELECT * FROM old_table;
            DROP TABLE old_table;
            PRAGMA foreign_keys=on;
        """)
        conn.commit()
        conn.cursor().executescript("""
            PRAGMA foreign_keys=off;
            ALTER TABLE contacts RENAME TO old_table;
            CREATE TABLE contacts (
                id INTEGER PRIMARY KEY,
                company_id TEXT NOT NULL,
                name TEXT,
                email_address TEXT
            );
            INSERT INTO contacts SELECT * FROM old_table;
            DROP TABLE old_table;
            PRAGMA foreign_keys=on;
        """)
        conn.commit()
        conn.cursor().executescript("""
            PRAGMA foreign_keys=off;
            ALTER TABLE users RENAME TO old_table;
            CREATE TABLE users (
                id TEXT PRIMARY KEY,
                name TEXT NOT NULL,
                email TEXT UNIQUE,
                profile_pic TEXT,
                account_type TEXT,
                date_added TEXT
            );
            INSERT INTO users SELECT * FROM old_table;
            DROP TABLE old_table;
            PRAGMA foreign_keys=on;
        """)
        conn.commit()
    os.rename(
        abs_dir_path + "test/test_cert_db.sqlite3",
        abs_dir_path + "test_cert_db.sqlite3",
    )
    shutil.copy(
        abs_dir_path + "test/results.json",
        abs_dir_path + "results.json",
    )
    os.chdir(abs_dir_path)
示例#43
0
def main(argv):
    print("starting db init")
    conn = utils.create_connection(db_file)
    utils.create_table(conn)
    get_messages(conn)
示例#44
0
            'name': category.find('h2').find('b').get_text(),
            'url': category.find('a').get('href')
        }
        categories.append(art)

    if soup.find("a", text="Next"):
        next_link = soup.find("a", text="Next").get('href')
        page = requests.get(url + next_link)
        soup = BeautifulSoup(page.content, 'html.parser')
        print(next_link)
    else:
        break

database = 'mp3database{}.db'.format(BASE)

create_connection(
    "C:/Users/val31/Desktop/Projects/mp3scrape/{}".format(database))

Base = declarative_base()


class Artist(Base):
    __tablename__ = 'artist'

    id = Column(Integer, primary_key=True)
    name = Column(String(250), nullable=False)
    created_date = Column(DateTime, default=datetime_berlin)
    updated_date = Column(DateTime, default=datetime_berlin)
    albums = relationship('Album', back_populates="artist")

    #     tracks = relationship('Track', back_populates="artist")