Example #1
0
def craigslist_searcher(locations, site=None):
    for location in locations:
        url = "https://{}.craigslist.org/search/{}/".format(
            location, (site or CRAIGSLIST_SITE))
        response = requests.get(url)
        soup = BeautifulSoup(response.content, 'lxml')
        db.insert_into_db(
            build_craigslist_data_object(soup, url, location,
                                         'gigs, computer'))
def read_csv_and_store_in_db():

    with open('developer_score_5_criteria.csv', 'r') as fin:
        dr = csv.DictReader(fin)
        to_db = [(i['name'], i['LinkedIn content'],
                  i['Public coding activities'], i['Github Analysis'],
                  i['Stackoverflow Analysis'], i['Tech Keys involved'])
                 for i in dr]
        for person in to_db:
            dbs.insert_into_db(person[0], person[1], person[2], person[3],
                               person[4], person[5])
Example #3
0
def post_uuids():
	UUID_LENGTH = 16
	print(request.content_type)
	assert request.content_type == 'application/octet-stream'
	data = request.get_data()
	assert len(data) % UUID_LENGTH == 0

	uuids = (data[i:i + UUID_LENGTH] for i in range(0, len(data), UUID_LENGTH))
	db.insert_into_db(get_db_connection(), uuids)

	return ""
 def get(self, key):
     #handle case we have a db
     if self.conn:
         value = lookup_table(self.conn, key)
         if not value:
             #the additional tuple is a bit dirty…
             #but the fetchone method called in lookup_table
             #return a tuple, and this way it works in both cases
             value = (self.f(key), )
             insert_into_db(self.conn, key, tuple(value[0]))
         return value[0]
     else:
         #handle case we don't have a db (for instance, for tests)
         if key not in self.cache:
             value = self.f(key)
             self.cache[key] = value
         return self.cache[key]
Example #5
0
def align_images(path_originals, path_scanned, output_path, phone=None, conn=None):
    scanned = [s for s in os.listdir(path_scanned)]
    total_mse = 0
    total_ssim = 0
    count = 0
    for image in scanned:
        if conn and check_if_image_exists(image, 2, phone, conn):
            print("Image {} for phase {} and phone {} already exists in db".format(image, 2, phone))
            continue
        original_image_name = image.split('.')[0] + '.png'
        result = align_image(os.path.join(path_originals, original_image_name), os.path.join(path_scanned, image),
                    os.path.join(output_path, image))
        try:
            status, ssim, mse = result
            total_mse += mse
            total_ssim += ssim
            count += 1
        except:
            pass
        if conn and result:
            status, ssim = result
            insert_into_db(image, status, 2, ssim, None, phone, conn)
    print('Average ssim: {}; average mse: {}'.format(total_ssim/count, total_mse/count))
Example #6
0
def insert_callback(session, response):
    soup = BeautifulSoup(response.content, 'lxml')
    location = urllib.parse.urlparse(response.url).hostname.split('.')[0]
    print("Executing callback for:" + location)
    data = build_craigslist_data_object(soup, response.url, location, 'gigs, computer')
    db.insert_into_db(data)
Example #7
0
def craigslist_searcher(locations, site=None):
    for location in locations:
        url = "https://{}.craigslist.org/search/{}/".format(location, (site or CRAIGSLIST_SITE))
        response = requests.get(url)
        soup = BeautifulSoup(response.content, 'lxml')
        db.insert_into_db(build_craigslist_data_object(soup, url, location, 'gigs, computer'))
Example #8
0
def login():
    if request.method == 'POST':
        data = request.data.decode("utf-8").split(",")
        insert_into_db(data[0], data[1])
    return render_template('login.html')
Example #9
0
def insert_callback(session, response):
    soup = BeautifulSoup(response.content, 'lxml')
    location = urllib.parse.urlparse(response.url).hostname.split('.')[0]
    data = build_craigslist_data_object(soup, response.url, location,
                                        'gigs, computer')
    db.insert_into_db(data)