Пример #1
0
def main():
    dbms = mydatabase.MyDatabase(mydatabase.SQLITE,
                                 dbname='mydb.sqlite',
                                 log=log)
    # dbms.create_db_tables()

    dbms.print_all_data(mydatabase.USERS)
Пример #2
0
def main():
    dbms = mydatabase.MyDatabase(mydatabase.SQLITE, dbname='mydb.sqlite')
    # Create Tables
    dbms.create_db_tables()
    # dbms.insert_single_data()
    dbms.print_all_data(mydatabase.USERS)
    dbms.print_all_data(mydatabase.ADDRESSES)
    dbms.sample_query()  # simple query
    dbms.sample_delete()  # delete data
    dbms.sample_insert()  # insert data
    dbms.sample_update()  # update data
from PIL import Image
import face_recognition
import glob
from database import mydatabase

dbms = mydatabase.MyDatabase(mydatabase.SQLITE, dbname='mydb.sqlite')
# dbms.create_db_tables()


def findFace(filenames):
    # Load the jpg file into a numpy array

    all_data = []
    for file in filenames:
        data = []
        image = face_recognition.load_image_file(file)
        face_locations = face_recognition.face_locations(image)
        # print("I found {} face(s) in this photograph.".format(len(face_locations)))
        if (len(face_locations) > 0):
            data = [
                "'" + file + "'", "'" + file + "'",
                str(len(face_locations))
            ]
            data_string = ",".join(data)
            all_data.append(data_string)
        # for face_location in face_locations:
        #     top, right, bottom, left = face_location
        #     print("A face is located at pixel location Top: {}, Left: {}, Bottom: {}, Right: {}".format(top, left, bottom, right))
    dbms.insertmany_sqlite3("imagelist", "imagename,imagepath,faceCount",
                            all_data)
    print("--------- completed")
Пример #4
0
import numpy as np
import sklearn
import pickle
from face_recognition import face_locations
from PIL import Image, ImageDraw, ImageFont
from tqdm import tqdm
import cv2
import pandas as pd
# we are only going to use 4 attributes
COLS = ['Asian', 'White', 'Black']
N_UPSCLAE = 1
# ------- Ethnicity Prediction
from database import mydatabase
dbms = mydatabase.MyDatabase(
    mydatabase.SQLITE,
    dbname=
    '/Users/divyachandana/Documents/NJIT/work/summertasks/may25-may30/Park_face/mydb.sqlite'
)

# images_path = '/Users/divyachandana/Documents/NJIT/work/summertasks/jun1-jun5/atlanta'

# images_path = '/Users/divyachandana/Documents/NJIT/work/summertasks/jun1-jun5/nyc'


def main():
    with open('face_model.pkl', 'rb') as f:
        clf, labels = pickle.load(f, encoding="latin1")
    # db_table = 'face_attributes_atlanta'
    db_table = 'face_attributes_nyc'

    # files = glob.glob(r'/Users/divyachandana/Documents/NJIT/work/summertasks/jun1-jun5/atlanta/*.jpg')
Пример #5
0
import numpy
from PIL import Image


model = gluoncv.model_zoo.get_model('psp_resnet101_ade', pretrained=True, ctx=mx.cpu(0))
ctx = mx.cpu(0)
import numpy
from PIL import Image
import csv
import glob
from datetime import datetime
from timeit import default_timer as timer

# ------- gluon ----
from database import mydatabase
dbms = mydatabase.MyDatabase(mydatabase.SQLITE, dbname='/Users/divyachandana/Documents/NJIT/work/summertasks/june15-june20/semantic-segmentation-pixel/semanticdb.sqlite')


def main():
    start = timer()
    print('Processing Start time: %.1f' % (start))
    print("current time", datetime.now())
    gauth = GoogleAuth()
    gauth.LocalWebserverAuth()

    drive = GoogleDrive(gauth)

    # Auto-iterate through all files that matches this query
    file_list = drive.ListFile({'q': "'root' in parents"}).GetList()
    for file in file_list:
        # print('title: {}, id: {}'.format(file1['title'], file1['id']))