Exemplo n.º 1
0
import autogluon as ag
from autogluon import ObjectDetection as task
from console_logging.console import Console
console = Console()

console.log("Baixando Dataset...")
root = './'
filename_zip = ag.download(
    'https://autogluon.s3.amazonaws.com/datasets/tiny_motorbike.zip',
    path=root)
filename = ag.unzip(filename_zip, root=root)

console.log("Criando TASK TRAIN ")
import os
data_root = os.path.join(root, filename)
dataset_train = task.Dataset(data_root, classes=('motorbike', ))

console.info("TRAINING DATA MODEL...")
time_limits = 5 * 60 * 60  # 5 hours
epochs = 30
detector = task.fit(dataset_train,
                    num_trials=2,
                    epochs=epochs,
                    lr=ag.Categorical(5e-4, 1e-4),
                    ngpus_per_trial=1,
                    time_limits=time_limits)
console.success("TRAINING DONE !")
console.log("START TEST MODEL ")
dataset_test = task.Dataset(data_root,
                            index_file_name='test',
                            classes=('motorbike', ))
Exemplo n.º 2
0
with open('jobs.json') as f:
    job_data = json.load(f)
    console.info("Crawling %d career pages." % len(job_data))
    i = 0
    for job_entry in job_data:
        try:
            url = job_entry['link']
            page = requests.get(url)
            tree = html.fromstring(page.content)
            links = tree.xpath('//a')
            job_postings = []
            for link in links:
                job_title = link.text_content().strip().lstrip()
                if 'intern' in job_title: # only test if intern position
                    res = requests.post(
                        'http://127.0.0.1:8000/predict', json={'title': job_title})
                    prediction = res.text.strip().lstrip()
                    if prediction in ['IT/Software Development', 'Engineering']:
                        job_postings.append(job_title)
            job_entry['positions'] = job_postings
        except Exception as e:
            console.error(e)
        i = i + 1
        if i % 20 == 0:
            console.log("Processed %d pages." % i)
console.success("Finished crawling.")

with open('jobs.json', 'w') as f:
    json.dump(job_data, f)

console.success("Dumped data.")
Exemplo n.º 3
0
import dataset
from voiceit2 import VoiceIt2

from console_logging.console import Console
import os
console = Console()

console.log("Stating....")

apiKey = "   "  #
apiToken = "  "

my_voiceit = VoiceIt2(apiKey, apiToken)

try:
    #ENDPOINT_DB = os.getenv('ENDPOINT_DB')
    #db = dataset.connect(ENDPOINT_DB)
    db = dataset.connect('sqlite:///tovivo.db')

except:
    db = dataset.connect('sqlite:///tovivo.db')


class CRUD:
    @staticmethod
    def cadastrar(data):
        table = db['user']
        user = my_voiceit.create_user()
        print(user)
        data['userId'] = user['userId']
        table.insert(data)
Exemplo n.º 4
0
# Debugging
console.setVerbosity(4)
# Training
# console.setVerbosity(3)
# Staging
# console.setVerbosity(2)
# Production
# console.mute()
# Neater logging inside VS Code
console.timeless()
console.monotone()

DATASET_FILEPATH = 'data/text_emotion.csv'
dataset_path = os.path.join(os.getcwd(), DATASET_FILEPATH)
console.log("Loading data from %s" % dataset_path)


def _clean(sentence):
    regex_letters = "a-zA-Z"
    regex_spaces = " "
    regex_symbols = "!?@&;.,"
    regex_pattern = regex_letters + regex_spaces + regex_symbols
    new_sentence = re.sub('[^%s]' % regex_pattern, '', sentence)
    regex_special_characters = "&.*?;"
    regex_punctuation = "[.,]"
    return re.sub(regex_punctuation, '',
                  re.sub(regex_special_characters, '', new_sentence))


def load_data():
Exemplo n.º 5
0
class LISTA_USUARIO(Resource):
    def post(self):
        console.info("LISTANDO USUARIOS ")
        argumentos = reqparse.RequestParser()
        argumentos.add_argument("busca")
        dados = argumentos.parse_args()
        result = db.lista(dados['busca'])
        return {"mesage": result}, 200


class DELETA_USUARIO(Resource):
    def post(self):
        console.error("DELETANDO USUARIO ")
        argumentos = reqparse.RequestParser()
        argumentos.add_argument("cpf")
        dados = argumentos.parse_args()
        print("DELETANDO USUARIO")
        result = db.deleta(dados['cpf'])
        return {"mesage": result}, 200


api.add_resource(about, "/")
api.add_resource(ATUALIZA_USUARIO, "/suinox/api/v1/update")
api.add_resource(LISTA_USUARIO, "/suinox/api/v1/lista")
api.add_resource(DELETA_USUARIO, "/suinox/api/v1/delete")
api.add_resource(CADASTRAR_USUARIO, "/suinox/api/v1/cadastro")

if __name__ == '__main__':
    console.log("START APP ")
    app.run(host="0.0.0.0", debug=True)
Exemplo n.º 6
0
print("\nPlease provide a GPA and test score to chance.")
cur_gpa = float(input('GPA: '))
print("Given " + str(cur_gpa))
test_score = int(input('Test Score: '))


def new_samples():
    return np.array([[0.0, 0], [cur_gpa, test_score], [maxgpa, maxtest]],
                    dtype=np.float32)


predictions = list(classifier.predict(input_fn=new_samples))
console.success("Made predictions:")


def returnChance(chance):
    if chance == 0:
        return "rejection"
    if chance == 1:
        return "admission"


console.log(
    "Testing:\nGPA: 0\nTest Score: 0\nPrediction: %s\nExpected: rejection" %
    returnChance(predictions[0]))
console.log(
    "Testing:\nGPA: %0.1f\nTest Score: %d\nPrediction: %s\nExpected: admission"
    % (maxgpa, maxtest, returnChance(predictions[2])))
console.success("Predicting:\nGPA: %d\nTest Score: %d\nPrediction:%s" %
                (cur_gpa, test_score, returnChance(predictions[1])))