Esempio n. 1
0
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from time import sleep
import numpy as np
from console_logging.console import Console

console = Console()
import json
import os

curated_lists = []

browser = webdriver.Chrome()
console.info("Initialized Chrome Webdriver.")


def get_repos(pages=10):

    console.log("Now entering signup process.")

    # Page 1 of Signup

    browser.get('https://github.com/')

    input('Log in, then press ENTER.')

    browser.get(
        'https://github.com/search?o=desc&p=1&q=curated+list&s=stars&type=Repositories&utf8=%E2%9C%93'
    )
Esempio n. 2
0
from console_logging.console import Console
console = Console()

console.log("Baixando Dataset...")
root = './'
filename_zip = ag.download(
    'https://autogluon.s3.amazonaws.com/datasets/tiny_motorbike.zip',
    path=root)
filename = ag.unzip(filename_zip, root=root)

console.log("Criando TASK TRAIN ")
import os
data_root = os.path.join(root, filename)
dataset_train = task.Dataset(data_root, classes=('motorbike', ))

console.info("TRAINING DATA MODEL...")
time_limits = 5 * 60 * 60  # 5 hours
epochs = 30
detector = task.fit(dataset_train,
                    num_trials=2,
                    epochs=epochs,
                    lr=ag.Categorical(5e-4, 1e-4),
                    ngpus_per_trial=1,
                    time_limits=time_limits)
console.success("TRAINING DONE !")
console.log("START TEST MODEL ")
dataset_test = task.Dataset(data_root,
                            index_file_name='test',
                            classes=('motorbike', ))

test_map = detector.evaluate(dataset_test)
Esempio n. 3
0
import json
import os
from lxml import html
import requests
import unicodedata
from console_logging.console import Console
console = Console()

job_data = None

with open('jobs.json') as f:
    job_data = json.load(f)
    console.info("Crawling %d career pages." % len(job_data))
    i = 0
    for job_entry in job_data:
        try:
            url = job_entry['link']
            page = requests.get(url)
            tree = html.fromstring(page.content)
            links = tree.xpath('//a')
            job_postings = []
            for link in links:
                job_title = link.text_content().strip().lstrip()
                if 'intern' in job_title: # only test if intern position
                    res = requests.post(
                        'http://127.0.0.1:8000/predict', json={'title': job_title})
                    prediction = res.text.strip().lstrip()
                    if prediction in ['IT/Software Development', 'Engineering']:
                        job_postings.append(job_title)
            job_entry['positions'] = job_postings
        except Exception as e:
Esempio n. 4
0
try:
    os.makedirs('./blog')
except:
    pass

for file in os.listdir('./blogs'):
    ext = file.split('.')[-1]
    if ext == 'blog':
        blog_id = file.split('.')[0]
        with open('./blogs/%s' % file, 'r') as blog_def:
            parsed_blog = parse_blog(blog_def.readlines())
            blog_html = render_blog(parsed_blog)
            with open('./blog/%s.html' % blog_id, 'w') as blog:
                blog.write(blog_html)
            blog_posts[-1]['url'] = '/blog/%s.html' % blog_id
        console.info("Wrote blog id::%s" % blog_id)
    blog_posts.sort(key=lambda blog_post: int(blog_post['date'][
        'm']) * 32 * 3600 + int(blog_post['date']['d']) * 3600 + int(blog_post[
            'date']['y']) * 367 * 3600 + int(blog_post['time']['h']) * 60 +
                    int(blog_post['time']['m'][:2]))
    blog_posts.reverse()
    index = []
    for blog_post in blog_posts:
        index.append('''<div class="col-md-6 item">
                   <div class="item-in">
                       <h4>{title}</h4>
                       <div class="seperator"></div>
                       <p>{date}: {excerpt}</p>
                        <a href="{url}">Read More<i class="fa fa-long-arrow-right"></i></a>
                    </div>
               </div>'''.format(
Esempio n. 5
0
        exit(1)

    from streaming_event_compliance.objects.variable.globalvar import gVars
    from streaming_event_compliance.services import setup
    from streaming_event_compliance.services.build_automata import build_automata
    from streaming_event_compliance.database import dbtools

    dbtools.empty_tables()
    setup.init_automata()
    if gVars.auto_status == 0:
        start = time.clock()
        console.secure("Start time: ", start)
        try:
            ServerLogging().log_info(func_name, "Building automata...")
            build_automata.build_automata()
        except ReadFileException as ec:
            console.error(ec.message)
            ServerLogging().log_error(func_name, "Training file cannot be read")
        except ThreadException as ec:
            ServerLogging().log_error(func_name, "Error with threads")
        except Exception as ec:
            ServerLogging().log_error(func_name, "Error")
        ends = time.clock()
        console.secure("[ The Total Time  For Training Automata  ]", str(ends - start) + "Seconds.")
    else:
        console.info("Automata have been created in database and read out! You can use it do compliance checking!")
        ServerLogging().log_info(func_name, "Automata have been created in database and read out")

    app.debug = False
    app.run(host="0.0.0.0", port=5000, debug=True, use_reloader=False, threaded=True)
Esempio n. 6
0
    } for line in lines]
    return data


data_save_path = os.path.join(os.getcwd(), 'data/data.sav')
if os.path.exists(data_save_path):
    console.log("Reading from save file...")
    data = pkl.load(open(data_save_path, 'rb'))
    console.success("Finished reading data from save.")
else:
    console.log("Did not find a save file.")
    data = load_data()
    pkl.dump(data, open(data_save_path, 'wb'))
    console.success("Created save file.")

console.info("First data is sentence \"%s\" with emotion \'%s\'" %
             (data[0]['raw'], data[0]['emotion']))


def make_wordlists(data):
    wordlist = set()
    mentions = set()
    uppercase = set()
    for datapoint in data:
        words = re.sub('[ ]{1,10}', ',', datapoint['raw'])
        words = re.sub('[?!]', '', words).split(',')
        for word in words:
            if len(word) > 0:
                if word[0] == '@':
                    mentions.add(word[1:])
                else:
                    if word.isupper():
Esempio n. 7
0
my_voiceit = VoiceIt2(apiKey,apiToken)

id_user = '******'

cadastro_img = "https://observatoriodocinema.uol.com.br/wp-content/uploads/2021/01/Renato-Aragao-1.jpg"

verifica_img = "https://stcotvfoco.com.br/2021/01/renato-aragao-didi-carreira-trapalhoes-filmes-1.jpg"

image_fake = "https://conexao.segurosunimed.com.br/wp-content/uploads/2021/01/Capa-idoso-2.0.jpg"

voz_url = "https://to-vivo-app.s3.amazonaws.com/users/usr_54fbb7f880214222958ce92aef0f22f2/output+(2).flac"
#print(my_voiceit.check_user_exists(id_user))

#print(my_voiceit.create_face_enrollment_by_url(id_user, cadastro_img))

console.info("Verifica...do......")

r = my_voiceit.face_verification_by_url(id_user, verifica_img)
console.info(r['faceConfidence'])

console.info("Verificando image fake...")

fake = my_voiceit.face_verification_by_url(id_user, image_fake)
console.info(fake['faceConfidence'])

console.info("Verificando voz......")
my_voiceit.voice_verification_by_url(id_user, "pt-BR", "Juan Manoel Marinho Nascimento", voz_url)

# -------------------------------------------------

Esempio n. 8
0

@app.route('/predict', methods=['POST'])
async def predict(request):
    try:
        return text(str(pipe.predict([request.json['title']])[0]))
    except Exception as e:
        console.error(e)
        return text(e, status=500)


@app.route('/predict_many', methods=['POST'])
async def predict_many(request):
    try:
        return json(list(pipe.predict(request.json['titles'])))
    except Exception as e:
        console.error(e)
        return text(e, status=500)


@app.route('/log')
async def log(request):
    try:
        return text(str(train_jobtitle.get_analytics()))
    except Exception as e:
        console.error(e)
        return text(e, status=500)


console.info("Starting server...")
app.run()
Esempio n. 9
0
import utils
from classifiers import JobTitle
from console_logging.console import Console

console = Console()

train = utils.load_dataset('features')
console.info("Loaded training dataset.")
test = utils.load_dataset('test')
console.info("Loaded testing dataset.")
pipe = JobTitle.pipe(train)
console.success("Finished training pipe.")

t = [_['title'] for _ in test]
e = [_['categories'][0] for _ in test]

accuracy = utils.evaluate(pipe, t, e)
console.success("%f accuracy" % accuracy)


def get_analytics():
    analytics = utils.analyze(pipe, t, e, utils.categories(test))
    # console.log('\n'+str(analytics))
    return analytics
Esempio n. 10
0
    def __init__(self, connection, queues):
        self.connection = connection
        self.queues = queues

    def on_message(self, body, message):
        data = pickle.loads(body)
        print(data['id_camera'])
        tabela.insert(
            dict(id_camera=data['id_camera'],
                 data_=data['data_'],
                 dt=data['dt'],
                 evento=data['evento']))
        message.ack()

    def get_consumers(self, Consumer, channel):
        return [Consumer(queues=self.queues, callbacks=[self.on_message])]


def run():
    console.info("[ CONSUMER - WORKER ]  QUEUE: %s " % queue)
    queues = [Queue(queue, exchange_, routing_key=routing_key)]
    with Connection(rabbit_url, heartbeat=80) as conn:
        worker = Worker(conn, queues)
        console.info("[ CONSUMER - WORKER ]  WORKER RUNNING ")
        worker.run()


if __name__ == "__main__":
    console.info("[ CONSUMER - WORKER ] ....STARTED.... ")
    run()
Esempio n. 11
0
from sanic import Sanic
import json as j
app = Sanic()
from sanic.response import json
from console_logging.console import Console
console = Console()

routing_table = dict()
with open('paths.json') as f:
    for d in j.load(f):
        routing_table[d["passkey"]] = d["url"]

console.info("Compiled routing table of %d routes." %
             len(routing_table.keys()))


@app.middleware('response')
async def all_cors(r, s):
    s.headers['Access-Control-Allow-Origin'] = '*'
    s.headers['Access-Control-Allow-Headers'] = '*'


@app.route("/knock", methods=['POST', 'OPTIONS'])
async def whos_there(r):
    if r.method == 'OPTIONS': return json({}, status=200)
    if 'name' not in r.json.keys(): return json({}, status=500)
    console.log("%s@%s is knocking." % (r.json['name'], r.ip))
    if r.json['name'] in routing_table.keys():
        p = routing_table[r.json['name']]
        console.log("%s is answering." % p)
        return json({"url": p}, status=200)