示例#1
0
    def __init__(self, env_str):
        if env_str != "test":
            env_str = "local"
        print('Using environment - %s' % env_str)
        dotenv_path = join(dirname(__file__), 'environments/.env-' + env_str)

        # setup db info
        self.DB_HOST = get_variable(dotenv_path, "DB_HOST")
        self.DB_NAME = get_variable(dotenv_path, "DB_NAME")
        self.DB_USERNAME = get_variable(dotenv_path, "DB_USERNAME")
        self.DB_PASSWORD = get_variable(dotenv_path, "DB_PASSWORD")

        # setup twitter oauth stuff
        self.APP_KEY = get_variable(dotenv_path, "APP_KEY")
        self.APP_SECRET = get_variable(dotenv_path, "APP_SECRET")
        self.TOKEN = get_variable(dotenv_path, "TOKEN")
        self.TOKEN_SECRET = get_variable(dotenv_path, "TOKEN_SECRET")

        # setup logging
        root = logging.getLogger()
        root.setLevel(logging.DEBUG)

        ch = logging.StreamHandler(sys.stdout)
        ch.setLevel(logging.DEBUG)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        ch.setFormatter(formatter)
        root.addHandler(ch)
    def open_spider(self, spider):
        self.counter = 0

        password = get_variable(dotenv_path, "Password")
        username = get_variable(dotenv_path, "Username")
        url = get_variable(dotenv_path, "URL")
        database = get_variable(dotenv_path, "Database")

        try:
            self.engine, self.controller = connect(username, password,
                                                   database, url)
        except ConnectionError:
            print("error connecting")

        self.Session = sessionmaker(bind=self.engine)
示例#3
0
from os.path import join, dirname
import dotenv

dotenv_path = join(dirname(__file__), '.env')

token = dotenv.get_variable(dotenv_path, "SECRET_TOKEN")
nome_fermata = dotenv.get_variable(dotenv_path, "NOME_FERMATA")
client_ticket = dotenv.get_variable(dotenv_path, "CLIENT_TICKET")

firenze = {
	"llLat": 43,
	"llLon": 10,
	"urLat": 44,
	"urLon": 12
}
示例#4
0
    def test_get_variable(self):
        result = get_variable(self.file_path, 'baz')

        self.assertEqual('1234', result)
示例#5
0
    def test_get_variable(self):
        result = get_variable(self.file_path, 'baz')

        self.assertEqual('1234', result)
示例#6
0
import os
import sys
import click
import logging
import nltk
import numpy as np
import pandas as pd

import boto3

from dotenv import get_variable
env_file = '/home/ubuntu/science/quora_question_pairs/.env'
from ipyparallel import Client
from ast import literal_eval

S3_BUCKET = get_variable(env_file, 'S3_BUCKET')
S3_DATA_PATH = get_variable(env_file, 'S3_DATA_PATH')
PROJECT_DIR = get_variable(env_file, 'PROJECT_DIR')
CHUNKSIZE = int(get_variable(env_file, 'CHUNKSIZE'))
Q_WORD_TOKENIZED = literal_eval(get_variable(env_file, 'Q_WORD_TOKENIZED'))
Q_TAGGED = literal_eval(get_variable(env_file, 'Q_TAGGED'))


def lit_pos_tag(lst_str):
    '''
    -position tags a list of tokenized words
    -The list is provided as a string literal (from pandas df)
    '''
    return nltk.pos_tag(literal_eval(lst_str))

示例#7
0
import time
import boto3

from dotenv import get_variable; env_file = '/home/ubuntu/science/quora_question_pairs/.env'
from ipyparallel import Client
from ast import literal_eval

from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.decomposition import NMF
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import log_loss
from sklearn.model_selection import RandomizedSearchCV
from xgboost import XGBClassifier

S3_BUCKET = get_variable(env_file, 'S3_BUCKET')
S3_DATA_PATH = get_variable(env_file, 'S3_DATA_PATH')
PROJECT_DIR = get_variable(env_file, 'PROJECT_DIR')
CHUNKSIZE = int(get_variable(env_file, 'CHUNKSIZE'))
MASI_DISTANCE = get_variable(env_file, 'MASI_DISTANCE')
EDIT_DISTANCE = get_variable(env_file, 'EDIT_DISTANCE')
JACCARD_DISTANCE = get_variable(env_file, 'JACCARD_DISTANCE')
Q_WORD_TOKENIZED = literal_eval(get_variable(env_file, 'Q_WORD_TOKENIZED'))
Q_TYPE1=['question1_type1', 'question2_type1']

test_rows = 1000

@click.command()
@click.argument('test', type = click.Path(), default = 'False')
def main(test):
    if test == 'True':
示例#8
0
import os
import sys
import click
import logging
import nltk
import numpy as np
import pandas as pd

import boto3

from dotenv import get_variable; env_file = '/home/ubuntu/science/quora_question_pairs/.env'
from ipyparallel import Client
from ast import literal_eval

S3_BUCKET = get_variable(env_file, 'S3_BUCKET')
S3_DATA_PATH = get_variable(env_file, 'S3_DATA_PATH')
PROJECT_DIR = get_variable(env_file, 'PROJECT_DIR')
CHUNKSIZE = int(get_variable(env_file, 'CHUNKSIZE'))

def FUNCTION(D):
    '''Write a description'''
    if len(D) > 0:
        #DO SOME STUFF
    return D

@click.command()
@click.argument('test', type = click.Path(), default = 'False')
@click.argument('i_max', type = click.Path(), default = 0)
def main(test, i_max):
    i_max = int(i_max)
    if test == 'True': #Don't chunk
示例#9
0
# -*- coding: utf-8 -*-
import os
import sys
import click
import logging
import nltk
import boto3

import numpy as np
import pandas as pd

from dotenv import get_variable
env_file = '/home/ubuntu/science/quora_question_pairs/.env'
from ipyparallel import Client

S3_BUCKET = get_variable(env_file, 'S3_BUCKET')
S3_DATA_PATH = get_variable(env_file, 'S3_DATA_PATH')
PROJECT_DIR = get_variable(env_file, 'PROJECT_DIR')
CHUNKSIZE = 1024

Q = ['question1', 'question2']
Q_word_tokenized = ['question1_word_tokenized', 'question2_word_tokenized']
Q_tag = ['question1_pos_tag', 'question2_pos_tag']


def wtokenize_ptag_chunk(Di):
    '''word tokenize and position tag chunks'''
    if len(Di) > 0:
        Di.loc[:, Q] = Di.loc[:, Q].applymap(str)  #Ensure we have strings
        Di[Q_word_tokenized] = Di.loc[:, Q].applymap(nltk.word_tokenize)
        Di[Q_tag] = Di.loc[:, Q_word_tokenized].applymap(nltk.pos_tag)
示例#10
0
from dotenv import get_variable, set_variable, get_variables, __version__


parser = argparse.ArgumentParser()

parser.add_argument("key", nargs='?')
parser.add_argument("value", nargs='?')

parser.add_argument('--file', default='.env')

parser.add_argument('--version', action='version', version=__version__)

parser.add_argument('--shell', action='store_true', default=False)

args = parser.parse_args()


if args.shell:
    PRINT_FORMAT = '%s=%s'
else:
    PRINT_FORMAT = '%s: %s'

if args.key is None:
    for key, value in get_variables(args.file).items():
        print(PRINT_FORMAT % (key, value))
elif args.value is None:
    print(PRINT_FORMAT % (args.key, get_variable(args.file, args.key)))
else:
    set_variable(args.file, args.key, args.value)
    print(PRINT_FORMAT % (args.key, args.value))
示例#11
0
import os
import sys
import click
import logging
import nltk
import numpy as np
import pandas as pd

import boto3

from dotenv import get_variable
env_file = '/home/ubuntu/science/quora_question_pairs/.env'
from ipyparallel import Client
from ast import literal_eval

S3_BUCKET = get_variable(env_file, 'S3_BUCKET')
S3_DATA_PATH = get_variable(env_file, 'S3_DATA_PATH')
PROJECT_DIR = get_variable(env_file, 'PROJECT_DIR')
CHUNKSIZE = int(get_variable(env_file, 'CHUNKSIZE'))
Q = literal_eval(get_variable(env_file, 'Q'))
Q_TYPE1 = literal_eval(get_variable(env_file, 'Q_TYPE1'))

#First order question types
n_types = 25
question_types1 = {
    'who': 1,
    'whos': 2,
    'whose': 3,
    'what': 4,
    'whats': 5,
    'where': 6,
示例#12
0
from SubscriberThread import SubscriberThread

import requests
from os.path import join

from dotenv import get_variable

from ping_thread import PingThread

from statistics import mean

interface = "wlan0"
path_env = '/home/pi/smart-directions-slave/.env'

base_path_scanner = get_variable(path_env, 'BASE_PATH_SCANNER')
assets_path_scanner = join(base_path_scanner,
                           get_variable(path_env, 'RELATIVE_PATH_ASSETS'))
face_id = get_variable(path_env, 'FACE_ID')
BROKER_IP = get_variable(path_env, 'BROKER_IP')

if __name__ == '__main__':
    # execute command to get our own mac address (wlan)
    command = "cat /sys/class/net/wlan0/address"

    own_mac = os.popen(command).read()
    own_mac = own_mac[:-1]

    # execute hcitool: required to make btmon work
    hcitools_command = ["hcitool", "lescan", "--duplicates"]
    FNULL = open(os.devnull, 'w')
示例#13
0
import argparse
from dotenv import get_variable, set_variable, get_variables, __version__


parser = argparse.ArgumentParser()

parser.add_argument("key", nargs='?')
parser.add_argument("value", nargs='?')

parser.add_argument('--file', default='.env')

parser.add_argument('--version', action='version', version=__version__)

args = parser.parse_args()

if args.key is None:
	for key, value in get_variables(args.file).items():
		print("%s: %s" % (key, value))
elif args.value is None:
	print("%s: %s" % (args.key, get_variable(args.file, args.key)))
else:
	set_variable(args.file, args.key, args.value)
	print("%s: %s" % (args.key, args.value))
import time

import requests
from dotenv import get_variable

from log_thread import LogThread
from sniffer_thread import SnifferThread

path_env = '/home/pi/smart-directions-anchor-init/.env'

FLASK_URL = get_variable(path_env, 'FLASK_URL')
PERIOD_CHECK = int(get_variable(path_env, 'PERIOD_CHECK'))
'''
With periodicity PERIOD_CHECK, a ping is sent to the server that confirms it is still alive.
If not, anchor waits for reconnection.
'''


class PingThread(LogThread):
    def __init__(self, name, mac):
        LogThread.__init__(self, name)
        self.mac = mac
        self.sniffer_thread = SnifferThread('Sniffer')

    def run(self):
        while True:
            try:
                r = requests.post("{}{}/ping".format(FLASK_URL, self.mac))
                code = r.status_code
                if code < 300:
                    if not self.sniffer_thread.is_alive():
import subprocess
import threading
from os.path import join

from dotenv import get_variable

from log_thread import LogThread

import os

path_env = '/home/pi/smart-directions-slave/.env'

abs_path_arrow = "{}/../assets/".format(
    os.path.dirname(os.path.realpath(__file__)))
print("PATH: {}".format(abs_path_arrow))
PATH_ASSETS = get_variable(path_env, 'PATH_ASSETS')


class LedThread(LogThread):
    def __init__(self, name, color, direction, execution_time, connection):
        LogThread.__init__(self, name)
        self.process = None
        self.connection = connection  # socket towards c++
        self.direction, self.color, self.execution_time = direction, color, execution_time

    def run(self):
        msg = str.encode("{}${}${}".format(self.direction, self.color,
                                           self.execution_time))
        if self.connection is not None:
            print("SEND MSG: {}".format(msg))
            self.connection.sendall(msg)
 def run(self):
     base_path_scanner = get_variable(path_env, 'BASE_PATH_SCANNER')
     command = join(base_path_scanner, "cmake-build-rasp1/ble_scanner")
     # running c++ code for sniffing
     self.process = subprocess.Popen([command])
     self.process.wait()