Example #1
0
def clear_db():
    '''
    Obsługa kliknięcia "Estimate vocabulary size". Szacuje liczbę słów znaną przez użytkownika.

    :return: None
    '''
    ans = messagebox.askyesno(
        "", "Are you sure you want to clear info about user?")
    if ans:
        DATABASE.clear()
Example #2
0
def easy_clicked():
    '''
    Obsługa kliknięcia w przycisk "Easy".

    :return: None
    '''
    if 0 <= INDEX < LEN:
        word = WORDS[INDEX]
        DATABASE.modify_word(word, FREQLIST.rank(word), True, True)
    next_word()
Example #3
0
def hard_clicked():
    '''
    Obsługa kliknięcia w przycisk "Hard".

    :return: None
    '''
    if 0 <= INDEX < LEN:
        word = WORDS[INDEX]
        DATABASE.modify_word(word, FREQLIST.rank(word), False, False)
        if TO_LEARN and DICT.correct(word) is not None:
            WORDS_TO_LEARN.append(word)
    next_word()
Example #4
0
def create_app():
    app = Flask(__name__)
    app.config['SECRET_KEY'] = secrets.token_urlsafe(64)
    app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///.database/trello.db'
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

    DATABASE.app = app
    DATABASE.init_app(app)
    login_manager.init_app(app)

    with app.test_request_context():
        DATABASE.create_all()
    return app
Example #5
0
def main():

	GPIO.cleanup() 
	GPIO.setmode(GPIO.BCM)
	#Relais als Ausgang schalten
	GPIO.setup(RELAIS, GPIO.OUT)
	#Eimersensor als Eingang schalten
	GPIO.setup(SENSOR_BUCKET_PIN,GPIO.IN)

	#Datenbank initialisieren
	db = DATABASE()
	# Aktuelle Luftfeuchtigkeit holen
	humidity = db.getLatestHumidity()
	
	# Checken ob Wasser im Eimer ist
	if isBucketEmpty():
		db.saveBucketEmpty()
	else:
		db.saveStatus("OK")
		# Giessen wenn Luftfeuchtigkeit unter 90 %
		if humidity < HUMIDITY_LIMIT:
			db.saveWatering(WATERING_TIME)
			GPIO.output(RELAIS, GPIO.HIGH)
			time.sleep(WATERING_TIME)
			GPIO.output(RELAIS, GPIO.LOW)
		else:
			GPIO.output(RELAIS, GPIO.LOW)		
Example #6
0
def next_word():
    '''
    Pokazuje następne słowo do nauczenia.
    :return: None
    '''
    global UTILITY_FUNC, PR, INDEX, LEN, WORDS, TO_LEARN, WORDS_TO_LEARN

    INDEX += 1

    if INDEX < LEN:
        word = WORDS[INDEX]
        corr = DICT.correct(word)
        if corr is None:
            corr = word
        WORD_STR.set(corr)
        STAT_STR.set("%d/%d" % (INDEX + 1, LEN))
        WORDINFO_STR.set("rank = %d, p = %f, frequency = %f, value = %f" %
                         (FREQLIST.rank(word), PR(word), FREQLIST.freq(word),
                          UTILITY_FUNC(word)))
    else:
        WORD_STR.set('')
        STAT_STR.set('')
        WORDINFO_STR.set('')

        filemenu.entryconfig("Open", state="normal")
        filemenu.entryconfig("Test vocabulary", state="normal")
        filemenu.entryconfig("Clear info about user", state="normal")
        filemenu.entryconfig("Estimate vocabulary size", state="normal")
        btn_easy['state'] = 'disabled'
        btn_hard['state'] = 'disabled'

        if WORDS_TO_LEARN:
            homedir = expanduser("~")
            initialfile = datetime.datetime.now().strftime(
                "%d-%m-%y %H-%M") + '.apkg'
            filename = asksaveasfilename(
                initialfile=initialfile,
                initialdir=homedir + "/Desktop",
                title="Save APKG file",
                filetypes=(("Anki files", "*.apkg"), ),
                defaultextension='.apkg',
            )
            if filename:
                card.write_apkg(WORDS_TO_LEARN, filename)
                for word in WORDS_TO_LEARN:
                    DATABASE.modify_word(word, FREQLIST.rank(word), False,
                                         True)
        elif TO_LEARN:
            messagebox.showinfo("", "All words are known.")
Example #7
0
def save_card(bot, update, user_data):
    card_number = update.message.text
    check_card = re.match(r'\d{10,20}', card_number)
    message = 'Ок, карта {} сохранена 👌'.format(card_number)
    if check_card is None:
        message = 'Введен неверный номер карты 😞'
    from_user = update.message.from_user
    user_id = str(from_user['id'])
    try:
        with DATABASE.atomic():
            user = User.get(User.user_id == user_id)
            check_sum = user_id + card_number
            card = Card.create(card_number=card_number,
                               user=user,
                               check_sum=check_sum)
            card.save()
    except IntegrityError:
        message = 'Пользователь c такой картой уже существует 😞'
    except DoesNotExist:
        user = User.create(user_id=user_id,
                           first_name=from_user['first_name'],
                           last_name=from_user['last_name'])
        user.save()
    update.message.reply_text(message)
    return cancel(None, update, None)
Example #8
0
class Members(db.Model):
    __tablename__ = "members"
    id = db.Column(db.Integer, primary_key=True)
    table_id = db.Column(db.Integer,
                         db.ForeignKey('tables.id'),
                         nullable=False)
    user_id = db.Column(db.Text, db.ForeignKey('users.email'), nullable=False)

    # Can be either 'creator', 'admin', 'editor' or 'visitor'
    role = db.Column(db.String(20), nullable=False)

    def get_table_id(self):
        return self.table_id

    def get_member_id(self):
        return self.user_id

    def get_member_role(self):
        return self.role

    def set_member_role(self, role):
        self.role = role

    def to_dict(self):
        return {
            'id': self.id,
            'table_id': self.table_id,
            'member_email': self.user_id,
            'member_role': self.role,
        }
Example #9
0
    def __init__(self,
                 reduced_database_source="bar",
                 reduced_collection_source="raw_vector01_redu",
                 folder_contains_imgs="/data/bar03/screenshot01/",
                 traind_ipca_model_path="/data/bar03/ipcav08.pkl",
                 index_to_name_file="/data/bar03/moive_name_list_new.txt"):

        reduced_database = DATABASE()
        reduced_database.database_chose(reduced_database_source)
        reduced_database.collection_chose(reduced_collection_source)
        data_from_database = reduced_database.get_data().astype("float32")

        self.compare_data = data_from_database[:, :-2]
        self.compare_target = data_from_database[:, -2:]

        self.folder_contains_imgs = folder_contains_imgs
        self.img_path_many = self._Img_List()
        self.model_path_ipca = traind_ipca_model_path
        with open(self.model_path_ipca, 'rb') as file_id:
            self._Ipca_loaded = pickle.load(file_id)

        file = open(index_to_name_file, "r")
        # And for reading use
        lines = file.read().split()
        file.close()
        self.index_to_name = lines
Example #10
0
class Tasks(db.Model):
    __tablename__ = "tasks"
    id = db.Column(db.Integer, primary_key=True)
    description = db.Column(db.Text, nullable=False)
    column_id = db.Column(db.Integer, db.ForeignKey('columns.id'))

    def get_id(self):
        '''Return the task's id.'''
        return self.id

    def get_description(self):
        '''Return the task's description.'''
        return self.description

    def to_dict(self):
        return {'id': self.id,
                'description': self.description,
                'column_id': self.column_id}
Example #11
0
def main():

	GPIO.cleanup() 
	GPIO.setmode(GPIO.BCM)
	#Relais als Ausgang schalten
	GPIO.setup(RELAIS, GPIO.OUT)
	#Eimersensor als Eingang schalten
	GPIO.setup(SENSOR_BUCKET_PIN,GPIO.IN)
	#Feuchtigkeitssensor als Eingang schalten
	GPIO.setup(SOIL_MOSITURE_SENSOR,GPIO.OUT)
	
	# mcp Chip initialisieren
	mcp3008 = MCP3008()	
	#Datenbank initialisieren
	db = DATABASE()
	ch = 0

	#db.saveSoilMoisture(3)
	#db.saveBucketEmpty()
	#db.saveStatus("OK1")
	#sys.exit()
	while True:
		# Bodensensor Strom anschalten
		GPIO.output(SOIL_MOSITURE_SENSOR,GPIO.HIGH)
		time.sleep(3)
		# Bodensensor lesen
		#value = mcp3008.readAnalogData(ch)
		value = getAverangeMositure(mcp3008,ch)
		time.sleep(1)
		# Bodensensor abschalten
		GPIO.output(SOIL_MOSITURE_SENSOR,GPIO.LOW)
		# Bodenfeuchtigkeit speichern
		db.saveSoilMoisture(value)
		
		# Checken ob Wasser im Eimer ist
		if isBucketEmpty():
			db.saveBucketEmpty()
		else:
			db.saveStatus("OK")
			# Giessen wenn Trockenheitsgrenze ueberschritten wird
			if value > SOIL_MOISTURE_WATERING_VALUE_MIN and value < SOIL_MOISTURE_WATERING_VALUE_MAX:
				db.saveWatering(WATERING_TIME)
				GPIO.output(RELAIS, GPIO.HIGH)
				time.sleep(WATERING_TIME)
				GPIO.output(RELAIS, GPIO.LOW)
			else:
				GPIO.output(RELAIS, GPIO.LOW)		
#		time.sleep(1)
		time.sleep(TIMER_SOIL_MOISTURE)
Example #12
0
def delete_card(bot, update, user_data):
    card_number = update.message.text
    try:
        with DATABASE.atomic():
            card = Card.get(Card.card_number == card_number)
            card.delete_instance()
            message = 'Ок, карта {} удалена 👌'.format(card.card_number)
    except DoesNotExist:
        message = 'То, что мертво, умереть не может.'
    update.message.reply_text(message)
    return cancel(None, update, None)
Example #13
0
def connectdb(thread=False):
    # Create a connection to InfluxDB if thread=True, otherwise it will create a dummy data instance
    global db
    global cp
    global ue_data
    if thread:
        db = DUMMY()
    else:
        ins.populatedb(
        )  # temporary method to populate db, it will be removed when data will be coming through KPIMON to influxDB

        db = DATABASE('UEData')
        db.read_data("liveUE")
        ue_data = db.data.values.tolist(
        )  # needs to be updated in future when live feed will be coming through KPIMON to influxDB
    cp = CAUSE(db)
Example #14
0
def get_saved_cards(update):
    cards_keyboard = []
    from_user = update.message.from_user
    user_id = str(from_user['id'])
    try:
        with DATABASE.atomic():
            user = User.get(User.user_id == user_id)
            if len(user.cards) == 0:
                return None
            for card in user.cards:
                cards_keyboard.append([card.card_number])
            cards_keyboard.append(['Отмена'])
            cards_markup = ReplyKeyboardMarkup(cards_keyboard, one_time_keyboard=True,
                                               resize_keyboard=True)
            return cards_markup
    except DoesNotExist:
        return None
Example #15
0
def random_unknown_word(x, y, max_a):
    '''
    Zwraca losowe nieznane użytkownikowi słowo na liście frekwencyjnej z przedziału [x, y).\n
    Gdy ciężko takie słowo wylosować zwracane jest jakieś zupełnie losowe słowo.

    :param x: lewy kraniec przedziału
    :param y: prawy koniec przedziału
    :param max_a: maksymalna ilość prób wylosowania słowa z przedziału [x, y)
    :return: losowe nieznane użytkownikowi słowo na liście frekwencyjnej z przedziału [x, y)
    '''
    a = 0
    word = None
    while max_a is None or a < max_a:
        rank = random.randint(x, y - 1)
        word = FREQLIST.word(rank)
        if not DATABASE.exists(word) and DICT.exists(word):
            break
        else:
            word = None
        a += 1
    return word
Example #16
0
def select(stream, db=DATABASE):
    '''
    Zwraca listę słów posortowanych według wartości funkcji użyteczności dla podanego strumienia znakowego.

    :param stream: strumień znakowy
    :param db: baza danych zawierająca informacje o słownictwie użytkownika
    :return: lista słów posortowanych według wartości funkcji użyteczności
    '''
    words = myparser.parse(stream)
    temp_fl = freqlist.FreqList()
    temp_fl.load(stream=stream)
    fl = freqlist.DynMixedFreqList([(freqlist.FREQLIST, 0.5), (temp_fl, 0.5)])
    utility_fun = get_utility_func(db=db, fl=fl)
    pr = get_pr_func(db=db)
    mean_word = []
    for word in words:
        if DICT.correct(word) is not None and DATABASE.known_now(word) == False:
            mean_word.append((utility_fun(word), word, pr(word), ))
    mean_word.sort(reverse=True)
    # print(mean_word)
    return [word for (_, word, _) in mean_word]
Example #17
0
def train(thread=False):
    """
     Main function to perform training on input data
    """
    if thread:
        db = DUMMY()
    else:
        db = DATABASE('UEData')
    db.read_data('train')
    ps = PREPROCESS(db.data)
    ps.process()
    df = ps.data

    mod = modelling(df)
    mod.read_test(db)

    scores = []
    for of in np.arange(0.01, 0.4, 0.01):
        scores.append(mod.isoforest(outliers_fraction=of))
    opt_f1 = scores.index(max(scores)) + 1
    mod.isoforest(outliers_fraction=opt_f1 * 0.01, push_model=True)
    print("Optimum value of contamination : {}".format(opt_f1 * 0.01))
    print('Training Ends : ')
Example #18
0
class JsonFy:
    def __init__(self):
        self.__read = DATABASE()

    def json_data(self, tempo_coleta):
        pcdData = []
        data = self.__read.select_DB(horas_de_coleta=tempo_coleta)
        host_geradores = defaultdict(list)
        for row_db in data:
            hostname_db, data_hora_db, angulo_db, temperatura_db = row_db

            host_geradores[hostname_db].append({
                "timestamp":
                datetime.timestamp(data_hora_db),
                "hostname":
                hostname_db,
                "temperatura_ar":
                temperatura_db,
                "angulo":
                angulo_db,
            })

        return host_geradores
Example #19
0
from database import DATABASE

db = DATABASE()

Example #20
0
#         if numpy_data:
#             return np.array(data_list_from_db)
#         else:
#             return data_list_from_db
#
#     def insert_data(self, d2_arrary_data, d2_target):
#         dimension_of_data = d2_arrary_data.shape[1]
#         df_data = pd.DataFrame(data=d2_arrary_data, columns=range(dimension_of_data))
#         df_target = pd.DataFrame(data=d2_target, columns=["movie_name", "second"])
#         data_target = df_data.join(df_target)
#         insert_result = self.collection.insert_many(json.loads(data_target.to_json(orient="records")))
#         return insert_result

# from sklearn.decomposition import PCA, IncrementalPCA

data_base = DATABASE()
data_base.collections_of_eachdatabase

data_base.database_chose("bar")
data_base.collection_chose("raw_vector01")
print("data_base.collection =", data_base.collection)


def Explain_Ratio(numpy_array, percentage=0.95, first=50):
    numpy_array = np.array(numpy_array).flatten()
    sum_ = 0
    for index in range(len(numpy_array)):
        sum_ += numpy_array[index]
        if sum_ >= percentage:
            return index + 1, (index + 1) / len(numpy_array), len(
                numpy_array), numpy_array[:first].sum(
Example #21
0
def atualizabanco():
    dados = request.get_json()
    DATABASE().insert_DB(**dados)
    return dados
Example #22
0
from database import DATABASE
from model3 import MODEL_JPG_VECTOR


if __name__ =="__main__":
    print(" begining ".center(60,"="))
    folder_path = "/data/bar04/output"
    data_in_out = DATABASE()
    data_in_out.database_chose("bar")
    data_in_out.collection_chose("raw_vector02")

    data_to_mongod = MODEL_JPG_VECTOR(chunk=400,folder_path = folder_path,database=data_in_out,)
    data_to_mongod.Jpg_To_Vector_DataBase(to_database=True)
    # print(data_to_mongod.img_path_many.__len__())
    # data = data_in_out.get_data()
Example #23
0
from database import DATABASE
import time

print(DATABASE().collections_of_eachdatabase)

bar = DATABASE()
bar.database_chose("bar")
bar.collection_chose("raw_vector01_redu")

print(bar.collection.count())

print(DATABASE)

# while True:
#     print(  bar.collection.count() )
#     time.sleep(60)
Example #24
0
class Users(db.Model, flask_login.UserMixin):
    __tablename__ = "users"
    email = db.Column(db.Text, primary_key=True)
    username = db.Column(db.Text, nullable=False)
    password_hash = db.Column(db.Text, nullable=False)
    tables = db.relationship('Tables',
                             backref='owner',
                             cascade='all,delete',
                             lazy=True)
    membership = db.relationship('Members', backref='membership', lazy=True)

    def set_password(self, password):
        '''Set the user's password.'''
        self.password_hash = generate_password_hash(password)

    def check_password(self, password):
        '''Check if the provided password and the user's password match.'''
        return check_password_hash(self.password_hash, password)

    def get_id(self):
        '''Return the user's email.'''
        return self.email

    def get_name(self):
        '''Return the user's name.'''
        return self.username

    def get_table_by_id(self, id):
        ''' Return corresponding table, mostly used when retrieveing a
            table shared with the user'''
        if self.membership:
            table, _ = self.get_tables_shared_with_me()

            for t in table:
                if t.id == id:
                    return (t, 'Found')

            return (None, 'Not found')
        return (None, 'Not found')

    def get_table_by_name(self, name):
        '''Return the table that matches the name provided.'''
        table = Tables.query.filter_by(name=name, creator=self.email).first()

        if table:
            return (table, "Table '{}' found".format(name))

        table = Tables.query.filter_by(name=name).first()

        if table and table.get_member_by_email(self.email):
            return (table, "Table '{}' found".format(name))

        return (None, "Table '{}' not found".format(name))

    def get_private_tables(self):
        '''Return a list of the user's private tables.'''
        if self.tables:
            table = [t for t in self.tables if not t.shared]

            if table:
                return (table, "Found")
            return (None, "You don't have any private table yet")
        return (None, "You don't have any private table yet")

    def get_tables_shared_with_others(self):
        '''Return a list of the user's shared tables.'''
        if self.tables:
            table = [t for t in self.tables if t.shared]

            if table:
                return (table, "Found")
            return (None, "You did not share any table yet")
        return (None, "You did not share any table yet")

    def get_tables_shared_with_me(self):
        '''Return a list of the tables shared with the user.'''
        table = []

        if self.membership:
            for m in self.membership:
                t = Tables.query.filter_by(id=m.get_table_id()).first()

                if t and t.creator != self.email:
                    table.append(t)
        if table:
            return (table, "Found")
        return (None, "No table shared with you")

    def add_table(self, name):
        '''Create a new table if it doesn't already exists.'''
        if name:

            table = Tables(name=name, creator=self.email, shared=False)

            if Tables.query.filter_by(name=name).first() is None:
                db.session.add(table)
                db.session.commit()
                return (table, "Table '{}' successfully created".format(name))
            return (None,
                    "A table with name: '{}' already exists".format(name))
        return (None, "Looks like you did not give us a table name")

    def remove_table_by_name(self, name):
        '''Remove the table from the list of the user's tables.'''
        table = db.session.query(Tables).filter(
            Tables.name == name, Tables.creator == self.email).first()

        if table:
            db.session.delete(table)
            db.session.commit()
            return True
        return False

    def to_dict(self):
        return {'email': self.email, 'username': self.username}
Example #25
0
import sys
sys.path.insert(0, '..')
import constants as c

from database import DATABASE

db = DATABASE()

db.Print()

Example #26
0
import numpy as np
import subprocess
import os
# np.set_printoptions(precision=20)

from database import DATABASE

bar3 = DATABASE()
bar3.database_chose("bar3")
bar3.collection_chose("bar3")


class MODEL_JPG_VECTOR(object):
    def __init__(self,
                 chunk=1500,
                 img_path_many=None,
                 folder_path=None,
                 database=bar3):
        self.database = database

        from keras.models import Model
        self._Model = Model
        from keras.preprocessing import image
        self._image = image
        from keras.applications.xception import Xception as key_model
        self._key_model = key_model
        from keras.applications.xception import preprocess_input, decode_predictions
        self._preprocess_input = preprocess_input
        self._decode_predictions = decode_predictions
        base_model_4 = key_model(weights='imagenet', include_top=False)
        self._base_model_4 = base_model_4
Example #27
0
                     (answer.id, len(users)))
    sys.stdout.flush()


def create_directory(directory):
    try:
        os.mkdir(directory, 0o700)
    except OSError as error:
        if error.errno != errno.EEXIST:
            raise


if __name__ == '__main__':

    # Configuring Database
    DATABASE.connect()
    create_tables()

    parser = argparse.ArgumentParser(description='Quora Crawling Bot')
    parser.add_argument('-n',
                        '--max_crawl',
                        nargs='?',
                        default=1000,
                        type=int,
                        help='Number of maximum requests to make')
    parser.add_argument('--no_profile',
                        action='store_true',
                        help='Do not Crawl Profiles')
    parser.add_argument('--no_answer',
                        action='store_true',
                        help='Do not Crawl Answers')
    fstream.write(resp)

  sys.stdout.write('\rDone Parsing Answer id %d (%d)' % (answer.id, len(users)))
  sys.stdout.flush()

def create_directory(directory):
  try:
    os.mkdir(directory, 0o700)
  except OSError as error:
    if error.errno != errno.EEXIST:
      raise

if __name__ == '__main__':

  # Configuring Database
  DATABASE.connect()
  create_tables()

  parser = argparse.ArgumentParser(description = 'Quora Crawling Bot')
  parser.add_argument('-n', '--max_crawl', nargs='?', default=1000, type=int,
                      help='Number of maximum requests to make')
  parser.add_argument('--no_profile', action='store_true',
                      help='Do not Crawl Profiles')
  parser.add_argument('--no_answer', action='store_true',
                      help='Do not Crawl Answers')
  args = parser.parse_args()

  # Filling Database with Top Writers 2016
  with open('top_writers_2016.json', 'r') as fstream:
    writer_list = json.load(fstream)
  with open('other_writers.json', 'r') as fstream:
from database import DATABASE as db
from flask import Flask, g
from movie.models import Movie, Genre
from movie.views import movies_bp

tables = [Movie, Genre]

db.connect()

for table in tables:
    print(table)
    try:
        db.create_table(table)
    except:
        continue

db.close()

server = Flask(__name__)
server.register_blueprint(movies_bp)


@server.before_request
def before_request():
    g.db = db
    g.db.connect()


@server.after_request
def after_request(response):
    g.db.close()
Example #30
0
 def __init__(self):
     self.__read = DATABASE()
Example #31
0
import sys

sys.path.insert(0, '../database')
from database import DATABASE

sys.path.insert(0, '..')
import constants as c

import pygraphviz as pgv
import networkx as nx

db = DATABASE()

robots = db.Get_Robots()
#robots = robots[-500:]

phyloTree = pgv.AGraph()  # bgcolor='green')

#phyloTree.add_node(-1,label="", fixedsize=False, width=1,height=1)

for robot in robots:

    parentID = db.From_Robot_Record_Get_Parent_ID(robot)

    childID = db.From_Robot_Record_Get_ID(robot)

    numYeses = db.Get_Robot_Num_Yeses(childID)

    numNos = db.Get_Robot_Num_Nos(childID)

    numEvals = db.Get_Robot_Num_Evaluations(childID)
Example #32
0
sys.path.insert(0, '../database')
from database import DATABASE

sys.path.insert(0, '../pyrosim')
import pyrosim

sys.path.insert(0, '../TPR_3')

sys.path.insert(0, '../environments')
from environment0 import ENVIRONMENT0

import constants as c

import pickle

database = DATABASE()

filename = '../data/robot0.p'

s = pyrosim.Simulator(debug=False,
                      play_paused=False,
                      eval_time=c.evaluationTime)

e = ENVIRONMENT0(s, [0, 0, 0], [0, 0, 0], c.noFade)

e.Send_To_Simulator()

r = pickle.load(open(filename, 'rb'))

command = c.defaultCommand
class JsonFy:
    def __init__(self):
        self.__read = DATABASE()

    def json_data(self, tempo_coleta):
        pcdData = []
        data = self.__read.select_DB(horas_de_coleta=tempo_coleta)
        host_geradores = defaultdict(list)
        for row_db in data:
            hostname_db, data_hora_db, temperatura_ar_db, temperatura_orvalho_db, umidade_db, pressao_local_db, correnteFaseA_db, correnteFaseB_db, correnteFaseC_db, correnteNeutro_db, statusEnergia_db = row_db

            host_geradores[hostname_db].append({
                "timestamp":
                datetime.timestamp(data_hora_db),
                "hostname":
                hostname_db,
                "temperatura_ar":
                temperatura_ar_db,
                "temperatura_orvalho":
                temperatura_orvalho_db,
                "umidade":
                umidade_db,
                "pressao_local":
                pressao_local_db,
                "corrente_Fase_A":
                correnteFaseA_db,
                "corrente_Fase_B":
                correnteFaseB_db,
                "corrente_Fase_C":
                correnteFaseC_db,
                "corrente_Neutro":
                correnteNeutro_db,
                "status_Energia":
                statusEnergia_db
            })

        return host_geradores

    def json_minMaxMed_data(self, dias):
        pcdData = []
        data = self.__read.min_max_med_DB(dias)
        host_geradores2 = defaultdict(list)
        for row_db in data:
            hostname, data_hora_db, min_temperatura, max_temperatura, med_temperatura, min_temperatura_orvalho, max_temperatura_orvalho, med_temperatura_orvalho, min_umidade, max_umidade, med_umidade, min_pressao_local, max_pressao_local, med_pressao_local, min_corrente_fase_A, max_corrente_fase_A, med_corrente_fase_A, min_corrente_fase_B, max_corrente_fase_B, med_corrente_fase_B, min_corrente_fase_C, max_corrente_fase_C, med_corrente_fase_C, min_corrente_fase_Neutro, max_corrente_fase_Neutro, med_corrente_fase_Neutro, min_status_energia, max_status_energia, med_status_energia = row_db

            host_geradores2[hostname].append({
                "timestamp":
                datetime.timestamp(data_hora_db),
                "hostname":
                hostname,
                "temperatura_ar":
                float(med_temperatura),
                "temperatura_ar_min":
                float(min_temperatura),
                "temperatura_ar_max":
                float(max_temperatura),
                "temperatura_orvalho":
                float(med_temperatura_orvalho),
                "temperatura_orvalho_min":
                float(min_temperatura_orvalho),
                "temperatura_orvalho_max":
                float(max_temperatura_orvalho),
                "umidade":
                float(med_umidade),
                "umidade_min":
                float(min_umidade),
                "umidade_max":
                float(max_umidade),
                "pressao_local":
                float(med_pressao_local),
                "pressao_local_min":
                float(min_pressao_local),
                "pressao_local_max":
                float(max_pressao_local),
                "max_corrente_fase_A":
                float(max_corrente_fase_A),
                "med_corrente_fase_A":
                float(med_corrente_fase_A),
                "min_corrente_fase_B":
                float(min_corrente_fase_B),
                "max_corrente_fase_B":
                float(max_corrente_fase_B),
                "med_corrente_fase_B":
                float(med_corrente_fase_B),
                "min_corrente_fase_C":
                float(min_corrente_fase_C),
                "max_corrente_fase_C":
                float(max_corrente_fase_C),
                "med_corrente_fase_C":
                float(med_corrente_fase_C),
                "min_corrente_fase_Neutro":
                float(min_corrente_fase_Neutro),
                "max_corrente_fase_Neutro":
                float(max_corrente_fase_Neutro),
                "med_corrente_fase_Neutro":
                float(med_corrente_fase_Neutro),
                "min_status_energia":
                float(min_status_energia),
                "max_status_energia":
                float(max_status_energia),
                "med_status_energia":
                float(med_status_energia),
            })
        return host_geradores2

    def json_minMaxMedHora_data(self, horas):
        pcdData = []
        data = self.__read.min_max_med_hora_DB(horas)
        host_geradores3 = defaultdict(list)

        for row_db in data:
            hostname, datadia, hora, min_temperatura, max_temperatura, med_temperatura, min_temperatura_orvalho, max_temperatura_orvalho, med_temperatura_orvalho, min_umidade, max_umidade, med_umidade, min_pressao_local, max_pressao_local, med_pressao_local, min_corrente_fase_A, max_corrente_fase_A, med_corrente_fase_A, min_corrente_fase_B, max_corrente_fase_B, med_corrente_fase_B, min_corrente_fase_C, max_corrente_fase_C, med_corrente_fase_C, min_corrente_fase_Neutro, max_corrente_fase_Neutro, med_corrente_fase_Neutro, min_status_energia, max_status_energia, med_status_energia = row_db
            data_hora = f'{datadia} {hora}:00:00'

            host_geradores3[hostname].append({
                "timestamp":
                datetime.timestamp(
                    datetime.strptime(data_hora, '%Y-%m-%d %H:%M:%S')),
                "hostname":
                hostname,
                "temperatura_ar":
                float(med_temperatura),
                "temperatura_ar_min":
                float(min_temperatura),
                "temperatura_ar_max":
                float(max_temperatura),
                "temperatura_orvalho":
                float(med_temperatura_orvalho),
                "temperatura_orvalho_min":
                float(min_temperatura_orvalho),
                "temperatura_orvalho_max":
                float(max_temperatura_orvalho),
                "umidade":
                float(med_umidade),
                "umidade_min":
                float(min_umidade),
                "umidade_max":
                float(max_umidade),
                "pressao_local":
                float(med_pressao_local),
                "pressao_local_min":
                float(min_pressao_local),
                "min_corrente_fase_A":
                float(min_corrente_fase_A),
                "pressao_local_max":
                float(max_pressao_local),
                "max_corrente_fase_A":
                float(max_corrente_fase_A),
                "med_corrente_fase_A":
                float(med_corrente_fase_A),
                "min_corrente_fase_B":
                float(min_corrente_fase_B),
                "max_corrente_fase_B":
                float(max_corrente_fase_B),
                "med_corrente_fase_B":
                float(med_corrente_fase_B),
                "min_corrente_fase_C":
                float(min_corrente_fase_C),
                "max_corrente_fase_C":
                float(max_corrente_fase_C),
                "med_corrente_fase_C":
                float(med_corrente_fase_C),
                "min_corrente_fase_Neutro":
                float(min_corrente_fase_Neutro),
                "max_corrente_fase_Neutro":
                float(max_corrente_fase_Neutro),
                "med_corrente_fase_Neutro":
                float(med_corrente_fase_Neutro),
                "min_status_energia":
                float(min_status_energia),
                "max_status_energia":
                float(max_status_energia),
                "med_status_energia":
                float(med_status_energia),
            })
        return host_geradores3
Example #34
0
from database import DATABASE
import pickle
import faiss

import numpy as np
from keras.models import Model
from keras.preprocessing import image
from keras.applications.xception import Xception as key_model
from keras.applications.xception import preprocess_input, decode_predictions

base_model_4 = key_model(weights='imagenet', include_top=False)
model = Model(inputs=base_model_4.input,
              outputs=base_model_4.get_layer(index=-3).output)

reduced_database = DATABASE()
reduced_database.database_chose("bar")
reduced_database.collection_chose("raw_vector01_redu")

data_from_database = reduced_database.get_data(movie_name=0).astype("float32")
compare_data = data_from_database[:, :-2]
compare_target = data_from_database[:, -2:]

img_path = "/data/bar03/screenshot01/0_160.jpg"


def Jpg_To_Vector(img_path):
    if isinstance(img_path, list):
        img_path = img_path[0]

    img = image.load_img(img_path, target_size=(299, 299))
    x = image.img_to_array(img)