Exemplo n.º 1
0
def _getDbManager():
    "按需获取数据库对象"
    db = getattr(g, "__dataManager", None);
    if db is None:
        db = DataManager()
        g.__dataManager = db
    return db
Exemplo n.º 2
0
    def setUp(self):
        self.d1 = DataManager()

        self.wd1 = Word('software', '소프트웨어', (True, self.d1))
        self.wd2 = Word('project', '프로젝트', (True, self.d1))

        self.d1.words = [self.wd1, self.wd2]
Exemplo n.º 3
0
    def __init__(self, root):
        Frame.__init__(self, root)
        self.__root = root
        self.__data_manager = DataManager()
        self.__check_button_type = namedtuple('CheckButtonType', 'widget var')

        self.__natures = [
            "Single Carriageway", "Traffic Island Link", "Dual Carriageway",
            "Roundabout", "Traffic Island Link At Junction", "Slip Road"
        ]

        self.__roads = [
            "M3", "M40", "M4", "A1(M)", "M11", "M23", "M20", "M25", "M1",
            "HIGH STREET", "LONDON ROAD", "HIGH ROAD", "UXBRIDGE ROAD",
            "STATION ROAD", "BRIGHTON ROAD", "GREEN LANES", "FINCHLEY ROAD",
            "HARROW ROAD", "NORTH CIRCULAR ROAD", "KINGSTON ROAD",
            "PORTSMOUTH ROAD", "HERTFORD ROAD", "STAINES ROAD", "CROYDON ROAD",
            "MAIN ROAD", "CHURCH ROAD", "PARK ROAD"
        ]

        self.__motorways = [
            "M3", "M40", "M4", "A1(M)", "M11", "M23", "M20", "M25", "M1"
        ]

        self.__init_grid()
        self.__draw_grid()
Exemplo n.º 4
0
    def __init__(self, ArduinoHoistingData, ArduinoRotationData, ArduinoCirculationData, Hoistingsystem, CirculationSystem, RotaitonSystem):
        Thread.__init__(self,daemon=True)
        self.ArduinoHoistingData = ArduinoHoistingData
        self.ArduinoRotationData = ArduinoRotationData
        self.ArduinoCirculationData = ArduinoCirculationData
        self.HoistingSystem = Hoistingsystem
        self.CirculationSystem = CirculationSystem
        self.RotaitonSystem  = RotaitonSystem
        self.dataManager = DataManager()

        
        self.dataBaseBuffer = {
            "RPM"  :0,
            "Top_Drive_Torque" :0,
            "Pressure" : 0,
            "Loadcell_z1" : 0,
            "Loadcell_z2" : 0,
            "Loadcell_z3" :0,
            "ROP_15s_avg" : 0,
            "ROP_3m_avg" : 0,
            "Flow_Rate" : 0,
            "MSE" : 0,
            "UCS" : 0,
            "TVD" : 0,
            "Bit_Torque" : 0,
            "WOB" : 0,
            "d_exponent" : 0,
            "act1" : 0,
            "act2" : 0,
            "act3" : 0,
            "Velocity" : 0,
            "Height_Sensor": 0
        }
Exemplo n.º 5
0
    def saveOrUpdateMotion(self, motion):
        dm = DataManager()

        #Prüfen ob die Motion existiert
        #Sie existiert wenn es eine MotionFile mit passendem
        #Namen gibt
        path = None
        if motion.getName() in self.getAllMotionFiles():
            path = self.TEMPLATES_PATH + self.getMotionFile(motion.getName())
            #print(path)
        else:
            i = 0
            plainPath = self.TEMPLATES_PATH + 'template'
            while os.path.exists(plainPath + "%s.txt" % i):
                i = i + 1
            else:
                plainPath = plainPath + "%s.txt" % i

            if not os.path.exists(os.path.dirname(self.TEMPLATES_PATH)):
                os.makedirs(os.path.dirname(self.TEMPLATES_PATH))

            path = plainPath

        dm.saveMotion(motion, path)
        if motion.getName() not in self.__motions:
            self.__motions[motion.getName()] = motion
            self.__motionFiles[motion.getName()] = path

        print('Motion Saved or Updated')
Exemplo n.º 6
0
def beginScan(aPaths, aUsers, aDM=None, aMergeRootPath=True):
    if aDM is None:
        aDM = DataManager()
    users = makeUsers(aDM, aUsers)
    sd = ScanDisk(users, aPaths, aDM)
    if aMergeRootPath is not None:
        sd.mergeRootPaths = aMergeRootPath
    sd.startScan()
    aDM.save()
Exemplo n.º 7
0
    def saveMotion(self, template):
        dm = DataManager()

        i = 0
        plainPath = self.TEMPLATES_PATH + 'template'
        while os.path.exists(plainPath + "%s.txt" % i):
            i = i + 1
        else:
            plainPath = plainPath + "%s.txt" % i

        if not os.path.exists(os.path.dirname(self.TEMPLATES_PATH)):
            os.makedirs(os.path.dirname(self.TEMPLATES_PATH))

        dm.saveMotion(template, plainPath)
        print('Motion Saved')
Exemplo n.º 8
0
def run():
    #数据爬取器
    factory_spider = Spiser()
    #数据解析器
    json_parser = dataParser.DataParser()
    data_manager = DataManager()
    #总页数:250页
    total_page = 251
    #获取数据
    for i in range(total_page):
        print i
        pagedata = factory_spider.get_pageData(i)
        if pagedata is not None:
            factory_list = json_parser.json_parser(pagedata)
            data_manager.save_local(factory_list)
        time.sleep(1.5)
Exemplo n.º 9
0
def run():
    #数据爬取器
    factory_spider = Spiser()
    #数据解析器
    json_parser = dataParser.DataParser()
    data_manager = DataManager()
    # #总页数:250页
    # total_page = 251
    # #获取数据
    # for i in range(total_page):
    #     print i
    #     pagedata = factory_spider.get_pageData(i)
    #     if pagedata is not None:
    #         factory_list = json_parser.json_parser(pagedata)
    #         data_manager.save_local(factory_list)
    #     time.sleep(1.5)
    memberId_list = data_manager.read_local()
    for i in memberId_list:
        print i
Exemplo n.º 10
0
def test1():
    data = DataManager().query_e2019('%', '7,1')
    for d in data:
        print(d)
    data = [{'content': d[1]} for d in data]
    # data=[{'content':'在华盛顿期间,习近平还先后会见了前来参加本届核安全峰会的丹麦首相拉斯穆森、韩国总统朴槿惠和阿根廷总统马克里,并出席了伊核问题六国机制领导人会议。'}]
    data = [{
        'content':
        '谈及美国以所谓“安全威胁”为由打压中国高科技企业,马尔科表示,美国必须提供有说服力的证据,否则指控只是猜测而已。马尔科希望,中美两国能以建设性的方式解决贸易分歧,“贸易战没有赢家,应尽快找到和平解决方案”。\
      摩尔多瓦共产党人党意识形态秘书康斯坦丁·斯塔里什说,近几十年,全球经济体系已被证实行之有效,并推动了各国经济和贸易发展。如今,美国借保护自身经济利益为由,破坏这一体系基础,这好比“大象闯进瓷器店”,破坏了现有模式,却又不提供替代方案。\
      斯塔里什认为,美国盲目挑起经贸摩擦,是为保持自身“世界经济霸主地位”,此举严重违背市场规律,表明美国不愿对世界经济发展负责,同时也将影响美国自身经济发展。\
      巴勒斯坦法塔赫革命委员会委员巴萨姆说,美国挑起对华贸易摩擦的行为是“霸凌逻辑”,贸易战对中美双方都会造成损失。\
      巴勒斯坦人民党总书记萨利希指出,美国此举违背市场规律和国际贸易规则,不仅对美国和中国,乃至对世界经济都造成威胁。此外,美国对华为等中国企业进行打压,是因为相关企业具有强劲的全球竞争力。(执笔记者:马湛;参与记者:张修智、林惠芬、陈进、杨媛媛、赵悦、周天翮)'
    }]
    data = extract_all(data) + extra_info
    for d in data:
        print(d)
    db_manager = NeoManager()
    db_manager.clear()
    db_manager.write2db(data)
Exemplo n.º 11
0
def scanByJsonFile(aJsonFileName):
    "根据配置文件扫描数据"
    try:
        with open(aJsonFileName) as f:
            config = json.load(f)
    except Exception as e:
        print(e)
        return

    if not isinstance(config, list):
        print("文件格式有误")
        return

    dm = DataManager()
    for item in config:
        try:
            beginScan(item.get("paths"), item.get("users"), dm,
                      item.get("merge"))
        except Exception as e:
            print(e)
Exemplo n.º 12
0
    def __init__(self, pushsendingstart):
        self.dataManager = DataManager()
        topic, intervalStart, intervalEnd = self.dataManager.readAllSettings()

        self.webCrawler = WebCrawler()
        self.pushSender = PushSender(self.dataManager, "default")

        if pushsendingstart:
            self.pushSender.pushSendingThreadStart()

        self.nowListedWords = self.getAllWords()

        self.mainWindow = MainWindow(self)
        self.memorizeWindow = MemorizeWindow(self)
        self.settingWindow = SettingWindow(self)
        self.settingWindow.settingLoad(topic, intervalStart, intervalEnd)

        self.mainWindow.showWindow()
        self.memorizeWindow.hideWindow()
        self.settingWindow.hide()
Exemplo n.º 13
0
    def __init__(self, pushsendingstart):
        self.dataManager = DataManager()
        sts = self.dataManager.realAllSettings()

        self.webCrawler = WebCrawler()
        self.pushSender = PushSender(self.dataManager, "wowawesome")

        if pushsendingstart:
            self.pushSender.pushSendingThreadStart()

        self.nowListedWords = self.getAllWords()

        self.mainWindow = MainWindow(self)
        self.memorizeWindow = MemorizeWindow(self)
        self.settingWindow = SettingWindow(self)
        self.settingWindow.settingLoad(sts)

        self.mainWindow.showWindow()
        self.memorizeWindow.hideWindow()
        self.settingWindow.hide()
Exemplo n.º 14
0
    def initMotions(self):
        dm = DataManager()
        #Hier alle Devices intitialisieren
        oldPath = os.getcwd()
        try:
            os.chdir(self.TEMPLATES_PATH)
        except FileNotFoundError:
            print("Could not load Motions")
            return

        #For jede Geste
        for file in sorted(os.listdir()):
            filePath = os.getcwd() + "/" + file
            if os.path.isdir(filePath):
                continue

            motion = dm.getMotion(filePath)
            self.__motions[motion.getName()] = motion
            self.__motionFiles[motion.getName()] = file

        os.chdir(oldPath)
Exemplo n.º 15
0
 def __init__(self):
     self.tp=TextProcess()
     #数据库管理,加载政要数据
     self.dataManager=DataManager()
     # self.political_person_dict=list()
     #改用aho形式进行存储,方便进行多模匹配。
     self.aho_policical_person=ahocorasick.Automaton()
     try:
         # load_file = open('./mod/political_person_dict.bin', 'rb')
         # self.political_person_dict = pickle.load(load_file)
         # logging.info('political_person_dict count %d' % (len(self.political_person_dict)))
         file = open('./mod/aho_policical_person.aho', 'rb')
         self.aho_policical_person = pickle.load(file)
         logging.info('aho_policical_person count %d' % (len(self.aho_policical_person)))
     except:
         pass
     self.detector=MultiSenDetect()
     #加载地名数据索引,用于判断词性为hs的是否是地名
     load_file = open('./mod/place_dict.bin', 'rb')
     self.place_dict = pickle.load(load_file)
     logging.info('place_dict count %d' % (len(self.aho_policical_person)))
     return
Exemplo n.º 16
0
def run():
    #数据爬取器
    factory_spider = Spiser()
    #数据解析器
    data_parser = dataParser.DataParser()
    data_manager = DataManager()
    url_manager = UrlManager()
    #获取到三个url列表,均为需要爬取的数据
    url_list, shopurl1_list, shopurl2_list = url_manager.tel_url()
    total_num = len(url_list)
    crawred_url = url_manager.crawred_url()
    company_dataList = []

    for i in range(total_num):
        url = url_list[i]
        shopurl1 = shopurl1_list[i]
        shopurl2 = shopurl2_list[i]

        if url not in crawred_url:
            page_data = factory_spider.get_urlpage(url)
            page_shop1 = factory_spider.get_urlpage(shopurl1)
            page_shop2 = factory_spider.get_urlpage(shopurl2)
            #使用解析器,解析三个页面的数据
            companydata = data_parser.get_company_data(page_data, page_shop1,
                                                       page_shop2, url)
            #将解析后的数据元组保存至列表
            company_dataList.append(companydata)
            time.sleep(1.1)
        # elif url in crawred_url:
        #     print '已经爬取过了',url

        # 将爬取结果保存至本地csv文件,爬5家店铺保存一次
        print '=========', i, '=========='
        if i % 10 == 0 and len(company_dataList) > 0:
            data_manager.save_local_tel(company_dataList)
            company_dataList = []
            time.sleep(10)
Exemplo n.º 17
0
cur.execute(
    "select distinct classification from itn_link where description = 'Motorway';"
)
motorways = [("classification", m[0].replace("'", "''"))
             for m in cur.fetchall()]

cur.execute('select distinct street from itn_link;')
streets = [("street", s[0].replace("'", "''")) for s in cur.fetchall()]

natures = [
    "Single Carriageway", "Traffic Island Link", "Dual Carriageway",
    "Roundabout", "Traffic Island Link At Junction", "Slip Road"
]

dM = DataManager()


def days_to_binary(day):
    if day == 0 or day == 6:
        return 0
    return 1


def func0(params, speed, rainfall_depth, day, hour):
    p0, e0, p1, e1, p2, e2, c = params
    return p0 * rainfall_depth**e0 + p1 * day**e1 + p2 * hour**e2 + c - speed


def plot_func0(params, rainfall_depth, day, hour):
    p0, e0, p1, e1, p2, e2, c = params
Exemplo n.º 18
0
import traceback
from flask import Flask, render_template, request, redirect, url_for
from dataManager import DataManager
import datetime
from dbManager import DashboardInfo

OPTIONS = [
    "ALL_TEST_CASES", "FAILED_IN_CURRENT_RUN", "FAILING_FROM_LAST_10_RUNS",
    "FAILING_FROM_LAST_3_RUNS", "UNSTABLE_TEST_CASES", "PASS_STATUS_SWITCHED"
]

VIEWS = ['TABLE_VIEW', 'GRAPH_VIEW']

dataManager = DataManager()

####################### HOME ######################
app = Flask(__name__)


@app.errorhandler(Exception)
def default_error_handler(error):
    '''Default error handler'''
    original = getattr(error, "original_exception", error)
    traceback.print_tb(error.__traceback__)
    print("ERROR occurded during handling message:", original)
    return render_template("view_error.html")


@app.route('/', methods=["GET", "POST"])
def hello():
    all_dashboards = dataManager.get_all_dashboards()
Exemplo n.º 19
0
    def getFeaturesFavorAgainst(self, mode, listOfFeats):
        #only tweets with favor or against
        X, y = self.getFeaturesMatrix(mode, listOfFeats, 'stance')
        # print X,y
        nonerows = np.where(y == self.labelenc.transform('NONE'))[0]
        # print y
        # print nonerows
        X = np.delete(X, nonerows, axis=0)

        y = np.delete(y, nonerows)
        return X, y

    def getFeaturesStanceNone(self, mode, listOfFeats):
        X, y = self.getFeaturesMatrix(mode, listOfFeats, 'stance')
        y[y == self.labelenc.transform('FAVOR')] = 3
        y[y == self.labelenc.transform('AGAINST')] = 3
        return X, y


if __name__ == '__main__':
    dp = DataManager('../data/train.csv', '../data/test.csv')
    fe = FeatureExtractor(dp)
    # fe.getYStanceNone('train')
    # fe.getFeaturesFavorAgainst('train',['words2vec'])
    # fe.getFeaturesStanceNone('train',['words2vec'])
    # X,y = fe.getFeaturesFavorAgainst('train',['words2vec'])

    # print fe.getFeaturesMatrix('train',['words'],'topic','Hillary Clinton')[0].shape
    # print fe.getFeaturesTopicNontopic('train',['words'],'topic', 'Hillary Clinton')[0].shape
    # print fe.getX('train',fe.data.trainTweets, ['words2vec']).shape
Exemplo n.º 20
0
def active_learning(network: str, dataset: str, pool_length: int, method: str,
                    k: str, num_trainings: int, batch_size: int,
                    num_epochs: int, learning_rate: float, use_cuda: bool):
    """
    Function that execute an active learning
    depending on several arguments
    Args:
        network: the model that will be trained and tested
        dataset: the data used for the active learning
        method: method used to select the k samples to add
                to the training set at each active learning loop
        k: number of samples to add
        num_trainings: number of active learning loops
        batch_size: number of samples in a batch
        num_epochs: number of loops during one training
        learning_rate: learning rate of the optimizer
        use_cuda: boolean to use the gpu for training
    Returns:
        The list of accuracies of each test phase
    """

    print("Starting active learning with"
          "\n\tmodel: " + network + "\n\tdataset: " + dataset +
          "\n\tselection method: " + method + "\n\tk: " + k +
          "\n\tnum trainings: " + str(num_trainings) + "\n\tbatch size: " +
          str(batch_size) + "\n\tnum epochs: " + str(num_epochs) +
          "\n\tlearning rate: " + str(learning_rate) + "\n\tuse cuda: " +
          str(use_cuda))

    model = getModel(network)
    data = getData(dataset, pool_length)
    selection_method = getSelectionMethod(method)
    k = int(k)

    if len(data.get_pool_data()[0]) < k * num_trainings:
        raise ValueError(
            "'k' or 'num-trainings' is too big, "
            "the program will not be able to extract the training "
            "samples from the pool at some point")

    # Set the optimizer factory function
    optimizer = optimizer_setup(SGD, lr=learning_rate, momentum=0.9)

    # Create the network depending on the number of classes
    model = model(num_classes=len(data.get_label_names()))

    # First index samples to train
    train_idx = np.arange(k)

    # List that will contain the test accuracy of each training
    accuracies = []

    for num_training in range(num_trainings):
        print("\nActive learning loop " + str(num_training + 1) + "/" +
              str(num_trainings))

        # Set data loaders depending on training samples
        dataManager = DataManager(data=data, \
            train_idx=train_idx, \
            batch_size=batch_size)

        # Set the network trainer and launch the training
        netTrainer = NetTrainer(model=model, \
                                data_manager=dataManager, \
                                selection_method=selection_method, \
                                loss_fn=nn.CrossEntropyLoss() , \
                                optimizer_factory=optimizer, \
                                use_cuda=use_cuda)
        netTrainer.train(num_epochs)

        # Select k samples depending on the selection method
        # and add them to the training samples
        add_to_train_idx = netTrainer.evaluate_on_validation_set(k)
        train_idx = np.concatenate((train_idx, add_to_train_idx))
        print("Selected next training set indexes")

        # Compute the accuracy on the test set and save it
        accuracy = netTrainer.evaluate_on_test_set()
        accuracies.append(accuracy)

    return accuracies
Exemplo n.º 21
0
from __future__ import absolute_import, unicode_literals
from .celery import app
from configs import configs, locations
from dataManager import DataManager
import time
from datetime import datetime
import requests
import json
# import redis

dm = DataManager()


def get_utc_time(date_t: datetime):
    """获取指定的北京时间的utc时间"""
    t = date_t.timestamp()  # 目标时间的unix时间戳
    return datetime.utcfromtimestamp(t)


def send(url, method='get', data=None) -> dict:
    if method == 'get' or method == 'GET':
        headers = {'Authorization': configs['Authorization']}
        r = requests.get(url, headers=headers)  # request

    elif method == 'post' or method == 'POST':
        headers = {
            'Authorization': configs['Authorization'],
            'Content-Type': 'application/json;charset=UTF-8'
        }
        if data:
            r = requests.post(url, data=data, headers=headers)
Exemplo n.º 22
0
# -*- coding: utf-8 -*-
"""
Created on Mon May 14 17:12:19 2018

@author: greg
"""

import requests
from bs4 import BeautifulSoup
import codecs
import time
import pandas as pd
import numpy as np
from competition import Competition
from competitor import Competitor
from dataManager import DataManager

competition_manager = Competition()
competitor_manager = Competitor()
data_manager = DataManager()
Exemplo n.º 23
0
 def updateMotion(self, motion):
     dm = DataManager()
     path = self.TEMPLATES_PATH + self.getMotionFile(motion.getName())
     dm.saveMotion(motion, path)
     print('Motion Updated')
Exemplo n.º 24
0
 def __init__(self):
     self.data_manager = DataManager()
Exemplo n.º 25
0
def extractSPDHandSave(modelPath='', inputScheme='cropped'):

    num_classes = 6
    batchSize = 32

    # placeholder for input image data

    saver = tf.train.import_meta_graph(modelPath + '.meta')

    if (inputScheme == 'cropped'):
        trainImg_path = '../dataNpyFiles/bbox_cropped_train_img.npy'
        trainLabel_path = '../dataNpyFiles/bbox_cropped_train_label.npy'
        testImg_path = '../dataNpyFiles/bbox_cropped_test_img.npy'
        testLabel_path = '../dataNpyFiles/bbox_cropped_test_label.npy'
    elif (inputScheme == 'activated'):
        trainImg_path = '../dataNpyFiles/bbox_activated_train_img.npy'
        trainLabel_path = '../dataNpyFiles/bbox_activated_train_label.npy'
        testImg_path = '../dataNpyFiles/bbox_activated_test_img.npy'
        testLabel_path = '../dataNpyFiles/bbox_activated_test_label.npy'
    elif (inputScheme == 'origin'):
        trainImg_path = '../dataNpyFiles/origin_train_img.npy'
        trainLabel_path = '../dataNpyFiles/origin_train_label.npy'
        testImg_path = '../dataNpyFiles/origin_test_img.npy'
        testLabel_path = '../dataNpyFiles/origin_test_label.npy'
    else:
        print('Wrong with param...')
        return

    print('load train data...')
    trainData = DataManager(imgNpyPath=trainImg_path,
                            labelNpyPath=trainLabel_path,
                            batchSize=batchSize,
                            classNumber=num_classes)
    print('load test data...')
    testData = DataManager(imgNpyPath=testImg_path,
                           labelNpyPath=testLabel_path,
                           batchSize=batchSize,
                           classNumber=num_classes)

    trainDataSize = np.shape(trainData.label_data)[0]
    testDataSize = np.shape(testData.label_data)[0]

    train_batches_per_epoch = int(np.floor(trainDataSize / batchSize))
    val_batches_per_epoch = int(np.floor(testDataSize / batchSize))

    train_bit_representation = []
    train_label_representation = []

    test_bit_representation = []
    test_label_representation = []

    graph = tf.get_default_graph()

    # load for all operation restored

    latent_op = graph.get_tensor_by_name('latentLayer/latentLayer_sig:0')

    # x = tf.placeholder(tf.float32, [32, 227, 227, 3], name='Placeholder_4')

    for op in graph.get_operations():
        print(op.name)

    with tf.Session() as sess:
        saver.restore(sess, modelPath)

        print('bit generation for train...')

        # for extract bit of training data
        print('trainData initializer on GPU')
        q = trainData.getTrueData()
        sess.run(trainData.getInitializer(),
                 feed_dict={
                     trainData.x: q[0],
                     trainData.y: q[1]
                 })

        for step in range(train_batches_per_epoch):
            if (step % 20 == 0):
                print('For ' + str(step))
            img_batch, label_batch = sess.run(
                trainData.getNextBatchPlaceholder())

            # bitRepresentation = sess.run('latentLayer/Sigmoid:0', feed_dict={'Placeholder_4:0': img_batch, 'Placeholder_6:0': 1.0})
            bitRepresentation = sess.run(latent_op,
                                         feed_dict={
                                             'input_network:0': img_batch,
                                             'keep_prob:0': 1.0
                                         })

            train_label_representation.append(label_batch)
            train_bit_representation.append(bitRepresentation)

        # for extract bit of testing data
        print('bit generation for test...')

        print('testData initializer on GPU')
        p = testData.getTrueData()
        sess.run(testData.getInitializer(),
                 feed_dict={
                     testData.x: p[0],
                     testData.y: p[1]
                 })

        for step in range(val_batches_per_epoch):
            if (step % 20 == 0):
                print('For ' + str(step))
            # Is it right??
            img_batch, label_batch = sess.run(
                testData.getNextBatchPlaceholder())

            bitRepresentation = sess.run(latent_op,
                                         feed_dict={
                                             'input_network:0': img_batch,
                                             'keep_prob:0': 1.0
                                         })

            test_label_representation.append(label_batch)
            test_bit_representation.append(bitRepresentation)

        train_bit_representation_npy = np.asarray(train_bit_representation)
        test_bit_representation_npy = np.asarray(test_bit_representation)

        train_label_representation_npy = np.asarray(train_label_representation)
        test_label_representation_npy = np.asarray(test_label_representation)

        train_save_dic = {
            'bit': train_bit_representation_npy,
            'label': train_label_representation_npy
        }
        test_save_dic = {
            'bit': test_bit_representation_npy,
            'label': test_label_representation_npy
        }

        print('save train bit...')
        np.save(modelPath.split('/')[-1] + '_train_bit.npy', train_save_dic)
        print('save test bit...')
        np.save(modelPath.split('/')[-1] + '_test_bit_npy', test_save_dic)

        print('done')
Exemplo n.º 26
0
import numpy as np
from event import Event
from fiducialCuts import FiducialCuts
import time
import matplotlib.pyplot as plt
from dataManager import DataManager
import math

# Read in data here

start = time.time()

# data_file = '/media/tylerviducic/Elements/aidapt/data/synthetic/clasfilter2_5M780.npy' # change to your path, obviously
data_file = '/media/tylerviducic/Elements/aidapt/data/recon/twopi_ppip.10.zzz'

data_manager = DataManager(data_file)

input_array = data_manager.get_numpy_array()

output_list = []
phi_list = []
num_bins = 180

num_rows, num_columns = input_array.shape

for n in range(num_rows):

    row = input_array[n]
    event = Event(row)
    phi = math.degrees(abs(event.get_proton_phi()))
    phi_list.append(phi)
Exemplo n.º 27
0
	def __init__(self, n):
		self.data = DataManager('../data/train.csv','../data/test.csv', n)
		self.fe = FeatureExtractor(self.data)
		self.eval = Evaluate()