Пример #1
0
 def test_retries(self, mock_get):
     with self.assertLogs() as cm:
         self.assertIsNone(
             get_data("http://www.mocky.io/v2/5e81ea2b2f00000d002fb6d2",
                      max_retries=1))
         self.assertIsNotNone(
             get_data("http://www.mocky.io/v2/5e81ea2b2f00000d002fb6d2"))
     self.assertEqual(len(cm.output), 3)
Пример #2
0
def question_two(entity, k=3):
    main.get_data()
    result = main.q2(entity, k=k)
    if result is None:
        return statement("I couldn't find anything.")
    top_results = [i[0] for i in result]
    results = ", ".join(top_results)
    msg = "Most associated with " + entity + ": " + results
    return statement(msg)
Пример #3
0
 def populate_api(self):
     try:
         school_data = main.get_data(self.url)
         main.populate_db(self.cursor, school_data)
         main.commit_changes(self.conn)
     except FileNotFoundError:
         print('File Not Found')
Пример #4
0
def main():
    rev_by_star = get_data()
    X_train, Y_test, X_target, Y_target = split_data(rev_by_star)
    X_train = X_train.toarray()
    Y_test = Y_test.toarray()
    #Y_target = trans_target(Y_target)
    #X_target = trans_target(X_target)
    input_num = 1000
    output_num = 5
    X_target = to_categorical(X_target, 5)
    #Y_target = to_categorical(Y_target, 5)
    #data = np.random.random((2148051, input_num))
    #labels = np.random.randint(output_num, size=(2148051, 1))
    #print(X_target[0])
    #print(X_train.dtype, X_target.dtype)
    #labels = to_categorical(labels, 5)
    #print("data shape", data.dtype)
    #print("label shape", labels.dtype)
    print(type(X_train))
    model = build_model(input_num, output_num)
    model.fit(X_train,
              X_target,
              batch_size=128,
              nb_epoch=5,
              validation_split=0.25)
    #model.fit(data, labels, batch_size=32, nb_epoch=10)
    Y_pred = model.predict(Y_test)
    Y_pred = categorical_probas_to_classes(Y_pred)
    print("Accuracy is : %.2f" % ((Y_target == Y_pred).sum() * 1.0 /
                                  (1.0 * Y_test.shape[0])))
    plot_confusion_matrix(Y_pred, Y_target, "neural_network")
 def test_get_data(self):
     """ Test that compares the retrieved data with the expected data that's stored locally """
     remote_data = get_data(
         'http://www.mocky.io/v2/5e539b332e00007c002dacbe')
     with open('response.json') as fin:
         local_data = json.load(fin)
     self.assertDictEqual(remote_data, local_data)
Пример #6
0
def result(train_fraction, test_fraction):
    all_data = main.get_data(train_fraction,
                             test_fraction)  # 0 = train 1 = test
    testing_data = all_data[1]
    all_models = train_bayes.get_models(all_data[0])
    tp = 0
    fp = 0
    tn = 0
    fn = 0
    #p = high
    #n = low

    for i in testing_data:
        if train_bayes.predict(all_models, i[0:len(i) - 1]) == i[-1]:
            if i[-1] == 3:
                tp += 1
            else:
                tn += 1
        else:
            if i[-1] == 3:
                fn += 1
            else:
                fp += 1


#    tpr = high_correct/(high_correct+high_wrong)
#    far =
    confusion_matrix = [[tn, fp], [fn, tp]]
    return confusion_matrix
Пример #7
0
 def test_false(self):
     self.assertEqual(
         get_data(
             "https://collectionapi.metmuseum.org/public/collection/v2/departments"
         ),
         {'message': 'Not Found'},
     )
def main_function():
    today_date = datetime.today().strftime('%Y-%m-%d')
    data = get_data()
    # print(data.head(3))
    data.to_csv('results/NSE_'+today_date+'.csv')
    # return this data file
    return Response(print('Downloade Complete for', today_date, 'NSE Option Chain data'))
Пример #9
0
 def setUpClass(cls):
     logging.info('setUpClass')
     # define in-memory mock data for the weather files
     cls.mock_weather_df = pd.DataFrame ( 
                 { 'ForecastSiteCode': ['','','','',''],
                   'ObservationTime': ['','','','',''],
                   'ObservationDate': ['2016-02-01',
                                       '2016-02-01',
                                       '2016-02-01',
                                       '2016-02-01',
                                       '2016-02-01'],
                   'WindDirection': ['','','','',''],
                   'WindSpeed': ['','','','',''],
                   'WindGust': ['','','','',''],
                   'Visibility': ['','','','',''],
                   'ScreenTemperature': 
                     ['2.1','0.1','2.8','1.6','9.8'],
                   'Pressure': ['','','','',''],
                   'SignificantWeatherCode': ['','','','',''],
                   'SiteName': ['','','','',''],
                   'Latitude': ['60.7490','40.7490','30.7490','20.7490',''],
                   'Longitude': ['-1.8540','-2.8540','-0.8540','-0.1854',''],
                    'Region':
                     ['Orkney & Shetland',
                     'Orkney & Shetland',
                     'Orkney & Shetland',
                     'Orkney & Shetland',
                     'Highland & Eilean Siar'],
                    'Country': ['SCOTLAND','SCOTLAND','SCOTLAND','SCOTLAND','']} )
     cls.mock_weather_df['ObservationDate'] = pd.to_datetime(cls.mock_weather_df['ObservationDate'])
     cls.mock_weather_df['ScreenTemperature'] = cls.mock_weather_df['ScreenTemperature'].astype('float64')
     # create utils helper object
     helper = Helper()
     cls.configs ={'file1': 'weather.20160201.csv', 'file2': 'weather.20160301.csv', 'path': 'raw_data/', 'separator': ','}
     cls.weather_data_df = main.get_data(helper, cls.configs)
 def test_get_data(self):
     """ tests whether get_data method in main.py reads data from file 
         and return the contents in string array fromat.
         use unit_test_file.txt data for test purpose.
     """
     self.assertEqual(main.get_data("./test/unit_test_file.txt"),
                      ['check', 'data', 'is', 'right'])
Пример #11
0
def adddata():
    # Retrieve the name from url parameter
    date_time_str = request.args.get("date", None)
    date_time_obj = datetime.strptime(date_time_str, '%Y-%m-%d')
    table_rows = get_data(date_time_obj.strftime('%m/%d/%Y'))
    update_db(connect_to_db(), table_rows)
    return "<h1>Thanks!</h1>"
Пример #12
0
    def test_row_data(self):

        link = 'pagina_test/lei.html'
        soup = BeautifulSoup(open(link), 'html.parser')
        row = soup.findAll('table')[0].findAll('tr')[3::]
        alerj = get_data(row[0])

        self.assertEqual(alerj['data'], '09/10/2019')
Пример #13
0
def test_getschooldata():
    url = "https://api.data.gov/ed/collegescorecard/v1/schools.json?school.degrees_awarded.predominant=2," \
          "3&fields=id,school.state,school.name,school.city,2018.student.size," \
          "2017.student.size,2017.earnings.3_yrs_after_completion.overall_count_over_poverty_line," \
          "2016.repayment.3_yr_repayment.overall"
    resulting_array = main.get_data(url)
    num_schools = len(resulting_array)
    assert num_schools > 1000
Пример #14
0
    def __init__(self, driver_obj, messages_obj, client_id):
        from main import get_data

        self.driver = driver_obj
        self.messages = messages_obj
        self.client = client_id
        self.data = get_data(self.client)
        self.deamon = True
        threading.Thread.__init__(self)
Пример #15
0
def test_get_data_present(query_fn, testbed):
    try:
        main.client.set(KEY_PREFIX + 'key', 'data', 9000)
    except Exception:
        pytest.skip('Redis is unavailable')

    data = main.get_data()
    query_fn.assert_not_called()
    assert data == 'data'
    main.client.delete(KEY_PREFIX + 'key')
Пример #16
0
def test_get_data_not_present(query_fn, testbed):
    try:
        main.client.set(KEY_PREFIX + 'counter', '0', 9000)
    except redis.RedisError:
        pytest.skip('Redis is unavailable')

    data = main.get_data(KEY_PREFIX + 'key')
    query_fn.assert_called_once_with()
    assert data == 'data'
    assert 'data' == main.client.get(KEY_PREFIX + 'key')
    main.client.delete(KEY_PREFIX + 'key')
Пример #17
0
def process_dirs(path_to_experiments):
    if not os.path.isdir('results'):
        os.mkdir('results')
    entries = os.listdir(path_to_experiments)
    dirs = [dir for dir in entries if os.path.isdir(os.path.join(path_to_experiments,dir))]

    pipe = lambda dir: search_clusters(dir,*(get_data(os.path.join(path_to_experiments,dir))))
    result = map(pipe,dirs)
    for i,dir in enumerate(dirs):
        print('%s:' % dir)
        for entry in result[i]:
            print '  {0}:{1}'.format(*entry)
Пример #18
0
def main():
    args = Parser().parse()

    use_cuda = not args.cpu and torch.cuda.is_available()
    device = torch.device(f"cuda:{args.gpu}" if use_cuda else "cpu")

    torch.manual_seed(args.seed)

    train_loader, val_loader, test_loader = get_data(args)

    # labels should be a whole number from [0, num_classes - 1]
    num_labels = 10  #int(max(max(train_data.targets), max(test_data.targets))) + 1
    output_size = num_labels
    setup_logging(args)

    if 'resnet' in args.model:
        constructor = getattr(resnet, args.model)
        model_stoch = constructor(True, num_labels, device).to(device)
        model_det = constructor(False, num_labels, device).to(device)

    elif 'vgg' in args.model:
        constructor = getattr(vgg, args.model)
        model_stoch = constructor(True, num_labels, device,
                                  args.orthogonal).to(device)
        model_det = constructor(False, num_labels, device,
                                args.orthogonal).to(device)

    else:
        stoch_args = [True, True, device]
        det_args = [False, False, device]
        model_stoch = lenet5.LeNet5(*stoch_args).to(device)
        model_det = lenet5.LeNet5(*det_args).to(device)

    # load saved parameters
    saved_models = glob(f'/scratch/bsm92/{args.model}_{args.dataset}*.pt')
    saved_det = saved_models[0] if 'det' in saved_models[0] else saved_models[1]
    saved_stoch = saved_models[1 - saved_models.index(saved_det)]
    it = zip([model_stoch, model_det], [saved_stoch, saved_det])
    for model, param_path in it:
        saved_state = torch.load(param_path, map_location=device)
        if param_path[-4:] == '.tar':
            saved_state = saved_state['model_state_dict']
        model.load_state_dict(saved_state)

    loss = torch.nn.CrossEntropyLoss()
    test(args, model_det, device, test_loader, loss, 10)
    test(args, model_stoch, device, test_loader, loss, 10)
Пример #19
0
def index():
    if request.method == 'POST':
        print("POST")
        address = request.form['jobnumber']
        elections = get_data(address)
        html = "<div>"
        html += "Address: %s </br>" % address
        html += "Elections: </br>"
        html += "<ul>"
        for name, day, division in elections:
            html += "<li>" + name + "</li>"
            html += "<ul>"
            html += "<li>Election day: " + day + "</li>"
            html += "<li>Division: " + division + "</li>"
            html += "</ul>"
        html += "</ul>"
        html += "</div>"
        return html
    return render_template('index.html')
Пример #20
0
def process_data():
    page = int(request.args.get("page", 1))
    per_page = PER_PAGE
    offset = (page - 1) * per_page
    data = get_data(
        org_name=session["org_name"],
        repo_count=session["repo_count"],
        committees=session["committees"],
        per_page=per_page,
        offset=offset,
    )
    total = data["count"]
    pagination = Pagination(
        page=page,
        per_page=per_page,
        total=total,
        css_framework="bootstrap4",
        error_out=False,
    )

    return (data, page, per_page, pagination)
Пример #21
0
        return statement("I couldn't find anything.")
    return statement("I found this: " + results)


@ask.intent("GetEntity")
def question_two(entity, k=3):
    main.get_data()
    result = main.q2(entity, k=k)
    if result is None:
        return statement("I couldn't find anything.")
    top_results = [i[0] for i in result]
    results = ", ".join(top_results)
    msg = "Most associated with " + entity + ": " + results
    return statement(msg)


@ask.intent("ThirdIntent")
def question_three(query, k=3):
    result = main.q3(query, k=k)
    if result is None:
        return statement("I couldn't find anything.")
    top_results = [i[0] for i in result]
    results = ", ".join(top_results)
    msg = "Most associated with " + query + ": " + results
    return statement(msg)


if __name__ == '__main__':
    # main.initialize()
    main.get_data()
    app.run()
Пример #22
0
def main():
    args = Parser().parse()

    use_cuda = not args.cpu and torch.cuda.is_available()
    device = torch.device(f"cuda:{args.gpu}" if use_cuda else "cpu")

    torch.manual_seed(args.seed)

    train_loader, val_loader, test_loader = get_data(args)

    # labels should be a whole number from [0, num_classes - 1]
    num_labels = 10  #int(max(max(train_data.targets), max(test_data.targets))) + 1
    output_size = num_labels
    setup_logging(args)

    if 'resnet' in args.model:
        constructor = getattr(resnet, args.model)
        model_stoch = constructor(True, device).to(device)
        model_det = constructor(False, device).to(device)

    elif 'vgg' in args.model:
        constructor = getattr(vgg, args.model)
        model_stoch = constructor(True, device, args.orthogonal).to(device)
        model_det = constructor(False, device, args.orthogonal).to(device)

    else:
        stoch_args = [True, True, device]
        det_args = [False, False, device]
        model_stoch = lenet5.LeNet5(*stoch_args).to(device)
        model_det = lenet5.LeNet5(*det_args).to(device)

    # load saved parameters
    saved_models = glob(f'experimental_models/{args.model}*')
    saved_det = saved_models[0] if 'det' in saved_models[0] else saved_models[1]
    saved_stoch = saved_models[1 - saved_models.index(saved_det)]
    it = zip([model_stoch, model_det], [saved_stoch, saved_det])
    for model, param_path in it:
        saved_state = torch.load(param_path, map_location=device)
        if param_path[-4:] == '.tar':
            saved_state = saved_state['model_state_dict']
        model.load_state_dict(saved_state)

    rows = []
    det_row_prefix = [args.model, args.dataset, saved_det, False, 1]
    for _ in range(args.inference_passes):
        cal_results = [
            *calc_calibration(args, model_det, device, test_loader,
                              args.batch_size, num_labels, 1)
        ]
        rows.append(det_row_prefix + cal_results)
        _log_calibration(*cal_results)
    get_temp_scheduler(model_temps(model_stoch, val_only=False), args).step()
    stoch_row_prefix = [args.model, args.dataset, saved_stoch, True, 1]
    for num_passes in range(1, 51):
        for _ in range(args.inference_passes):
            cal_results = [
                *calc_calibration(args, model_stoch, device, test_loader,
                                  args.batch_size, num_labels, num_passes)
            ]
            stoch_row_prefix[-1] = num_passes
            rows.append(stoch_row_prefix + cal_results)
            _log_calibration(*cal_results, prefix=f'NUM PASSES: {num_passes}')

    _write_results(args, rows)
import main
import numpy as np
from scipy import stats
from matplotlib import pyplot as plt
from astroML.plotting import setup_text_plots
from astroML.stats import mean_sigma, median_sigmaG

setup_text_plots(fontsize=8, usetex=False)

if __name__ == '__main__':
    quasars = main.get_data('data/I_Paris_plate_mjd_fiber.dat')
    qsos, wavelengths = main.build_rest_frame(quasars)
    qsos_m = main.list_to_matrix(qsos)

    for i in xrange(0, 4000, 500):
        fig = plt.figure(figsize=(5, 7))
        fig.subplots_adjust(left=0.13, right=0.95,
                            bottom=0.06, top=0.95,
                            hspace=0.1)
        ax = fig.add_subplot(1, 1, 1)

        avg = np.mean(qsos_m[:, i])
        std = np.std(qsos_m[:, i])
        data = (qsos_m[:, i] - avg) / std

        x = np.linspace(-5, 5, 1000)
        pdf = stats.norm(0, 1).pdf(x)

        A2, sig, crit = stats.anderson(data)
        D, pD = stats.kstest(data, "norm")
        W, pW = stats.shapiro(data)
Пример #24
0
import numpy as np
import seaborn as sns
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
from sklearn.semi_supervised import (
    LabelPropagation,
    LabelSpreading,
    SelfTrainingClassifier
)
from sklearn.svm import SVC

from main import plot_decision_boundary, get_data


if __name__ == '__main__':
    X_train, X_test, y_train, y_test = get_data()

    X = np.concatenate([X_train, X_test], axis=0)
    y = np.concatenate([y_train, -1 * np.ones_like(y_test)], axis=0)

    models = (
        LabelPropagation(max_iter=10000),
        LabelSpreading(),
        SelfTrainingClassifier(base_estimator=SVC(probability=True, gamma="auto"))
    )
    color_maps = ('Blues', 'Greens', 'Reds')

    for model, cmap in zip(models, color_maps):
        model.fit(X, y)
        y_pred = model.predict(X[y == -1])
Пример #25
0
def main(cfg: DictConfig) -> None:
    x, y = get_data(cfg)
    assert_binary_data(x, y)

    x_tr, x_te, y_tr, y_te = train_test_split(
        x,
        y,
        test_size=cfg.evaluation.test_ratio,
        random_state=_seed(cfg.seed))

    mlflow.set_tracking_uri(hydra.utils.get_original_cwd() + '/mlruns')

    experiment_id = get_mlflow_experiment_id(cfg.name)

    with mlflow.start_run(experiment_id=experiment_id):
        loss = get_loss_class(cfg)
        stopping_criterion = EarlyStopping(
            monitor='train_loss',
            lower_is_better=True,
            patience=min(10, cfg.training.max_epochs),
            threshold=cfg.training.tol,
            threshold_mode='rel',
            sink=logger.info,
        )
        clf = Classifier(
            module=LinearClassifier,
            module__n_features=x.shape[1],
            max_epochs=cfg.training.max_epochs,
            criterion=loss,
            predict_nonlinearity=get_loss_link(cfg),
            optimizer=torch.optim.Adam,
            iterator_train__batch_size=cfg.training.batch_size,
            iterator_train__shuffle=True,
            train_split=False,
            callbacks=[('stopping_criterion', stopping_criterion)],
            verbose=cfg.verbose,
        )

        if check_if_validation(cfg):
            params = get_validation_params(cfg)
            clf = GridSearchCV(
                clf,
                params,
                refit=True,
                cv=cfg.evaluation.n_cv,
                scoring=negative_brier_score,
                n_jobs=-1,
            )
        else:
            clf.set_params(
                lr=cfg.training.lr,
                optimizer__weight_decay=cfg.training.regularization,
            )
            if check_if_gev_loss(loss):
                clf.set_params(criterion__xi=cfg.loss.xi)

        mlflow.log_param('dataset', cfg.dataset)
        mlflow.log_param('loss', cfg.loss.name)
        mlflow.log_param('lr', cfg.training.lr)
        mlflow.log_param('max_epochs', cfg.training.max_epochs)
        mlflow.log_param('tol', cfg.training.tol)
        mlflow.log_param('regularization', cfg.training.regularization)
        mlflow.log_param('seed', _seed(cfg.seed))
        if check_if_gev_loss(loss):
            mlflow.log_param('xi', cfg.loss.xi)

        # Step 1: fit the base CPE model
        x_tr, x_vl, y_tr, y_vl = train_test_split(x_tr, y_tr, test_size=0.3)
        clf.fit(x_tr, y_tr)

        # Step 2: search for the best threshold
        threshold_candidates = np.arange(0.05, 1., 0.05)
        validation_scores = [
            f1_score(y_vl,
                     (clf.predict_proba(x_vl)[:, 1] > th).astype(np.uint8))
            for th in threshold_candidates
        ]
        threshold = threshold_candidates[np.argmax(validation_scores)]

        y_prob = clf.predict_proba(x_te)[:, 1]
        y_pred = (y_prob > threshold).astype(np.uint8)
        log_metric('f1_score', f1_score(y_te, y_pred))
        log_metric('roc_auc_score', roc_auc_score(y_te, y_pred))
        log_metric('best_threshold', threshold)
import main as lib

#parse all the info from csv
lib.get_data()

#here we get the rating of results
rating = lib.generate_rating(lib.students)
#now form a scholarship list
list = lib.get_scholarship(lib.all_students, rating)

#print the list to console
lib.form_output(list)
#and to a csv
lib.form_csv(list)
Пример #27
0
    sys.exit(1)

config = configuration()
config.readconfig()

if len(args) < 1:
    print _("Specify at least one video.")
    sys.exit(1)

for i in args:

    print "----"
    print _("Trying to download the video...")
    print _("URL: %(url)s") % {"url" : i}
    try:
        data = get_data(i)
        data.start()
        while data.status == -1:
            sleep(0.02)
        if data.status == 0:
            if options.saveAs != None:
                saveAs = options.saveAs
            else:
                saveAs = os.path.join(
                                      options.destination,
                                      data.data[2]
                                     )
            print 'Saving file as "%(file)s"...' % {"file" : saveAs}
            down = fdownload(
                             data.data[0],
                             saveAs
Пример #28
0
def set_dragon():
    table = 'Dragons'
    fields = 'BaseId,Name,NameJP,Rarity,ElementalType,' + \
        'MinHp,MaxHp,MinAtk,MaxAtk,' + \
        'Abilities11,Abilities12,Abilities21,Abilities22,' + \
        'IsPlayable'
    group = 'BaseId'

    parse_int = ['MinHp', 'MaxHp', 'MinAtk', 'MaxAtk']

    raw_data = main.get_data(table, fields, group)

    names = main.load_name(FILE_NAME)

    data_new = []
    # data_list = []
    data_dict = {}
    data_updates = [False]

    for i in raw_data:
        item = i['title']
        if item['BaseId'] and item[
                'IsPlayable'] == '1' and item['BaseId'] != '210091':
            uid = '{}_01'.format(item['BaseId'])
            name = main.set_name(names, item, data_new, data_updates)

            new_item = {
                'id': uid,
                'name': name,
                'element': item['ElementalType'],
                'rarity': item['Rarity'],
            }

            for k in parse_int:
                new_item[k] = int(item[k])

            HP_V = {'incHP1': 0, 'incHP2': 0}
            STR_V = {'incSTR1': 0, 'incSTR2': 0}
            res_V = {}

            for a in [
                    'Abilities11', 'Abilities12', 'Abilities21', 'Abilities22'
            ]:
                ability = abilities.get(item[a], '')

                if ability:
                    new_item[a.lower()] = ability['Might']
                    level = a[-1]

                    if 'reqEle' in ability:
                        new_item['reqEle'] = ability['reqEle']

                    if 'HP' in ability:
                        HP_V['incHP' + level] = ability['HP']
                    elif 'STR' in ability:
                        STR_V['incSTR' + level] = ability['STR']
                    elif 'Hybrid' in ability:
                        HP_V['incHP' + level] = ability['Hybrid']
                        STR_V['incSTR' + level] = ability['Hybrid']

                    if 'res' in ability:
                        res_V['resEle'] = ability['resEle']
                        res_V['incRes'] = ability['res']
                else:
                    new_item[a.lower()] = 0

            new_item.update(HP_V)
            new_item.update(STR_V)
            new_item.update(res_V)

            # data_list.append(new_item)
            data_dict[uid] = new_item

    # main.save_file('list', FILE_NAME, data_list)
    main.save_file('dict', FILE_NAME, data_dict)

    if data_updates[0]:
        main.save_file('locales', FILE_NAME, names)

    if data_new:
        print(data_new)
        main.download_images(FILE_NAME, data_new)
Пример #29
0
        rm_lemm = [[wordnet.lemmatize(word) for word in words]
                   for words in rm_stop]
    else:
        rm_lemm = rm_stop
    if stem == porter:
        rm_stem = [[porter.stem(word) for word in words] for words in rm_lemm]
    elif stem == snowball:
        rm_stem = [[snowball.stem(word) for word in words]
                   for words in rm_lemm]
    else:
        rm_stem = rm_lemm
    clean_lyrics = [' '.join([word for word in words]) for words in rm_stem]
    return pd.Series(clean_lyrics)


full_dataset = get_data()


# CV
def confusion_matrix_plot_cv(tn,
                             fp,
                             fn,
                             tp,
                             save_fig=False,
                             cmap=plt.cm.Blues):
    '''
    This function prints and plots the confusion matrix.
    '''
    cm = np.array([[tp, fn], [fp, tn]])
    classes = ['True', 'False']
    plt.figure(figsize=(4, 4))
Пример #30
0
def set_adventurer():
    table = 'Adventurers'
    fields = 'Id,VariationId,Name,NameJP,WeaponType,Rarity,ElementalType,' + \
        'MinHp3,MinHp4,MinHp5,MaxHp,PlusHp0,PlusHp1,PlusHp2,PlusHp3,PlusHp4,McFullBonusHp5,' + \
        'MinAtk3,MinAtk4,MinAtk5,MaxAtk,PlusAtk0,PlusAtk1,PlusAtk2,PlusAtk3,PlusAtk4,McFullBonusAtk5,' + \
        'Abilities11,Abilities12,Abilities21,Abilities22,Abilities31,Abilities32,' + \
        'DefCoef,IsPlayable'
    group = 'Id,VariationId'
    parse_int = [
        'MinHp3', 'MinHp4', 'MinHp5', 'MaxHp', 'PlusHp0', 'PlusHp1', 'PlusHp2',
        'PlusHp3', 'PlusHp4', 'McFullBonusHp5', 'MinAtk3', 'MinAtk4',
        'MinAtk5', 'MaxAtk', 'PlusAtk0', 'PlusAtk1', 'PlusAtk2', 'PlusAtk3',
        'PlusAtk4', 'McFullBonusAtk5', 'DefCoef'
    ]

    raw_data = main.get_data(table, fields, group)

    names = main.load_name(FILE_NAME)
    o_len = len(names)
    data_list = []
    data_dict = {}
    data_new = []

    for i in raw_data:
        item = i['title']
        if item['IsPlayable'] == '1':
            uid = '{}_0{}'.format(item['Id'], item['VariationId'])
            name = main.set_name(names, item, data_new)
            weapon = item['WeaponType']
            rarity = item['Rarity']
            new_item = {
                'id': uid,
                'name': name,
                'weapon': weapon,
                'element': item['ElementalType'],
                'rarity': rarity,
            }

            for k in parse_int:
                new_item[k] = int(item[k])

            inc_LV = {}
            inc_Value = {}
            for a in [
                    'Abilities11', 'Abilities12', 'Abilities21', 'Abilities22',
                    'Abilities31', 'Abilities32'
            ]:
                ability = abilities.get(item[a], '')
                if ability:
                    new_item[a.lower()] = ability['Might']

                    level = a[-1]

                    if 'STR' in ability:
                        inc_LV['STRLV' + level] = ABILITY_LEVEL.get(
                            rarity, ABILITY_LEVEL['res'])[a]
                        inc_Value['incSTR' + level] = ability['STR']

                    if 'def' in ability:
                        inc_LV['defLV' + level] = ABILITY_LEVEL.get(
                            rarity, ABILITY_LEVEL['res'])[a]
                        inc_Value['incDef' + level] = ability['def']

            new_item.update(inc_LV)
            new_item.update(inc_Value)

            data_list.append(new_item)
            data_dict[uid] = new_item

    main.save_file('list', FILE_NAME, data_list)
    main.save_file('dict', FILE_NAME, data_dict)

    if len(names) != o_len:
        print(data_new)
        main.save_file('locales', FILE_NAME, names)
        main.download_images(FILE_NAME, data_new)
Пример #31
0
 def download_list(self): # Clicked on button 'open' in filechooserdialog
     self.listdownloading = True
     fc = self.wTree.get_widget("filechooserdialog")
     pb = self.wTree.get_widget("dprogressbar")
     mainDownload_button = self.wTree.get_widget("mainDownload_button")
     mainClose_button = self.wTree.get_widget("mainClose_button")
     os.chdir(
              self.config.get(
                              "general",
                              "save_videos_in"
                             )
             )
     fc.hide()
     pb.set_fraction(0)
     chosen_list = fc.get_filename()
     print "%(list)s selected" % {"list" : chosen_list}
     print "-----"
     mainDownload_button.set_sensitive(False)
     mainClose_button.set_sensitive(False)
     pb.set_text(_("Downloading a list..."))
     file = open(
                 chosen_list,
                 "r"
                )
     i = 0
     successful = 0 # videos downloaded successfully
     while True:
         pb.set_fraction(0)
         pb.set_text(_("Fetching video information..."))
         line = file.readline()
         if not line:
             break
         if line[-1] == "\n":
             line = line[:-1]
         print "----"
         print _("Trying to download %(link)s") % {"link" : line}
         v_no = i+1 # example: You're downloading the v_no. video && You're downloading the 5. video
         data = get_data(line)
         data.start()
         while data.status == -1:
             gtk.main_iteration_do(False)
             sleep(0.01)
         gtk.main_iteration_do(True)
         if data.status == 0:
             saveAs = os.path.join(
                                   self.config.get(
                                                   "general",
                                                   "save_videos_in"
                                                  ),
                                   data.data[2]
                                  )
             print _("Saving file as \"%(filename)s\"...") % {"filename" : saveAs}
             down = fdownload(
                              data.data[0], # http://*.*/*
                              data.data[2]  # .flv
                             )
             down.start()
             pb.set_text(_("Downloading video %(number)s") % {"number" : v_no})
             progress = down.downloaded()/100
             while down.get_filesize() == 0:
                 gtk.main_iteration_do(False)
                 sleep(0.01)
             gtk.main_iteration_do(True)
             filesize = down.get_filesize()
             print _("Filesize: %(filesize)s KB") % {"filesize" : filesize}
             while progress < 1:
                 gtk.main_iteration_do(False)
                 sleep(0.005)
                 pb.set_fraction(progress)
                 progress = down.downloaded()/100
             gtk.main_iteration_do(True)
             pb.set_fraction(1)
             pb.set_text(_("Finished download #%(number)s") % {"number" : v_no})
             successful += 1
             if self.config.getboolean(
                                       "general",
                                       "convert"
                                      ) and data.data[3]:
                 pb.set_text(_("Converting file"))
                 output = convert(
                                  saveAs,
                                  self.config.get(
                                                  "general",
                                                  "convert_filename_extension"
                                                 ),
                                  self.config.get(
                                                  "general",
                                                  "convertcmd"
                                                 )
                                 )
                 output.start()
                 while output.status == -1:
                     gtk.main_iteration_do(False)
                     sleep(0.01)
                 gtk.main_iteration_do(True)
                 pb.set_text(_("Converted file."))
                 if self.deletesourcefile:
                     os.remove(saveAs)
                     print _("Deleted input (.flv) file")
         else:
             print _("Wrong URL / unsupported video portal")
             pb.set_text(_("Download #%(number)s failed") % {"number" : v_no})
         i += 1
         print "----"
     pb.set_text(_("%(successful)s of %(all)s successful") % {"successful" : successful, "all" : i})
     mainDownload_button.set_sensitive(True)
     mainClose_button.set_sensitive(True)
     self.listdownloading = False
     print "-----"
Пример #32
0
 def test_OK(self):
     self.assertIsNotNone(
         get_data("http://www.mocky.io/v2/5e539b332e00007c002dacbe"))
from main import get_data
import csv
import numpy as np
pos_path = "./review_polarity/txt_sentoken/pos/*.txt"
neg_path = "./review_polarity/txt_sentoken/neg/*.txt"
pos_data_train, pos_data_test = get_data(pos_path)
neg_data_train, neg_data_test = get_data(neg_path)
print("shape of pos data train")
print(pos_data_train.shape)
print("shape of pos data test")
print(pos_data_test.shape)
print("shape of neg data train")
print(neg_data_train.shape)
print("shape of neg data test")
print(neg_data_test.shape)
with open(
        'training_data_pos.csv',
        'w',
) as file:
    writer = csv.writer(file)
    for review in pos_data_train:
        for word in review:
            writer.writerow(word)
with open(
        'testing_data_pos.csv',
        'w',
) as file:
    writer = csv.writer(file)
    for review in pos_data_test:
        for word in review:
            writer.writerow(word)
Пример #34
0
from main import data_name_1
from main import data_name_2
from main import data_name_3
from main import get_data

lines = get_data(data_name_1)



Пример #35
0
 def download_single(
                     self,
                     widget,
                     event = None
                    ):
     entry_url = self.wTree.get_widget("entry_url")
     pb = self.wTree.get_widget("dprogressbar")
     mainDownload_button = self.wTree.get_widget("mainDownload_button")
     mainClose_button = self.wTree.get_widget("mainClose_button")
     url = entry_url.get_text()
     os.chdir(
              self.config.get(
                              "general",
                              "save_videos_in"
                             )
             )
     pb.set_fraction(0)
     pb.set_text(_("Fetching video information..."))
     if url == "":
         pb.set_text(_("No URL specified"))
     else:
         print "----"
         print _("Trying to download %(url)s") % {"url" : url}
         pb.set_fraction(0)
         mainDownload_button.set_sensitive(False)
         mainClose_button.set_sensitive(False)
         data = get_data(url)
         data.start()
         while data.status == -1:
             gtk.main_iteration_do(False)
             sleep(0.01)
         gtk.main_iteration_do(True)
         if data.status == 0:
             saveAs = os.path.join(
                                   self.config.get(
                                                   "general",
                                                   "save_videos_in"
                                                  ),
                                   data.data[2]
                                  )
             print "Saving file as \"%(filename)s\"..." % {"filename" : saveAs}
             down = fdownload(
                              data.data[0],
                              data.data[2]
                             )
             down.start()
             pb.set_fraction(0)
             pb.set_text(_("Downloading video..."))
             progress = down.downloaded()/100
             while down.get_filesize() == 0:
                 gtk.main_iteration_do(False)
                 sleep(0.01)
             gtk.main_iteration_do(True)
             filesize = down.get_filesize()
             print "Filesize: %(filesize)s KB" % {"filesize" : filesize}
             while progress < 1:
                 gtk.main_iteration_do(False)
                 sleep(0.01)
                 pb.set_fraction(progress)
                 progress = down.downloaded()/100
             gtk.main_iteration_do(True)
             pb.set_fraction(1)
             pb.set_text(_("Download finished."))
             if self.config.getboolean(
                                       "general",
                                       "convert"
                                      ) and data.data[3]:
                 pb.set_text(_("Converting file"))
                 output = convert(
                                  saveAs,
                                  self.config.get(
                                                  "general",
                                                  "convert_filename_extension"
                                                 ),
                                  self.config.get(
                                                  "general",
                                                  "convertcmd"
                                                 )
                                 )
                 output.start()
                 while output.status == -1:
                     gtk.main_iteration_do(False)
                     sleep(0.01)
                 gtk.main_iteration_do(True)
                 pb.set_text(_("Converted file."))
                 if self.config.getboolean(
                                           "general",
                                           "delete_source_file_after_converting"
                                          ):
                     os.remove(saveAs)
                     print _("Deleted input (.flv) file")
         else:
             pb.set_text(_("Wrong URL / unsupported video portal"))
         mainDownload_button.set_sensitive(True)
         mainClose_button.set_sensitive(True)
         print "----"
Пример #36
0
def set_adventurer():
    table = "Adventurers"
    fields = (
        "Id,VariationId,Name,NameJP,WeaponType,Rarity,ElementalType," +
        "MinHp3,MinHp4,MinHp5,MaxHp,PlusHp0,PlusHp1,PlusHp2,PlusHp3,PlusHp4,McFullBonusHp5,"
        +
        "MinAtk3,MinAtk4,MinAtk5,MaxAtk,PlusAtk0,PlusAtk1,PlusAtk2,PlusAtk3,PlusAtk4,McFullBonusAtk5,"
        +
        "Skill1Name,Skill2Name,Abilities11,Abilities12,Abilities21,Abilities22,Abilities31,Abilities32,"
        + "DefCoef,IsPlayable")
    group = "Id,VariationId"
    parse_int = [
        "MinHp3",
        "MinHp4",
        "MinHp5",
        "MaxHp",
        "PlusHp0",
        "PlusHp1",
        "PlusHp2",
        "PlusHp3",
        "PlusHp4",
        "McFullBonusHp5",
        "MinAtk3",
        "MinAtk4",
        "MinAtk5",
        "MaxAtk",
        "PlusAtk0",
        "PlusAtk1",
        "PlusAtk2",
        "PlusAtk3",
        "PlusAtk4",
        "McFullBonusAtk5",
        "DefCoef",
    ]

    raw_data = main.get_data(table, fields, group)

    abilities = main.set_abilities()
    skills = main.set_skills()

    names = main.load_name(FILE_NAME)
    data_list = []
    data_dict = {}
    data_new = []
    data_updates = [False]

    for i in raw_data:
        item = i['title']
        if item['IsPlayable'] == '1':
            uid = '{}_0{}'.format(item['Id'], item['VariationId'])
            name = main.set_name(names, item, data_new, data_updates)
            weapon = item['WeaponType']
            rarity = item['Rarity']
            new_item = {
                "id": uid,
                "name": name,
                "weapon": weapon,
                "element": item["ElementalType"],
                "rarity": rarity,
            }

            for k in parse_int:
                if item[k].isdigit():
                    new_item[k] = int(item[k])

            inc_LV = {}
            inc_Value = {}

            for a in [
                    "Abilities11",
                    "Abilities12",
                    "Abilities21",
                    "Abilities22",
                    "Abilities31",
                    "Abilities32",
            ]:
                ability = abilities.get(item[a])
                if ability:
                    # new_item[a.lower()] = ability['Might']
                    new_item[a.lower()] = ability

                    level = a[-1]

                    if "STR" in ability:
                        inc_LV["STRLV" + level] = ABILITY_LEVEL.get(
                            rarity, ABILITY_LEVEL["res"])[a]
                        inc_Value["incSTR" + level] = ability["STR"]

                    if "def" in ability:
                        inc_LV["defLV" + level] = ABILITY_LEVEL.get(
                            rarity, ABILITY_LEVEL["res"])[a]
                        inc_Value["incDef" + level] = ability["def"]

            for s in ["Skill1Name", "Skill2Name"]:
                skill = skills.get(item[s], {})
                if skill:
                    new_item[s[0:6].lower()] = skill

            new_item.update(inc_LV)
            new_item.update(inc_Value)

            data_list.append(new_item)
            data_dict[uid] = new_item

    main.save_file("list", FILE_NAME, data_list)
    main.save_file("dict", FILE_NAME, data_dict)

    if data_updates[0]:
        main.save_file('locales', FILE_NAME, names)

    if data_new:
        print(data_new)
        main.download_images(FILE_NAME, data_new)
Пример #37
0
def export():
    app_id = int(request.args['app_id'])
    format = request.args['format']
    return jsonify(actors = main.get_actors(app_id), data=main.get_data(app_id), goals=main.get_goals(app_id), disclosures=main.get_disclosures(app_id))