예제 #1
0
    def get_summary(self, tickers, threads=True, format="default"):
        tickers = utils.format_tickers(tickers)

        if tickers:
            if len(tickers) >= 1:
                data = self.get_summary_data(tickers, threads)
        # Process result data and build json object to append in list.
        resultConfig = utils.getConfig("Results", "SUMMARY")
        resultFormats = utils.getConfig("Results", "DEFAULT_valueFormat")
        resultExclusionConfig = utils.getConfig("Results", "DEFAULT_exclude").replace(" ", "")
        resultExclusionObj = resultExclusionConfig.split(",")
        resultObj = json.loads(resultConfig)
        formatsObj = json.loads(resultFormats)
        summaryResult = []
        if bool(data):
            for tkr in tickers:
                resultObjCopy = copy.copy(resultObj)
                tkrValues = data[tkr]["quoteSummary"]["result"] if tkr in data else False
                if tkrValues:
                    try:
                        for (key, value) in resultObj.items():
                            if key not in resultExclusionObj:
                                valueObj = tkrValues[0]["summaryDetail"][value] if value in tkrValues[0]["summaryDetail"] else []
                                if bool(valueObj):
                                    if hasattr(valueObj, "keys"):
                                        # Evaluate keys and determine what key to use
                                        # Majority of cases:
                                        #     raw - values as is,
                                        #     fmt - string value,
                                        #     longFmt - formatted string value
                                        if format in formatsObj:
                                            if format in valueObj:
                                                resultObjCopy[key] = valueObj[formatsObj[format]]
                                            else:
                                                if formatsObj[format] in valueObj:
                                                    resultObjCopy[key] = valueObj[formatsObj[format]]
                                                else:
                                                    resultObjCopy[key] = valueObj["raw"]
                                        else:
                                            resultObjCopy[key] = valueObj["raw"]
                                    else:
                                        resultObjCopy[key] = valueObj
                                else:
                                    resultObjCopy[key] = 0
                        resultObjCopy["errors"] = "Ok"
                        if "symbol" in resultObjCopy:
                            resultObjCopy["symbol"] = tkr
                    except:
                        resultObjCopy["errors"] = "Exception getting prices: {}"
                    summaryResult.append(resultObjCopy)
        return (summaryResult)
예제 #2
0
 def run(self):
     """
         Az adatok ismétlődő lekérdezése. 
         Az adatokat a self.refreshed signallal kerülnek átadásra.
     """
     config = utils.getConfig()
     checking_u = True 
     while not self.stopped:
         try:
             src = urllib2.urlopen(config["netwatcher"]["src"]).read()
             self.data = json.loads(src)
             if self.data['error'] == "false":
                 self.data['error'] = False
                 self.data['window1day']['current'] = int(self.data['window1day']['current'])
                 self.data['window4day']['current'] = int(self.data['window4day']['current'])
                 self.checking_transfer()
                 self.refreshed.emit(self.data)
                 #Kapcsolattesztelés minden 2. körben.
                 if checking_u: 
                     self.checking_connect()
                     checking_u = False
                 else:
                     checking_u = True
             else:
                 raise Exception()
         except:
                 self.refreshed.emit({'error' : True})
         time.sleep(config["netwatcher"]["refresh_time"])
예제 #3
0
def checkMovieClip():
    startRecode("../tank/res/tank/action")
    global config
    config = getConfig("gameConfig.tank_ani")
    for action_id in config:
        checkTankAction(action_id)
    print "checkMovieClip complete......"
예제 #4
0
def run_multiple_best_beta():
    opt = utils.getConfig()
    lr, weightDecay, beta = (0.0025, 0.0002, 1)
    sessionName = "ImageNet"
    opt.main_tag = "bigger-beta"

    j = 2
    opt.tag2 = "beta_{}".format(j)
    for i in range(3):
        hparams = (lr, weightDecay, (1.0 / 2**j) * beta)
        opt_clean = get_clean(opt, hparams)
        opt_ssl = get_ssl(opt, hparams)
        opt_pretrained_clean = get_pretrained_clean(opt, hparams)
        opt_pretrained_ssl = get_pretrained_ssl(opt, hparams)

        # opt_clean.sessionName = "{}_id:{}-{}".format(sessionName, j, i)
        # run_train(opt_clean)

        opt_ssl.sessionName = "{}+Cifar_id:{}-{}".format(sessionName, j, i)
        run_train(opt_ssl)

        # opt_pretrained_clean.sessionName = "{}->Cifar_id:{}-{}".format(sessionName, j, i)
        # opt_pretrained_clean.net = "./"+opt_clean.sessionName+".pth"
        # run_train(opt_pretrained_clean)

        opt_pretrained_ssl.sessionName = "{}+Cifar->Cifar_id:{}-{}".format(
            sessionName, j, i)
        opt_pretrained_ssl.net = "./" + opt_ssl.sessionName + ".pth"
        run_train(opt_pretrained_ssl)
예제 #5
0
def test():
    ##
    config = getConfig()
    # define transform image
    transform_test = transforms.Compose([
        transforms.Resize((config.image_size, config.image_size)),
        # transforms.CenterCrop(config.input_size),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
    ])

    net = resnet101(pretrained=True, use_bap=False)
    in_features = net.fc_new.in_features
    new_linear = torch.nn.Linear(in_features=in_features, out_features=25)
    net.fc_new = new_linear

    # load checkpoint
    checkpoint_path = os.path.join(config.checkpoint_path,
                                   'model_best.pth.tar')
    load_state_dict = torch.load(checkpoint_path,
                                 map_location='cpu')['state_dict']
    new_state_dict = {}
    for key, value in load_state_dict.items():
        new_key = key.replace('module.', '')
        new_state_dict[new_key] = value
    net.load_state_dict(new_state_dict)
    img_dir = config.image
    image = Image.open(img_dir).convert('RGB')
    image = transform_test(image)
    preds, _, _ = net(image.unsqueeze(0))
    print(torch.sigmoid(preds))
예제 #6
0
def run_multiple_with_pretrain():
    opt = utils.getConfig()
    lr, weightDecay, beta = (25 * 0.0005, 25 * 0.00004, 0.005)
    sessionName = "ImageNet"
    for i in range(1):
        for j in range(1):
            hparams = ((0.2**i) * lr, (0.2**j) * weightDecay, beta)
            opt_clean = get_clean(opt, hparams)
            opt_ssl = get_ssl(opt, hparams)
            opt_pretrained_clean = get_pretrained_clean(opt, hparams)
            opt_pretrained_ssl = get_pretrained_ssl(opt, hparams)

            opt_clean.sessionName = "{}_id:{}-{}".format(sessionName, i, j)
            run_train(opt_clean)

            opt_ssl.sessionName = "{}+Cifar_id:{}-{}".format(sessionName, i, j)
            run_train(opt_ssl)

            opt_pretrained_clean.sessionName = "{}->Cifar_id:{}-{}".format(
                sessionName, i, j)
            opt_pretrained_clean.net = "./" + opt_clean.sessionName + ".pth"
            run_train(opt_pretrained_clean)

            opt_pretrained_ssl.sessionName = "{}+Cifar->Cifar_id:{}-{}".format(
                sessionName, i, j)
            opt_pretrained_ssl.net = "./" + opt_ssl.sessionName + ".pth"
            run_train(opt_pretrained_ssl)
예제 #7
0
def get_products():
    conf = ut.getConfig()
    da = DataAccess(conf)

    query = "select * from products"
    result = da.select_row(query)
    return result
예제 #8
0
def main():
   """
   Main function, call searchTorrentSite
   """
   signal.signal(signal.SIGINT, signal_handler)

   arguments = docopt(__doc__, version=__version__)

   if arguments['--debug']:
      logger.level = logging.DEBUG
   elif arguments['--warning']:
      logger.level = logging.WARNING
   elif arguments['--error']:
      logger.level = logging.ERROR

   logger.info('Torrent Searcher')
   logger.debug(arguments)

   # Fetch config
   config = getConfig()

   if arguments['-s'] in config['DEFAULT']['SITE_OPTIONS'].split(','):
      site = arguments['-s']
      logger.debug('site selected: {}'.format(site))
   else:
      logger.error('"{}" is a invalid site. Select from: {}'.format(arguments['-s'], config['DEFAULT']['SITE_OPTIONS']))
      sys.exit()

   searchTorrentSite(arguments['<query>'], site, arguments['-f'], arguments['--print'], config)
예제 #9
0
 def checking_transfer(self):
     """ A forgalomkorlátok, státuszok ellenőrzése."""
     config = utils.getConfig()
     if not self.forced:
         if (self.data['window1day']['current'] >= config["netwatcher"]["one_day_limit"]*1024**2) or (self.data['window4day']['current'] >= config["netwatcher"]["four_day_limit"]*1024**2):
             try:
                 self.api.deactivate()
                 self.status = 0
             except uwebapi.ConnectionError:
                 self.uconnected.emit(False)
         elif (self.data['window1day']['current'] <= config["netwatcher"]["reactivate_limit1"]*1024**2) or (self.data['window4day']['current'] <= config["netwatcher"]["reactivate_limit4"]*1024**2):
             try:
                 self.api.reactivate()
                 self.status = 1
             except uwebapi.ConnectionError:
                 self.uconnected.emit(False)
     else:
         if self.status == 1 or self.status == 2:
             try:
                 self.api.deactivate()
                 self.status = 2
             except uwebapi.ConnectionError:
                 self.uconnected.emit(False)
         if self.status == 0:
             try:
                 self.api.reactivate()
                 self.status = 3
             except uwebapi.ConnectionError:
                 self.uconnected.emit(False)
     self.statusSig.emit(self.status)
예제 #10
0
def temp(csvPath, col):
  config = utils.getConfig()
  reader = csv.reader(open(csvPath, 'rU'), delimiter=',')
  data = [{'name': "{0[0]}, {0[1]}".format(row), 'score': row[col], 'pid': row[2]} for row in reader if row[col] not in [0, '0', '-', '']]
  utils.check("Data: ", data)

  g = Gradesource(config['gradesourceLogin'], config['gradesourcePasswd'])
  g.importScoresBy(data, 'pid')
예제 #11
0
 def deactivate(self):
     """ Minden aktív torrent megállítása. """
     config = utils.getConfig()
     for i in self.get_torrents():
         if i[1]&1 == 1:
             config["torrents"].append(i[0])
             self.action("stop", i[0])
     utils.setConfig(config)
예제 #12
0
def checkMovieClip():
    startRecode("/Users/tanzuoliang/Documents/projects/tank/res/tank/action")
    #	readConfig("/Users/tanzuoliang/Documents/projects/tank/src/gameConfig/gameConfig.js")
    global config
    config = getConfig("gameConfig.tank_ani")
    for action_id in config:
        checkTankAction(action_id)
    print "checkMovieClip complete......"
예제 #13
0
def validate():
    ##
    engine = Engine()
    config = getConfig()
    device = torch.device("cuda:" + str(config.device))
    # define dataset
    transform_test = transforms.Compose([
        transforms.Resize((config.image_size, config.image_size)),
        # transforms.CenterCrop(config.input_size),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
    ])
    val_dataset = CustomDataset('data/movie_val.csv',
                                'data/movie/images',
                                transform=transform_test)
    val_loader = DataLoader(val_dataset,
                            batch_size=config.batch_size,
                            shuffle=False,
                            num_workers=config.workers,
                            pin_memory=True)

    net = resnet101(pretrained=True, use_bap=False)
    in_features = net.fc_new.in_features
    new_linear = torch.nn.Linear(in_features=in_features, out_features=25)
    net.fc_new = new_linear

    # load checkpoint
    use_gpu = torch.cuda.is_available() and config.use_gpu
    if use_gpu:
        net = net.to(device)
    gpu_ids = [int(r) for r in config.gpu_ids.split(',')]
    if use_gpu and config.multi_gpu:
        net = torch.nn.DataParallel(net, device_ids=gpu_ids)
    checkpoint_path = os.path.join(config.checkpoint_path,
                                   'model_best.pth.tar')
    load_state_dict = torch.load(checkpoint_path,
                                 map_location=device)['state_dict']
    new_state_dict = {}
    for key, value in load_state_dict.items():
        new_key = key.replace('module.', '')
        new_state_dict[new_key] = value
    net.load_state_dict(new_state_dict)

    # define loss
    criterion = torch.nn.BCEWithLogitsLoss()
    if use_gpu:
        criterion = criterion.cuda()
    state = {
        'model': net,
        'val_loader': val_loader,
        'criterion': criterion,
        'config': config,
        'device': device,
        'step': 0,
        'lr': config.lr
    }
    prec1, fprec, val_loss = engine.validate(state)
    print(prec1)
예제 #14
0
    def __init__(self):
        self.effectConfig = getConfig("gameConfig.effect")
        self.soundConfig = getConfig("gameConfig.sound")

        self.skillConfig = getConfig("gameConfig.skill")
        self.skillPropConfig = getConfig("gameConfig.skill_prop")
        self.buffConfig = getConfig("gameConfig.buff")
        self.skillItemConfig = getConfig("gameConfig.skill_item")

        self.itemConfig = getConfig("gameConfig.item")
        self.tankConfig = getConfig("gameConfig.tank")
        self.tankAniConfig = getConfig("gameConfig.tank_ani")

        self.rootActionPath = "../res/tank/action"
예제 #15
0
def checkSkillAction():
	tankConfig = getConfig("gameConfig.tank")
	for tank_id in tankConfig:
		attack_id = "%d"%tankConfig[tank_id]["attack_id"]
		skill_id = "%d"%tankConfig[tank_id]["skill_id"]
		model_id = "%d"%tankConfig[tank_id]["model"]
		checkSkill(attack_id, model_id,False)
		checkSkill(skill_id, model_id,True)
	
	print "checkSkill complete......"
예제 #16
0
    def download_summary(self, ticker):
        url = utils.getConfig("Yahoo-api", "SUMMARYQUERY").format(ticker, "summaryDetail")
        data = requests.get(url).json()
        return data

# stock = Stock()
# print(stock.get_summary("kmi"))
# print(stock.download_summary("KMI"))
# print(stock.get_summary_data("AAPL"))
# print(utils.getConfig("Yahoo-api", "QUERY").format("KMI", "price"))
예제 #17
0
def uploadClickerScores(csvPath, col):
  config = utils.getConfig()

  reader = csv.reader(open(csvPath, 'rU'), delimiter=',')
  # Fields: 0:LN, 1:FN, 2:id, >2:scores
  data = [{'name': '%s, %s' % (row[0], row[1]), 'score': row[col], 'pid': row[2]} for row in reader if row[col] not in [0, '0', '-', '']]
  utils.check("Clicker data: ", data)

  g = Gradesource(config['gradesourceLogin'], config['gradesourcePasswd'])
  g.importScoresBy(data, 'pid')
예제 #18
0
def run_multiple_beta():
    opt = utils.getConfig()
    lr, weightDecay, beta = (0.0025, 0.0002, 0.005)
    sessionName = "ImageNet"
    opt.main_tag = "multi-task-learning"
    for i in range(1):
        hparams = (lr, weightDecay, (0.5**i) * beta)
        opt_ssl = get_clean(opt, hparams)

        opt_ssl.sessionName = "{}+Cifar_beta_id:{}".format(sessionName, i)
        run_train(opt_ssl)
예제 #19
0
 def getSymbolList(self, query):
     markets = (utils.getSection("Markets", "USA")).split(", ")
     if isinstance(query, list):
         query = query[0]
     url = utils.getConfig("Yahoo-api", "URL").format(query)
     listsymbols = []
     result = requests.get(url).json()
     # print(result['ResultSet']['Result'])
     for x in result['ResultSet']['Result']:
         if x['exchDisp'] in markets:
             listsymbols.append({"symbol": x['symbol'], "name": x['name']})
     return json.dumps(listsymbols)
예제 #20
0
def uploadMoodleQuizScores():
  config = utils.getConfig()

  m = Moodle(config['moodleLogin'], config['moodlePasswd'])
  _, scores = m.getScores(config['moodleCourseId'])
  # Fields: 0:FN, 1:LN, 2:pid, 3:inst, 4:dpt, 5:email, 6:total, 7:score
  data = [{'name': '%s, %s' % (row[1], row[0]), 'score': row[7], 'pid': row[2], 'email': row[5]} for row in scores if row[7] not in [0, '0', '-', '']]
  
  utils.check("Moodle data: ", data)

  g = Gradesource(config['gradesourceLogin'], config['gradesourcePasswd'])
  g.importScoresBy(data, 'pid')
예제 #21
0
 def __init__(self):
     config = getConfig()
     self.username = config['username']
     self.password = config['password']
     IGBot.browser = webdriver.Firefox(
         executable_path=os.path.join(THIS_FOLDER, 'geckodriver.exe'))
     IGBot.browser.set_window_position(0, 0)
     IGBot.browser.set_window_size(768, 1024)
     IGBot.browser.get("https://instagram.com")
     self.loginBot()
     self.closeSaveLogin()
     self.closeNotif()
예제 #22
0
def download(opt):
    print("{:-<50}{:-^100}{:-<50}".format('-', 'download', '-'))
    print("参数: %s" % opt)
    print("-" * 200)
    if opt[1].startswith("http"):
        utils.download(opt[1])
    else:
        try:
            flag = re.match(r'\d+', opt[1])
            detail = getDetail(flag.string)
            config = utils.getConfig()
            # print()
            m3u8Map = dict()
            plays = detail['list'][0]['vod_play_url']
            index = str(str(plays)).find('#')
            if index > 0:
                ls = str(plays).split("#")
            else:
                ls = str(plays).split("$$$")
            for s in ls:
                if (s.endswith(".m3u8")):
                    ss = s.split('$')
                    m3u8Map[ss[0]] = ss[1]
            if len(m3u8Map.keys()) == 1:
                print(config['savePath'])
                m3u8.main([m3u8Map.popitem()[1]], config['savePath'],
                          detail['list'][0]['vod_name'])
            else:
                urlList = list()
                dirList = list()
                titleList = list()
                index = str(opt[2]).find('-')
                if index > 0:
                    aa = opt[2].split('-')
                    for k in range(int(aa[0]), int(aa[1]) + 1):
                        titleList.append(
                            '第{}集'.format("0" * (len(aa) - len(str(k))) +
                                          str(k)))
                else:
                    aa = str(opt[2]).split(',')
                    for k in aa:
                        titleList.append('第%s集' % k)
                print("下载视频")
                print(titleList)
                for k in titleList:
                    # print(k)
                    dirList.append(config['savePath'] + "/" +
                                   detail['list'][0]['vod_name'])
                    urlList.append(m3u8Map[k])
                m3u8.main(urlList, dirList, titleList)
        except:
            print("参数有误")
예제 #23
0
 def getSymbol(self, symbol):
     url = utils.getConfig("Yahoo-api", "URL").format(symbol)
     listsymbols = []
     result = requests.get(url).json()
     print(result)
     for x in result['ResultSet']['Result']:
         if x['symbol'] == symbol:
             del listsymbols[:]
             listsymbols.append({"symbol": x['symbol'], "name": x['name']})
             return listsymbols
         else:
             listsymbols.append({"symbol": x['symbol'], "name": x['name']})
     return json.dumps(listsymbols)
예제 #24
0
def importGradesourceNamesInMySQL(mysqlClassId):
  config = utils.getConfig()
  g = Gradesource(config['gradesourceLogin'], config['gradesourcePasswd'])
  infos = g.studentsInfo()
  
  db = torndb.Connection("localhost", "ta", user = "******", password = "******")
  for name, info in infos.items():
    sql = "SELECT id FROM students WHERE gs_name = %s AND pid = %s AND email = %s"
    if not db.get(sql, name, info['pid'], info['email']):
      sql = "INSERT INTO students (gs_name, pid, email, class_id) VALUES (%s, %s, %s, %s)"
      db.execute(sql, name, info['pid'], info['email'], mysqlClassId)
      cprint("%s not found, inserting" % name, 'green')
    else:
      print "%s found, doing nothing" % name
예제 #25
0
    def __init__(self, tk):
        self.goodId = getConfig()["maxId"]
        sw = (tk.winfo_screenwidth() - 400) / 2 + 320
        # 得到屏幕宽度
        sh = (tk.winfo_screenheight() - 700) / 2

        self.top = Toplevel(tk)
        self.top.geometry("%dx%d+%d+%d" % (400, 700, sw, sh))
        # 禁止改变窗口宽高
        self.top.resizable(0, 0)
        # self.top.attributes("-toolwindow", True)
        # self.top.wm_attributes("-topmost", True)
        self.top.title("抓图工具")
        self.initViews()
예제 #26
0
 def download_events(self,
                     ticker,
                     start=0,
                     end=9999999999,
                     interval="1d",
                     event="div"):
     url = utils.getConfig("Yahoo-api",
                           "EVENTSMODULE").format(symbol=ticker,
                                                  start=start,
                                                  end=end,
                                                  interval=interval,
                                                  event=event)
     data = requests.get(url).json()
     return data
예제 #27
0
 def get_settings(self):
     config = utils.getConfig()
     self.odl_sb.setValue(config["netwatcher"]["one_day_limit"])
     self.fdl_sb.setValue(config["netwatcher"]["four_day_limit"])
     self.odr_sb.setValue(config["netwatcher"]["reactivate_limit1"])
     self.fdr_sb.setValue(config["netwatcher"]["reactivate_limit4"])
     self.ref_sb.setValue(config["netwatcher"]["refresh_time"])
     self.email_chk.setChecked(config["general"]["send_email"])
     self.email_lt.setText(config["general"]["email_address"])
     self.de_chk.setChecked(config["general"]["deactive"])
     self.re_chk.setChecked(config["general"]["reactive"])
     auth = base64.decodestring(config["webui"]["auth"]).split(":")
     self.usr_lt.setText(auth[0])
     self.psw_lt.setText(auth[1])
     self.url_lt.setText(config["webui"]["url"])
     self.prt_lt.setText(str(config["webui"]["port"]))
예제 #28
0
 def set(self):
     config = utils.getConfig()
     config["netwatcher"]["one_day_limit"] = self.odl_sb.value()
     config["netwatcher"]["four_day_limit"] = self.fdl_sb.value()
     config["netwatcher"]["reactivate_limit1"] = self.odr_sb.value()
     config["netwatcher"]["reactivate_limit4"] = self.fdr_sb.value()
     config["netwatcher"]["refresh_time"] = self.ref_sb.value()
     config["webui"]["auth"] = base64.encodestring("%s:%s" % (self.usr_lt.text(), self.psw_lt.text()))
     config["webui"]["url"] = self.url_lt.text()
     config["webui"]["port"] = int(self.prt_lt.text())
     config["general"]["send_email"] = self.email_chk.isChecked()
     config["general"]["email_address"] = self.email_lt.text()
     config["general"]["deactive"] = self.de_chk.isChecked()
     config["general"]["reactive"] = self.re_chk.isChecked()
     utils.setConfig(config)
     self.close()
예제 #29
0
def test():
    ##
    engine = Engine()
    config = getConfig()
    data_config = getDatasetConfig(config.dataset)
    # define dataset
    transform_test = transforms.Compose([
        transforms.Resize((config.image_size, config.image_size)),
        transforms.CenterCrop(config.input_size),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
    ])
    val_dataset = CustomDataset(data_config['val'],
                                data_config['val_root'],
                                transform=transform_test)
    val_loader = DataLoader(val_dataset,
                            batch_size=config.batch_size,
                            shuffle=False,
                            num_workers=config.workers,
                            pin_memory=True)
    # define model
    if config.model_name == 'inception':
        net = inception_v3_bap(pretrained=True, aux_logits=False)
    elif config.model_name == 'resnet50':
        net = resnet50(pretrained=True)

    in_features = net.fc_new.in_features
    new_linear = torch.nn.Linear(in_features=in_features,
                                 out_features=val_dataset.num_classes)
    net.fc_new = new_linear

    # load checkpoint
    use_gpu = torch.cuda.is_available() and config.use_gpu
    if use_gpu:
        net = net.cuda()
    gpu_ids = [int(r) for r in config.gpu_ids.split(',')]
    if use_gpu and len(gpu_ids) > 1:
        net = torch.nn.DataParallel(net, device_ids=gpu_ids)
    #checkpoint_path = os.path.join(config.checkpoint_path,'model_best.pth.tar')
    net.load_state_dict(torch.load(config.checkpoint_path)['state_dict'])

    # define loss
    # define loss
    criterion = torch.nn.CrossEntropyLoss()
    if use_gpu:
        criterion = criterion.cuda()
    prec1, prec5 = engine.test(val_loader, net, criterion)
예제 #30
0
    def download_prices(self, ticker, start=0, end=9999999999, interval="1d"):
        url = utils.getConfig("Yahoo-api",
                              "PRICERANGE").format(symbol=ticker,
                                                   start=start,
                                                   end=end,
                                                   interval=interval)
        data = requests.get(url).json()
        return data


# historical = Historical()
# print(historical.get_events("aapl", start="2020-11-01", end="2020-11-02", event="dividend"))
# print(historical.get_prices("aapl", start="2020-11-01", end="2020-11-02"))
# start = "2020-11-01"
# end = "2020-11-02"
# startdt = utils.format_date(start, True)
# enddt = utils.format_date(end, True)
# print(historical.validate_date_range(start=startdt, end=enddt))
예제 #31
0
def main():
    # global db
    db = getConfig()
    data = execSQL(db, "SELECT VERSION()")
    print(time.strftime('[%H:%M:%S]') + "The version of database: %s " % data)
    time.sleep(1)
    data = execSQL(db, "show variables like '%general_log%';")[1]
    print(time.strftime('[%H:%M:%S]') + 'The status of log:' + data)
    if data == "OFF":
        try:
            print(time.strftime('[%H:%M:%S]') + 'Starting log mode...')
            time.sleep(1)
            try:
                # logPath = r'D:\\github\\MySQL_Monitor\\'
                logPath = os.getcwd()
                #print(logPath)
                global log
                logName = str(time.strftime('%Y_%m_%d')) + "_log.txt"
                log = logPath + "/" + logName
                log = log.replace(
                    "\\",
                    "/")  # for windows not support to use \ in log file path
                data = execSQL(db,
                               "set global general_log_file='" + log + "';")
            except:
                pass

            data = execSQL(db, "set global general_log=on;")
            data = execSQL(db, "show variables like '%general_log%';")[1]
            if data == "ON":
                print(time.strftime('[%H:%M:%S]') + 'Log is started.')
                print(time.strftime('[%H:%M:%S]') + 'Log monitor running...')
                log = str(
                    execSQL(db, "show variables like 'general_log_file';")[-1])
                logMonitor(log, db)
        except:
            print(time.strftime('[%H:%M:%S]') + 'Log starting failed.')
            exit()
    else:
        print(time.strftime('[%H:%M:%S]') + 'Log monitor running...')
    log = str(execSQL(db, "show variables like 'general_log_file';")[-1])
    db.close()
    logMonitor(log, db)
예제 #32
0
def integrationTest():
    
    # TEST RESULT
    success = True
    
    # CONFIG 
    dbConfig, BATCH_SIZE = utils.getConfig()
    
    # Run the test cases
    success = runTestCase(happyFlow, dbConfig, 100) and success
    success = runTestCase(catchNonMatchingSpecAndDataFile, dbConfig, 100) and success
    success = runTestCase(rollbackWhenDataIsCorrupted, dbConfig, 100) and success
    
    if success :
        print("TEST CASE PASS")
        print("Yay! all test case passed")
    else:
        print("TEST CASE FAIL")
        print("Noooo! you broke something")
예제 #33
0
def _readConfig():

    nameConfig = utils.getConfig()

    fileOut = 'configurations/{0}.yaml'.format(nameConfig)

    yaml.add_implicit_resolver ( "!pathex", pattern )
    yaml.add_constructor('!pathex', pathex_constructor)

    with open('configurations/default.yaml') as stream:
       R = yaml.load(stream)

    with open(fileOut, 'r') as stream:
       Rb = yaml.load(stream)

    R = dict(mergedicts(R,Rb))


    return R
예제 #34
0
def searchTorrentSite(query,
                      site='jackett',
                      filter=None,
                      print_result=False,
                      config=None):
    """
   Selects site based on input and finds torrents for that site based on query

   :param configparser.ConfigParser config: integer of size of torrent filest
   :param str query: query to search search torrents for
   :param str site: the site we want to index/scrape
   :param boolean print_result: if the in results should be printed to terminal
   :return: json list with results
   :rtype: str
   """
    if config is None:
        config = getConfig()
    logger.debug('Searching for query {} at {}'.format(query, site))

    if site == 'piratebay':
        pirate = Piratebay(config['PIRATEBAY']['HOST'],
                           config['PIRATEBAY']['PATH'],
                           config['PIRATEBAY']['LIMIT'],
                           config['PIRATEBAY']['SSL'])
        torrents_found = pirate.search(query)
    elif site == 'jackett':
        jackett = Jackett(config['JACKETT']['APIKEY'],
                          config['JACKETT']['HOST'], config['JACKETT']['PATH'],
                          config['JACKETT']['LIMIT'],
                          config.getboolean('JACKETT', 'SSL'))
        torrents_found = jackett.search(query)

    if (filter):
        torrents_found = chooseCandidate(torrents_found)

    jsonList = createJSONList(torrents_found)

    if (print_result):
        print(jsonList)

    return jsonList
예제 #35
0
def main():
    # global db
    db = getConfig()

    data = execSQL(db, "SELECT VERSION()")
    print(time.strftime('[%H:%M:%S]') + "The version of database: %s " % data)
    time.sleep(1)

    st_m = statusMonitor(db)

    while True:
        print('TABLES')
        print('-' * 20)
        st_m.show_open_table()
        print('\n\nTHREADS')
        print('-' * 20)
        st_m.show_thread()
        print('\n\nCONNECTIONS')
        st_m.show_all_connections()
        time.sleep(1)
        os.system('cls')
예제 #36
0
def createProduct():
    try:
        p_name = request.json['name']
        p_desc = request.json['description']
        p_price = request.json['price']

        p_int_price = float(p_price)

        conf = ut.getConfig()
        da = DataAccess(conf)

        query = "INSERT INTO products (`name`, `description`, `price`) VALUES ('{0}', '{1}', {2})".format(
            p_name, p_desc, str(p_price))
        print query
        da.execute_query(query)

        p_obj = dict(success=True, error="")
    except Exception as ex:
        p_obj = dict(success=False, error=str(ex))

    return json.dumps(p_obj)
def main():
    config = getConfig()
    host = config['SSH']['host']
    user = config['SSH']['user']
    remotePath = config['FILES']['remote']
    localPath = config['FILES']['local']
    delugeScript = config['DELUGE']['script']

    remoteFiles = getFiles(remotePath, host, user)
    if len(remoteFiles) > 0:
        logger.info('Remote files found: {}'.format(remoteFiles),
                    es={'files': remoteFiles})
    else:
        logger.info('No remote files found')
    # print('Remote found: {}'.format(remoteFiles))

    localFiles = getFiles(localPath)
    # print('Local files: {}'.format(localFiles))
    if len(localFiles) > 0:
        logger.info('Local files found: {}'.format(localFiles),
                    es={'files': localFiles})
    else:
        logger.info('No local files found')

    newFiles = filesNotShared(localFiles, remoteFiles)
    if (newFiles):
        logger.info('New files: {}'.format(newFiles), es={'files': newFiles})
        exisitingFiles = list(set(remoteFiles).intersection(localFiles))
        logger.info('Existing files: {}'.format(exisitingFiles),
                    es={'files': exisitingFiles})

        transferedFiles = transferFiles(newFiles, localPath, remotePath, host,
                                        user)
        removeFromDeluge(delugeScript, transferedFiles)

    else:
        # print('No new files found to travel on the great transatlantic express')
        logger.info(
            'No new files found to travel on the great transatlantic express')
예제 #38
0
def get_builds(proj):
    if not os.path.isfile(getProjPath(proj) + "/.ci.json"):
        return "Not found", 404

    if os.path.exists(getBuildPath(proj)):
        dirs = [
            entry for entry in os.listdir(getBuildPath(proj))
            if entry != "latest" and os.path.isdir(getBuildPath(proj, entry))
        ]

        data = []
        for ref in dirs:
            toAdd = {
                "commit": git.getCommitDetails(proj, ref),
                "build": compile.getStatus(proj, ref)
            }

            if ((proj, ref) in compile.q):
                toAdd["build"]["status"] = "queued"

            data.append(toAdd)

        return json.dumps({
            "list": data,
            "language": getConfig(proj).get("language", None),
            "id": git.repos[proj]["github"],
            "latest": parseRef(proj, "latest")
        }), {
            "Content-Type": "application/json"
        }
    else:
        return json.dumps({
            "list": [],
            "language": None,
            "latest": ""
        }), {
            "Content-Type": "application/json"
        }
예제 #39
0
def test():
    ##
    config = getConfig()
    device = torch.device("cuda:" + str(config.device))
    # define transform image
    transform_test = transforms.Compose([
        transforms.Resize((config.image_size, config.image_size)),
        # transforms.CenterCrop(config.input_size),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
    ])

    net = resnet101(pretrained=True, use_bap=False)
    in_features = net.fc_new.in_features
    new_linear = torch.nn.Linear(in_features=in_features, out_features=25)
    net.fc_new = new_linear

    # load checkpoint
    use_gpu = torch.cuda.is_available() and config.use_gpu
    if use_gpu:
        net = net.to(device)
    gpu_ids = [int(r) for r in config.gpu_ids.split(',')]
    if use_gpu and config.multi_gpu:
        net = torch.nn.DataParallel(net, device_ids=gpu_ids)
    checkpoint_path = os.path.join(config.checkpoint_path,
                                   'model_best.pth.tar')
    load_state_dict = torch.load(checkpoint_path,
                                 map_location=device)['state_dict']
    new_state_dict = {}
    for key, value in load_state_dict.items():
        new_key = key.replace('module.', '')
        new_state_dict[new_key] = value
    net.load_state_dict(new_state_dict)
    img_dir = config.image
    image = Image.open(img_dir).convert('RGB')
    image = transform_test(image)
    preds, _, _ = net(image.unsqueeze(0).to(device))
    print(torch.sigmoid(preds))
예제 #40
0
def main():

    # CONFIG
    dbConfig, BATCH_SIZE = utils.getConfig()

    # Connect to Database
    db, cursor, fail = utils.connectToDatabaseAndGetCursor(
        dbConfig, dbConfig["dbName"])

    # End program run when it cannot connect to DB
    if not fail:
        problematicFile = dataLoader.dataImports(db, cursor, BATCH_SIZE, "")
        db.close()

        # Show all ignored files
        if len(problematicFile) == 0:
            print("All file loaded successfully")
        else:
            print(
                "Some file/specs are not loaded, here is the list of unprocessed data"
            )
            for file in problematicFile:
                print(file)
예제 #41
0
 def reactivate(self):
     """ A megállított torrentek elindítása. """
     config = utils.getConfig()
     while len(config["torrents"]):
         self.action("start", config["torrents"].pop())
     utils.setConfig(config)
예제 #42
0
import urllib.parse
import os

from utils import getHome, getConfig, checkUpdate, downloadFixPack, applyFixPack, \
    check_exsit


if __name__ == '__main__':
    logging.basicConfig(level=logging.DEBUG,
                format='%(asctime)s [%(levelname)s] %(filename)s[line:%(lineno)d] %(message)s',
                datefmt='%a, %d %b %Y %H:%M:%S')
    if check_exsit("fanUpdater.exe") > 1:
        logging.debug("another fanUpdater.exe exists")
        os._exit(2)
    
    config = getConfig("updater.ini")
    check_version_service = config.get("default", "check_version_service")
    fixpack_service = config.get("default", "fixpack_service")
    
    APP_HOME = getHome() + "/../"
    absolute_version_file      = APP_HOME + "properties/app.version"
    absolute_fixpack_file      = APP_HOME + urllib.parse.unquote(fixpack_service).split('/')[-1].split("?")[0]
    TEMP                       = getHome() + "/TEMP/"

    while True:
        version, md5 = checkUpdate(check_version_service, absolute_version_file)
        if version:
            root = Tk()
            root.withdraw()
            result = askokcancel("fanUpdater", "发现新版本,是否更新?",
                         default=CANCEL)
	LEFT OUTER JOIN places_common tc2 ON (tc2.id = h2.id)
ORDER BY ParentPlace, Place""" % (tenant, tenant)

    objects.execute(gethierarchy)
    #return objects.fetchall()
    return [list(item) for item in objects.fetchall()]



if __name__ == "__main__":

    from utils import getConfig

    form = {'webapp': 'barcodeprintDev'}

    config = getConfig(form)
    print getobjinfo('1-504', config)

    print '\nkeyinfo\n'
    # Kroeber, 20A, X  1,  1
    # Kroeber, 20AMez, 128 A
    for i, loc in enumerate(getlocations('Kroeber, 20A, X  1,  3', '', 1, config, 'keyinfo','pahma')):
        print 'location', i + 1, loc[0:12]

    sys.exit()


    config = getConfig('sysinvProd.cfg')
    print '\nrefnames\n'
    print getrefname('concepts_common', 'zzz', config)
    print getrefname('concepts_common', '', config)
예제 #44
0
# -*- coding: utf-8 -*-

import re, sys, csv, pickle
from termcolor import colored, cprint
from gradesource import Gradesource
import utils

# Read the CSV files for grades
csvPath = sys.argv[1]
lines = list(csv.reader(open(csvPath, 'rU'), delimiter = ','))
first_line = lines[0]
# ignore 1st line and lines starting by '#' (unregistered clickers)
lines = [l for l in lines[1:] if l[0][0] != '#']

# Ask for the column to pick in theh file
for col, title in enumerate(first_line):
  print colored(col, 'green') + ' ' + title
pid_col   = int(raw_input(colored('PID column ? ', 'green')))
score_col = int(raw_input(colored('Score column ? ', 'green')))

# Keep only lines with non-zero score
lines = [l for l in lines if l[score_col] not in [0, '0', '-', '']]
data  = [{'score': l[score_col], 'pid': l[pid_col]} for l in lines]

# Ask for the assessment to upload the grade
config = utils.getConfig()
g = Gradesource(config['gradesourceLogin'], config['gradesourcePasswd'])
g.importScoresBy(data, 'pid')
예제 #45
0
 def set_authentication(self):
     """ Az azonosításhoz szükséges fejléc, és a cookie tárolásához szükséges tároló beállítása."""
     config = utils.getConfig()
     self.headers =  {"Authorization" : "Basic %s" % config["webui"]["auth"]}
     self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookielib.CookieJar()))
     self.url = "%s:%d/gui" % (config["webui"]["url"], config["webui"]["port"])