コード例 #1
0
ファイル: Network.py プロジェクト: dakinfemiwa/Messaging
 def listen(self):
     while True:
         try:
             read_sockets, write_sockets, error_sockets = select.select(
                 self.LIST, [], [])
             for sock in read_sockets:
                 if sock == self.gameSocket:
                     sockfd, address = self.gameSocket.accept()
                     self.LIST.append(sockfd)
                     Logger.log(
                         f'Client [{address[0]}:{address[1]}] connected to the server.',
                         'CONNECT')
                 else:
                     try:
                         receivedData = sock.recv(40, ).decode()
                     except:
                         try:
                             try:
                                 disconnected_user = self.connectedUsers[
                                     address]
                                 del self.connectedUsers[address]
                             except:
                                 disconnected_user = '******'
                             Logger.log(
                                 f'Client [{address[0]}:{address[1]}] ({disconnected_user}) disconnected from the server.',
                                 'DISCONNECT')
                         except Exception as error:
                             Logger.error(error)
                         sock.close()
                         self.LIST.remove(sock)
                         continue
                     if receivedData:
                         arguments = receivedData.split(';')
                         if arguments[0] != str(self.randomID):
                             if arguments[1] == str(
                                     self.gameInstance.getPage()):
                                 self.otherPlayer.setLocation(
                                     round(float(arguments[2]), 2),
                                     round(float(arguments[3]), 2))
                             else:
                                 self.otherPlayer.hide()
         except Exception as error:
             Logger.error(error)
コード例 #2
0
 def __init__(self, session, js_url, login2ctrip):
     super(CtripClickBooking, self).__init__()
     self.logger = Logger.getlogger()
     self.tasklist = 'PromoteVerify'
     self.proxy = {
         'url': 'http://proxy.my.com/proxy',
         'params': {
             "method": "c_hotel",
             "group": "cadsl",
             "mode": 1,
             "url": 1,
             "level": "HTTPS",
             "expire": -1
         }
     }
     self.js_url = js_url
     self.session = session
     self.login2ctrip = login2ctrip
     self.url_inputneworder = 'http://hotels.ctrip.com/internationalbook/inputneworder.aspx?ctm_ref=hi_0_0_0_0_lst_sr_1_df_ls_1_n_hi_bk_1_0'
コード例 #3
0
class TestGovernLogin(unittest.TestCase):

    warnings.simplefilter("ignore", ResourceWarning)

    log = Logger()

    read_yaml = ReadYaml()
    govern_host = read_yaml.get_default_value("govern", "host")
    user_name = read_yaml.get_default_value("govern", "account")
    pass_word = read_yaml.get_default_value("govern", "password")
    driver = webdriver.Chrome()
    driver.maximize_window()
    govern_login = GovernLogin(driver=driver)
    govern_yang_lao = GovernYangLao(driver=driver)

    @classmethod
    def setUpClass(cls):
        cls.log.info(
            "---------------------------setUpClass--STA-----------------------------"
        )
        cls.govern_login = GovernLogin(driver=cls.driver)
        cls.driver.get(cls.govern_host + "/Gover")
        cls.govern_login.login_info(username=cls.user_name,
                                    password=cls.pass_word)

    def test_01_enter_person(self):
        self.log.info("--------enter_person-STA-------")
        self.govern_yang_lao.person_click()
        self.assertEqual("人员信息", self.driver.title)
        self.log.info("--------enter_person-END-------")

    def test_02_add_person(self):
        self.log.info("--------add_person-STA-------")
        self.govern_yang_lao.add_person_click()
        self.log.info("--------add_person-END-------")

    @classmethod
    def tearDownClass(cls):
        # self.driver.quit()
        cls.log.info(
            "---------------------------tearDownClass--END-----------------------------"
        )
コード例 #4
0
def process(cf):
    # Enable log file
    sys.stdout = Logger(cf.log_file)
    print(' ---> Init experiment: ' + cf.exp_name + ' <---')

    # Create the data generators
    train_gen, valid_gen, test_gen = Dataset_Generators().make(cf)

    # Create the optimizer
    print('\n > Creating optimizer...')
    optimizer = Optimizer_Factory().make(cf)

    # Build model
    print('\n > Building model...')
    model = Model_Factory().make(cf, optimizer)

    # Create the callbacks
    print('\n > Creating callbacks...')
    cb = Callbacks_Factory().make(cf, valid_gen)

    if cf.train_model:
        # Train the model
        startt = time.time()
        model.train(train_gen, valid_gen, cb)
        print('   Training time: {}. seconds'.format(time.time() - startt))

    if cf.test_model:
        # Compute validation metrics
        if (valid_gen != None): model.test(valid_gen)
        # Compute test metrics
        model.test(test_gen)

    if cf.pred_model:
        # Compute validation metrics
        if (valid_gen != None): model.predict(valid_gen, tag='pred')
        # Compute test metrics
        model.predict(test_gen, tag='pred')

    # Finish
    print(' ---> Finish experiment: ' + cf.exp_name + ' <---')
コード例 #5
0
ファイル: process.py プロジェクト: caixuwuwu/MachineLearning
def main():
    logger = Logger.get_instance(ConfManage.getString("LOG_CRON_NAME"))
    parser = argparse.ArgumentParser()
    parser.add_argument('-d', '--date', help='日期', type=str)
    parser.add_argument('-p', '--pickle', type=str, help='数据集', default='data')
    parser.add_argument('estimator', help='算法选择', nargs='?', type=str, default='xgb')
    parser.add_argument('predict_target', help='目标值', nargs='?', type=str, default='accept')
    parser.add_argument('-f', '--feature-selected', help='特征值选择', action='store_true')
    parser.add_argument('-w', '--withhold', help='是否保存数据到bi数据库', action='store_true')
    parser.add_argument("-s", "--shift_days", help="The last few days", type=int, default=-1)
    args = parser.parse_args()
    logger.info('Arguments: estimator=%s, predict-target=%s, feature-selected=%r, withhold-bi-insertion=%r' % \
                (args.estimator, args.predict_target, args.feature_selected, args.withhold))
    try:
        process(logger, args.pickle, args.estimator, args.predict_target, args.withhold, args.date, args.shift_days)
    except TestDataEmpty:
        logger.error('Test Data Empty!')
    except (AttributeError, ValueError) as err:
        logger.error(err)
        logger.error('Trace: {}'.format(traceback.format_exc()))
    except KeyboardInterrupt:
        logger.info('Process manually interupted at %s' % arrow.now(tz=ConfManage.getString("ARROW_TIMEZONE")).format(loggable))
    logger.info('Releasing Logger...')
コード例 #6
0
ファイル: test_bagging.py プロジェクト: idoiaruiz/mcv-m5
def process(cf):
    # Enable log file
    sys.stdout = Logger(cf.log_file)
    print(' ---> Init experiment: ' + cf.exp_name + ' <---')

    # Create the data generators
    # Train set generator is created but not used
    train_gen, valid_gen, test_gen = Dataset_Generators().make(cf)

    # Create the optimizer

    optimizer = Optimizer_Factory().make(cf)

    # Build model
    #Need to load different weights for each model
    cf.load_pretrained = True
    print('\n > Building models...')
    models = Model_Factory().make(cf, optimizer)

    if cf.test_model:
        test_ensemble(models, valid_gen, test_gen, cf)

    # Finish
    print(' ---> Finish experiment: ' + cf.exp_name + ' <---')
コード例 #7
0
ファイル: test.py プロジェクト: dakinfemiwa/Messaging
    def lobby(self, host=True):

        def send(event):
            if self.lobbyEntry.get() != '':
                self.send('CHAT^{0} ({1}): '.format((testData['information']['username']).upper(), self.gameState) + self.lobbyEntry.get() + '\n')
                self.lobbyEntry.delete(0, END)

        if host:
            Logger.log('Created new lobby successfully.')
            self.chat('HOST: Created new lobby\n')
            self.gameState = 'HOST-LOBBY'
            self.gamePlayers.append(testData['information']['username'])
            self.refresh()
        else:
            Logger.log('Joined a lobby successfully.')
            self.gameState = 'JOIN-LOBBY'

        Logger.log(f'Set game state to \'{self.gameState}\'.')

        self.clear()
        self.lobbyChat.place(relx=.052, rely=.26)
        self.lobbyEntry.bind('<Return>', send)
        self.lobbyEntry.place(relx=.052, rely=.73)
        self.lobbyEntry.focus_force()
        self.lobbyPlayers.place(relx=.61, rely=.25)

        self.playerOne.place(relx=.61, rely=.35)
        self.playerTwo.place(relx=.61, rely=.45)
        self.playerThree.place(relx=.61, rely=.55)
        self.playerFour.place(relx=.61, rely=.65)

        self.readyOne.place(relx=.92, rely=.35)
        self.readyTwo.place(relx=.92, rely=.45)
        self.readyThree.place(relx=.92, rely=.55)
        self.readyFour.place(relx=.92, rely=.65)

        self.lobbyReady.place(relx=.6075, rely=.75)
        self.lobbyLeave.place(relx=.8, rely=.75)
コード例 #8
0
ファイル: menu.py プロジェクト: morestart/auto_deploy
 def install(self, command: str):
     if self.os_name == 'Windows':
         Logger.warn('暂不支持此系统')
     elif self.os_name == "Linux":
         out = subprocess.check_output("cat /etc/os-release", shell=True)
         out = out.decode("utf-8").split('\n')
         # 判断系统为ubuntu1804
         if "VERSION_ID=\"18.04\"" in out and "NAME=\"Ubuntu\"" in out:
             if command == "更新Python3":
                 UbuntuService().upgrade_python3()
             elif command == "更新软件包列表":
                 UbuntuService().update_source_list()
             elif command == "更新系统软件包":
                 UbuntuService().upgrade_software()
             elif command == "更换系统apt源":
                 UbuntuService().change_apt_source()
             elif command == "设置时区":
                 UbuntuService().set_timezone()
             elif command == "安装JDK8":
                 UbuntuService().install_java()
             elif command == "安装emqx":
                 UbuntuService().install_emqx()
             elif command == "安装mosquitto":
                 UbuntuService().install_mosquitto()
             elif command == "安装ssh":
                 UbuntuService().install_ssh()
             elif command == "安装docker":
                 UbuntuService().install_docker()
             elif command == "安装redis":
                 UbuntuService().install_redis()
             elif command == "安装nginx":
                 UbuntuService().install_nginx()
         # TODO: 其他系统
         else:
             Logger.warn('暂不支持当前系统')
     else:
         Logger.warn('暂不支持当前系统')
コード例 #9
0
ファイル: gameserver.py プロジェクト: dakinfemiwa/Messaging
                    challengedSocket.send(str.encode('CONNECT4<>CHALLENGED<>' + name))
                else:
                    sender.send(b'GAME_ERROR<>MEMBER_NOT_FOUND')
            elif args[1] == 'ACCEPTED':
                time.sleep(0.1)
                challengedSocket = self.extendedUsers[args[2]]
                challengedSocket.send(str.encode('<>'.join(args)))

    def broadcast(self, message):
        global tempMsg
        try:
            for connectedSocket in self.LIST:
                if connectedSocket != self.serverSocket:
                    connectedSocket.send(str.encode(message))
            if message != tempMsg:
                Logger.log(message.rstrip().lstrip(), 'BROADCAST')
                tempMsg = message
        except Exception as error:
            Logger.error(error)


tempMsg = ''


if __name__ == '__main__':
    try:
        Server = GameServer()
        Server.run()
    except Exception as e:
        Logger.error(e)
コード例 #10
0
    app_startup_path = os.path.dirname(os.path.realpath(__file__))

    # params
    MAIN_PROCESS = sys.argv[0][sys.argv[0].rfind(r'/') + 1:]
    settings = {'parent_process': MAIN_PROCESS}
    params = sys.argv[0:]
    for p in params[1:]:
        name = p.split('=')[0].strip(' ')
        if name == 'num_jspool':
            set_jspool(int(p.split('=')[1].strip(' ')))
        elif name == 'num_proxy_ip':
            set_proxy_ip(int(p.split('=')[1].strip(' ')))
        elif name == 'log_path':
            LOG_PATH = p.split('=')[1].strip(' ')

    logger = Logger.getlogger(level=logging.DEBUG, name=LOG_NAME, path=LOG_PATH)
    logger.info('<<<========================= PhantomJS Service This log starting at %s =========================>>>', time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))

    logger.info('Thread: %s, num_threadpool=%s', thread_name, NUM_THREADPOOL)
    logger.info('Thread: %s, num_phantomjspool=%s', thread_name, get_jspool())
    # logger.info('Thread: %s, num_proxy_ip=%s', thread_name, get_proxy_ip())

    # start threading to update network PROXY
    # thread_proxy_daemon = FixProxy()
    # thread_proxy_daemon.name = 'thread_proxy_daemon'
    # thread_proxy_daemon.setDaemon(True)
    # thread_proxy_daemon.start()
    # logger.info('Thread: %s, thread_proxy_daemon start... please waiting for few minutes...', thread_name)

    # create phantomjs pool
    JS_PID = PhantomJSPool.init(num=get_jspool())
コード例 #11
0
ファイル: job_scheduler.py プロジェクト: Nepherius/Mangopie
 def __init__(self):
     self.logger = Logger("setting_manager")
     self.jobs = []
     self.job_id_index = 0
コード例 #12
0
DATA_BEGIN_DATE = setting['DATA_BEGIN_DATE']
CL_INDEXS = setting['CL_INDEXS']
CL_TOP10_HOLDER = setting['CL_TOP10_HOLDER']
CL_TOP10_FLOADHOLDER = setting['CL_TOP10_FLOADHOLDER']
CL_PLEDGE_STAT = setting['CL_PLEDGE_STAT']
CL_REPURCHASE = setting['CL_REPURCHASE']
CL_STK_HOLDERNUMBER = setting['CL_STK_HOLDERNUMBER']
CL_STK_HOLDERTRADE = setting['CL_STK_HOLDERTRADE']
CL_STK_POOL_DAILY = setting['CL_STK_POOL_DAILY']
CL_STK_POOL_CUR = setting['CL_STK_POOL_CUR']
CL_STK_TOP_LIST = setting['CL_STK_TOP_LIST']
CL_STK_POOL_ZZ500 = setting['CL_STK_POOL_ZZ500']
CL_INDEX_ZZ500 = setting['CL_INDEX_ZZ500']
CL_SHORT_8_PCT = setting['CL_SHORT_8_PCT']

LOG = Logger().getlog()

class IndexCode:
    INDEX_SH = '000001_SH'
    INDEX_SZ = '399001_SZ'
    INDEX_ZX = '399005_SZ'
    INDEX_CY = '399006_SZ'
    INDEX_ZZ = '000905_SH'
    
    _VALUES_TO_NAMES = {
        '000001_SH': "INDEX_SH",
        '399001_SZ': "INDEX_SZ",
        '399005_SZ': "INDEX_ZX",
        '399006_SZ': "INDEX_CY",
        '000905_SH': "INDEX_ZZ",
    }
コード例 #13
0
ファイル: encoder.py プロジェクト: DavidLopezSaez/Clipper-v2
    def __init__(self, error_messages, packager):
        self.__error_messages = error_messages
        self.__packager = packager

        self.__logger = Logger()
コード例 #14
0
ファイル: emqx.py プロジェクト: morestart/auto_deploy
 def get_emq_status(self):
     try:
         subprocess.run("sudo emqx_ctl status", shell=True, check=True)
     except subprocess.CalledProcessError:
         Logger.error('查询状态失败')
コード例 #15
0
class Request:
    def __init__(self):
        self.log = Logger()
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
        warnings.simplefilter("ignore", ResourceWarning)

        # 禁用安全请求警告
        requests.packages.urllib3.disable_warnings(
            urllib3.exceptions.InsecureRequestWarning)

    def post_request_data(self, _url, _data, _headers, case_name=None):
        response = requests.post(url=_url,
                                 data=_data,
                                 headers=_headers,
                                 verify=False)
        self.log.info("【%s - 请求地址】:%s" % (case_name, _url))
        self.log.info("【%s - 请求参数】: %s" % (case_name, _data))
        self.log.info("【%s - 响应码】: %d" % (case_name, response.status_code))
        return response

    def post_request_json(self, _url, _json, _headers, case_name=None):
        response = requests.post(url=_url,
                                 json=_json,
                                 headers=_headers,
                                 verify=False)
        self.log.info("【%s - 请求地址】:%s" % (case_name, _url))
        self.log.info("【%s - 请求参数】: %s" % (case_name, _json))
        self.log.info("【%s - 响应码】: %d" % (case_name, response.status_code))
        return response

    def post_request_files(self, _url, _files, _headers, case_name=None):
        response = requests.post(url=_url,
                                 files=_files,
                                 headers=_headers,
                                 verify=False)
        self.log.info("【%s - 请求地址】:%s" % (case_name, _url))
        self.log.info("【%s - 请求参数】: %s" % (case_name, _files))
        self.log.info("【%s - 响应码】: %d" % (case_name, response.status_code))
        return response

    def get_request(self, _url, _headers, _data=None, case_name=None):
        response = requests.get(url=_url,
                                params=_data,
                                headers=_headers,
                                verify=False)
        self.log.info("【%s - 请求地址】:%s" % (case_name, _url))
        self.log.info("【%s - 请求参数】: %s" % (case_name, _data))
        self.log.info("【%s - 响应码】: %d" % (case_name, response.status_code))
        return response
コード例 #16
0
os.chdir(BASE_DIR)

import signal
from multiprocessing import Process
from dataB import auto_begin as abg
from proxy.clear_proxies import clear_proxies_db
from proxy.get_proxy import GetProxies
from tools.send_mail import SendMail
from tools.auto_analyse_data import AnalyseData
from tools.common import *
from tools.auto_check_articles_list import CheckArticlesList as CAL
from tools.logger import Logger
from dataP.auto_begin import ExpertDataBegin as EB

logger = Logger(logger=__name__).getlog()

process_flag = 0
normal_main_process_time_delta = 1800
abnormal_main_process_time_delta = 300
hostname = os.popen('hostname').read()[:-1]


def get_work_times():
    content = get_json_content(SETUP_FILE)
    common_times = content['common_actions_time']
    real_times = content['real_actions_time']
    test_times = content['test_actions_time']
    se_times = content['start_end_times']
    actions_times = real_times if REAL == 1 else test_times
    actions_times.update(se_times)
コード例 #17
0
class CommandManager:
    PRIVATE_CHANNEL = "priv"
    ORG_CHANNEL = "org"
    PRIVATE_MESSAGE = "msg"

    def __init__(self):
        self.handlers = collections.defaultdict(list)
        self.logger = Logger("command_manager")
        self.channels = {}
        self.ignore_regexes = [
            re.compile(" is AFK \(Away from keyboard\) since ", re.IGNORECASE),
            re.compile("I am away from my keyboard right now", re.IGNORECASE),
            re.compile("Unknown command or access denied!", re.IGNORECASE),
            re.compile("I am responding", re.IGNORECASE),
            re.compile("I only listen", re.IGNORECASE),
            re.compile("Error!", re.IGNORECASE),
            re.compile("Unknown command input", re.IGNORECASE),
            re.compile("You have been auto invited", re.IGNORECASE),
        ]

    def inject(self, registry):
        self.db = registry.get_instance("db")
        self.util: Util = registry.get_instance("util")
        self.access_manager: AccessManager = registry.get_instance("access_manager")
        self.bot: Mangopie = registry.get_instance("mangopie")
        self.character_manager: CharacterManager = registry.get_instance("character_manager")
        self.setting_manager: SettingManager = registry.get_instance("setting_manager")
        self.command_alias_manager = registry.get_instance("command_alias_manager")

    def pre_start(self):
        self.bot.add_packet_handler(server_packets.PrivateMessage.id, self.handle_private_message)
        self.bot.add_packet_handler(server_packets.PrivateChannelMessage.id, self.handle_private_channel_message)
        self.register_command_channel("Private Message", self.PRIVATE_MESSAGE)
        self.register_command_channel("Org Channel", self.ORG_CHANNEL)
        self.register_command_channel("Private Channel", self.PRIVATE_CHANNEL)

    def start(self):
        # process decorators
        for _, inst in Registry.get_all_instances().items():
            for name, method in get_attrs(inst).items():
                if hasattr(method, "command"):
                    cmd_name, params, access_level, description, help_file, sub_command = getattr(method, "command")
                    handler = getattr(inst, name)
                    module = self.util.get_module_name(handler)
                    help_text = self.get_help_file(module, help_file)
                    self.register(handler, cmd_name, params, access_level, description, module, help_text, sub_command)

    def register(self, handler, command, params, access_level, description, module, help_text=None, sub_command=None):
        command = command.lower()
        if sub_command:
            sub_command = sub_command.lower()
        else:
            sub_command = ""
        access_level = access_level.lower()
        module = module.lower()
        command_key = self.get_command_key(command, sub_command)

        if help_text is None:
            help_text = self.generate_help(command, description, params)

        if not self.access_manager.get_access_level_by_label(access_level):
            self.logger.error("Could not add command '%s': could not find access level '%s'" % (command, access_level))
            return

        for channel, label in self.channels.items():
            row = self.db.find('command_config', {"command": command, "sub_command": sub_command, 'channel': channel})

            if row is None:
                # add new command commands
                self.db.insert('command_config',
                               {'command': command, 'sub_command': sub_command, 'access_level': access_level,
                                'channel': channel, 'module': module, 'verified': 1, 'enabled': 1})

            elif 'verified' in row and row['verified']:
                if row['module'] != module:
                    self.logger.warning("module different for different forms of command '%s' and sub_command '%s'" % (
                        command, sub_command))
            else:
                # mark command as verified
                self.db.update('command_config',
                               {'module': module, 'command': command, 'channel': channel, 'access_level': access_level,
                                'sub_command': sub_command},
                               {'verified': 1})

        # save reference to command handler
        r = re.compile(self.get_regex_from_params(params), re.IGNORECASE)
        self.handlers[command_key].append(
            {"regex": r, "callback": handler, "help": help_text, "description": description, "params": params})

    def handle_private_message(self, packet: server_packets.PrivateMessage):
        # since the command symbol is not required for private messages,
        # the command_str must have length of at least 1 in order to be valid,
        # otherwise it is ignored
        if len(packet.message) < 1 or not self.bot.is_ready():
            return

        for regex in self.ignore_regexes:
            if regex.search(packet.message):
                return

        if packet.message[:1] == '!':
            command_str = packet.message[1:]
        else:
            command_str = packet.message

        self.process_command(
            command_str,
            "msg",
            packet.char_id,
            lambda msg: self.bot.send_private_message(packet.char_id, msg))

    def handle_private_channel_message(self, packet: server_packets.PrivateChannelMessage):
        # since the command symbol is required in the private channel,
        # the command_str must have length of at least 2 in order to be valid,
        # otherwise it is ignored
        if len(packet.message) < 2:
            return

        symbol = packet.message[:1]
        command_str = packet.message[1:]
        if symbol == self.setting_manager.get(
                "symbol").get_value() and packet.private_channel_id == self.bot.char_id:
            self.process_command(
                command_str,
                "priv",
                packet.char_id,
                lambda msg: self.bot.send_private_channel_message(msg))

    def process_command(self, message: str, channel: str, char_id, reply):
        try:
            command_str, command_args = self.get_command_parts(message)

            # check for command alias
            command_alias = self.command_alias_manager.check_for_alias(command_str)

            if command_alias:
                command_str, command_args = self.get_command_parts(
                    command_alias + " " + command_args if command_args else command_alias)

            cmd_configs = self.get_command_configs(command_str, channel, 1)
            cmd_configs = list(cmd_configs)
            if cmd_configs:
                # given a list of cmd_configs that are enabled, see if one has regex that matches incoming command_str
                cmd_config, matches, handler = self.get_matches(cmd_configs, command_args)
                if matches:
                    if self.access_manager.check_access(char_id, cmd_config['access_level']):
                        sender = MapObject(
                            {"name": self.character_manager.resolve_char_to_name(char_id), "char_id": char_id})
                        handler["callback"](channel, sender, reply, self.process_matches(matches, handler["params"]))
                    else:
                        self.access_denied_response(char_id, cmd_config, reply)
                else:
                    # handlers were found, but no handler regex matched
                    help_text = self.get_help_text(char_id, command_str, channel)
                    if help_text:
                        reply(self.format_help_text(command_str, help_text))
                    else:
                        reply("Error! Invalid syntax.")
            else:
                reply("Error! Unknown command.")
        except Exception as e:
            self.logger.error("error processing command: %s" % message, e)
            reply("There was an error processing your request.")

    def get_help_text(self, char, command_str, channel):
        data = self.db.find_all('command_config', {'command': command_str, 'channel': channel, 'enabled': 1})
        # filter out commands that character does not have access level for
        data = filter(lambda row: self.access_manager.check_access(char, row['access_level']), data)

        def read_help_text(row):
            command_key = self.get_command_key(row['command'], row['sub_command'])
            return filter(lambda x: x is not None, map(lambda handler: handler["help"], self.handlers[command_key]))

        content = "\n\n".join(flatmap(read_help_text, data))
        return content if content else None

    def get_help_file(self, module, help_file):
        if help_file:
            try:
                help_file = "./" + module.replace(".", "/") + "/" + help_file
                with open(help_file) as f:
                    return f.read().strip()
            except FileNotFoundError as e:
                self.logger.error("Error reading help file", e)
        return None

    def format_help_text(self, topic, help_text):
        return ChatBlob("Help (" + topic + ")", help_text)

    def generate_help(self, command, description, params):
        return description + ":\n" + "<tab><symbol>" + command + " " + " ".join(map(lambda x: x.get_name(), params))

    def access_denied_response(self, char_id, cmd_config, reply):
        reply("Error! Access denied.")

    def get_command_key(self, command, sub_command):
        if sub_command:
            return command + ":" + sub_command
        else:
            return command

    def get_command_key_parts(self, command_str):
        parts = command_str.split(":", 1)
        if len(parts) == 2:
            return parts[0], parts[1]
        else:
            return parts[0], ""

    def get_matches(self, cmd_configs, command_args):
        if command_args:
            command_args = " " + command_args

        for row in cmd_configs:
            command_key = self.get_command_key(row['command'], row['sub_command'])
            handlers = self.handlers[command_key]
            for handler in handlers:
                matches = handler["regex"].match(command_args)
                if matches:
                    return row, matches, handler
        return None, None, None

    def process_matches(self, matches, params):
        groups = list(matches.groups())

        processed = []
        for param in params:
            processed.append(param.process_matches(groups))
        return processed

    def format_matches(self, command_args, matches):
        # convert matches to list
        m = list(matches.groups())
        m.insert(0, command_args)

        # strip leading spaces for each group, if they group exists
        return list(map(lambda x: x[1:] if x else x, m))

    def get_command_parts(self, message):
        parts = message.split(" ", 1)
        if len(parts) == 2:
            return parts[0].lower(), parts[1]
        else:
            return parts[0].lower(), ""

    def get_command_configs(self, command, channel=None, enabled=1, sub_command=None):
        query = {"command": command}
        if channel:
            query['channel'] = channel
        if enabled:
            query['enabled'] = enabled
        if sub_command:
            query['sub_command'] = sub_command
        return self.db.find_all('command_config', query)

    def get_handlers(self, command_key):
        return self.handlers.get(command_key, None)

    def register_command_channel(self, label, value):
        if value in self.channels:
            self.logger.error("Could not register command channel '%s': command channel already registered" % value)
            return

        self.logger.debug("Registering command channel '%s'" % value)
        self.channels[value] = label

    def is_command_channel(self, channel):
        return channel in self.channels

    def get_regex_from_params(self, params):
        # params must be wrapped with line-beginning and line-ending anchors in order to match
        # when no params are specified (eg. "^$")
        return "^" + "".join(map(lambda x: x.get_regex(), params)) + "$"
コード例 #18
0
ファイル: train.py プロジェクト: phamchien98/deeppolyp
def main():
    # Get parameters from file parser
    parser = argparse.ArgumentParser(description='DeepPolyp model training')
    parser.add_argument('-dataset', default='polyps', help='Dataset')
    parser.add_argument('-model_name', default='fcn8', help='Model')
    parser.add_argument('-model_file', default='weights.hdf5',
                        help='Model file')
    parser.add_argument('-load_pretrained', default=False,
                        help='Load pretrained model from model file')
    parser.add_argument('-learning_rate', default=0.0001, help='Learning Rate')
    parser.add_argument('-weight_decay', default=0.,
                        help='regularization constant')
    parser.add_argument('--num_epochs', '-ne', type=int, default=1000,
                        help='Optional. Int to indicate the max'
                        'number of epochs.')
    parser.add_argument('-max_patience', type=int, default=100,
                        help='Max patience (early stopping)')
    parser.add_argument('-batch_size', type=int, default=10, help='Batch size')
    parser.add_argument('--optimizer', '-opt', default='rmsprop',
                        help='Optimizer')
    args = parser.parse_args()

    # Parameters
    nClasses = 3
    w_balance = None  # 'median_freq_cost'
    crop_size = (224, 224)  # (288, 384)

    # Experiment name
    experiment_name = "tmp"

    # Define paths according to user
    usr = getuser()
    if usr == "michal":
        # Michal paths
        savepath = '/home/michal/' + experiment_name + '/'
        dataset_path = '/home/michal/polyps/polyps_split2/CVC-912/'
        train_path = dataset_path + 'train/'
        valid_path = dataset_path + 'valid/'
        test_path = dataset_path + 'test/'
    elif usr == 'vazquezd' or usr == 'romerosa':
        shared_dataset_path = '/data/lisa/exp/vazquezd/datasets/polyps_split7/'
        dataset_path = '/Tmp/'+usr+'/datasets/polyps_split7/'
        # Copy the data to the local path if not existing
        if not os.path.exists(dataset_path):
            print('The local path {} does not exist. Copying '
                  'dataset...'.format(dataset_path))
            shutil.copytree(shared_dataset_path, dataset_path)
            print('Done.')

        savepath = '/Tmp/'+usr+'/results/deepPolyp/fcn8/paper/'+experiment_name+'/'
        final_savepath = '/data/lisatmp4/' + usr + '/results/deepPolyp/fcn8/' + experiment_name + '/'
        train_path = dataset_path + 'train/'
        valid_path = dataset_path + 'valid/'
        test_path = dataset_path + 'test/'

    elif usr == 'dvazquez':
        shared_dataset_path = '/home/'+usr+'/Datasets/Polyps/'
        dataset_path = '/home/'+usr+'/Datasets/Polyps/'
        # Copy the data to the local path if not existing
        if not os.path.exists(dataset_path):
            print('The local path {} does not exist. Copying '
                  'dataset...'.format(dataset_path))
            shutil.copytree(shared_dataset_path, dataset_path)
            print('Done.')

        savepath = '/home/'+usr+'/Experiments/deepPolyp/'+experiment_name+'/'
        final_savepath = '/home/'+usr+'/Experiments/deepPolyp/'+experiment_name+'/'
        train_path = dataset_path + 'train/'
        valid_path = dataset_path + 'valid/'
        test_path = dataset_path + 'test/'

    else:
        raise ValueError('User unknown, please add your own paths!')

    # Create output folders
    if not os.path.exists(savepath):
        os.makedirs(savepath)

    # Enable log file
    sys.stdout = Logger(savepath + "logfile.log")
    print (' ---> Experiment: ' + experiment_name + ' <---')

    # Train the network
    train(dataset=args.dataset,
          model_name=args.model_name,
          learning_rate=float(args.learning_rate),
          weight_decay=float(args.weight_decay),
          num_epochs=int(args.num_epochs),
          max_patience=int(args.max_patience),
          batch_size=int(args.batch_size),
          optimizer=args.optimizer,
          savepath=savepath,
          show_model=False,
          train_path=train_path, valid_path=valid_path, test_path=test_path,
          crop_size=crop_size,
          in_shape=(3, None, None),
          n_classes=nClasses+1,
          gtSet=nClasses,
          void_class=[nClasses],
          w_balance=w_balance,
          weights_file=savepath+args.model_file if bool(args.load_pretrained) else False,
          train_model=True,
          plot_hist=False
          )
    print (' ---> Experiment: ' + experiment_name + ' <---')

    print('Copying model and other training files to {}'.format(final_savepath))
    start = time.time()
    copy_tree(savepath, final_savepath)
    open(os.path.join(final_savepath, 'lock'), 'w').close()
    print ('Copy time: ' + str(time.time()-start))
コード例 #19
0
ファイル: encoder.py プロジェクト: DavidLopezSaez/Clipper-v2
class Encoder:
    __DEFAULT = {
        'FPS': '60/1',
        'WIDTH': '1920',
        'HEIGHT': '1080',
        'TBN': '1/15360',
        'HZ': '1/44100'
    }

    def __init__(self, error_messages, packager):
        self.__error_messages = error_messages
        self.__packager = packager

        self.__logger = Logger()

    def commander(self, command, args):
        '''
            encode (package name[0]):
                encodes clips to make all clips has the same parameters options
        '''
        if command == 'encode':
            filter = self.__filter(args, 1)
            if filter:
                name = args[0]
                package = self.__packager.get(name)

                if not package:
                    return True

                folder = package.get_data()['clips_folder']
                for subdir, dirs, files in os.walk(folder):
                    files.sort()
                    for file in files:
                        mp4_file = Path(folder) / file
                        opt = self.__check_video(mp4_file)
                        if opt == '':
                            continue
                        else:
                            self.__logger.log('Re-encoding video {}'.format(mp4_file))
                            cmd = 'ffmpeg -i ' + str(mp4_file) + opt + str(mp4_file).replace('.mp4', '') + 'converted.mp4'
                            r = subprocess.run(cmd, capture_output=True, shell=True)

                            os.remove(mp4_file)
                self.__logger.separator()
            return True
        else:
            return False
    
    def __filter(self, args, ammount):
        if len(args) >= ammount:
            return True
        else:
            print(ConsoleColors.RED + 'Error: {0}'.format(self.__error_messages[0]) + ConsoleColors.RESET)

    def __check_video(self, video):
        cmd = 'ffprobe -v error -of json -show_entries stream=time_base,r_frame_rate,width,height ' + str(video)
        r = os.popen(cmd).read()
        j_streams = json.loads(r)['streams']

        streams = list([{}, {}])
        for j_stream in j_streams:
            if 'width' in j_stream.keys():
                streams[0] = j_stream
            else:
                streams[1] = j_stream

        opt = ''
        if str(streams[0]['width']) != self.__DEFAULT['WIDTH']:
            opt = opt + ' -vf scale=' + self.__DEFAULT['WIDTH'] + ':' + self.__DEFAULT['HEIGHT']
            opt = opt + ' -video_track_timescale ' + self.__DEFAULT['TBN'][2:]
            opt = opt + ' -r ' + self.__DEFAULT['FPS'][:-2]
        else:
            if streams[0]['time_base'] != self.__DEFAULT['TBN']:
                opt = opt + ' -video_track_timescale ' + self.__DEFAULT['TBN'][2:]
            if streams[0]['r_frame_rate'] != self.__DEFAULT['FPS']:
                opt = opt + ' -r ' + self.__DEFAULT['FPS'][:-2]

        if streams[1]['time_base'] != self.__DEFAULT['HZ']:
            opt = opt + ' -ar ' + self.__DEFAULT['HZ'][2:]

        if opt != '':
            opt = opt + ' '

        return opt
コード例 #20
0
ファイル: gameserver.py プロジェクト: dakinfemiwa/Messaging
 def receive(self):
     Logger.log(f'Started listening for connections at [{self.EXTERNAL_IP}:{self.PORT}]')
     Logger.log(f'Accepting local connections at [127.0.0.1:{self.PORT}]')
     while self.serverStatus:
         try:
             read_sockets, write_sockets, error_sockets = select.select(self.LIST, [], [])
             for sock in read_sockets:
                 if sock == self.serverSocket:
                     sockfd, address = self.serverSocket.accept()
                     self.LIST.append(sockfd)
                     Logger.log(f'Client [{address[0]}:{address[1]}] connected to the server.', 'CONNECT')
                 else:
                     try:
                         receivedData = sock.recv(self.BUFFER_SIZE, )
                     except:
                         try:
                             try:
                                 disconnected_user = self.connectedUsers[address]
                                 del self.connectedUsers[address]
                                 del self.extendedUsers[disconnected_user]
                             except:
                                 disconnected_user = '******'
                             Logger.log(f'Client [{address[0]}:{address[1]}] ({disconnected_user}) disconnected from the server.', 'DISCONNECT')
                             self.broadcast(f'({disconnected_user}) left the server')
                             self.broadcast(f'LEFT<>{disconnected_user}')
                         except Exception as error:
                             Logger.error(error)
                         sock.close()
                         self.LIST.remove(sock)
                         continue
                     if receivedData:
                         arguments = receivedData.decode().split('<>')
                         if 'DISCONNECT' in receivedData.decode():
                             try:
                                 disconnected_user = self.connectedUsers[address]
                                 del self.connectedUsers[address]
                                 del self.extendedUsers[disconnected_user]
                             except:
                                 disconnected_user = '******'
                             Logger.log(f'Client [{address[0]}:{address[1]}] ({disconnected_user}) disconnected from the server.', 'DISCONNECT')
                             Logger.log(f'Received quit command from client [{address[0]}:{address[1]}]')
                             self.broadcast(f'({disconnected_user}) left the server')
                             self.broadcast(f'LEFT<>{disconnected_user}')
                             sock.close()
                             self.LIST.remove(sock)
                             continue
                         elif arguments[0] == 'USERNAME':
                             self.connectedUsers[address] = arguments[1]
                             self.extendedUsers[arguments[1]] = sock
                             Logger.log(f'Allowed connection from [{address[0]}:{address[1]}] ({arguments[1]})', 'CONNECT')
                         elif arguments[0] == 'CONNECT4':
                             self.handle_game_commands(arguments, sock, self.connectedUsers[address])
                         elif arguments[0] == 'ONLINE':
                             userList = ''
                             for user in self.connectedUsers:
                                 userList = userList + self.connectedUsers[user] + ';'
                             sock.send(str.encode('USER_LIST<>' + userList))
                             Logger.log(f'[{address[0]}:{address[1]}] ({self.connectedUsers[address]}) requested user list.')
                             Logger.log(f'Current connected users: {userList.replace(";", " ")}')
                         elif arguments[0] == 'CLIENT_INFORMATION':
                             Logger.log(f'Received client information from [{address[0]}:{address[1]}]')
                             clientInformation = ast.literal_eval(arguments[1])
                             clientData = []
                             for field in ['Username', 'Version', 'Rank']:
                                 clientData.append(str([clientInformation['Client Information'][field]][0]))
                             if clientData[0] == '' or clientData[0] == ' ':
                                 sock.send(b'CONN_ERROR<>Invalid username (username not allowed)')
                                 Logger.log(f'Rejected connection from [{address[0]}:{address[1]}] due to invalid username.')
                                 sock.close()
                                 self.LIST.remove(sock)
                             else:
                                 userListByName = []
                                 for user in self.connectedUsers:
                                     userListByName.append(self.connectedUsers[user])
                                 if clientData[0] in userListByName:
                                     sock.send(b'CONN_ERROR<>A user with that name is already connected (use a different username)')
                                     Logger.log(f'Rejected connection from [{address[0]}:{address[1]}] ({clientData[0]}) due to duplicate username.', 'DISCONNECT')
                                     sock.close()
                                     self.LIST.remove(sock)
                                 else:
                                     if float(clientData[1]) < self.MIN_VERSION:
                                         sock.send(str.encode(f'CONN_ERROR<>Client is out of date (latest version is {str(self.MIN_VERSION)})'))
                                         Logger.log(f'Rejected connection from [{address[0]}:{address[1]}] ({clientData[0]}) due to outdated client [{clientData[1]}]', 'DISCONNECT')
                                         sock.close()
                                         self.LIST.remove(sock)
                                     else:
                                         self.connectedUsers[address] = clientData[0]
                                         self.extendedUsers[clientData[0]] = sock
                                         sock.send(b'CONN_SUCCESS<>Successfully connected to the server.')
                                         Logger.log(f'Allowed connection from [{address[0]}:{address[1]}] ({clientData[0]}) [{clientData[1]}]', 'CONNECT')
                                         time.sleep(0.10)
                                         start_dt = datetime.strptime(self.serverInformation['Server Information']['Uptime'], '%H:%M:%S')
                                         end_dt = datetime.strptime(datetime.now().strftime('%H:%M:%S'), '%H:%M:%S')
                                         diff = (end_dt - start_dt)
                                         serverInformationTemp = self.serverInformation
                                         serverInformationTemp['Server Information']['Uptime'] = str(diff)
                                         time.sleep(0.10)
                                         sock.send(str.encode(f'SERVER_INFORMATION<>{str(serverInformationTemp)}'))
                                         Logger.log(f'Sent server information to client [{address[0]}:{address[1]}] ({clientData[0]})')
                                         self.serverInformation['Server Information']['Uptime'] = self.LAUNCH_TIME
                         else:
                             self.broadcast(receivedData.decode())
         except Exception as error:
             Logger.error(error)
コード例 #21
0
from src.dax31.ui.testconfig.ui_config import *
from tools.common import *
from tools.logger import Logger
from tools.util import *

# register the logging configuration
test_type_in_module = "ui"
_ui_current_directory = dirname(realpath(__file__))
logging_file_name = abspath(
    join(_ui_current_directory, 'log', test_type_in_module, __name__))
logger = Logger(log_name=logging_file_name + '.log',
                log_level='1',
                logger_name=test_type_in_module).get_log

# test_method_list = list()
test_method_list = generate_valid_test_method_from_apk(
    _ui_current_directory,
    ui_apk_folder_name,
    ui_test_apk_file_name,
    _logger_name=test_type_in_module)


# @pytest.mark.parametrize('test_method', test_method_list)
# def test_ui(test_method):
def test_ui_by_method_filter():
    global test_method_list

    __ui_setup()

    _test_class_list = test_method_list
    _xml_file_list = list()
コード例 #22
0
ファイル: emqx.py プロジェクト: morestart/auto_deploy
 def start_emqx(self):
     try:
         subprocess.run("sudo emqx start", shell=True, check=True)
     except subprocess.CalledProcessError:
         Logger.error('启动失败')
コード例 #23
0
import json
import time
import requests
from threading import Thread
from settings import lotteries_predict_data_db as lpdb, STAGE_COUNT, avoid_experts_db, PER_EXPERT_ARTICLES_LIST_MAX_PAGE
from settings import EXPERT_URL
from tools.common import get_the_next_stage
from tools.save_data import SaveLotteriesData as SLD
from tools.ua import ua
from settings import saved_db
from settings import LOTTERY_DICT, miss_urls_db
from tools.set_proxies import SetProxies as SP
from tools.logger import Logger
logger = Logger(__name__).logger


class GetExpertsUrls(Thread):

    def __init__(self, lottery_name, expert_id, data_type, has_missed_list_urls=0):
        super(GetExpertsUrls, self).__init__()
        self.lottery_name = lottery_name
        self.expert_id = expert_id
        self.data_type = data_type
        self.max_page = PER_EXPERT_ARTICLES_LIST_MAX_PAGE       # 获取文章列表时,page取范围
        self.url = EXPERT_URL

        self.page = None
        self.params = None
        self.lottery_id = None

        self.urls_db = lpdb[LOTTERY_DICT[self.lottery_name] + '_articles_list']     #
コード例 #24
0
class Downloader:
    def __init__(self, error_messages, packager, p_tokens_file):
        self.__error_messages = error_messages
        self.__packager = packager
        self.__p_tokens_file = p_tokens_file

        self.__logger = Logger()

    def commander(self, command, args):
        '''
            download (package name[0]):
                downloads clips from given package
        '''
        if command == 'download':
            filter = self.__filter(args, 1)
            if filter:
                name = args[0]
                package = self.__packager.get(name)

                for streamer in package.get_data()['streamers']:
                    self.__download(streamer, package)

            return True
        return False

    def __filter(self, args, ammount):
        if len(args) >= ammount:
            return True
        else:
            print(ConsoleColors.RED +
                  'Error: {0}'.format(self.__error_messages[0]) +
                  ConsoleColors.RESET)

    def __check_paths(self, path):
        if not os.path.exists(self.__p_packages_file):
            os.mkdir(path)

    def __download(self, streamer, package):
        with open(self.__p_tokens_file, 'r') as f:
            client_id = json.load(f)['twitch']['client_id']

        access_token, broad_id = Downloader.check_username(streamer).split("&")

        if package.get_data()['period'] == 'week':
            start = generate(datetime.utcnow().replace(tzinfo=pytz.utc) -
                             timedelta(days=7))
            end = generate(datetime.utcnow().replace(tzinfo=pytz.utc))
        elif package.get_data()['period'] == 'month':
            start = generate(datetime.utcnow().replace(tzinfo=pytz.utc) -
                             timedelta(days=30))
            end = generate(datetime.utcnow().replace(tzinfo=pytz.utc))

        count = 0
        pagination = ''
        times = list()
        game_ids = dict()
        while count < int(package.get_data()['limit']):
            r = requests.get(
                'https://api.twitch.tv/helix/clips?broadcaster_id={}&first={}&started_at={}&ended_at={}&after={}'
                .format(broad_id,
                        package.get_data()['limit'], start, end, pagination),
                headers={
                    'Authorization': 'Bearer ' + access_token,
                    'Client-ID': client_id
                })

            clips = r.json()['data']
            if len(clips) > 0:
                for clip in clips:
                    if type(clip) == dict:
                        if clip['game_id'] not in game_ids:
                            game_ids[clip['game_id']] = 1
                        else:
                            game_ids[clip['game_id']] += 1

                        thumbnail_url = clip['thumbnail_url']

                        mp4_url = thumbnail_url.split('-preview',
                                                      1)[0] + '.mp4'

                        number = count + 1
                        if len(str(number)) == 1:
                            number = '0' + str(number)

                        mp4_name = str(
                            Path(package.get_data()['clips_folder']) /
                            str(str(number) + '.mp4'))

                        self.__logger.log("Downloading: " + str(mp4_name))

                        res = requests.get(mp4_url)
                        with open(mp4_name, 'wb') as f:
                            f.write(res.content)

                        cmd = 'ffprobe -show_entries format=duration -v quiet -of csv="p=0" ' + mp4_name
                        duration = float(os.popen(cmd).read())
                        created_at = datetime.strptime(clip['created_at'],
                                                       '%Y-%m-%dT%H:%M:%SZ')
                        ended_at = created_at + timedelta(0, duration)

                        exist = False
                        if len(times) > 0:
                            for i in range(len(times)):
                                if times[i]['created_at'] < created_at < times[
                                        i]['ended_at'] or times[i][
                                            'created_at'] < ended_at < times[
                                                i]['ended_at']:
                                    exist = True
                                    break
                                else:
                                    time_meta = {
                                        'created_at': created_at,
                                        'ended_at': ended_at
                                    }
                                    times.append(time_meta)
                        else:
                            time_meta = {
                                'created_at': created_at,
                                'ended_at': ended_at
                            }
                            times.append(time_meta)

                        if exist:
                            os.remove(mp4_name)
                        else:
                            count += 1
                            if count >= int(package.get_data()['limit']):
                                break

                pagination = r.json()['pagination']['cursor']
            else:
                raise Exception('Clips not found')

        games_sorted = sorted(game_ids.items(),
                              key=lambda x: x[1],
                              reverse=True)

        stop = 1
        if len(games_sorted) > 1:
            stop = 2

        games = list()
        for i in range(stop):
            r = requests.get('https://api.twitch.tv/helix/games?id=' +
                             games_sorted[i][0],
                             headers={
                                 'Authorization': 'Bearer ' + access_token,
                                 'Client-ID': client_id
                             })

            games.append(r.json()['data'][0]['name'])

        package.get_data()['additional_info']['games'] = games
        package.update()
        self.__logger.separator()

    @staticmethod
    def check_username(username):
        with open(Path('tokens/tokens.json'), 'r') as f:
            credentials = json.load(f)['twitch']

        try:
            r = requests.post(
                'https://id.twitch.tv/oauth2/token?client_id={0}&client_secret={1}&grant_type=client_credentials'
                .format(credentials['client_id'],
                        credentials['client_secret']))
            access_token = r.json()['access_token']

            r = requests.get('https://api.twitch.tv/helix/users?login='******'Authorization': 'Bearer ' + access_token,
                                 'Client-ID': credentials['client_id']
                             })
            broad_id = r.json()['data'][0]['id']

            if not broad_id:
                print('Bad request, check username or user banned')
                return False
        except:
            print('Failed to connect to Twitch API')
            return False

        return access_token + '&' + broad_id
コード例 #25
0
ファイル: emqx.py プロジェクト: morestart/auto_deploy
 def emqx_config_explain(self):
     Logger.info("配置路径:")
     Logger.info("/etc/emqx")
     Logger.info("取消匿名访问模式:")
     Logger.info(
         "使用sudo nano /etc/emqx/emqx.conf 编辑配置文件,将allow_anonymous设置为false, ctrl o保存"
     )
     Logger.info("鉴权设置:")
     Logger.info(
         "使用此命令编辑sudo nano /etc/emqx/etc/plugins/emqx_auth_username.conf, "
         "注释掉现有内容, 打开auth.user.1的用户名和密码")
     Logger.warn("在启动鉴权时,请先在dashboard中启动鉴权插件")
コード例 #26
0
    def __init__(self, error_messages, packager, p_tokens_file):
        self.__error_messages = error_messages
        self.__packager = packager
        self.__p_tokens_file = p_tokens_file

        self.__logger = Logger()
コード例 #27
0
from core.predictor import *
from tools.cache import Cache
from tools.pickler import reload_pickle_cache, init_pickle_cache
from tools.po_cache_ret import POCacheRet
from tools.logger import Logger

if sys.version_info[:2] in [(2, 6), (2, 7)]:
    reload(sys)
    sys.setdefaultencoding('utf-8')
elif sys.version_info[:2] in [(3, 6), (3, 7)]:
    # pylint: disable=E0401, E0611, E1101
    import importlib
    importlib.reload(sys)

logger = Logger.get_instance(ConfManage.getString("LOG_BASE_NAME"))
# Import Thrift generated classes
sys.path.append('gen-py')
sys.path.insert(0, glob.glob('../../lib/py/build/lib*'[0]))
try:
    from eta import EstimateTaskDurations
    from eta.ttypes import Duration, Range, InvalidInput, ModelMissing, Prediction, Params, \
        Prediction_showup, Prediction_quote, Prediction_delivery, PredictionResult, Data
except ImportError:
    logger.error('Cannot find thrift classes.')
    logger.error('Have (you run `thrift --gen py eta.thrift`?')
    raise


def __validate_coordinates__(query):
    if query.showup_distance is None:
コード例 #28
0
class Cutout:
    def __init__(self, survey, position, radius, **kwargs):
        self.survey = survey
        self.position = position
        self.ra = self.position.ra.to_value(u.deg)
        self.dec = self.position.dec.to_value(u.deg)
        self.radius = radius
        self.basesurvey = kwargs.get('basesurvey', 'racsI')
        self.psf = kwargs.get('psf')
        self.cmap = kwargs.get('cmap', 'gray_r')
        self.color = 'k' if self.cmap == 'hot' else 'black'
        self.band = kwargs.get('band', 'g')

        level = 'DEBUG' if kwargs.get('verbose') else 'INFO'
        self.logger = Logger(__name__, kwargs.get('log'),
                             streamlevel=level).logger
        self.logger.propagate = False

        self.kwargs = kwargs

        try:
            self._get_cutout()
        except Exception as e:
            msg = f"{survey} failed: {e}"
            raise FITSException(msg)
        finally:
            if 'racs' not in self.survey and 'vast' not in self.survey:
                self.plot_sources = False
                self.plot_neighbours = False

    def __repr__(self):
        return f"Cutout({self.survey}, ra={self.ra:.2f}, dec={self.dec:.2f})"

    def _get_source(self):
        try:
            pattern = re.compile(r'\S*(\d{4}[+-]\d{2}[AB])\S*')
            selpath = SURVEYS.loc[self.survey]['selavy']
            sel = glob.glob(f'{selpath}/*components.txt')
            sel = [s for s in sel if pattern.sub(r'\1', self.filepath) in s]

            if len(sel) > 1:
                df = pd.concat([pd.read_fwf(s, skiprows=[
                    1,
                ]) for s in sel])
            else:
                df = pd.read_fwf(sel[0], skiprows=[
                    1,
                ])
            coords = SkyCoord(df.ra_deg_cont, df.dec_deg_cont, unit=u.deg)
            d2d = self.position.separation(coords)
            df['d2d'] = d2d
            sources = df.iloc[np.where(d2d.deg < 0.5 * self.radius)[0]]
            sources = sources.sort_values('d2d', ascending=True)

            if any(sources.d2d < self.pos_err / 3600):
                self.source = sources.iloc[0]
                self.neighbours = sources.iloc[1:]
                self.plot_sources = True
            else:
                self.source = None
                self.neighbours = sources
                self.plot_sources = False

            self.plot_neighbours = self.kwargs.get('neighbours', True)

            self.logger.debug(f'Source: \n {self.source}')
            if len(self.neighbours) > 0:
                nn = self.neighbours.iloc[0]
                self.logger.debug(
                    f'Nearest neighbour coords: \n {nn.ra_deg_cont, nn.dec_deg_cont}'
                )
                self.logger.debug(
                    f'Nearest 5 Neighbours \n {self.neighbours.head()}')

        except IndexError:
            self.plot_sources = False
            self.plot_neighbours = False
            self.logger.warning('No nearby sources found.')

    def _get_cutout(self):

        if not os.path.exists(cutout_cache + self.survey):
            msg = f"{cutout_cache}{self.survey} cutout directory does not exist, creating."
            self.logger.info(msg)
            os.makedirs(cutout_cache + self.survey)

        if os.path.isfile(self.survey):
            self._get_local_cutout()
        elif 'racs' in self.survey or 'vast' in self.survey or 'vlass' in self.survey:
            self._get_local_cutout()
        elif self.survey == 'skymapper':
            self._get_skymapper_cutout()
        elif self.survey == 'panstarrs':
            self._get_panstarrs_cutout()
        elif self.survey == 'decam':
            self._get_decam_cutout()
        else:
            self._get_skyview_cutout()

    def _get_local_cutout(self):
        """Fetch cutout data via local FITS images (e.g. RACS / VLASS)."""

        fields = self._find_image()
        assert len(
            fields
        ) > 0, f"No fields located at {self.position.ra:.2f}, {self.position.dec:.2f}"
        closest = fields[fields.dist_field_centre ==
                         fields.dist_field_centre.min()].iloc[0]
        image_path = SURVEYS.loc[self.survey]['images']

        if self.survey == 'vlass':
            filepath = f'{closest.epoch}/{closest.tile}/{closest.image}/{closest.filename}'
            image_path = vlass_path
        elif 'racs' in self.survey:
            pol = self.survey[-1]
            if on_system == 'ada':
                filepath = f'RACS_test4_1.05_{closest.field}.fits'
            else:
                filepath = f'RACS_{closest.field}.EPOCH00.{pol}.fits'
        elif 'vast' in self.survey:
            pattern = re.compile(r'vastp(\dx*)([IV])')
            epoch = pattern.sub(r'\1', self.survey)
            pol = pattern.sub(r'\2', self.survey)
            filepath = f'VAST_{closest.field}.EPOCH0{epoch}.{pol}.fits'
        else:
            filepath = f'*{closest.field}*0.restored.fits'

        try:
            self.filepath = glob.glob(image_path + filepath)[0]
        except IndexError:
            raise FITSException(
                f'Could not match {self.survey} image filepath: \n{image_path + filepath}'
            )

        with fits.open(self.filepath) as hdul:
            self.header, data = hdul[0].header, hdul[0].data
            wcs = WCS(self.header, naxis=2)
            self.mjd = Time(self.header['DATE']).mjd

            try:
                cutout = Cutout2D(data[0, 0, :, :],
                                  self.position,
                                  self.radius * u.deg,
                                  wcs=wcs)
            except IndexError:
                cutout = Cutout2D(data,
                                  self.position,
                                  self.radius * u.deg,
                                  wcs=wcs)
            self.data = cutout.data * 1000
            self.wcs = cutout.wcs

        if 'racs' in self.survey or 'vast' in self.survey:
            self.pos_err = SURVEYS.loc[self.basesurvey].pos_err
            self._get_source()
        else:
            # Probably using vlass, yet to include aegean catalogs
            self.plot_sources = False
            self.plot_neighbours = False

    def _get_panstarrs_cutout(self):
        """Fetch cutout data via PanSTARRS DR2 API."""
        path = cutout_cache + 'panstarrs/{}_{}arcmin_{}_{}.fits'.format(
            self.band,
            '{:.3f}',
            '{:.3f}',
            '{:.3f}',
        )
        imgpath = path.format(self.radius * 60, self.ra, self.dec)
        if not os.path.exists(imgpath):
            pixelrad = int(self.radius * 120 * 120)
            service = "https://ps1images.stsci.edu/cgi-bin/ps1filenames.py"
            url = (
                f"{service}?ra={self.ra}&dec={self.dec}&size={pixelrad}&format=fits"
                f"&filters=grizy")
            table = Table.read(url, format='ascii')

            msg = f"No PS1 image at {self.position.ra:.2f}, {self.position.dec:.2f}"
            assert len(table) > 0, msg

            urlbase = (
                f"https://ps1images.stsci.edu/cgi-bin/fitscut.cgi?"
                f"ra={self.ra}&dec={self.dec}&size={pixelrad}&format=fits&red="
            )

            flist = ["yzirg".find(x) for x in table['filter']]
            table = table[np.argsort(flist)]

            for row in table:
                self.mjd = row['mjd']
                filt = row['filter']
                url = urlbase + row['filename']
                path = cutout_cache + 'panstarrs/{}_{}arcmin_{}_{}.fits'.format(
                    filt,
                    '{:.3f}',
                    '{:.3f}',
                    '{:.3f}',
                )
                path = path.format(self.radius * 60, self.ra, self.dec)

                img = requests.get(url, allow_redirects=True)

                if not os.path.exists(path):
                    with open(path, 'wb') as f:
                        f.write(img.content)

        with fits.open(imgpath) as hdul:
            self.header, self.data = hdul[0].header, hdul[0].data
            self.wcs = WCS(self.header, naxis=2)

    def _get_skymapper_cutout(self):
        """Fetch cutout data via Skymapper API."""

        path = cutout_cache + self.survey + '/dr2_jd{:.3f}_{:.3f}arcmin_{:.3f}_{:.3f}'
        linka = 'http://api.skymapper.nci.org.au/aus/siap/dr2/'
        linkb = 'query?POS={:.5f},{:.5f}&SIZE={:.3f}&BAND=all&RESPONSEFORMAT=CSV'
        linkc = '&VERB=3&INTERSECT=covers'
        sm_query = linka + linkb + linkc

        link = linka + 'get_image?IMAGE={}&SIZE={}&POS={},{}&FORMAT=fits'

        table = requests.get(sm_query.format(self.ra, self.dec, self.radius))
        df = pd.read_csv(io.StringIO(table.text))
        assert len(
            df
        ) > 0, f'No Skymapper image at {self.position.ra:.2f}, {self.position.dec:.2f}'

        df = df[df.band == 'z']
        self.mjd = df.iloc[0]['mjd_obs']
        link = df.iloc[0].get_image

        img = requests.get(link)

        path = path.format(self.mjd, self.radius * 60, self.ra, self.dec)

        if not os.path.exists(path):
            with open(path, 'wb') as f:
                f.write(img.content)

        with fits.open(path) as hdul:
            self.header, self.data = hdul[0].header, hdul[0].data
            self.wcs = WCS(self.header, naxis=2)

    def _get_decam_cutout(self):
        """Fetch cutout data via DECam LS API."""
        size = int(self.radius * 3600 / 0.262)
        if size > 512:
            size = 512
            maxradius = size * 0.262 / 3600
            self.logger.warning(
                f"Using maximum DECam LS cutout radius of {maxradius:.3f} deg")

        link = f"http://legacysurvey.org/viewer/fits-cutout?ra={self.ra}&dec={self.dec}"
        link += f"&size={size}&layer=dr8&pixscale=0.262&bands={self.band}"
        img = requests.get(link)

        path = cutout_cache + self.survey + '/dr8_jd{:.3f}_{:.3f}arcmin_{:.3f}_{:.3f}_{}band'
        path = path.format(self.mjd, self.radius * 60, self.ra, self.dec,
                           self.band)
        if not os.path.exists(path):
            with open(path, 'wb') as f:
                f.write(img.content)

        with fits.open(path) as hdul:
            self.header, self.data = hdul[0].header, hdul[0].data
            self.wcs = WCS(self.header, naxis=2)

        msg = f"No DECam LS image at {self.position.ra:.2f}, {self.position.dec:.2f}"
        assert self.data is not None, msg

    def _get_skyview_cutout(self):
        """Fetch cutout data via SkyView API."""

        sv = SkyView()
        path = cutout_cache + self.survey + '/{:.3f}arcmin_{:.3f}_{:.3f}.fits'
        path = path.format(self.radius * 60, self.ra, self.dec)
        progress = self.kwargs.get('progress', False)

        if not os.path.exists(path):
            skyview_key = SURVEYS.loc[self.survey].sv
            try:
                hdul = sv.get_images(position=self.position,
                                     survey=[skyview_key],
                                     radius=self.radius * u.deg,
                                     show_progress=progress)[0][0]
            except IndexError:
                raise FITSException('Skyview image list returned empty.')
            except ValueError:
                raise FITSException(
                    f'{self.survey} is not a valid SkyView survey.')
            except HTTPError:
                raise FITSException('No response from Skyview server.')

            with open(path, 'wb') as f:
                hdul.writeto(f)

        with fits.open(path) as hdul:
            self.header, self.data = hdul[0].header, hdul[0].data
            self.wcs = WCS(self.header, naxis=2)

            try:
                self.mjd = Time(self.header['DATE']).mjd
            except KeyError:
                try:
                    self.epoch = self.kwargs.get('epoch')
                    msg = "Could not detect epoch, PM correction disabled."
                    assert self.epoch is not None, msg
                    self.mjd = self.epoch if self.epoch > 3000 else Time(
                        self.epoch, format='decimalyear').mjd
                except AssertionError as e:
                    if self.kwargs.get('pm'):
                        self.logger.warning(e)
                    self.mjd = None

            self.data *= 1000

    def _find_image(self):
        """Return DataFrame of survey fields containing coord."""

        survey = self.survey.replace('I', '').replace('V', '')
        try:
            image_df = pd.read_csv(aux_path + f'{survey}_fields.csv')
        except FileNotFoundError:
            raise FITSException(f"Missing field metadata csv for {survey}.")

        beam_centre = SkyCoord(ra=image_df['cr_ra_pix'],
                               dec=image_df['cr_dec_pix'],
                               unit=u.deg)
        image_df['dist_field_centre'] = beam_centre.separation(
            self.position).deg

        pbeamsize = 1 * u.degree if self.survey == 'vlass' else 5 * u.degree
        return image_df[image_df.dist_field_centre < pbeamsize].reset_index(
            drop=True)

    def _obfuscate(self):
        """Remove all coordinates and identifying information."""
        lon = self.ax.coords[0]
        lat = self.ax.coords[1]
        lon.set_ticks_visible(False)
        lon.set_ticklabel_visible(False)
        lat.set_ticks_visible(False)
        lat.set_ticklabel_visible(False)
        lon.set_axislabel('')
        lat.set_axislabel('')

    def _plot_setup(self, fig, ax):
        """Create figure and determine normalisation parameters."""
        if ax:
            self.fig = fig
            self.ax = ax
        else:
            self.fig = plt.figure()
            self.ax = self.fig.add_subplot(111, projection=self.wcs)

        if self.kwargs.get('grid', True):
            self.ax.coords.grid(color='white', alpha=0.5)
        self.ax.set_xlabel('RA (J2000)')
        self.ax.set_ylabel('Dec (J2000)')

        if self.kwargs.get('title', True):
            self.ax.set_title(SURVEYS.loc[self.survey]['name'],
                              fontdict={
                                  'fontsize': 20,
                                  'fontweight': 10
                              })
        if self.kwargs.get('obfuscate', False):
            self._obfuscate()

        if self.kwargs.get('annotation'):
            color = 'white' if self.cmap == 'hot' else 'k'
            self.ax.text(0.05,
                         0.85,
                         self.kwargs.get('annotation'),
                         color=color,
                         weight='bold',
                         transform=self.ax.transAxes)

    def _add_cornermarker(self, ra, dec, span, offset):
        color = 'white' if self.cmap != 'gray_r' else 'r'
        cosdec = np.cos(np.radians(dec))
        raline = Line2D(
            xdata=[ra + offset / cosdec, ra + span / cosdec],
            ydata=[dec, dec],
            color=color,
            linewidth=2,
            path_effects=[pe.Stroke(linewidth=3, foreground='k'),
                          pe.Normal()],
            transform=self.ax.get_transform('world'))
        decline = Line2D(
            xdata=[ra, ra],
            ydata=[dec + offset, dec + span],
            color=color,
            linewidth=2,
            path_effects=[pe.Stroke(linewidth=3, foreground='k'),
                          pe.Normal()],
            transform=self.ax.get_transform('world'))
        self.ax.add_artist(raline)
        self.ax.add_artist(decline)

    def plot(self, fig=None, ax=None):
        """Plot survey data and position overlay."""
        self.sign = self.kwargs.get('sign', 1)
        self._plot_setup(fig, ax)
        self.data *= self.sign
        absmax = max(self.data.max(), self.data.min(), key=abs)
        self.logger.debug(f"Max flux in cutout: {absmax:.2f} mJy.")
        rms = np.sqrt(np.mean(np.square(self.data)))
        self.logger.debug(f"RMS flux in cutout: {rms:.2f} mJy.")

        assert (sum((~np.isnan(self.data).flatten())) > 0 and sum(self.data.flatten()) != 0), \
            f"No data in {self.survey}"

        if self.kwargs.get('maxnorm'):
            self.norm = ImageNormalize(self.data,
                                       interval=ZScaleInterval(),
                                       vmax=self.data.max(),
                                       clip=True)
        else:
            self.norm = ImageNormalize(self.data,
                                       interval=ZScaleInterval(contrast=0.2),
                                       clip=True)

        self.im = self.ax.imshow(self.data, cmap=self.cmap, norm=self.norm)

        if self.kwargs.get('bar', True):
            try:
                self.fig.colorbar(self.im,
                                  label=r'Flux Density (mJy beam$^{-1}$)',
                                  ax=self.ax)
            except UnboundLocalError:
                self.logger.error(
                    "Colorbar failed. Upgrade to recent version of astropy ")

        if self.psf:
            try:
                self.bmaj = self.header['BMAJ'] * 3600
                self.bmin = self.header['BMIN'] * 3600
                self.bpa = self.header['BPA']
            except KeyError:
                self.logger.warning('Header did not contain PSF information.')
                try:
                    self.bmaj = self.psf[0]
                    self.bmin = self.psf[1]
                    self.bpa = 0
                    self.logger.warning(
                        'Using supplied BMAJ/BMin. Assuming BPA=0')
                except ValueError:
                    self.logger.error('No PSF information supplied.')

            rhs = self.wcs.wcs_pix2world(self.data.shape[0], 0, 1)
            lhs = self.wcs.wcs_pix2world(0, 0, 1)

            # Offset PSF marker by the major axis in pixel coordinates
            try:
                cdelt = self.header['CDELT1']
            except KeyError:
                cdelt = self.header['CD1_1']
            beamavg = (self.bmaj + self.bmin) / 2
            beamsize_pix = beamavg / abs(cdelt) / 3600
            ax_len_pix = abs(lhs[0] - rhs[0]) / abs(cdelt) / 3600
            beam = self.wcs.wcs_pix2world(beamsize_pix, beamsize_pix, 1)
            self.beamx = beam[0]
            self.beamy = beam[1]

            self.beam = Ellipse((self.beamx, self.beamy),
                                self.bmin / 3600,
                                self.bmaj / 3600,
                                -self.bpa,
                                facecolor='white',
                                edgecolor='k',
                                transform=self.ax.get_transform('world'),
                                zorder=10)
            self.ax.add_patch(self.beam)

            # Optionally plot square around the PSF
            # Set size to greater of 110% PSF size or 10% ax length
            if self.kwargs.get('beamsquare', False):
                boxsize = max(beamsize_pix * 1.15, ax_len_pix * .1)
                offset = beamsize_pix - boxsize / 2
                self.square = Rectangle(
                    (offset, offset),
                    boxsize,
                    boxsize,
                    facecolor='white',
                    edgecolor='k',
                    # transform=self.ax.get_transform('world'),
                    zorder=5)
                self.ax.add_patch(self.square)

        if self.plot_sources:
            if self.kwargs.get('corner'):
                self._add_cornermarker(
                    self.source.ra_deg_cont, self.source.dec_deg_cont,
                    self.kwargs.get('corner_span', 20 / 3600),
                    self.kwargs.get('corner_offset', 10 / 3600))
            else:
                self.sourcepos = Ellipse(
                    (self.source.ra_deg_cont, self.source.dec_deg_cont),
                    self.source.min_axis / 3600,
                    self.source.maj_axis / 3600,
                    -self.source.pos_ang,
                    facecolor='none',
                    edgecolor='r',
                    ls=':',
                    lw=2,
                    transform=self.ax.get_transform('world'))
                self.ax.add_patch(self.sourcepos)

        else:
            if self.kwargs.get('corner'):
                self._add_cornermarker(
                    self.ra, self.dec, self.kwargs.get('corner_span',
                                                       20 / 3600),
                    self.kwargs.get('corner_offset', 10 / 3600))
            else:
                self.bmin = 15
                self.bmaj = 15
                self.bpa = 0
                overlay = SphericalCircle(
                    (self.ra * u.deg, self.dec * u.deg),
                    self.bmaj * u.arcsec,
                    edgecolor='r',
                    linewidth=2,
                    facecolor='none',
                    transform=self.ax.get_transform('world'))
                self.ax.add_artist(overlay)

        if self.plot_neighbours:
            for idx, neighbour in self.neighbours.iterrows():
                n = Ellipse((neighbour.ra_deg_cont, neighbour.dec_deg_cont),
                            neighbour.min_axis / 3600,
                            neighbour.maj_axis / 3600,
                            -neighbour.pos_ang,
                            facecolor='none',
                            edgecolor='c',
                            ls=':',
                            lw=2,
                            transform=self.ax.get_transform('world'))
                self.ax.add_patch(n)

    def save(self, path, fmt='png'):
        """Save figure with tight bounding box."""
        self.fig.savefig(path, format=fmt, bbox_inches='tight')

    def savefits(self, path):
        """Export FITS cutout to path"""
        header = self.wcs.to_header()
        hdu = fits.PrimaryHDU(data=self.data, header=header)
        hdu.writeto(path)
コード例 #29
0
                                             para * 4 / 1000 / 1000))
    model = model.to(device)

    # Set checkpoint
    checkpoint = CheckPoint(config.save_path)

    # Set optimizer
    optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad,
                                        model.parameters()),
                                 lr=config.lr)
    scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer,
                                                     milestones=config.step,
                                                     gamma=0.1)

    # Set trainer
    logger = Logger(config.save_path)
    trainer = AlexNetTrainer(config.lr, train_loader, valid_loader, model,
                             optimizer, scheduler, logger, device)

    print(model)

    time_start = time.time()

    for epoch in range(1, config.nEpochs + 1):
        cls_loss_, accuracy, accuracy_valid = trainer.train(epoch)
        checkpoint.save_model(model, index=epoch)

    time_end = time.time()
    print(time_end - time_start)

    # #计算样本量
コード例 #30
0
import pandas as pd

from keras.losses import *
from loss.pairwise_loss import *
from loss.triplet_loss import *
import loss.pairwise_loss
import loss.triplet_loss
import models
#from distutils.util import strtobool

gpu_count = len(units.get_available_gpus())
dir_path, global_logger = units.getLogger()

from tools.logger import Logger

logger = Logger()
import os


def run(params):
    evaluation = []
    #    params=dataset.classification.process_embedding(reader,params)
    qdnn = models.setup(params)
    model = qdnn.getModel()
    model.summary()
    if hasattr(loss.pairwise_loss, params.loss):
        loss_func = getattr(loss.pairwise_loss, params.loss)
    else:
        loss_func = params.loss
    optimizer = units.getOptimizer(name=params.optimizer, lr=params.lr)
    #