Ejemplo n.º 1
0
    def push_message(self, sense_hat_readings: SenseHatReadings,
                     time: str) -> None:
        """
        # Send a message to all your registered devices.
        :param sense_hat_readings data from sense hat
        :param time current time
        """

        title = 'It is warm enough for a t-shirt' \
            if sense_hat_readings.temperature >= UPPER_TEMPERATURE_THRESHOLD \
            else 'Please put on a Pullover - its getting colder'

        temperature = sense_hat_readings.get_reading_as_string(
            value=sense_hat_readings.temperature, unit='temperature')
        pressure = sense_hat_readings.get_reading_as_string(
            value=sense_hat_readings.pressure, unit='pressure')
        humidity = sense_hat_readings.get_reading_as_string(
            value=sense_hat_readings.humidity, unit='humidity')

        body = f"Current reading at {time}\n" \
               f"Temperature: {temperature}\n" \
               f"Pressure: {pressure}\n" \
               f"Humidity: {humidity}"

        data = {'type': 'note', 'title': title, 'body': body}

        try:
            # sending off the message to push-bullet
            requests.post('https://api.pushbullet.com/api/pushes',
                          data=data,
                          auth=(self._api_key, ''))
        except requests.exceptions.RequestException as err:
            logger.critical(f" error pushing message {err}")
Ejemplo n.º 2
0
def connect():
    if (time.time() - LAST > 60):
        global redis_getter
        global redis_setter
        global redis_expire
        if REDIS_NODE != "Single":
            startup_nodes = [{"host": REDIS_SERVER, "port": REDIS_PORT}]
            pool_cluster = StrictRedisCluster(startup_nodes=startup_nodes,
                                              decode_responses=True,
                                              skip_full_coverage_check=True)
            redis_getter = pool_cluster.get
            redis_setter = pool_cluster.set
            redis_expire = pool_cluster.expire
        else:
            try:
                pool = redis.ConnectionPool(
                    host=REDIS_SERVER,
                    port=REDIS_PORT,
                    db=0,
                    decode_responses=True,
                )  # , password=REDIS_PASS
                redis_getter = redis.Redis(connection_pool=pool).get
                redis_setter = redis.Redis(connection_pool=pool).set
                redis_expire = redis.Redis(connection_pool=pool).expire
            except:
                logger.critical("Could not connect to Redis")
        logger.debug("REDIS Re-connected")
Ejemplo n.º 3
0
    def get_state_from_volatile_status(self, mutator, volatile_status,
                                       attacker, affected_side, instruction):
        if instruction.frozen or not volatile_status:
            return [instruction]

        if affected_side in self.same_side_strings:
            affected_side = attacker
        elif affected_side in self.opposing_side_strings:
            affected_side = self.possible_affected_strings[attacker]
        else:
            logger.critical("Invalid affected_side: {}".format(affected_side))
            return [instruction]

        side = self.get_side_from_state(mutator.state, affected_side)
        mutator.apply(instruction.instructions)
        if volatile_status in side.active.volatile_status:
            mutator.reverse(instruction.instructions)
            return [instruction]

        if self._can_be_statused(
                side.active, volatile_status
        ) and volatile_status not in side.active.volatile_status:
            apply_status_instruction = (
                constants.MUTATOR_APPLY_VOLATILE_STATUS, affected_side,
                volatile_status)
            mutator.reverse(instruction.instructions)
            instruction.add_instruction(apply_status_instruction)
            if volatile_status == constants.SUBSTITUTE:
                instruction.add_instruction(
                    (constants.MUTATOR_DAMAGE, affected_side,
                     side.active.maxhp * 0.25))
        else:
            mutator.reverse(instruction.instructions)

        return [instruction]
Ejemplo n.º 4
0
def svn(url, username, password):
    try:
        setting = {
            # svn 的本地安装路径
            'svn': Config.SVN_PATH,
            # 需要下载的svn文件
            "url": url,
            # svn账号
            "user": username,
            # svn密码
            "pwd": password,
            # 下载到的路径
            "dist": config.basedir
        }
        dist = setting['dist']
        os.chdir(setting['svn'])
        # 这里可能会出现换行情况
        svn_url = setting['url']
        svn_url = str(svn_url).replace("\n", "")
        post = str(svn_url).rfind("/")
        path = svn_url[post + 1:]
        setting['url'] = svn_url
        setting['dist'] = str(dist + "\\" + path)
        cmd = 'svn export %(url)s %(dist)s --force --non-interactive --trust-server-cert --username %(user)s --password %(pwd)s' % setting
        os.system(cmd)
        print(dist)
        return "ok"
    except Exception:
        print ("下载异常!")
        logger.critical("svn下载出现异常")
        return "fail"
Ejemplo n.º 5
0
def suiteCancel():
    logger.debug('收到cancel指令,进入cancel函数')
    print('进入取消分支')
    try:
        global owner
        #1、删除yaml文件
        if os.path.exists(Config.YAML_FILE_PATH):  # 如果文件存在
            #1、删除文件
            os.remove(Config.YAML_FILE_PATH)

            #2、上报本机状态
            owner = ''
            payload = {
                'type': 'security',
                'subType': 'nmap',
                'status': 'idle',
                'owner': owner
            }
            sendStatusToRedis(payload)
            return {"status": '200', "msg": '执行成功'}

        else:
            print('no such file:%s' % Config.YAML_FILE_PATH)  # 否则,返回文件不存在
            return {"status": '500', "msg": '删除失败:没有测试yaml文件'}
    except Exception:
        print("取消接口执行异常!")
        logger.critical("取消接口执行异常,程序即将退出")
        exit(0)
Ejemplo n.º 6
0
def suiteReady():
    logger.debug('收到suiteReady指令,进入suiteReady函数')
    ret = 'false'
    print('在suiteReady分支里面')
    global owner
    jsonstr = request.json
    fileurl = jsonstr.get("fileurl")
    print('yaml文件地址:%s', fileurl)
    repository = jsonstr.get("repository")
    repousername = jsonstr.get("repousername")
    repopassword = jsonstr.get("repopassword")
    if owner == '':
        owner = jsonstr.get("owner")

    #下载模式字段,需要根据和服务端的调试情况来调整
    try:
        ret = download(repository, fileurl, repousername, repopassword)
    except Exception:
        print("下载异常!")
        logger.critical("下载出现异常,程序即将退出")
        exit(0)
    else:
        if ret == 'ok':
            payload = {
                'type': 'security',
                'subType': 'nmap',
                'status': 'ready',
                'owner': owner
            }
            sendStatusToRedis(payload)
            return {"status": '200', "msg": '执行成功'}
        else:
            return {"status": '500', "msg": '执行失败'}
Ejemplo n.º 7
0
 def run(self):
     global task_queue
     global response_queue
     while True:
         url_paras = task_queue.get()
         task_url = self.construct_url(url_paras)
         try:
             # 这里的 requests需要包装起来
             response = get_page(task_url)
             if response:
                 data = self.parse(response)
                 if data is not None:  # 如果为None,表明解析response.text出现错误。
                     response_queue.put((data, url_paras))
                 else:
                     self.collection.update_one(
                         {'_id': url_paras},
                         update={'$set': {
                             'status': UN_PROCESSED
                         }})
             else:
                 # 把数据库中的 status重新置为 UN_PROCESSED
                 self.collection.update_one(
                     {'_id': url_paras},
                     update={'$set': {
                         'status': UN_PROCESSED
                     }})
         except Exception as e:
             logger.critical('In Crawler:{}'.format(str(e)) + str(task_url))
             self.collection.update_one(
                 {'_id': url_paras},
                 update={'$set': {
                     'status': UN_PROCESSED
                 }})
             pass
Ejemplo n.º 8
0
def search_and_display_message(temperature: float) -> None:
    if os.path.exists(BLUETOOTH_DEVICES_JSON):
        try:
            known_devices = parse_known_devices()
            for device in known_devices["devices"]:
                is_home = bluetooth.lookup_name(device['mac'], timeout=15)
                if os.path.exists(BLUETOOTH_STATUS_JSON):
                    with open(BLUETOOTH_STATUS_JSON) as bluetooth_status_file:
                        bt_stat = json.load(bluetooth_status_file)
                else:
                    bt_stat = {'sent': False}
                if is_home is not None and not bool(
                        bt_stat['sent']
                ):  # if device is home and greeting wasn't sent
                    # make a process on separate thread
                    multiprocessing.Process(target=sensehat_greeting,
                                            args=(
                                                temperature,
                                                device,
                                            )).start()
                    bt_stat['sent'] = True
                elif is_home is None:
                    bt_stat['sent'] = False
                with open(BLUETOOTH_STATUS_JSON, "w") as status_write:
                    json.dump(bt_stat, status_write)
        except (FileNotFoundError, IOError, JSONDecodeError):
            logger.critical(f"{BLUETOOTH_DEVICES_JSON} failed to read")
Ejemplo n.º 9
0
    def deal_pkg(self, header, body):
        res = None
        if header.cmd == pkg.SET_CHANNEL_CTRL_REQ:
            res = pkg.Header(cmd=pkg.SET_CHANNEL_CTRL_RSP)
            for k in ('mode', 'enable', 'mu_factor'):
                Config.set(header.channel, k, getattr(body, k))

            wsets = [(w.begin, w.noise, w.end) for w in body.data]
            Config.set(header.channel, 'win_settings', wsets)
        elif header.cmd == pkg.GET_BASE_INFO_REQ:
            res = pkg.Header(cmd=pkg.GET_BASE_INFO_RSP)
            body = pkg.BaseInfo(
                mac=get_mac_address(self.ifname),
                hwid='Emultr',
                hwcode='EM',
                hw_version=chr(2) + chr(0),
                sw_version=chr(2) + chr(0),
                sw_revision=0,
                proto_version=2,
                channel_num=2,
                machine_id=0,
                ip_num=1,
                slot_id=0
            )
            res = pkg.combine(res, body)
        elif header.cmd == pkg.SET_SWITCH_CTRL_REQ:
            res = pkg.Header(cmd=pkg.SET_SWITCH_CTRL_RSP)
            if body.led == 1:       # light on
                if self.test:
                    algorithm_test(self.ip + ":" + str(header.channel))
                led = 'LED RED'
            elif body.led == 0xff:  # blink
                led = 'LED BLINK'
            elif body.led == 0:     # light off
                led = 'LED BLACK'
            logger.critical((self.ip, led))
        elif header.cmd == pkg.GET_CHANNEL_CTRL_REQ:
            res = pkg.Header(cmd=pkg.GET_CHANNEL_CTRL_RSP)
            ch_ctl = pkg.ChannelCtrl()
            for k in ('enable', 'mu_factor', 'mode'):
                setattr(ch_ctl, k, Config.get(header.channel, k))
            win_sts = Config.get(header.channel, 'win_settings')
            ch_ctl.win_size = len(win_sts)
            for ws in win_sts:
                wp = pkg.ChannelCtrl.WinSetting(
                    begin=ws[0],
                    end=ws[1],
                    noise=ws[2]
                )
                ch_ctl.data += wp.pack()
            res.data = ch_ctl.pack()
        else:
            logger.info(repr((header, body)))
        if res:
            res.seq = header.seq
            return res.pack()
Ejemplo n.º 10
0
 def read_api_key() -> str:
     """
     read out of the local API Key
     :return: push-bullet api key
     """
     try:
         with open(API_KEY_FILE, "r") as api_key:
             return api_key.read()
     except (FileNotFoundError, IOError):
         logger.critical(f"{API_KEY_FILE} not found")
Ejemplo n.º 11
0
def fetch_ads(session: Session) -> Set[AdModel]:
    url = build_url()
    ads = []

    logger.info('=== Starting fetch ads ===')

    response = session.get(url)
    if response.status_code != 200:
        logger.critical(
            '=== Unsuccessful attempt. '
            'Please check url - %s '
            'The script will be stopped ===', url)
        raise RequestsConnectionError(
            f'Unable to get urls {response.status_code}')

    soup = BeautifulSoup(response.content.decode('utf-8'), 'lxml')
    ads_items = soup.find_all('table', attrs={'summary': 'Объявление'})

    logger.info('=== Start processing %s ads ===', len(ads_items))
    for item in ads_items:

        item_url_obj = item.find('a', class_='marginright5')
        item_url, url_info, *_ = item_url_obj.attrs.get('href').split('#')

        if not settings.WITH_PROMOTED and 'promoted' in url_info:
            continue

        try:
            price = int(
                item.find(
                    'p',
                    class_='price').text.split(' грн.')[0].strip().replace(
                        ' ', ''))
        except ValueError:
            logger.exception('=== Error during parsing a price ===')
            continue

        day = item.select('small > span')[1].text.strip().split(' ')[0].lower()

        ad = AdModel(
            external_id=item.attrs.get('data-id'),
            title=item_url_obj.text.strip(),
            price=price,
            url=item_url,
        )

        if day in settings.PUBLICATION_DATE and \
                settings.MIN_PRICE <= ad.price <= settings.MAX_PRICE:
            ads.append(ad)

    result = {ad for ad in ads}
    logger.info('=== Found %s ads after filtering ===', len(result))
    return result
Ejemplo n.º 12
0
def do_update(key, sequence, taskList):
    print('在do_update分支里面')
    global flag
    while True:
        if flag == 0:
            break

        #执行测试,返回的就是给post的返回值
        try:
            return DoTest(key, sequence, taskList)
        except Exception:
            print("执行测试异常!")
            logger.critical("执行测试异常,程序即将退出")
            exit(0)
Ejemplo n.º 13
0
 def store(self, info):
     if 'url' not in info or info['url'] == ''       \
         'ref' not in info or info['ref'] == '':
         return None  # `None` means param is wrong.
     key = self._encodeUrl(
         info['url']) + sysconfig.MOVE_KEY_HYPHEN + self._encodeUrl(
             info['ref'])
     count = self.getInt(key)
     exists = count is not None
     if exists:
         try:
             self.put(key, count + 1)
         except Exception, e:
             logger.critical("failed no sotre web-moves: " + str(e.args))
         finally:
Ejemplo n.º 14
0
def pushStatusIdle():
    logger.debug('程序启动,向redis上报自己的状态')
    print('程序启动上报redis自己的状态')
    payload = {
        'type': 'security',
        'subType': 'nmap',
        'status': 'idle',
        'owner': ''
    }
    try:
        sendStatusToRedis(payload)
    except Exception:
        print("向redis发送函数异常!")
        logger.critical("向redis发送函数异常,程序即将退出")
        exit(0)
Ejemplo n.º 15
0
    def get(self, key_name, pop_=0) -> str or bool:
        """
        分发订阅链接
        每次get请求都会强制关闭连接
        @param pop_:
        @param key_name: 任务类型,用于定位 redis hash name
        @return:
        """
        try:
            while True:
                # 一次性抽取key_name(hashName)下的所有subscribe
                target_raw: dict = self.db.hgetall(key_name)
                try:
                    # 弹出并捕获 <离当前时间> <最远一次入库>的订阅连接 既订阅链接并未按end_life排序
                    self.subscribe, end_life = list(
                        target_raw.items()).pop(pop_)

                    # end_life: requests_time(采集时间点) + vip_crontab(机场会员时长(账号试用时长))

                    # 该模块最初设计时仅针对<vip_crontab = 1day>的机场采集
                    # 后续又将试用几天甚至几个月的机场加入了任务队列
                    # 但该分发逻辑并未更新 以后版本将继续沿用这套分发逻辑

                    # 若之后版本的项目支持end_life动态更新时(既加入某种技术能够实时反馈机场主对会员规则的修改)
                    # 此分发逻辑将会加入排序功能

                    # 若链接过期 -> loop next -> finally :db-del stale subscribe
                    if self.is_stale(end_life):
                        continue
                    # 若链接可用 -> break off -> 分发 -> finally :db-del subscribe
                    else:
                        return self.subscribe
                # 出现该错误视为redis队列被击穿 无任何可用的链接分发,中断循环
                except IndexError:
                    logger.critical("{}.get() IndexError".format(
                        self.__class__.__name__))
                    return False
                # 关联解除
                finally:
                    self.db.hdel(key_name, self.subscribe)

                    from BusinessCentralLayer.middleware.subscribe_io import detach
                    detach(self.subscribe, at_once=True)
        finally:
            # 关闭连接
            self.kill()
Ejemplo n.º 16
0
 def _store(self, info):
     if 'url' not in info:
         return False
     key = self._genKey(info['url'])
     obj = self.bucket.get(key=key, r=self.R_VALUE)
     if not obj.exists():
         obj = self.bucket.get(key=key, r=self.R_VALUE_UP)
     exists = obj.exists()
     if 'ref' not in info:  # donot store in this case.
         return exists
     if exists:
         try:
             obj.set_data(str(int(obj.get_data()) + 1)).store()
         except Exception:
             logger.critical("failed no sotre web-moves")
     else:
         self.bucket.new_binary(key=key, data=str(1)).store()
Ejemplo n.º 17
0
    def get_last_average(self) -> float:
        """
        :return: average temperature for the last 15 minutes
        """
        try:
            """ 
             influxDb python adapter doesn't support parameterised queries. there is an open issue about that, no ETA 
            """

            last_temp = self._client.query(
                'SELECT MEAN(temperature) FROM SenseHatReadings where time >= now() - 15m'
            )
            last_temp = list(last_temp.get_points())[0]
            return last_temp["mean"]
        except (IndexError, exceptions.InfluxDBClientError) as err:
            logger.critical(f"Error reading from the database{err}")
            return 0
Ejemplo n.º 18
0
def check_dictionaries_are_unmodified(original_pokedex, original_move_json):
    # The bot should not modify the data dictionaries
    # This is a "just-in-case" check to make sure and will stop the bot if it mutates either of them
    if original_move_json != all_move_json:
        logger.critical("Move JSON changed!\nDumping modified version to `modified_moves.json`")
        with open("modified_moves.json", 'w') as f:
            json.dump(all_move_json, f, indent=4)
        exit(1)
    else:
        logger.debug("Move JSON unmodified!")

    if original_pokedex != pokedex:
        logger.critical("Pokedex JSON changed!\nDumping modified version to `modified_pokedex.json`")
        with open("modified_pokedex.json", 'w') as f:
            json.dump(pokedex, f, indent=4)
        exit(1)
    else:
        logger.debug("Pokedex JSON unmodified!")
Ejemplo n.º 19
0
def _take_images(tonight):
	'''
	Take an image and make diferents outputs: fits, png, json.
		tonight: Night class with properties to complete images.
		return True it completes Ok
	'''
	# Preparing exposure
	now = datetime.utcnow()
	texp = _set_exposure(now, tonight)
	datestamp = now.strftime("%Y%m%dT%H%M%S")
	# Check path
	fitsfile = '%s/raw/%s/%s-%s.fits' % (DATA_DIR, tonight.night8, INSTR_ID, datestamp)
	check = check_dir(fitsfile)
	if not check == True: logger.critical(check)
	# Taking a image
	resp = _take_fits(texp, fitsfile)
	# Creating other products from new fits
	if resp:
		parms = {
		'observatory': tonight.observatory,
		'night': tonight.night8,
		'exp': texp,
		'utc': now
		}
		# Make PNG files
		pngfile = '%s/png/%s/%s.png' % (DATA_DIR, tonight.night8, datestamp)
		check = check_dir(pngfile)
		if not check == True: logger.critical(check)
		latestpng = '%s/tonight/latest.png' % (DATA_DIR)
		check = check_dir(latestpng)
		if not check == True: logger.critical(check)
		outputs.make_image(parms=parms, fitsfile=fitsfile, pngfile=pngfile)
		logger.info("Saved %s" % (pngfile))
		copyfile(pngfile, latestpng)
		# Make Jasonfile
		jsonfile = '%s/tonight/latest.json' % (DATA_DIR)
		check = check_dir(jsonfile)
		if not check == True: logger.critical(check)
		outputs.make_json(parms, jsonfile)
		return True
	else:
		logger.critical("Error when take fits image!")
		sleep(10)	# To ease ctrl+c
		return False
Ejemplo n.º 20
0
def take_exposure(exptime, filename):
    logger.debug("Taking exposure of %s seg in %s" % (exptime, filename))
    # instantiate the client
    indiclient = IndiClient(float(exptime), str(filename))
    # set indi server localhost and port 7624
    indiclient.setServer("localhost", 7624)
    # connect to indi server pause for 2 seconds
    if (not (indiclient.connectServer())):
        txt = "No indiserver running on " + indiclient.getHost() + ":" + str(
            indiclient.getPort()) + " - Try to run"
        logger.critical(txt)
        return False
    sleep(1)
    # start endless loop, client works asynchron in background, loop stops after disconnect
    while indiclient.connected:
        sleep(0.1)
    indiclient.disconnectServer()
    del indiclient
    return True
Ejemplo n.º 21
0
def parse(html_response):
    '''
    将 get_page 得到的 response.text 解析,得到其中有用的数据
    :param html_response:
    :return: list。始发站到终点站所有车次信息。
    [['5l0000D35273', 'D352', 'AOH', 'ICW', 'AOH', 'ICW', '06:11', '20:27', '14:16', 'Y'], ['5l0000D63640', 'D636', 'AOH', 'ICW', 'AOH', 'ICW', '06:33', '21:01', '14:28', 'Y']]
    '''
    time_detail = []
    data = json.loads(html_response)["data"]
    result = data["result"]
    try:

        for train in result:
            temp_list = train[train.index("|") + 1:].split("|")
            time_detail.append(temp_list[1:11])
    except Exception as e:
        logger.critical(str(e) + str(result))
        return None
    return time_detail
Ejemplo n.º 22
0
 def run(self):
     global task_queue
     while True:
         try:
             if task_queue.qsize() < 300:
                 temp = self.collection.find({'status': UN_PROCESSED},
                                             limit=60)
                 for single_item in temp:
                     # 设置为PROCESSING
                     self.collection.update_one(
                         {'_id': single_item['_id']},
                         update={'$set': {
                             'status': PROCESSING
                         }})
                     task_queue.put(single_item['_id'])
             else:
                 time.sleep(3)
         except Exception as e:
             logger.critical('In Task1Producer:{}'.format(str(e)))
             pass
Ejemplo n.º 23
0
def suiteRun():
    logger.debug('收到suiteRun指令,进入suiteRun函数')
    print('进入执行分支')
    global flag, owner
    flag = 1
    jsonstr = request.json
    g.sequence = jsonstr.get("sequence")
    g.taskList = jsonstr.get("taskList")
    g.key = jsonstr.get("key")
    print('准备传递的key:', g.key)

    #开始执行测试,向redis发送running状态
    try:
        payload = {
            'type': 'security',
            'subType': 'nmap',
            'status': 'running',
            'owner': owner
        }
        sendStatusToRedis(payload)
        print("开始执行测试")
        executor = ThreadPoolExecutor()
        task = executor.submit(do_update, g.key, g.sequence, g.taskList)

        #向redis发送实时状态,测试执行完,实时状态变为idle
        owner = ''
        payload = {
            'type': 'security',
            'subType': 'nmap',
            'status': 'idle',
            'owner': owner
        }
        sendStatusToRedis(payload)
        print(task.result())
        return task.result()
    except Exception:
        print("执行测试过程出现异常!")
        logger.critical("执行测试过程出现异常,程序即将退出")
        exit(0)
Ejemplo n.º 24
0
## exposure method

## it's necesary separare take_exposure because exists a leak memory
## every exposure the memory used grow 3MB.
## If we take an exposure with diferent process, the memory sets free.

# my loggging
from config import logger

try:
    from indiclient import IndiClient
except:
    txt = "INDiClient not installed"
    print txt
    logger.critical(txt)

from time import sleep
from sys import argv


# Take image through INDI
def take_exposure(exptime, filename):
    logger.debug("Taking exposure of %s seg in %s" % (exptime, filename))
    # instantiate the client
    indiclient = IndiClient(float(exptime), str(filename))
    # set indi server localhost and port 7624
    indiclient.setServer("localhost", 7624)
    # connect to indi server pause for 2 seconds
    if (not (indiclient.connectServer())):
        txt = "No indiserver running on " + indiclient.getHost() + ":" + str(
Ejemplo n.º 25
0
def dealDetailAndContent(taskContent, key):
    print("in dealDetailAndContent")
    try:
        arguments = getTaskKey(taskContent, 'arguments')
        content = getTaskKey(taskContent, 'content')[0].lower()

        #如果isDetail为n,表示需要拼接,需将range和subrange中的内容拼接到content中的x、y处,组成ip:port样式
        if (getTaskKey(taskContent, 'isDetail') == 'n'):
            ip, port = getIpAndPortFromContent(
                content, getTaskKey(taskContent, 'range'),
                getTaskKey(taskContent, 'subRange'))
            # 此函数当前的返回值为执行结果
            status, msg, resultFile, time = results(ip, port, arguments)
            return {
                "status": status,
                "msg": msg,
                "file": resultFile,
                "key": key
            }

        #如果isDetal为y,表示不需要拼接,直接解析content中的内容,作为ip:port,进行测试
        elif (getTaskKey(taskContent, 'isDetail') == 'y'):
            status = '200'  # status是最终的测试结果,statusOnce是单次的测试结果
            msg = "该用例执行成功"
            resultFile = ""
            taskIpPortList = {}
            addrList = getTaskKey(taskContent, 'content')
            arguments = getTaskKey(taskContent, 'arguments')
            for addr in addrList:
                print('addr: ', addr)
                ip = addr[0:addr.index(':')]
                port = addr[addr.index(':') + 1:]
                print(ip, port)
                if ip in taskIpPortList.keys():
                    taskIpPortList[ip] = '{0},{1}'.format(
                        taskIpPortList[ip], port)
                else:
                    taskIpPortList[ip] = port

            status, msg, resultFile = testFromList(taskIpPortList, arguments)

            return {
                "status": status,
                "msg": msg,
                "file": resultFile,
                "key": key
            }

            mode = taskContent['subtype']
        else:
            logger.error("yaml中的任务中的字段【isDetail】值填写有误!")
            return {
                "status": "500",
                "msg": "yaml中的字段【isDetail】值填写有误",
                "file": "",
                "key": key
            }

    except Exception:
        print("获取yaml字段异常!")
        logger.critical("获取yaml字段异常!")
        return {
            "status": '500',
            "msg": "获取yaml字段异常,无法执行后续测试!",
            "file": '',
            "key": key
        }
Ejemplo n.º 26
0
async def showdown():
    env = Env()
    env.read_env()
    config.log_to_file = env.bool("LOG_TO_FILE", config.log_to_file)
    config.save_replay = env.bool("SAVE_REPLAY", config.save_replay)
    logger.setLevel(env("LOG_LEVEL", "DEBUG"))
    websocket_uri = env("WEBSOCKET_URI", "sim.smogon.com:8000")
    username = env("PS_USERNAME")
    password = env("PS_PASSWORD", "")
    bot_mode = env("BOT_MODE")
    team_name = env("TEAM_NAME", None)
    pokemon_mode = env("POKEMON_MODE", constants.DEFAULT_MODE)
    run_count = int(env("RUN_COUNT", 1))

    apply_mods(pokemon_mode)
    original_pokedex = deepcopy(pokedex)
    original_move_json = deepcopy(all_move_json)

    if bot_mode not in constants.BOT_MODES:
        raise ValueError("{} is not a valid bot mode".format(bot_mode))

    ps_websocket_client = await PSWebsocketClient.create(
        username, password, websocket_uri)
    await ps_websocket_client.login()

    team = load_team(team_name)

    battles_run = 0
    wins = 0
    losses = 0
    while True:
        if bot_mode == constants.CHALLENGE_USER:
            user_to_challenge = env("USER_TO_CHALLENGE")
            await ps_websocket_client.challenge_user(user_to_challenge,
                                                     pokemon_mode, team)
        elif bot_mode == constants.ACCEPT_CHALLENGE:
            await ps_websocket_client.accept_challenge(pokemon_mode, team)
        elif bot_mode == constants.SEARCH_LADDER:
            await ps_websocket_client.search_for_match(pokemon_mode, team)
        else:
            raise ValueError("Invalid Bot Mode")

        is_random_battle = "random" in pokemon_mode
        winner = await pokemon_battle(ps_websocket_client, is_random_battle)

        if winner == username:
            wins += 1
        else:
            losses += 1

        logger.info("\nW: {}\nL: {}\n".format(wins, losses))

        if original_move_json != all_move_json:
            logger.critical(
                "Move JSON changed!\nDumping modified version to `modified_moves.json`"
            )
            with open("modified_moves.json", 'w') as f:
                json.dump(all_move_json, f, indent=4)
            exit(1)
        else:
            logger.debug("Move JSON unmodified!")

        if original_pokedex != pokedex:
            logger.critical(
                "Pokedex JSON changed!\nDumping modified version to `modified_pokedex.json`"
            )
            with open("modified_pokedex.json", 'w') as f:
                json.dump(pokedex, f, indent=4)
            exit(1)
        else:
            logger.debug("Pokedex JSON unmodified!")

        battles_run += 1
        if battles_run >= run_count:
            break
Ejemplo n.º 27
0
def install_module(module_name,
                   dot_install_dir,
                   available_modules,
                   is_dependency=False,
                   install_dependencies=True):
    # Cannot install a module if it doesn't have an installer.
    if module_name not in available_modules.keys() and module_name.split(
            ":")[0] not in ["package", "packages"]:
        logger.error("{} is not installable.".format(module_name))
        return False

    if module_name.split(":")[0] not in ["package", "packages"]:
        module = available_modules[module_name]

    # Check if the module needs an alternate installer function.
    name_split = module_name.split(":")
    if len(name_split) > 1:
        if name_split[0] not in installer_map.keys():
            logger.critical("Installer for {} not found.".format(module_name))
            return False

        installer = installer_map[name_split[0]]
        return installer(module_name, dot_install_dir, available_modules,
                         is_dependency)

    dependency_str = " dependency" if is_dependency else ""
    logger.info("Installing{}: {}".format(dependency_str, module_name))

    # Install the module's dependencies first (if any).
    if install_dependencies:
        if "depends" in module.keys():
            logger.debug("Found dependencies for {}.".format(module_name))
            if len(module["depends"]) > 0:
                for dependency in module["depends"]:
                    if not install_module(dependency,
                                          dot_install_dir,
                                          available_modules,
                                          is_dependency=True):
                        logger.critical(
                            "{} could not install dependency {}.".format(
                                module_name, dependency))
                        return False

    # Check if the entire directory can be installed.
    if "install_dir" in module.keys():
        install_dir = module["install_dir"]
        logger.debug("[{}] Installing entire directory to {}.".format(
            module_name, install_dir))

        source_dir = helpers.get_config(module["config_dir"])
        helpers.symlink(source_dir, install_dir, is_directory=True)
    elif "config_files" in module.keys():
        for config_file in module["config_files"]:
            install_location = module["config_files"][config_file]
            logger.debug("[{}] Installing {} to {}.".format(
                module_name, config_file, install_location))

            source_file = helpers.get_config(module["config_dir"], config_file)
            helpers.symlink(source_file, install_location)
    else:
        logger.debug("[{}]: No config files to install.".format(module_name))

    # Module has been successfully installed.
    return True
Ejemplo n.º 28
0
def write_to_influx(solar_power_values, home_load_values, net_power_values,
                    ct0_dict, ct1_dict, ct2_dict, ct3_dict, ct4_dict, ct5_dict,
                    poll_time, length, voltages):

    # create timestampt to be added to CT number / [Weekday][Hour(1:0)][HourPortion][CT]
    time_object = datetime.datetime(poll_time)
    time_weekday = time_object.weekday(
    )  # 0 - Monday 1 - Tuesday 2 - Wednesday 3 - Thursday 4 - Friday 5 - Saturday 6 - Sunday
    time_hour = time_object.hour
    time_minut = time_object.minute // 15  # return the subsection of one hour : 0~3 is //15 or 0~1 if //30. can be forced to 0 if useless
    add_offset = time_weekday * 10000 + time_hour * 100 + time_minut * 10

    # Calculate Averages
    avg_solar_power = sum(solar_power_values['power']) / length
    avg_solar_current = sum(solar_power_values['current']) / length
    avg_solar_pf = sum(solar_power_values['pf']) / length
    avg_home_power = sum(home_load_values['power']) / length
    avg_home_current = sum(home_load_values['current']) / length
    avg_net_power = sum(net_power_values['power']) / length
    avg_net_current = sum(net_power_values['current']) / length
    ct0_avg_power = sum(ct0_dict['power']) / length
    ct0_avg_current = sum(ct0_dict['current']) / length
    ct0_avg_pf = sum(ct0_dict['pf']) / length
    ct1_avg_power = sum(ct1_dict['power']) / length
    ct1_avg_current = sum(ct1_dict['current']) / length
    ct1_avg_pf = sum(ct1_dict['pf']) / length
    ct2_avg_power = sum(ct2_dict['power']) / length
    ct2_avg_current = sum(ct2_dict['current']) / length
    ct2_avg_pf = sum(ct2_dict['pf']) / length
    ct3_avg_power = sum(ct3_dict['power']) / length
    ct3_avg_current = sum(ct3_dict['current']) / length
    ct3_avg_pf = sum(ct3_dict['pf']) / length
    ct4_avg_power = sum(ct4_dict['power']) / length
    ct4_avg_current = sum(ct4_dict['current']) / length
    ct4_avg_pf = sum(ct4_dict['pf']) / length
    ct5_avg_power = sum(ct5_dict['power']) / length
    ct5_avg_current = sum(ct5_dict['current']) / length
    ct5_avg_pf = sum(ct5_dict['pf']) / length
    avg_voltage = sum(voltages) / length

    # Create Points
    home_load = Point('home_load',
                      power=avg_home_power,
                      current=avg_home_current,
                      time=poll_time)
    solar = Point('solar',
                  power=avg_solar_power,
                  current=avg_solar_current,
                  pf=avg_solar_pf,
                  time=poll_time)
    net = Point('net',
                power=avg_net_power,
                current=avg_net_current,
                time=poll_time)
    ct0 = Point('ct',
                power=ct0_avg_power,
                current=ct0_avg_current,
                pf=ct0_avg_pf,
                time=poll_time,
                num=(0))  #+add_offset))
    ct1 = Point('ct',
                power=ct1_avg_power,
                current=ct1_avg_current,
                pf=ct1_avg_pf,
                time=poll_time,
                num=(1))  #+add_offset))
    ct2 = Point('ct',
                power=ct2_avg_power,
                current=ct2_avg_current,
                pf=ct2_avg_pf,
                time=poll_time,
                num=(2))  #+add_offset))
    ct3 = Point('ct',
                power=ct3_avg_power,
                current=ct3_avg_current,
                pf=ct3_avg_pf,
                time=poll_time,
                num=(3))  #+add_offset))
    ct4 = Point('ct',
                power=ct4_avg_power,
                current=ct4_avg_current,
                pf=ct4_avg_pf,
                time=poll_time,
                num=(4))  #+add_offset))
    ct5 = Point('ct',
                power=ct5_avg_power,
                current=ct5_avg_current,
                pf=ct5_avg_pf,
                time=poll_time,
                num=(5))  #+add_offset))
    v = Point('voltage', voltage=avg_voltage, v_input=0, time=poll_time)

    points = [
        home_load.to_dict(),
        solar.to_dict(),
        net.to_dict(),
        ct0.to_dict(),
        ct1.to_dict(),
        ct2.to_dict(),
        ct3.to_dict(),
        ct4.to_dict(),
        ct5.to_dict(),
        v.to_dict(),
    ]

    try:
        client.write_points(points, time_precision='ms')
    except InfluxDBServerError as e:
        logger.critical(f"Failed to write data to Influx. Reason: {e}")
    except ConnectionError:
        logger.info("Connection to InfluxDB lost. Please investigate!")
        sys.exit()
Ejemplo n.º 29
0
 def read_config_json() -> dict:
     try:
         with open(InfluxDBProxy.CONF_FILE, "r") as connect_file:
             return json.load(connect_file)
     except (FileNotFoundError, IOError):
         logger.critical(f"{InfluxDBProxy.CONF_FILE} not found")
def write_to_influx(solar_power_values, home_load_values, net_power_values,
                    ct0_dict, ct1_dict, ct2_dict, ct3_dict, ct4_dict, ct5_dict,
                    poll_time, length, voltages):

    # Calculate Averages
    avg_solar_power = sum(solar_power_values['power']) / length
    avg_solar_current = sum(solar_power_values['current']) / length
    avg_solar_pf = sum(solar_power_values['pf']) / length
    avg_home_power = sum(home_load_values['power']) / length
    avg_home_current = sum(home_load_values['current']) / length
    avg_net_power = sum(net_power_values['power']) / length
    avg_net_current = sum(net_power_values['current']) / length
    ct0_avg_power = sum(ct0_dict['power']) / length
    ct0_avg_current = sum(ct0_dict['current']) / length
    ct0_avg_pf = sum(ct0_dict['pf']) / length
    ct1_avg_power = sum(ct1_dict['power']) / length
    ct1_avg_current = sum(ct1_dict['current']) / length
    ct1_avg_pf = sum(ct1_dict['pf']) / length
    ct2_avg_power = sum(ct2_dict['power']) / length
    ct2_avg_current = sum(ct2_dict['current']) / length
    ct2_avg_pf = sum(ct2_dict['pf']) / length
    ct3_avg_power = sum(ct3_dict['power']) / length
    ct3_avg_current = sum(ct3_dict['current']) / length
    ct3_avg_pf = sum(ct3_dict['pf']) / length
    ct4_avg_power = sum(ct4_dict['power']) / length
    ct4_avg_current = sum(ct4_dict['current']) / length
    ct4_avg_pf = sum(ct4_dict['pf']) / length
    ct5_avg_power = sum(ct5_dict['power']) / length
    ct5_avg_current = sum(ct5_dict['current']) / length
    ct5_avg_pf = sum(ct5_dict['pf']) / length
    avg_voltage = sum(voltages) / length

    # Create Points
    home_load = Point('home_load',
                      power=avg_home_power,
                      current=avg_home_current,
                      time=poll_time)
    solar = Point('solar',
                  power=avg_solar_power,
                  current=avg_solar_current,
                  pf=avg_solar_pf,
                  time=poll_time)
    net = Point('net',
                power=avg_net_power,
                current=avg_net_current,
                time=poll_time)
    ct0 = Point('ct',
                power=ct0_avg_power,
                current=ct0_avg_current,
                pf=ct0_avg_pf,
                time=poll_time,
                num=0)
    ct1 = Point('ct',
                power=ct1_avg_power,
                current=ct1_avg_current,
                pf=ct1_avg_pf,
                time=poll_time,
                num=1)
    ct2 = Point('ct',
                power=ct2_avg_power,
                current=ct2_avg_current,
                pf=ct2_avg_pf,
                time=poll_time,
                num=2)
    ct3 = Point('ct',
                power=ct3_avg_power,
                current=ct3_avg_current,
                pf=ct3_avg_pf,
                time=poll_time,
                num=3)
    ct4 = Point('ct',
                power=ct4_avg_power,
                current=ct4_avg_current,
                pf=ct4_avg_pf,
                time=poll_time,
                num=4)
    ct5 = Point('ct',
                power=ct5_avg_power,
                current=ct5_avg_current,
                pf=ct5_avg_pf,
                time=poll_time,
                num=5)
    v = Point('voltage', voltage=avg_voltage, v_input=0, time=poll_time)

    points = [
        home_load.to_dict(),
        solar.to_dict(),
        net.to_dict(),
        ct0.to_dict(),
        ct1.to_dict(),
        ct2.to_dict(),
        ct3.to_dict(),
        ct4.to_dict(),
        ct5.to_dict(),
        v.to_dict(),
    ]

    try:
        client.write_points(points, time_precision='ms')
    except InfluxDBServerError as e:
        logger.critical(f"Failed to write data to Influx. Reason: {e}")
    except ConnectionError:
        logger.info("Connection to InfluxDB lost. Please investigate!")
        sys.exit()