Пример #1
0
    def __send_msg(self, action, target_type, target_id, msg):

        super(QQBot, self).random_wait()  #随机等待3秒

        url = self.conf.coolq_url + action
        qq = self.conf.coolq_qq
        data = {target_type: target_id, "message": msg, "auto_escape": False}

        logger.debug("消息发往酷Q:url=%s,user_id=%s,message=%r", url, qq, data)
        result = http_helper.do_request_json(url, data)
        logger.debug("HTTP返回结果:%r", result)
        if result is None: return False
        '''
        {
            "status": "ok",
            "retcode": 0,
            "data": {
                "id": 123456,
                "nickname": "滑稽"
            }
        }
        '''
        status = result['status']
        retcode = result['retcode']
        error_msg = ERROR_CODE.get(str(retcode), "未知错误码")
        if retcode != 0:
            logger.warn("调用酷Q HTTP接口失败:status=[%s],retcode=[%d],error=[%s]",
                        status, retcode, error_msg)
            return False

        return True
Пример #2
0
def generate_data():
    x = np.linspace(-2,2,100)[np.newaxis,:]
    noise = np.random.normal(0.0,0.5,size=(1,100))
    y = x**2+noise
    logger.debug('x.shape : {} \n x[0] : {}'.format(x.shape,x[0]))
    logger.debug('y.shape : {} \n y[0] : {}'.format(y.shape,y[0]))
    return x,y
Пример #3
0
def menu():
    #     label='Latelete.tv'
    #     thumbnail=''
    #     url=''
    #     tools.addItemMenu(label = label,thumbnail= thumbnail, url= url,IsPlayable = 'false', isFolder= True)
    #     label='Configurar'
    #     thumbnail=''
    #     url=''
    #     tools.addItemMenu(label = label,thumbnail= thumbnail, url= url,IsPlayable = 'false', isFolder= True)
    domain = "http://latelete.tv/"
    html = tools.getUrl(domain)
    # <center>(.*?)</center>
    pattern = '<center>(.*?)</center>'
    canalesTabla = tools.findall(pattern, html, re.DOTALL)[0]
    logger.debug(canalesTabla)
    pattern = '<td>(.*?)</td>'
    canallista = tools.findall(pattern, canalesTabla, re.DOTALL)
    for canal in canallista:
        try:
            pattern = '<a href.*title="(.*?)"><.*></a>'
            label = tools.findall(pattern, canal, re.DOTALL)[0]
            pattern = '<img src="(.*?)" height'
            thumbnail = domain + tools.findall(pattern, canal, re.DOTALL)[0]
            pattern = '<a href="(.*?)" title=".*</a>'
            url_ = tools.findall(pattern, canal, re.DOTALL)[0]
            url = tools.build_url({'action': 'lateletetvAction', 'url_': url_})
            tools.addItemMenu(label=label,
                              thumbnail=thumbnail,
                              url=url,
                              IsPlayable='false',
                              isFolder=True)
        except Exception as e:
            logger.debug(str(e))
Пример #4
0
    def openbrowser(self, b='chrome', d='chromedriver.exe'):
        # if b == 'chrome' or b == "":
        #     if d == "":
        #         d = "./lib2/chromedriver"

        # op = Options()
        #     # 去掉提示条
        # op.add_argument('--disable-infobars')
        #     # 添加用户配置文件,可以带缓存
        # try:
        #         # userdir = os.environ['USERPROFILE'] + '\\AppData\\Local\\Google\\User Data'
        #     userdir = os.environ['USERPROFILE']
        #     userdir = 'user-data-dir=' + userdir + '\\AppData\\Local\\Google\\User Data'
        # except Exception as e:
        #         # 跑Selenium文件夹里面的123.py即可知道结果
        #     userdir = 'C:\\Users\\fotileshanghai3\\AppData\\Local\\Google\\Chrome\\User Data'
        # print(userdir)
        # op.add_argument('--user-data-dir=' + userdir)

        option = webdriver.ChromeOptions()
        # 去掉提示条配置
        option.add_argument('disable-infobars')
        # 加快浏览速度,获取本机用户路径
        userdir = os.environ['USERPROFILE']
        userdir = 'user-data-dir=' + userdir + '\\AppData\\Local\\Google\\User Data'
        # 添加用户目录
        option.add_argument(userdir)
        logger.debug(userdir)

        self.driver = webdriver.Chrome(executable_path='../lib/driver/chromedriver', options=option)
        self.driver.implicitly_wait(10)
Пример #5
0
def get(queue_list, ignore_exist=False):
    logger.info("Do get queue[%s] info..", queue_list)

    try:
        queues_info = get_queues_details()
        if queue_list:
            # get specific queue
            for queue in queue_list:
                # queue exist
                if not queues_info.get(queue["name"]):
                    logger.error("The queue[%s] doesn't exist!", queue["name"])
                    if not ignore_exist:
                        return 55
                else:
                    queue.update(queues_info.get(queue["name"]))
        else:
            # get all queues
            queue_list = queues_info

        logger.info("get queue detail: [%s]!", queue_list)
        res = {
            "labels": ["queues"],
            "data": [jsmod.dumps(queue_list, encoding='utf-8')]
        }
        logger.debug("Cluster queues: %s", res)
        print jsmod.dumps(res, encoding='utf-8')
        return 0
    except Exception:
        logger.error("Failed to get queue [%s]: \n%s", queue_list,
                     traceback.format_exc())
        return 1
Пример #6
0
    def move(self, coord):
        # saghez_lock.acquire()
        if not self.is_above_horizon(coord):
            raise Exception('F**k off, its below the horizon')

        logger.debug('move :: move to %s' % coord)
        call(self.base_cmd +
             ['decimal', '%s,%s' % (coord.ra.hour, coord.dec.deg)])
        logger.debug('move :: goto completed')

        ccoord = self.get_position()

        ddec = coord.dec - ccoord.dec
        logger.debug('move :: start jogging in DEC (delta = %s)' % ddec)
        call(self.base_cmd +
             ['dither', 'N' if ddec > 0 else 'S',
              str(abs(ddec).arcmin)])

        dra = (coord.ra - ccoord.ra)
        logger.debug('move :: start jogging in RA (delta = %s)' % dra)
        call(self.base_cmd +
             ['dither', 'E' if dra > 0 else 'W',
              str(abs(dra).arcmin)])

        logger.debug('move :: jog completed')
Пример #7
0
 def savetxt(self):
     #写入文件后,必须要保存
     if self.w is None:
         logger.error('error:未打开可写入txt文件')
         return
     self.w.close()
     logger.debug('保存成功')
Пример #8
0
def iterate_nightlies(start_date, total_days):
    # Filter with user that triggers master and nightlies:
    parameters_base = "username=lluiscampos"

    for single_date in iterate_dates(start_date, total_days):
        single_date_str = single_date.strftime("%Y-%m-%d")
        logger.info("Looking for nightly " + single_date_str)
        for end_hour in range(2, 12):
            parameters = (
                parameters_base +
                "&updated_after={}-{:02}-{:02}T01:00:00Z".format(
                    single_date.year, single_date.month, single_date.day))
            parameters += "&updated_before={}-{:02}-{:02}T:{:02}:00:00Z".format(
                single_date.year, single_date.month, single_date.day, end_hour)
            url = "{url}?{params}".format(url=pipelines_api, params=parameters)
            logger.debug("Fetching URL: " + url)
            r = requests.get(url, headers={"PRIVATE-TOKEN": token})
            j = r.json()
            logger.debug("Got JSON: " + str(j))
            if len(j) > 0:
                nightly_id = j[0]["id"]
                logger.info("Found nightly " + str(nightly_id) + " for " +
                            single_date_str)
                yield single_date_str, j[0]["id"]
                break
        else:
            logger.error("Could not find nightly for " + single_date_str)
Пример #9
0
    def check_stream(self):
        m = re.match(VALID_URL_BASE, self.url)
        logger.debug(self.fname)
        if m:
            room_init_api_response = requests.get(_API_URL +
                                                  '{}'.format(m.group('id')))
            room_init_api_response.close()
            room_init_api_response = room_init_api_response.json()
            # room_init_api_response = json.loads(get_content(_API_URL + '{}'.format(m.group('id'))))
            live_status = room_init_api_response["data"]["live_status"]
            if live_status == 1:
                room_id = room_init_api_response['data']['room_id']

                # room_info_api_response = requests.get(
                #     'https://api.live.bilibili.com/room/v1/Room/get_info?room_id={}'.format(room_id))
                # room_info_api_response.close()
                # room_info_api_response = json.loads(
                #     get_content(
                #         'https://api.live.bilibili.com/room/v1/Room/get_info?room_id={}'.format(room_id)))
                # # title = room_info_api_response['data']['title']
                api_url = 'https://api.live.bilibili.com/room/v1/Room/playUrl?cid={}&quality=0&platform=web' \
                    .format(room_id)
                json_data = requests.get(api_url)
                json_data.close()

                json_data = json_data.json()
                # json_data = json.loads(get_content(api_url))
                self.ydl_opts['absurl'] = json_data['data']['durl'][0]['url']
                # print(self.ydl_opts['absurl'])
                return True
            else:
                return False
Пример #10
0
    def _spread_on_circle(self, tour, origin, radius=1):
        """
        :param tour: list of nodes that represents a circular (start=end) tour
        :param origin: start node
        :param radius: int Optional, the radius of the circle on which to spread the nodes.
        :return: a list of 2-tuples representing the x,y coordinates, one tuple for each node.
        """

        if len(tour) == 0:
            return [(0, 0)]

        assert tour[0] == tour[-1], "The module tour must be circular"

        tour_length = self.environment.get_distance_along_path(
            origin, origin, tour)
        coordinates = []
        last_node = origin  # initialize last_node as the origin_node

        theta = -math.pi / 2  # first point is at six o'clock w.r.t. the others
        D = 0
        for node in tour:
            logger.debug("get distance between " + str(last_node) + " and " +
                         str(node))
            d = self._get_distance(last_node, node)
            D += d
            delta_theta = d / tour_length * 2 * math.pi
            theta += delta_theta

            x = radius * math.cos(theta)
            y = radius * math.sin(theta) + radius

            coordinates.append((x, y))
            last_node = node

        return coordinates
Пример #11
0
def parse_url(url, postid=0, commentid=0):
    """ Gets image hash(es) from URL, populates database """

    if is_direct_link(url):
        parse_image(url, postid, commentid)
        return True

    if not should_parse_link(url):
        return

    image_urls = get_image_urls(url)
    url = clean_url(url)

    # We assume that any url that yields more than 1 image is an album
    albumid = 0
    if len(image_urls) > 1:
        albumid = get_or_create_album(url)

    if len(image_urls) > 10:
        logger.debug("Using multithreading to download large album")
        pool = ThreadPool(processes=10)
        pool.starmap(func=parse_image,
                     iterable=zip(image_urls, repeat(postid),
                                  repeat(commentid), repeat(albumid)))
        pool.close()
    else:
        for image_url in image_urls:
            parse_image(image_url, postid, commentid, albumid)
    return True
Пример #12
0
def parse_log(
        in_dir: str, out_dir: str,
        measure_type: common.MeasureType) -> Tuple[pd.DataFrame, pd.DataFrame]:
    df_runs = None
    df_stats = None
    dfs = __parse_log(in_dir, measure_type)
    if dfs is not None:
        df_runs, df_stats = dfs
    else:
        logger.warning("No logging data")

    if df_runs is None:
        df_runs = pd.DataFrame(columns=['name'],
                               index=pd.TimedeltaIndex([], name='time'))
    if df_stats is None:
        df_stats = pd.DataFrame(columns=['cpu_load', 'ram_usage'],
                                index=pd.TimedeltaIndex([], name='time'))

    logger.debug("Fixing log data types")
    df_runs = fix_dtypes(df_runs)
    df_stats = fix_dtypes(df_stats)

    logger.info("Saving ping data")
    df_runs.to_pickle(os.path.join(out_dir, 'runs.pkl'))
    df_stats.to_pickle(os.path.join(out_dir, 'stats.pkl'))
    with open(os.path.join(out_dir, 'runs.csv'), 'w+') as out_file:
        df_runs.to_csv(out_file)
    with open(os.path.join(out_dir, 'stats.csv'), 'w+') as out_file:
        df_stats.to_csv(out_file)

    return df_runs, df_stats
Пример #13
0
    def read_data(filename):
        """Extract the first file enclosed in a zip file as a list of words."""
        logger.debug('------------- enter read_data---------------')

        with zipfile.ZipFile(filename) as f:
            data = tf.compat.as_str(f.read(f.namelist()[0])).split()
        return data
Пример #14
0
 def free_upload(self, _urls):
     logger.debug(_urls)
     for title, v in engine.streamer_url.items():
         url = v[0]
         if self.free(v) and UploadBase.filter_file(title):
             event_manager.send_event(Event(UPLOAD, args=(title, url)))
             self.url_status[url] = 2
Пример #15
0
    def query_groups_info(self):
        qq_group = db_helper.find_group_by_qqtype()
        wechat_group = db_helper.find_group_by_wechattype()
        logger.debug("查询出的qq群组:[%r]", qq_group)
        logger.debug("查询出的wechat群组:[%r]", wechat_group)

        return qq_group, wechat_group
Пример #16
0
    def _write_strings(self):
        string = self._write_queue.get()

        # Convert string to character list with length 60 as required for
        # bricklet write
        string_complete = f'{string}\r\n'
        string_length = len(string_complete)
        chars = list(string_complete)
        chars.extend(['\0'] * (60 - len(string_complete)))

        # Writing to rs232 interface
        logger.debug(self, f'Writing RS232-string: {string}')
        try:
            self._retry_on_tinkeforge_error(self._bricklet_rs232.write, chars,
                                            string_length)
        except ip_connection.Error:
            self._process_exception(value_type=data.ValueTypes.CO2FLOW)
            self.error = errors.HardwareError(self.__str__)
        except Exception as e:
            self._process_exception(data.ValueTypes.CO2FLOW)
            self.error = e
        else:
            self.error = None
        finally:
            self._write_queue.task_done()
Пример #17
0
def search_channels_lyzem(query, limit=100):
    initial_request_url = LYZEM_BASE_URL + urllib.parse.quote(query)
    logger.debug("Lyzem request url {}".format(initial_request_url))

    # extract channels from initial page
    source_html = extract_html(initial_request_url, javascript_enabled=False)
    page_channels = parse_lyzem_page(source_html)
    all_channels = page_channels

    # if reached limit return the channels
    if len(all_channels) >= limit:
        return all_channels[:limit]

    # otherwise we need to go to next pages
    # find the number of pages from the html
    soup = BeautifulSoup(source_html, "lxml")
    cursor_div = soup.find_all('nav', {'class': 'pages'})
    try:
        num_pages = len(cursor_div[0].find_all('li'))
    except IndexError:
        num_pages = 0
        pass

    # then iterate over all pages to extract all channels
    for i in range(num_pages):
        request_url = initial_request_url + '&p=' + str(i + 1)
        logger.debug("Lyzem request url {}".format(request_url))
        source_html = extract_html(request_url, javascript_enabled=False)
        page_channels = parse_lyzem_page(source_html)
        for channel in page_channels:
            if channel not in all_channels:
                all_channels.append(channel)
        if len(all_channels) >= limit:
            return all_channels[:limit]
    return all_channels
Пример #18
0
    def _create_service(self, container):
        docker = DockerCmd()

        infos = self.cfg.infos(container)
        docker.pull(infos.image)

        logger.debug("container infos: {}".format(infos.__dict__))
        res_limit_cpus = ""
        if infos.res_limit_cpus:
            res_limit_cpus = "--cpus {}".format(infos.res_limit_cpus)

        res_limit_memory = ""
        if infos.res_limit_memory:
            res_limit_memory = "--cpus {}".format(infos.res_limit_cpus)

        cmd = "--name {} --hostname {} --net='{}' -p {} --init --restart=always -e VNC_RESOLUTION={} {} {} -v {} --privileged -d {}".format(
            container, infos.hostname, infos.network_mode, ' -p '.join(
                infos.ports),
            infos.vnc_resolution,
            res_limit_cpus, res_limit_memory,
            ' -v '.join(infos.volumes), infos.image)
        docker.run(cmd)
        time.sleep(3)

        if not docker.isHealth(container):
            logger.error('Create {} failed.'.format(container))
            raise DockerCmdExecError()
Пример #19
0
    def init_selenium(self, load_cookies=False) -> RemoteWebDriver:
        options = ChromeOptions()
        # 自动打开 F12 控制台,方便抓包检查网络请求问题
        options.add_argument('--auto-open-devtools-for-tabs')
        # 同避免 webdriver=True, 在 chrome 低于 79 版本生效
        options.add_experimental_option('excludeSwitches',
                                        ['enable-automation'])
        # stackoverflow 上表示避免 TimeoutException 的方法,但似乎没用
        options.add_argument("enable-features=NetworkServiceInProcess")
        browser = webdriver.Chrome(options=options)

        # 禁止 window.navigator.webdriver = True 被检测到是 webdriver 的爬虫行为
        script = '''
        Object.defineProperty(navigator, 'webdriver', {
            get: () => undefined
        })
        '''
        browser.execute_cdp_cmd("Page.addScriptToEvaluateOnNewDocument",
                                {"source": script})

        # 设置 cookie 前需要先跳转到对应的 domain
        browser.get(self.homepage)
        browser.set_page_load_timeout(30)
        if load_cookies:
            logger.debug('load cookies')
            with open(config[self.config_key]['cookie_file']) as f:
                cookies = json.load(f)
            for cookie in cookies:
                try:
                    browser.add_cookie(cookie)
                except Exception as e:
                    print(cookie)
                    raise e
        return browser
Пример #20
0
    def step(self, state_idx: int, action_idx: int) -> Tuple[int, float]:
        """ Perform one step in the MDP """
        actions = self.get_valid_actions(state_idx)
        if (action_idx < 0 or action_idx > len(actions)):
            raise ValueError(f"Invalid index action '{action_idx}'")

        # next state is the result of moving the cars between stations
        # and the requests / returns
        s1_returns = np.random.poisson(self.s1_req_lambda)
        s1_requests = np.random.poisson(self.s1_ret_lambda)
        s2_requests = np.random.poisson(self.s2_req_lambda)
        s2_returns = np.random.poisson(self.s1_ret_lambda)

        # TODO: review restrictions as this migh yield invalid configurations!
        logger.debug(action_idx)
        logger.debug(actions)
        to_s2, to_s1 = actions[action_idx]
        s1 = min(max(self.max_s1, s1_returns - s1_requests - to_s2 + to_s1), 0)
        s2 = min(max(self.max_s2, s2_returns - s2_requests - to_s1 + to_s2), 0)
        next_state = self.states.index((s1, s2))

        # the reward is the earnings per car (10) - the cost of moving cars (2)
        reward = -2 * (to_s2 + to_s1) + 10 * (s1_requests + s2_requests)

        return next_state, reward
Пример #21
0
    def all_check(self):
        live = []
        try:
            for batch in self.batches:
                res = batch.check()
                if res:
                    live.extend(res)

            for one in self.onebyone:
                for url in one.url_list:

                    if one('检测' + url, url).check_stream():
                        live.append(url)

                    if url != one.url_list[-1]:
                        logger.debug('歇息会')
                        time.sleep(15)
        except IOError:
            logger.exception("IOError")
        finally:
            event_t = Event(TO_MODIFY)
            event_t.args = (live,)
            event_u = Event(UPLOAD)
            event_u.args = (live,)
            return event_u, event_t
Пример #22
0
 def request(self, method, url, data=None, json=None):
     if type(data) == str:
         data = eval(data)
     #拼接请求的url
     url = config.get('URL', 'web_url') + url
     logger.debug("请求的url{}".format(url))
     logger.debug("请求的data{}".format(data))
     try:
         if method.lower() == 'get':
             res = self.session.request(method=method, url=url, params=data)
         elif method.lower() == 'post':
             if json:
                 res = self.session.request(method=method,
                                            url=url,
                                            json=json)
             else:
                 res = self.session.request(method=method,
                                            url=url,
                                            data=data)
         else:
             res = None
             logger.error("不支持的请求方法")
     except Exception as e:
         logger.error("请求报错了{}".format(e))
         raise e
     return res
Пример #23
0
    def send_image(self, groups, user, img_path):
        if not isinstance(groups, dict):
            logger.warn("send_image参数groups不合法,需要字典类型:%r", groups)
            return
        logger.debug("发送图片send_image,groups=%r", groups)
        qq_group = groups.get('qq_group', None)
        wechat_group = groups.get('wechat_group', None)
        email = groups.get('email', None)  #这个设计有些诡异,为了统一,暂时先这样吧

        params = {}
        params["user"] = user
        params["msg_type"] = "image"
        params["img_path"] = img_path
        if qq_group:
            params["group"] = qq_group
            self.send_qq_queue(params)
        if wechat_group:
            params["group"] = wechat_group
            self.send_wechat_queue(params)

        for bot in self.bots:  # bots是qqbot和wxbot的实例
            # if bot.type == "qq" and qq_group:
            #     bot.send_image(qq_group,user,img_path)
            # if bot.type == "wechat" and wechat_group:
            #     bot.send_image(wechat_group,user,img_path)
            if bot.type == "email" and email:
                bot.send_image(email, user, img_path)
Пример #24
0
def main(args):
    args = _parse_arguments(args)
    repo_root = common.get_repo_root(args.repo_root)
    cfg = config.load(repo_root, args.verbose)
    pom_content = pomcontentm.PomContent()
    if args.pom_description is not None:
        pom_content.description = args.pom_description
    if args.verbose:
        logger.debug("Global pom content: %s" % pom_content)

    mvn_install_info = maveninstallinfo.MavenInstallInfo(
        cfg.maven_install_paths)
    ws = workspace.Workspace(repo_root, cfg.excluded_dependency_paths,
                             cfg.all_src_exclusions, mvn_install_info,
                             pom_content)
    packages = argsupport.get_all_packages(repo_root, args.package)
    packages = ws.filter_artifact_producing_packages(packages)
    if len(packages) == 0:
        raise Exception(
            "Did not find any artifact producing BUILD.pom packages at [%s]" %
            args.package)
    spider = crawler.Crawler(ws, cfg.pom_template, args.verbose)
    result = spider.crawl(packages,
                          follow_monorepo_references=args.recursive,
                          force_release=args.force)

    if len(result.pomgens) == 0:
        logger.info(
            "No releases are required. pomgen will not generate any pom files. To force pom generation, use pomgen's --force option."
        )
    else:
        output_dir = _get_output_dir(args)

        for pomgen in result.pomgens:
            pom_dest_dir = os.path.join(output_dir, pomgen.bazel_package)
            if not os.path.exists(pom_dest_dir):
                os.makedirs(pom_dest_dir)

            # the goldfile pom is actually a pomgen metadata file, so we
            # write it using the mdfiles module, which ensures it goes
            # into the proper location within the specified bazel package
            if args.pom_goldfile:
                pom_content = pomgen.gen(pom.PomContentType.GOLDFILE)
                pom_goldfile_path = mdfiles.write_file(
                    pom_content, output_dir, pomgen.bazel_package,
                    mdfiles.POM_XML_RELEASED_FILE_NAME)
                logger.info("Wrote pom goldfile to [%s]" % pom_goldfile_path)
            else:
                pom_content = pomgen.gen(pom.PomContentType.RELEASE)
                pom_path = os.path.join(pom_dest_dir, "pom.xml")
                _write_file(pom_path, pom_content)
                logger.info("Wrote pom file to [%s]" % pom_path)
                for i, companion_pomgen in enumerate(
                        pomgen.get_companion_generators()):
                    pom_content = companion_pomgen.gen(
                        pom.PomContentType.RELEASE)
                    pom_path = os.path.join(pom_dest_dir,
                                            "pom_companion%s.xml" % i)
                    _write_file(pom_path, pom_content)
                    logger.info("Wrote companion pom file to [%s]" % pom_path)
Пример #25
0
    def run(self):
        if self.should_stop:  # early exit
            return 1

        context = zmq.Context()
        socket = context.socket(zmq.SUB)

        logger.info(f"subscribing to events from {self.socket_uri}…")
        socket.connect(self.socket_uri)
        for event in self.events:
            logger.debug(f".. {event}")
            socket.setsockopt_string(zmq.SUBSCRIBE, event)

        while not self.should_stop:
            try:
                received_string = socket.recv_string(zmq.DONTWAIT)
                self.handle_broadcast_event(received_string)
            except zmq.Again:
                pass

            if self.should_poll:
                self.sync_tasks_and_containers()
                self.poll()
            else:
                self.sleep()
Пример #26
0
def list_result_folders(root_folder: str) -> Generator[str, None, None]:
    for folder_name in os.listdir(root_folder):
        path = os.path.join(root_folder, folder_name)
        if not os.path.isdir(path):
            logger.debug("'%s' is not a directory, skipping", folder_name)
            continue
        yield folder_name
Пример #27
0
def cfr(path, dirname):
    """
	calls the cfr decompiler from command line
	"""
    process = subprocess.Popen([
        "java", "-jar", common.rootDir + "/lib/cfr_0_96.jar", path,
        "--outputdir", dirname + "1"
    ],
                               stdout=subprocess.PIPE,
                               stderr=subprocess.STDOUT)
    try:
        while True:
            line = process.stdout.readline()
            if not line:
                break
            if "Processing" in line:
                common.counter1 = common.counter1 + 1
                pub.sendMessage('decompile',
                                cfr=round(common.counter1 * 100 /
                                          common.count))
                pub.sendMessage('decompile',
                                jdcore=round(common.counter1 * 100 /
                                             common.count))
    except Exception as e:
        logger.debug(e.message)
Пример #28
0
def parse_tcp_timing(in_dir: str,
                     out_dir: str,
                     scenarios: Dict[str, Dict],
                     config_cols: List[str],
                     multi_process: bool = False) -> pd.DataFrame:
    """
    Parse all tcp timing results.
    :param in_dir: The directory containing the measurement results.
    :param out_dir: The directory to save the parsed results to.
    :param scenarios: The scenarios to parse within the in_dir.
    :param config_cols: The column names for columns taken from the scenario configuration.
    :param multi_process: Whether to allow multiprocessing.
    :return: A dataframe containing the combined results from all scenarios.
    """

    logger.info("Parsing tcp timing results")

    df_cols = [*config_cols, 'run', 'con_est', 'ttfb']
    df_tcp_timing = __parse_slice(__parse_tcp_timing_from_scenario, in_dir,
                                  [*scenarios.items()], df_cols, 'tcp',
                                  'timing')

    logger.debug("Fixing tcp timing data types")
    df_tcp_timing = fix_dtypes(df_tcp_timing)

    logger.info("Saving tcp timing data")
    df_tcp_timing.to_pickle(os.path.join(out_dir, 'tcp_timing.pkl'))
    with open(os.path.join(out_dir, 'tcp_timing.csv'), 'w+') as out_file:
        df_tcp_timing.to_csv(out_file)

    return df_tcp_timing
Пример #29
0
 def setUp(self):  # 每个测试方法里面去运行的操作放到setUp里面
     logger.debug('这是一个对象方法')
     self.login_data = {'mobilephone': '15810447656', 'pwd': '123456'}
     self.login_resp = requests.get(
         'http://test.lemonban.com/futureloan/mvc/api/member/login',
         params=self.login_data)
     logger.info(self.login_resp.text)
Пример #30
0
    def work (unit):
        data = []
        if isinstance(unit, Post):
            if not unit.image:
                logger.warning('unit %s is not an image.', unit)
                return 

            filename = get_filename (
                directory, unit, keep_names
            )

            if not os.path.exists(filename):
                logger.info('downloading %s', unit.image)
                image_data = unit.image.download(bypass_cache=True)

                return filename, image_data

            logger.debug('%s already downloaded', filename)
            return

        logger.info('working %r', unit)
        for e in unit.process():
            value =  work(e)
            if not value is None:
                data.append(value)
            #pool.push(work, e)

        return data
def scrape_images (directory, keep_names, *links):
    """
    Downloads images from links.
    """
    pool = Pool(num_threads=parameters.num_threads)

    def work (unit):
        if isinstance(unit, Post):
            if not unit.image:
                return

            filename = get_filename (
                directory, unit, keep_names
            )

            if not os.path.exists(filename):
                logger.info('downloading %s', unit.image)
                image_data = unit.image.download(bypass_cache=True)

                return filename, image_data

            logger.debug('%s already downloaded', filename)

            return

        logger.info('working %r', unit)
        for e in unit.process():
            pool.push(work, e)

    for link in map(classify, links):
        pool.push(work, link)
    pool.join()

    logger.info('Join complete.')

    downloaded = pool.get_results()
    pool.close()

    logger.info('Setting up directories')

    directories = set (
        map (
            lambda t : os.path.split(t[0])[0],
            downloaded
        )
    )

    for directory in directories:
        if not os.path.exists(directory):
            logger.debug('making directory %s', directory)
            os.makedirs(directory)

    logger.info('Writing images to disk.')

    for filename, image_data in downloaded:
        with open(filename, 'w') as outfile:
            outfile.write(image_data)
Пример #32
0
async def update_loop(screen: METARScreen):
    """
    Handles updating the METAR data in the background
    """
    while True:
        # logger.debug(f'{int(time.time())} {screen.update_time}')
        if time.time() >= screen.update_time:
            logger.debug('Auto update')
            screen.refresh_data()
        await asyncio.sleep(10)
Пример #33
0
def delete_duplicate(url_expurl_dic):
    urls = url_expurl_dic.values()
    for url in urls:
        if url_db.url_in_db(url):
            logger.debug('delete by duplicate URL: {0}'.format(url))
            return False

    now = datetime.datetime.now()
    for url in urls:
        url_db.add_url(url, now)

    return True
Пример #34
0
def ng_word_check(tweet, url_expurl_dic):
    values = url_expurl_dic.values()
    user = tweet['user']
    at_username = '******' + user['screen_name']
    nickname = user['name']
    for ngw in conf.ng_word_list():
        if ngw in tweet['text'] or ngw in at_username or ngw in nickname:
            logger.debug('delete by NG word in text: {0}'.format(ngw))
            return False
        for v in values:
            if ngw in v:
                logger.debug('delete by NG word in URL: {0}'.format(ngw))
                return False

    return True
Пример #35
0
def main():
    """
    Program main handles METAR data handling and user interaction flow
    """
    logger.debug('Booting')
    screen = METARScreen.from_session(common.load_session(), cfg.size)
    screen.draw_loading_screen()
    screen.refresh_data(force_main=True)
    loop = asyncio.get_event_loop()
    coros = [
        update_loop(screen),
        input_loop(screen)
    ]
    logger.debug('Setup complete')
    loop.run_until_complete(asyncio.wait(coros, return_when=asyncio.FIRST_COMPLETED))
Пример #36
0
def procyon(path,dirname):
	"""
	calls the procyon decompiler from command line
	"""
	process = subprocess.Popen(["java","-jar", common.rootDir + "/lib/procyon/procyon-decompiler-0.5.29.jar", path, "-o ", dirname+"2"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
	try:
		while True:
			line = process.stdout.readline()
			if not line:
				break
			if "Decompiling" in line:
				common.counter2 = common.counter2 + 1
				pub.sendMessage('decompile', procyon=round(common.counter2*100/common.count))
	except Exception as e:
		logger.debug(e.message)
Пример #37
0
def cfr(path,dirname):
	"""
	calls the cfr decompiler from command line
	"""
	process = subprocess.Popen(["java","-jar", common.rootDir + "/lib/cfr_0_96.jar", path, "--outputdir", dirname+"1"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
	try:
		while True:
			line = process.stdout.readline()
			if not line:
				break
			if "Processing" in line:
				common.counter1 = common.counter1 + 1
				pub.sendMessage('decompile', cfr=round(common.counter1*100/common.count))
				pub.sendMessage('decompile', jdcore=round(common.counter1*100/common.count))
	except Exception as e:
		logger.debug(e.message)
Пример #38
0
def verifyCallback(connection, x509, errnum, errdepth, ok):
    user = x509.get_subject().commonName
    user = user.lower().strip()


    if not ok:
        # if we haven't seen CN before (username),
        # add them to our CLIENT_CERTS_FILE and return True
        trusted_certs_str = open(CLIENT_CERTS_FILE, 'r').read()

        # Ugly, ugly, bad hack, because M2Crypto and pyOpenSSL 
        # didn't figure out how to either 1) trust a directory of certs
        # or 2) load a single file with multiple certs into an array of
        # X509 objects.
        END_CERT_SENTINAL = '-----END CERTIFICATE-----\n'
        for cert_str in trusted_certs_str.split(END_CERT_SENTINAL):
            if (cert_str.strip() == ''):
                break

            cert_str += END_CERT_SENTINAL

            logger.trace('Loading certificate: \n%s' % cert_str)

            cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_str)
            cert_user = cert.get_subject().commonName

            logger.debug("Cert Store: User '%s' %s" % (cert_user, cert_fingerprint(cert)))
            
            if cert_user.lower().strip() == user.lower().strip():
                # User already in cert store, error
                logger.error("User '%s' already in cert store; rejecting" % user)
                logger.debug("User '%s' == '%s'" % (user, cert_user))
                logger.debug("Digest %s , %s already in store" % \
                             (cert_fingerprint(x509), cert_fingerprint(cert)))

                return False

        # User not already in cert store; add them
        x509_str = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, x509)
        with open(CLIENT_CERTS_FILE, 'a') as f:
            f.write(x509_str)

        # Reload the cert store
        connection.get_context().load_verify_locations(CLIENT_CERTS_FILE, None)
        
        logger.info("Added user '%s' to cert store, fingerprint %s" % \
                    (user, cert_fingerprint(x509)))

        # This verify function will get called again with ok == True
        return True

    game_listener = connection.get_app_data()
    if game_listener:
        game_listener.verificationComplete(user, cert_fingerprint(x509))
    logger.debug("User '%s' authenticated (%s)" % (user, cert_fingerprint(x509)))
    return True
Пример #39
0
 def _get_playlist(self, params={}):
     params_tmp = self.douban_fm_default_params
     params_tmp.update(params)
     params_data = urllib.urlencode(params_tmp)
     url = '?'.join(('http://%s%s' % (self.douban_fm_host, self.douban_fm_playlist_path), params_data))
     l.debug('load playlist: %s' % url)
     res = self.http_session.get(url)
     if 'start="deleted"' in (res.headers.get('set-cookie') or ''):
         self.http_session.cookies.pop('start')
     cookies_text = '; '.join(['='.join(kv) for kv in self.http_session.cookies.items()])
     with open(self.COOKIE_PATH,'w') as fh:
         fh.write(cookies_text)
     res_json = json.loads(res.text)
     if int(res_json.get('r')) == 0:
         return res_json['song']
     elif int(res_json.get('r')) == 1:
         l.warning('cannot parse response json:\n {err}'.format(**res_json))
         return []
Пример #40
0
 def catchStateChanged(self, new_state, old_state):
     ''' 
     possible state sequences:
     [init]: loading -> [next]
     [next]: stop -> paused -> playing -> stop(*)
     [skip],[trash]: playing -> paused -> stop -> pause -> playing(*)
     '''
     l.debug(u'old_state: {0}, new_state: {1}'.format(phonon_state_label.get(old_state), phonon_state_label.get(new_state)))
     #http://harmattan-dev.nokia.com/docs/library/html/qt4/phonon.html
     if new_state == Phonon.PlayingState:
         self.set_ui_state(GUIState.Playing)
     elif new_state == Phonon.PausedState:
         self.set_ui_state(GUIState.Paused)
     elif new_state == Phonon.StoppedState:
         if old_state == Phonon.PlayingState:#auto next song
             self.next_song()
     elif new_state == Phonon.ErrorState:
         l.error('error while playing back')
         self.next_song()
Пример #41
0
 def __init__(self, station: str, size: (int, int), inverted: bool):
     logger.debug('Running init')
     try:
         self.metar = avwx.Metar(station)
     except avwx.exceptions.BadStation:
         self.metar = avwx.Metar('KJFK')
     self.ident = common.station_to_ident(station)
     self.old_ident = copy(self.ident)
     self.width, self.height = size
     self.win = pygame.display.set_mode(size)
     self.c = Color()
     self.inverted = inverted
     if inverted:
         self.c.BLACK, self.c.WHITE = self.c.WHITE, self.c.BLACK
     # Hide mouse for touchscreen input/Disable if test non touchscreen
     if cfg.on_pi:
         pygame.mouse.set_visible(False)
     self.reset_update_time()
     self.buttons = []
     logger.debug('Finished running init')
    def work (unit):
        if isinstance(unit, Post):
            if not unit.image:
                return

            filename = get_filename (
                directory, unit, keep_names
            )

            if not os.path.exists(filename):
                logger.info('downloading %s', unit.image)
                image_data = unit.image.download(bypass_cache=True)

                return filename, image_data

            logger.debug('%s already downloaded', filename)

            return

        logger.info('working %r', unit)
        for e in unit.process():
            pool.push(work, e)
Пример #43
0
 def init(self, config):
     # If this is an integer, make it a length 1 list
     if type(config) is int:
         self.weights = [config]
     # If this is a list, use it as is
     elif type(config) is list:
         # Make sure all the items are integers first
         if not all([type(x) is int for x in config]):
             logger.critical(
                 'Malformed weight list provided: {0}'.format(config)
             )
         else:
             self.weights = config
     # If it's a dict, make a range from the key to the value
     elif type(config) is dict and len(config) == 1:
         (lower, upper) = config.popitem()
         # Add 1 to account for Python's range bounding
         self.weights = range(lower, upper+1)
     else:
         logger.critical(
             'Malformed weight options provided: {0}'.format(config)
         )
     logger.debug('Using weight list: {0}'.format(self.weights))
Пример #44
0
parser.add_argument('-p', '--password', help='encryption password (user is prompted if not present)', nargs=1, required=False)
parser.add_argument('-o', '--output', help='output filename (default "./passport-page-[symbol].png")', nargs=1, required=False)
parser.add_argument('-c', '--comment', help='add an optional comment', nargs=1, required=False)
parser.add_argument('--compact', help='enable compact mode', action='store_true', required=False)
parser.add_argument('-v', '--verbose', help='show verbose output', action='store_true', required=False)
args = vars(parser.parse_args())

if (args['verbose']):
    logger.setLevel(logging.DEBUG)
else:
    logger.setLevel(logging.INFO)


# currency symbol
symbol = args['symbol'][0].upper()
logger.debug('SYMBOL {}'.format(symbol))


# currency name
cname = None
explorer = None
n = 0
with open(info_file) as fp:
    lines = fp.readlines()
    for line in lines:
        n += 1
        sym = None
        name = None
        link = None

        try:
Пример #45
0
    def __init__(self,start_url=None,debug=False):
        self._debug = debug
        self.ROOT_PATH = os.path.abspath(os.path.dirname(__file__) + '/../')
        self.COOKIE_PATH = os.path.join(self.ROOT_PATH, '.cookie')
        http_cookies = {}
        if os.path.exists(self.COOKIE_PATH):
            with open(self.COOKIE_PATH) as fh:
                cookies_text = fh.read().strip()
                try:
                    http_cookies = dict(cookie.split('=',1) for cookie in cookies_text.split('; '))
                except:
                    pass
        
        self.douban_fm_host = 'douban.fm'
        self.douban_fm_hot_channel_path = '/j/explore/hot_channels'
        self.douban_fm_playlist_path = '/j/mine/playlist'
        self.douban_fm_channel_detail_path = '/j/explore/channel_detail'
        self.douban_fm_channel_name= {0: u'私人兆赫', -3: u'红心兆赫'}
        self.douban_fm_default_params = {
                'type': 'n'
                , 'sid': ''
                , 'channel':0
                , 'pb': '192'
                , 'pt': 0.0
                , 'context':''
                , 'from':'mainsite'
                , 'kbps':'192'
                , 'r':'da01a52428'
                }
        
        self.http_session = requests.session()
        pre_request_url = start_url or ('http://%s/' % self.douban_fm_host)
        # XXX fix cross-domain cookie issue in a better way
        while urlparse.urlparse(pre_request_url).netloc != 'douban.fm':
            res = self.http_session.head(pre_request_url)
            pre_request_url = res.headers.get('location')

        self.http_session.cookies.update(http_cookies)
        res = self.http_session.get(pre_request_url)
        try:
            soup = BeautifulSoup.BeautifulSoup(res.text)
            self.username = soup.find(id='user_name').text
        except:
            self.username = None
        url_params =  urlparse.parse_qs(urlparse.urlparse(pre_request_url).query)
        start = url_params.get('start')
        context = url_params.get('context')
        cid = url_params.get('cid')
        if start:
            start = start[0]
            self.douban_fm_default_params['channel'] = int(start.split('g')[-1])
        if context is not None:
            context = context[0]
            self.douban_fm_default_params['context'] = context
            ctx_dict = dict(map(lambda kv: tuple(kv.split(':')), 
                    context.split('|')))
            cid_ = ctx_dict.get('channel')
            if cid_ is not None:
                self.douban_fm_default_params['channel'] = int(cid_)
        if cid:
            cid = cid[0]
            self.douban_fm_default_params['channel'] = int(cid)
        self.channel_id = int(self.douban_fm_default_params['channel'])

        self.current_playlist = []
        self.current_cur = -1

        self.timeout = 3
        l.debug(u'username:{username}, channel:{channel}'.format(username=self.username, channel=self.channel_name))
Пример #46
0
def edit_hosts_depends(waptconfigfile,hosts_list,
        append_depends=[],
        remove_depends=[],
        append_conflicts=[],
        remove_conflicts=[],
        sign_certs=None,
        sign_key=None,
        key_password=None,
        wapt_server_user=None,wapt_server_passwd=None,
        cabundle = None,
        ):
    """Add or remove packages from host packages

    Args:

    Returns:
        dict: { updated: of uuid of machines actually updated
                unchanged : list of uuid skipped because of no change needed
                discarded : list of uuid discarded due to errors}

    >>> edit_hosts_depends('c:/wapt/wapt-get.ini','htlaptop.tranquilit.local','toto','tis-7zip','admin','password')
    """
    if sign_certs is None:
        sign_bundle_fn = inifile_readstring(waptconfigfile,u'global',u'personal_certificate_path')
        sign_bundle = SSLCABundle(sign_bundle_fn)
        sign_certs = sign_bundle.certificates()
        # we assume a unique signer.
        if cabundle is None:
            cabundle = sign_bundle

    if not sign_certs:
        raise Exception(u'No personal signer certificate found in %s' % sign_bundle_fn)

    if sign_key is None:
        sign_key = sign_certs[0].matching_key_in_dirs(private_key_password=key_password)

    try:
        import waptconsole
        progress_hook = waptconsole.UpdateProgress
    except ImportError as e:
        def print_progress(show=False,n=0,max=100,msg=''):
            if show:
                print('%s %s/%s\r' % (msg,n,max),end='')
            else:
                if not msg:
                    msg='Done'
                print("%s%s"%(msg,' '*(80-len(msg))))
        progress_hook = print_progress

    hosts_list = ensure_list(hosts_list)

    progress_hook(True,0,len(hosts_list),'Loading %s hosts packages' % len(hosts_list))

    host_repo = WaptHostRepo(name='wapt-host',host_id=hosts_list,cabundle = cabundle)
    host_repo.load_config_from_file(waptconfigfile)
    total_hosts = len(host_repo.packages())
    discarded_uuids = [p.package for p in host_repo.discarded]

    hosts_list = ensure_list(hosts_list)
    append_depends = ensure_list(append_depends)
    remove_depends = ensure_list(remove_depends)
    append_conflicts = ensure_list(append_conflicts)
    remove_conflicts = ensure_list(remove_conflicts)

    packages = []
    discarded = []
    unchanged = []

    progress_hook(True,0,len(hosts_list),'Editing %s hosts' % len(hosts_list))
    i = 0
    try:
        for host_id in hosts_list:
            i+=1
            # don't change discarded packages.
            if host_id in discarded_uuids:
                discarded.append(host_id)
            else:
                host = host_repo.get(host_id)
                if host is None:
                    host = PackageEntry(package=host_id,section='host')

                if progress_hook(True,i,len(hosts_list),'Editing %s' % host.package):
                    break

                logger.debug(u'Edit host %s : +%s -%s'%(
                    host.package,
                    append_depends,
                    remove_depends))


                depends = host.depends
                conflicts = host.conflicts

                conflicts = add_to_csv_list(conflicts,append_conflicts)
                conflicts = remove_from_csv_list(conflicts,remove_conflicts)
                depends = remove_from_csv_list(depends,ensure_list(conflicts))

                depends = add_to_csv_list(depends,append_depends)
                depends = remove_from_csv_list(depends,remove_depends)
                conflicts = remove_from_csv_list(conflicts,ensure_list(depends))

                if depends != host.depends or conflicts != host.conflicts:
                    host.depends = depends
                    host.conflicts = conflicts
                    host.inc_build()
                    host_file = host.build_management_package()
                    host.sign_package(sign_certs,sign_key)
                    packages.append(host)
                else:
                    unchanged.append(host.package)

        # upload all in one step...
        progress_hook(True,3,3,'Upload %s host packages' % len(packages))
        server = WaptServer().load_config_from_file(waptconfigfile)
        server.upload_packages(packages,auth=(wapt_server_user,wapt_server_passwd),progress_hook=progress_hook)
        return dict(updated = [p.package for p in packages],
                    discarded = discarded,
                    unchanged = unchanged)

    finally:
        logger.debug('Cleanup')
        try:
            i = 0
            for s in packages:
                i+=1
                progress_hook(True,i,len(packages),'Cleanup')
                if os.path.isfile(s.localpath):
                    os.remove(s.localpath)
            progress_hook(False)
        except WindowsError as e:
            logger.critical('Unable to remove temporary directory %s: %s'% (s,repr(e)))
            progress_hook(False)
Пример #47
0
def decompile(path):
	"""
	Converts DEX to JAR(containing class files) and then class files to near original java code using 3 different decompilers and selecting the best available decompiled code
	"""
	common.pathToDEX = path
	pathToDex2jar = common.rootDir + "/lib/dex2jar/dex2jar.sh"
	sp = subprocess.Popen([pathToDex2jar, common.pathToDEX], shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
	output, error = sp.communicate()
	common.pathToJar = common.pathToDEX.rsplit(".",1)[0] + "_dex2jar.jar"
	dirname, extension = common.pathToJar.rsplit(".",1)
	zf = zipfile.ZipFile(common.pathToJar)

	#Total number of class files that need to be decompiled
	total_files = len(zf.namelist())
	report.write("totalfiles", total_files)
	common.count = len([s for s in zf.namelist() if ((".class" in s) and ("$" not in s))])

	pub.subscribe(decompiler_update, 'decompile')

	thread0 = Process(name='clear', target=clear, args = ())
	thread1 = Process(name='jdcore', target=jdcore, args = (zf.filename,dirname))
	thread2 = Process(name='procyon', target=cfr, args = (zf.filename,dirname))
	thread3 = Process(name='cfr', target=procyon, args = (zf.filename,dirname))

	thread0.start()
	thread0.join()

	progressbar1.start()
	progressbar2.start()
	progressbar3.start()


	thread1.start()
	thread2.start()
	thread3.start()
	thread1.join(0)
	thread2.join(0)
	thread3.join(0)

	with common.term.cbreak():
		val = None
		while val not in (u'c', u'C'):
			with common.term.location(0,common.term.height-3):
				print "Decompilation may hang/take too long (usually happens when the source is obfuscated)."
				print "At any time," + common.term.bold_underline_red_on_white('Press C to continue') + " and QARK will attempt to run SCA on whatever was decompiled."
				val = common.term.inkey(timeout=1)
				if not (thread1.is_alive() or thread2.is_alive() or thread3.is_alive()):
					break

	if thread1.is_alive():
		thread1.terminate()
	if thread2.is_alive():
		thread2.terminate()
	if thread3.is_alive():
		thread3.terminate()

	#Go back to the bottom of the screen
	with common.term.location(0,common.term.height):
		print ""

	g1 = grep_1(dirname, "// Byte code:")
	g2 = grep_1(dirname+"1", "// This method has failed to decompile.")
	g3 = grep_1(dirname+"2", "// This method could not be decompiled.")

	#print list(set(g1) - set(g2))
	logger.info("Trying to improve accuracy of the decompiled files")
	restored = 0
	try:
		for filename in g1:
			relative_filename = str(filename).split(dirname)[1]
			if any(relative_filename in s for s in g2):
				if any(relative_filename in s for s in g3):
					logger.debug("Failed to reconstruct: " + relative_filename)
				else:
					shutil.copy(dirname+"2"+relative_filename, filename)
					restored = restored +1
			else:
				shutil.copy(dirname+"1"+relative_filename, filename)
				restored = restored +1
	except Exception as e:
		print e.message
	report.write("restorestats","Restored " + str(restored) + " file(s) out of " + str(len(g1)) + " corrupt file(s)")
	logger.info("Restored " + str(restored) + " file(s) out of " + str(len(g1)) + " corrupt file(s)")
	logger.debug("Deleting redundant decompiled files")
	try:
		shutil.rmtree(dirname+"1")
		logger.debug("Deleted " + dirname+"1")
		shutil.rmtree(dirname+"2")
		logger.debug("Deleted " + dirname+"2")
	except Exception as e:
		logger.debug("Unable to delete redundant decompiled files (no impact on scan results): " + str(e))
Пример #48
0
 def __call__(self):
     info("this is info slf4j")
     error("error=%s" % (grinder.processNumber))
     warn("warn=%s" % (grinder.processNumber))
     debug("debug=%s" % (grinder.processNumber))
Пример #49
0
def add_ads_groups(waptconfigfile,
        hostdicts_list,
        sign_certs=None,
        sign_key=None,
        key_password=None,
        wapt_server_user=None,wapt_server_passwd=None,
        cabundle = None):

    if sign_certs is None:
        sign_bundle_fn = inifile_readstring(waptconfigfile,u'global',u'personal_certificate_path')
        sign_bundle = SSLCABundle(sign_bundle_fn)
        sign_certs = sign_bundle.certificates()
        # we assume a unique signer.
        if cabundle is None:
            cabundle = sign_bundle

    if not sign_certs:
        raise Exception(u'No personal signer certificate found in %s' % sign_bundle_fn)

    if sign_key is None:
        sign_key = sign_certs[0].matching_key_in_dirs(private_key_password=key_password)

    main_repo = WaptRemoteRepo(name='wapt',cabundle = cabundle)
    main_repo.load_config_from_file(waptconfigfile)

    host_repo = WaptHostRepo(name='wapt-host',host_id=[h['uuid'] for h in hostdicts_list],cabundle = cabundle)
    host_repo.load_config_from_file(waptconfigfile)

    total_hosts = len(host_repo.packages())
    discarded_uuids = [p.package for p in host_repo.discarded]


    try:
        import waptconsole
        progress_hook = waptconsole.UpdateProgress
    except ImportError as e:
        def print_progress(show=False,n=0,max=100,msg=''):
            if show:
                print('%s %s/%s\r' % (msg,n,max),end='')
            else:
                if not msg:
                    msg='Done'
                print("%s%s"%(msg,' '*(80-len(msg))))
        progress_hook = print_progress

    packages = []
    discarded = []
    unchanged = []

    try:
        progress_hook(True,0,len(hostdicts_list),'Editing %s hosts' % len(hostdicts_list))
        i = 0
        for h in hostdicts_list:
            try:
                host_package = None
                host_id = h['uuid']
                hostname = h['computer_name']
                groups = get_computer_groups(hostname)
                host_package = host_repo.get(host_id,PackageEntry(package=host_id,section='host'))
                if progress_hook(True,i,len(hostdicts_list),'Checking %s' % host_package.package):
                    break

                wapt_groups = ensure_list(host_package['depends'])
                additional = [group for group in groups if not group in wapt_groups and (main_repo.get(group,None) is not None)]
                if additional:
                    logger.info(u'Adding %s to %s' % (','.join(additional),host_package.package))
                    if progress_hook(True,i,len(hostdicts_list),'Editing %s' % host_package.package):
                        break
                    wapt_groups.extend(additional)
                    host_package.depends = ','.join(wapt_groups)
                    host_package.build_management_package()
                    host_package.inc_build()
                    host_file = host_package.build_management_package()
                    host_package.sign_package(sign_certs,sign_key)
                    packages.append(host_package)
                else:
                    unchanged.append(host_package.package)
            except Exception as e:
                if host_package:
                    discarded.append(host_package.package)
                logger.critical(u'Discarding because %s' % ensure_unicode(e))

        # upload all in one step...
        progress_hook(True,3,3,'Upload %s host packages' % len(packages))
        server = WaptServer().load_config_from_file(waptconfigfile)
        server.upload_packages(packages,auth=(wapt_server_user,wapt_server_passwd),progress_hook=progress_hook)
        return dict(updated = packages,
                    discarded = discarded,
                    unchanged = unchanged)

    finally:
        logger.debug('Cleanup')
        try:
            i = 0
            for s in packages:
                i+=1
                progress_hook(True,i,len(packages),'Cleanup')
                if os.path.isfile(s.localpath):
                    os.remove(s.localpath)
            progress_hook(False)
        except WindowsError as e:
            logger.critical('Unable to remove temporary directory %s: %s'% (s,repr(e)))
            progress_hook(False)
Пример #50
0
def quit():
    logger.debug('Quit')
    if cfg.shutdown_on_exit:
        system('shutdown -h now')
    sys.exit()