Ejemplo n.º 1
0
 def __init_json_data(self):
     if not os.path.exists(self.__node_pos_path):
         util.write_json_file(file_path=self.__node_pos_path, data={})
         logging.info("创建文件 - %s" % self.__node_pos_path)
     else:
         self.node_pos_data = util.read_json_file(
             file_path=self.__node_pos_path)
Ejemplo n.º 2
0
 def update(self, updater: Updater):
     logging.info("update-service update")
     self.data_handler.update()
     if self.last_update["vaccinationsLastUpdated"].split(
     )[0] != self.data_handler.update_info["vaccinationsLastUpdated"].split(
     )[0]:
         logging.info("new Data available")
         self.last_update = self.data_handler.update_info.copy()
         util.write_json_file(self.last_update, "last-update.json")
         _, msg = self.message_generator.gen_text("numbers")
         msg = strings["auto-update-text"].format(msg)
         update_notice = self.__get_update_notice()
         for chat_id in self.subscriptions.keys():
             if "zahlen" in self.subscriptions[chat_id]:
                 try:
                     updater.bot.send_message(chat_id,
                                              msg,
                                              parse_mode=ParseMode.HTML)
                     if update_notice is not None:
                         updater.bot.send_message(chat_id,
                                                  update_notice,
                                                  parse_mode=ParseMode.HTML)
                     logging.info("sent update to {}".format(chat_id))
                 except Exception as e:
                     logging.error(e)
Ejemplo n.º 3
0
def main_csv_reader(args):
    path_to_coffee = args.path_to_coffee
    path_to_matched = args.matched_json
    all_people_list = flat_list(list(read_csv_file(path_to_coffee)))
    matched_in_this_session = []
    error = False

    if path_to_matched:
        try:
            matched_people_json = read_json_file(path_to_matched)
            tuple_list = create_tuple_list(all_people_list,
                                           matched_people_json)
            sorted_people_list = sort_tuple_list(tuple_list)
        except:
            raise ('Only use the program generated matched_people.json file')
    else:
        write_json_file()
        matched_people_json = read_json_file('matched_people.json')
        sorted_people_list = all_people_list

    unmatched_people = []

    for person in sorted_people_list:
        if person not in matched_in_this_session:
            individual_match_list = invidual_preproc(person, all_people_list,
                                                     matched_people_json,
                                                     matched_in_this_session)
            if individual_match_list:
                matched_pair = coffee_roulette(person, individual_match_list)
                if matched_pair is not None:
                    for person in matched_pair:
                        matched_in_this_session.append(person)
                else:
                    error = True
                    break
            else:
                unmatched_people.append(person)
        else:
            pass

    if error is False:
        create_today_matched(matched_in_this_session)
        if unmatched_people:
            create_today_unmatched(unmatched_people)

        updated_json = update_current_json(matched_people_json,
                                           matched_in_this_session)
        summary = "\n{} Matches".format(date.today())
        summary = create_matched_people_string(matched_in_this_session,
                                               summary)
        summary_messsage, alone = make_summary(matched_in_this_session,
                                               unmatched_people, summary, "")
        summary += alone
        write_json_file(updated_json)
        write_txt_file(summary)
        print(summary_messsage)
Ejemplo n.º 4
0
    def __init_robots(self):
        value_count = 0
        if self.__start_uid > 0:
            uid_list = list()
            for uid in [
                    uid for uid in range(self.__start_uid, self.__start_uid +
                                         self.__robots_num)
            ]:
                player = Robot(sid=self.__sid, uid=uid, ksid=self.__sid)
                if player.has_login:
                    self.robot_object_list.append(player)
                    uid_list.append(uid)
                    value_count += 1
                else:
                    logging.error("%s login failed" % uid)
        else:
            uid_list = []
            robot_count = 0
            while robot_count < self.__robots_num:
                player = Robot(sid=self.__sid, uid=0, ksid=self.__sid)
                if player.has_login:
                    self.robot_object_list.append(player)
                    uid_list.append(player.uid)
                    value_count += 1
                else:
                    logging.error("create account failed")
                robot_count += 1
        logging.info("login success num: %s ; total num : %s" %
                     (value_count, self.__robots_num))
        try:
            self.__robots_data = read_json_file(
                file_path=config.robots_json_data_path)
        except FileNotFoundError:
            self.__robots_data[str(config.okc_environment)] = {
                str(self.__sid): uid_list
            }
        logging.info("robots_data : %s" % self.__robots_data)
        try:
            old_uid = set(self.__robots_data[str(config.okc_environment)][str(
                self.__sid)])
            new_uid = set(uid_list)
            self.__robots_data[str(config.okc_environment)][str(
                self.__sid)] = list(old_uid | new_uid)
        except KeyError:
            import traceback
            logging.error(traceback.print_exc())

        last_load = {
            "env": config.okc_environment,
            "kingdom": self.__sid,
            "sid": self.__sid,
            "uid": uid_list
        }
        self.__robots_data["last_load"] = last_load
        write_json_file(file_path=config.robots_json_data_path,
                        data=self.__robots_data)
Ejemplo n.º 5
0
 def __save_robots(self):
     env_path = util.get_ini_data(ini_path=config.conf_path,
                                  section="path",
                                  section_item="okc_environment")
     last_load = util.read_json_file(env_path)
     if self.robots:
         robot_uid_list = []
         for robot in self.robots:
             robot_uid_list.append(robot.uid)
         last_load["uid"] = robot_uid_list
         # data = {"sid": self.sid, "player_list": robot_uid_list}
         util.write_json_file(file_path=env_path, data=last_load)
Ejemplo n.º 6
0
	def __get_douyu_rooms(self):
		# 获取斗鱼平台直播的所有房间
		self.browser.get(config.douyu.base_url + "/directory")

		# util.write_data(write_path=config.douyu.directory_path, data=self.browser.page_source)

		soup_menu = bs4.BeautifulSoup(self.browser.page_source, "html.parser")

		for data in soup_menu.select("script"):
			data = str(data)
			if "DATA" in data:
				data = data.split("var $DATA =")[-1].split(";")[0]
				self.directory_games = json.loads(data)
				print(self.directory_games)

		# 所有游戏类型,包含游戏名称列表
		category_list = self.directory_games["firstCategory"]

		# 游戏名称 eg.LOL
		game_relative_path_list = []
		game_url_list = []

		for category in category_list:
			for relative in category["secondCategory"]:
				game_relative_path_list.append(relative["cate2Url"])

		for game_relative_path in game_relative_path_list:
			url = config.douyu.base_url + game_relative_path
			game_url_list.append(url)

		util.write_json_file(os.getcwd() + "\\game_url.json", data=game_url_list)

		# 获取房间号
		# TODO 获取房间属性 -- 关注量,名称等等,用于实现筛选功能,过滤官方直播间。可用class 描述属性实现
		for game_url in game_url_list:
			time.sleep(config.request_wait_time)
			game_url_result = requests.get(game_url)
			if game_url_result.status_code == 200:
				logging.info("%s 访问成功" % game_url)
			else:
				logging.error("%s 访问失败" % game_url)
				continue
			soup_game = bs4.BeautifulSoup(game_url_result.text, "html.parser")

			game_room_list = soup_game.select(".layout-Cover-item")

			for game_room in game_room_list:
				self.douyu_rooms.append("%s/%s" % (config.douyu.base_url, game_room.select("a")[0].get("href")))

		logging.info("斗鱼旗下所有直播间的数量为 : %s" % len(self.douyu_rooms))
		util.write_json_file(os.getcwd() + "\\room_url.json", data=self.douyu_rooms)
Ejemplo n.º 7
0
def write_geojson(args, source, fc, fn, archive, updated_stations):
    fc.properties["processed"] = int(datetime.utcnow().timestamp())
    fc.properties["origin_member"] = pathlib.PurePath(fn).name
    if archive:
        fc.properties["origin_archive"] = pathlib.PurePath(archive).name
    station_id = fc.properties["station_id"]

    if args.station and args.station != station_id:
        return

    fc.properties["fmt"] = config.FORMAT_VERSION

    logging.debug(
        f"output samples retained: {len(fc.features)}, station id={station_id}"
    )

    updated_stations.append((station_id, fc.properties))

    cc = station_id[:2]
    subdir = station_id[2:5]

    syn_time = datetime.utcfromtimestamp(fc.properties["syn_timestamp"]).replace(
        tzinfo=pytz.utc
    )
    day = syn_time.strftime("%Y%m%d")
    year = syn_time.strftime("%Y")
    month = syn_time.strftime("%m")
    time = syn_time.strftime("%H%M%S")

    dest = (
        f"{args.destdir}/{source}/{cc}/{subdir}/"
        f"{year}/{month}/{station_id}_{day}_{time}.geojson.br"
    )
    ref = f"{source}/{cc}/{subdir}/" f"{year}/{month}/{station_id}_{day}_{time}.geojson"

    path = pathlib.Path(dest).parent.absolute()
    pathlib.Path(path).mkdir(parents=True, exist_ok=True)

    if not fc.is_valid:
        logging.error(f"--- invalid GeoJSON! {fc.errors()}")
        raise ValueError("invalid GeoJSON")

    util.write_json_file(fc, dest, useBrotli=True, asGeojson=True)

    fc.properties["path"] = ref

    if args.dump_geojson:
        pprint(fc)
    return True
Ejemplo n.º 8
0
def join_all():
    requests = read_compressed_json_file(
        '../data/requests.json.gzip')["objects"]
    messages = read_compressed_json_file(
        '../data/messages.json.gzip')["objects"]

    data = []

    for r in requests:
        request_url = f"https://fragdenstaat.de/api/v1/request/{r['id']}/"
        print(request_url)
        new_data = [m for m in messages if m["request"] == request_url]
        data.append({"request": r, "messages": new_data})

    write_json_file('../data/full_messages.json', data)
Ejemplo n.º 9
0
    def init_cookies(self, platform):

        xpath_login = '******'
        xpath_player = '//*[@id="js-header"]/div/div/div[3]/div[7]/div/a/span/div/div/img'
        self.browser.get(url=self.init_url)
        self.browser.maximize_window()

        time.sleep(2)
        self.log_text_send.emit(self.browser.current_url)

        if util.find_xpath_in_xml(self.browser, xpath_login):
            self.browser.find_element_by_xpath(xpath=xpath_login).click()

            if util.find_xpath_in_xml(self.browser, xpath_player):
                util.write_json_file(
                    file_path=config.cookies_path,
                    data={platform: self.browser.get_cookies()[0]})
Ejemplo n.º 10
0
def join_successful_messages():
    requests = read_compressed_json_file(
        '../data/requests.json.gzip')["objects"]
    messages = read_compressed_json_file(
        '../data/messages.json.gzip')["objects"]

    data = []

    for r in requests:
        if r["resolution"] in ["successful", "partially_successful"]:
            request_url = f"https://fragdenstaat.de/api/v1/request/{r['id']}/"
            print(request_url)
            new_data = [
                m for m in messages
                if m["is_response"] and m["request"] == request_url
            ]
            data.extend(new_data)

    write_json_file('../data/suc_msg.json', data)
Ejemplo n.º 11
0
    def click_by_node_name(self,
                           view,
                           node_name,
                           can_change=False,
                           result_view="") -> bool:
        if can_change:
            node_pos = self.get_pos(node_name)
        elif view in self.ui_pos.keys():
            if node_name in self.ui_pos[view].keys():
                node_pos = tuple(self.ui_pos[view][node_name])
            else:
                node_pos = self.get_pos(node_name)
                self.ui_pos[view][node_name] = list(node_pos)
                util.write_json_file(file_path=self.ui_pos_path,
                                     data=self.ui_pos)
        else:
            node_pos = self.get_pos(node_name)
            self.ui_pos[view] = {node_name: list(node_pos)}
            util.write_json_file(file_path=self.ui_pos_path, data=self.ui_pos)

        logging.info("%s 的坐标为 : %s" % (node_name, node_pos))

        if node_pos != ():
            self.poco.click(pos=node_pos)
            api.sleep(self.click_wait_time)
            self.update_ui_node()
            if result_view == "":
                return True
            if self.find_ui_node(ui_node_name=result_view):
                return True
            else:
                logging.error("点击%s后,没有打开%s" % (node_name, result_view))
                return False
        else:
            self.phone.get_screen_shot(node_name, self.default_save_path)
            logging.error("点击失败,没找到UI坐标")
            return False
Ejemplo n.º 12
0
def update_geojson_summary(args, stations, updated_stations, summary):

    stations_with_ascents = {}
    # unroll into dicts for quick access
    if "features" in summary:
        for feature in summary.features:
            a = feature.properties["ascents"]
            if len(a):
                st_id = feature.properties["station_id"]
                stations_with_ascents[st_id] = feature

    # remove entries from ascents which have a syn_timestamp less than cutoff_ts
    cutoff_ts = util.now() - args.max_age * 24 * 3600

    # now walk the updates
    for station, asc in updated_stations:
        if station in stations_with_ascents:

            # we already have ascents from this station.
            # append, sort by synoptic time and de-duplicate
            oldlist = stations_with_ascents[station]["properties"]["ascents"]
            oldlist.append(asc)

            pruned = [x for x in oldlist if x["syn_timestamp"] > cutoff_ts]

            logging.debug(f"pruning {station}: {len(oldlist)} -> {len(pruned)}")

            newlist = sorted(pruned, key=itemgetter("syn_timestamp"), reverse=True)
            # https://stackoverflow.com/questions/9427163/remove-duplicate-dict-in-list-in-python
            seen = set()
            dedup = []
            for d in newlist:
                # keep an ascent of each source, even if same synop time
                t = str(d["syn_timestamp"]) + d["source"]
                if t not in seen:
                    seen.add(t)
                    dedup.append(d)
            stations_with_ascents[station]["properties"]["ascents"] = dedup

            # fixup the name if it was added to station_list.json:
            ident = stations_with_ascents[station]["properties"]["name"]
            if ident in stations:
                # using WMO id as name. Probably mobile. Replace by string name.
                stations_with_ascents[station]["properties"]["name"] = stations[ident][
                    "name"
                ]

            # overwrite the station coords by the coords of the last ascent
            # to properly handle mobile stations
            if asc["id_type"] == "mobile":
                logging.debug(
                    f"fix coords {station} -> {asc['lon']} {asc['lat']} {asc['elevation']}"
                )
                properties = stations_with_ascents[station]["properties"]
                stations_with_ascents[station] = geojson.Feature(
                    geometry=geojson.Point(
                        (
                            round(asc["lon"], 6),
                            round(asc["lat"], 6),
                            round(asc["elevation"], 1),
                        )
                    ),
                    properties=properties,
                )

        else:
            # station appears with first-time ascent
            properties = {}
            properties["ascents"] = [asc]

            if station in stations:
                st = stations[station]
                coords = (st["lon"], st["lat"], st["elevation"])
                properties["name"] = st["name"]
                properties["station_id"] = station
                properties["id_type"] = "wmo"
            else:

                # unlisted station: anonymous + mobile
                # take coords and station_id as name from ascent
                coords = (asc["lon"], asc["lat"], asc["elevation"])
                properties["name"] = asc["station_id"]

                if re.match(r"^\d{5}$", station):
                    # WMO id syntax, but not in station_list
                    # hence an unregistered but fixed station
                    properties["id_type"] = "unregistered"
                else:
                    # looks like weather ship
                    properties["id_type"] = "mobile"

            stations_with_ascents[station] = geojson.Feature(
                geometry=geojson.Point(coords), properties=properties
            )

    # create GeoJSON summary
    ns = na = 0
    fc = geojson.FeatureCollection([])
    fc.properties = {
        "fmt": config.FORMAT_VERSION,
        "generated": int(util.now()),
        "max_age": args.max_age * 24 * 3600,
    }
    for _st, f in stations_with_ascents.items():
        sid, stype = slimdown(f)
        f.properties["station_id"] = sid
        f.properties["id_type"] = stype
        ns += 1
        na += len(f.properties["ascents"])
        fc.features.append(f)

    logging.debug(f"summary {args.summary}: {ns} active stations, {na} ascents")

    useBrotli = args.summary.endswith(".br")
    util.write_json_file(fc, args.summary, useBrotli=useBrotli, asGeojson=True)
Ejemplo n.º 13
0
def initialize_stations(txt_fn, json_fn):
    US_STATES = [
        "AK",
        "AL",
        "AR",
        "AZ",
        "CA",
        "CO",
        "CT",
        "DE",
        "FL",
        "GA",
        "HI",
        "IA",
        "ID",
        "IL",
        "IN",
        "KS",
        "LA",
        "MA",
        "MD",
        "ME",
        "MI",
        "MN",
        "MO",
        "MS",
        "MT",
        "NC",
        "ND",
        "NE",
        "NH",
        "NJ",
        "NM",
        "NV",
        "NY",
        "OH",
        "OK",
        "OR",
        "PA",
        "RI",
        "SC",
        "SD",
        "TN",
        "TX",
        "UT",
        "VA",
        "VT",
        "WA",
        "WI",
        "WV",
        "WY",
    ]

    stationdict = {}
    with open(txt_fn, "r") as csvfile:
        stndata = csv.reader(csvfile, delimiter="\t")
        for row in stndata:
            m = re.match(
                r"(?P<stn_wmoid>^\w+)\s+(?P<stn_lat>\S+)\s+(?P<stn_lon>\S+)\s+(?P<stn_altitude>\S+)(?P<stn_name>\D+)",
                row[0],
            )
            fields = m.groupdict()
            stn_wmoid = fields["stn_wmoid"][6:]
            stn_name = fields["stn_name"].strip()

            if re.match(r"^[a-zA-Z]{2}\s",
                        stn_name) and stn_name[:2] in US_STATES:
                stn_name = stn_name[2:].strip().title() + ", " + stn_name[:2]
            else:
                stn_name = stn_name.title()
            stn_name = fields["stn_name"].strip().title()
            stn_lat = float(fields["stn_lat"])
            stn_lon = float(fields["stn_lon"])
            stn_altitude = float(fields["stn_altitude"])

            if stn_altitude > -998.8:
                stationdict[stn_wmoid] = {
                    "name": stn_name,
                    "lat": stn_lat,
                    "lon": stn_lon,
                    "elevation": stn_altitude,
                }
        util.write_json_file(stationdict, json_fn)
Ejemplo n.º 14
0
def main():
    parser = argparse.ArgumentParser(
        description="rebuild radiosonde summary.json",
        add_help=True,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
    )
    parser.add_argument("-v", "--verbose", action="store_true", default=False)
    parser.add_argument(
        "--station-json",
        action="store",
        default=config.STATION_LIST,
        help="path to write the station_list.json file",
    )
    parser.add_argument(
        "--station-text",
        action="store",
        default=config.STATION_TXT,
        help="path to the source text file to generate the station_list.json",
    )

    parser.add_argument(
        "--summary",
        action="store",
        default=config.WWW_DIR + config.DATA_DIR + config.SUMMARY,
        help="path of brotli-compressed summary.geojson.br",
    )

    parser.add_argument(
        "--dirs",
        nargs="+",
        type=str,
        default=[config.MADIS_DATA, config.GISC_DATA],
        help="directories to scan for detail files (*.geojson.br)",
    )
    parser.add_argument(
        "--max-age",
        action="store",
        type=int,
        default=config.MAX_DAYS_IN_SUMMARY,
        help="number of days of history to keep in summary",
    )
    parser.add_argument("--tmpdir", action="store", default=None)

    args = parser.parse_args()
    if args.tmpdir:
        config.tmpdir = args.tmpdir
    level = logging.WARNING
    if args.verbose:
        level = logging.DEBUG

    logging.basicConfig(level=level)
    os.umask(0o22)

    if not os.path.exists(args.station_text):
        logging.error(f"the {args.station_text} does not exist")
        sys.exit(1)

    for d in args.dirs:
        if not os.path.exists(d):
            logging.error(f"the directory {d} does not exist")
            sys.exit(1)

    try:
        with pidfile.Pidfile(config.LOCKFILE,
                             log=logging.debug,
                             warn=logging.debug):

            cutoff_ts = util.now() - args.max_age * 24 * 3600
            update_station_list(args.station_text, args.station_json)

            global station_list
            station_list = json.loads(
                util.read_file(args.station_json).decode())
            ntotal = 0
            for d in args.dirs:
                nf, nu, nc = walkt_tree(d, pathlib.Path(d), "*.geojson.br",
                                        cutoff_ts)
                ntotal = ntotal + nf

            fixup_flights(flights)
            fc = geojson.FeatureCollection([])
            fc.properties = {
                "fmt": config.FORMAT_VERSION,
                "generated": int(util.now()),
                "max_age": config.MAX_DAYS_IN_SUMMARY * 24 * 3600,
            }
            for _st, f in flights.items():
                fc.features.append(f)

            util.write_json_file(fc,
                                 args.summary,
                                 useBrotli=True,
                                 asGeojson=True)

            for l in txtfrag:
                print(l, file=sys.stderr)

    except pidfile.ProcessRunningException:
        logging.warning(f"the pid file {config.LOCKFILE}is in use, exiting.")
        return -1
Ejemplo n.º 15
0
def okc_robot_start():
    print(
        "welcome to use okc robot.\n\nthe environment : 0-test ,1-dev ,2-online test ,3-online ."
        .title())
    print(
        "no input or input error Use the environment where you last logged in."
        .title())
    print("If this is your first login, use test environment.\n".title())

    input_env = input("choose environment :\n".title())
    sid = input("enter sid:\n".title())
    start_uid = input("start uid:\n".title())
    robot_num = input("robot num:\n".title())

    env_path = util.get_ini_data(ini_path=config.conf_path,
                                 section="path",
                                 section_item="okc_environment")

    def get_help():
        help_info = "-" * 150 + "\n"
        for command_name in robots.__class__.__dict__.keys():
            if not re.match(r"__", command_name):
                help_info += "Command : %s  Params : %s" % (
                    command_name + " " * (30 - len(command_name)) + "·" * 10,
                    robots.__getattribute__(command_name).__doc__)
                help_info += "\n" + "-" * 150 + "\n"
        return help_info

    try:
        input_env = int(input_env)
        if input_env == 0:
            config.okc_environment = config.OkcEnvironment.Test.value
        elif input_env == 1:
            config.okc_environment = config.OkcEnvironment.Dev.value
        elif input_env == 2:
            config.okc_environment = config.OkcEnvironment.OnlineTest.value
        elif input_env == 3:
            config.okc_environment = config.OkcEnvironment.Online.value
        else:
            config.okc_environment = config.OkcEnvironment.Test.value
        logging.info("environment : %s " % config.ENV[config.okc_environment])
        sid = int(sid)
        start_uid = int(start_uid)
        player_num = int(robot_num)
        data = {
            "env": config.okc_environment,
            "sid": sid,
            "uid": list(range(start_uid, start_uid + player_num))
        }
        util.write_json_file(env_path, data=data)
        robots = AnalysisCommand(sid=sid,
                                 start_uid=start_uid,
                                 robot_num=player_num)
    except ValueError:
        last_load = util.read_json_file(env_path)

        env = int(last_load["env"])
        config.okc_environment = env

        logging.info("environment : %s " % config.ENV[config.okc_environment])
        print("-" * 150)

        sid = int(last_load["sid"])
        start_uid = last_load["uid"][0]
        player_num = len(last_load["uid"])

        if start_uid == -1:
            robots = AnalysisCommand()
        else:
            robots = AnalysisCommand(sid=sid,
                                     start_uid=start_uid,
                                     robot_num=player_num)

    if not robots.robots:
        print("Robot Init Failed")
        return
    while True:
        command = input("enter command : \n".title())
        cmd = re.match(r"(\w+)(:?\s+(.*))", command)
        if not cmd:
            print("Input Error.\n")
            print(get_help())
            continue
        try:
            cmd_name, params = cmd.group(1, 2)
        except ValueError:
            print("Input Params Error.\n")
            print(get_help())
            continue
        cmd_list = robots.__dir__()
        if cmd_name in cmd_list:
            cmd_value = getattr(robots, cmd_name)
            try:
                cmd_value(params)
            except:
                logging.error(traceback.print_exc())
        else:
            print("command -> '%s' isn't in cmd list.\n" % cmd_name)
            print(get_help())
            continue
Ejemplo n.º 16
0
 def __akzeptieren(self, update: Update, context: CallbackContext):
     """Callback Methode für /akzeptieren - Fügt Nutzer zur DSGVO-Whitelist hinzu"""
     if update.effective_chat.id not in self.whitelist:
         self.whitelist.append(update.effective_chat.id)
         util.write_json_file({"list": self.whitelist}, "whitelist.json")
         self.__start(update, context)
Ejemplo n.º 17
0
 def __write_json(self):
     try:
         util.write_json_file(self.subscriptions, "subscribers.json")
     except Exception as e:
         logging.error(e)