Пример #1
0
def extract_link_mode(linkset, save_file_path):
    source_list = []
    target_list = []
    for link in linkset:
        source = link['source']
        target = []
        if source not in source_list:
            source_list.append(source)
            for sub_iter in linkset:
                if sub_iter['source'] == source:
                    target.append(sub_iter['target'])
                else:
                    pass
            target_list.append(target)
        else:
            pass

    link_1_1 = []
    link_1_N = []
    for source, target in zip(source_list, target_list):
        if len(target) == 1:
            link_1_1.append({'source': source, 'target': target})
        else:
            link_1_N.append({'source': source, 'target': target})

    utils.save_json_to_file(save_file_path + "link_1_1.json", link_1_1)
    utils.save_json_to_file(save_file_path + "link_1_N.json", link_1_N)

    return link_1_1, link_1_N
Пример #2
0
def run(work_path):
    # 系统初始化,参数要与创建技能时填写的检验值保持一致
    hilens.init("driving")

    # 初始化自带摄像头与HDMI显示器,
    # hilens studio中VideoCapture如果不填写参数,则默认读取test/camera0.mp4文件,
    # 在hilens kit中不填写参数则读取本地摄像头
    camera = hilens.VideoCapture()
    display = hilens.Display(hilens.HDMI)

    # 初始化模型
    model_path = os.path.join(work_path, 'model/yolo3.om')
    driving_model = hilens.Model(model_path)

    frame_index = 0
    json_bbox_list = []
    json_data = {'info': 'det_result'}

    while True:
        frame_index += 1
        try:
            time_start = time.time()

            # 1. 设备接入 #####
            input_yuv = camera.read()  # 读取一帧图片(YUV NV21格式)

            # 2. 数据预处理 #####
            img_bgr = cv2.cvtColor(input_yuv,
                                   cv2.COLOR_YUV2BGR_NV21)  # 转为BGR格式
            img_preprocess, img_w, img_h = preprocess(img_bgr)  # 缩放为模型输入尺寸

            # 3. 模型推理 #####
            output = driving_model.infer([img_preprocess.flatten()])

            # 4. 获取检测结果 #####
            bboxes = get_result(output, img_w, img_h)

            # 5-1. [比赛提交作品用] 将结果输出到json文件中 #####
            if len(bboxes) > 0:
                json_bbox = convert_to_json(bboxes, frame_index)
                json_bbox_list.append(json_bbox)

            # 5-2. [调试用] 将结果输出到模拟器中 #####
            img_bgr = draw_boxes(img_bgr, bboxes)  # 在图像上画框
            output_yuv = hilens.cvt_color(img_bgr, hilens.BGR2YUV_NV21)
            display.show(output_yuv)  # 显示到屏幕上
            time_frame = 1000 * (time.time() - time_start)
            hilens.info('----- time_frame = %.2fms -----' % time_frame)

        except RuntimeError:
            print('last frame')
            break

    # 保存检测结果
    hilens.info('write json result to file')
    result_filename = './result.json'
    json_data['result'] = json_bbox_list
    save_json_to_file(json_data, result_filename)

    hilens.terminate()
Пример #3
0
    def get_exchange_rates_list_message(self):

        data_loaded_from_file = utils.read_data_from_file_as_json(
            os.path.join(os.environ['work_dir'], 'rates.json'))

        expiration_time = 10 * 60 * 1000
        last_timestamp = int(data_loaded_from_file['timestamp'])
        current_timestamp = utils.get_current_timestamp()

        f = open(os.path.join(os.environ['work_dir'], 'rates.json'), 'w')

        if current_timestamp - last_timestamp < expiration_time:
            data_loaded_from_file['timestamp'] = current_timestamp
            utils.save_json_to_file(data_loaded_from_file, f)

            return utils.format_decimal_values(data_loaded_from_file['rates'])

        try:
            data_loaded_from_api = self._get_exchange_rates_response_for_usd()
        except BadRequestException:
            return 'Something went wrong.'

        rates = data_loaded_from_api['rates']
        utils.save_json_to_file(
            {
                'rates': rates,
                'timestamp': current_timestamp
            }, f)

        return utils.format_decimal_values(rates)
Пример #4
0
    def fetch_nike_activities(self, refresh_cache=False):
        """Fetches all Nike activities. If they are saved in a JSON file, it will use that as the source,
        otherwise will fetch all activities from the API.

        Args:
            refresh_cache (bool, optional): Will force a refresh of the data in saved JSON files. Defaults to False.
        """
        activities = get_json_from_file(self.dir, self.activities_filename)
        if activities and not refresh_cache:
            logging.info(f"Using cached activities for Nike data")
            return activities

        logging.info(f"Fetching new activities for Nike data")
        try:
            if isBlank(self.nike_access_token):
                raise Exception(
                    "Please provide a Nike token in order to fetch Nike data.")

            url = ("https://api.nike.com/sport/v3/me/activities/after_time/0")
            first_page = get(url, bearer_token=self.nike_access_token)
            activities = self.get_all_subsequent_nike_pages(first_page)
            save_json_to_file(self.activities_filename, activities)

            # For fun fetch all additional metrics
            self.get_nike_additional_metrics()

            return activities
        except Exception:
            logging.exception(
                "Something went wrong, could not fetch Nike data")
Пример #5
0
    def get_nike_additional_metrics(self):
        """
            Not using this detailed Nike data for anything.
            But it seems interesting to fetch from the API.
            While you're hacking the Nike walled garden.
        """
        detailed_activities = get_json_from_file(
            self.dir, self.additional_activities_filename)
        if detailed_activities:
            logging.info('Fetching nike detailed activities from file')
            return detailed_activities

        logging.info("Fetching nike detailed activities from API")
        try:
            activities = self.fetch_nike_activities()
            nike_detailed_activities = []
            for activity in activities:
                activity_id = activity['id']
                url = f"https://api.nike.com/sport/v3/me/activity/{activity_id}?metrics=ALL"
                detailed_activity = get(url,
                                        bearer_token=self.nike_access_token)
                nike_detailed_activities.append(detailed_activity)
            save_json_to_file(self.additional_activities_filename,
                              nike_detailed_activities)
            return nike_detailed_activities
        except Exception:
            logging.exception(
                "Something went wrong, could not fetch additional Nike data")
 def get_user_information(self):
     user_info = self.accesslink.users.get_information(
         user_id=self.config['user_id'],
         access_token=self.config['access_token'])
     print('==========\tUSER INFORMATION\t==========')
     utils.pretty_print_json(user_info)
     utils.save_json_to_file(
         user_info,
         f'user_data/user_data_{datetime.today().strftime("%Y-%m-%d")}.json'
     )
Пример #7
0
 def __init__(self):
     self.api_resource = HTTPSConnection(host='api.exchangeratesapi.io')
     if not os.path.exists(
             os.path.join(os.environ['work_dir'], 'rates.json')):
         f = open(os.path.join(os.environ['work_dir'], 'rates.json'), 'w')
         data = self._get_exchange_rates_response_for_usd()
         utils.save_json_to_file(
             {
                 'rates': data['rates'],
                 'timestamp': utils.get_current_timestamp()
             }, f)
Пример #8
0
 async def get_subscriber_file(self):
     try:
         with open("./subscriber.json") as fp:
             data = json.load(fp)
             return data
     except FileNotFoundError:
         data = await self.bot.http_session.get(
             url="https://twitchemotes.com/api_cache/v3/subscriber.json"
         )
         content = await data.json()
         save_json_to_file(content, "./subscriber.json")
         return content
    def get_daily_activity(self):
        transaction = self.accesslink.daily_activity.create_transaction(
            user_id=self.config['user_id'],
            access_token=self.config['access_token'])
        if not transaction:
            print('No new daily activity available.')
            return

        resource_urls = transaction.list_activities()['activity-log']

        for url in resource_urls:
            activity_summary = transaction.get_activity_summary(url)

            print('Activity summary:')
            utils.pretty_print_json(activity_summary)
            utils.save_json_to_file(
                activity_summary,
                f'daily_activity_data/daily_activity_data_{str(activity_summary["date"])}.json'
            )

        transaction.commit()
Пример #10
0
    def fetch_strava_activities(self, refresh_cache=False):
        """Fetches all Strava activities. If they are saved in a JSON file, it will use that as the source,
        otherwise will fetch all activities from the API.

        Args:
            refresh_cache (bool, optional): Will force a refresh of the data in saved JSON files. Defaults to False.
        """
        activities = get_json_from_file(self.dir, self.activities_filename)

        if activities and not refresh_cache:
            logging.info(f"Using cached activities for Strava data")
            return activities

        logging.info(f"Fetching new activities for Strava data")
        try:
            self.authorize_strava()
            activities = self.get_all_strava_pages()
            save_json_to_file(self.activities_filename, activities)
            return activities
        except Exception:
            logging.exception("Something went wrong, could not fetch Strava data")
    def get_physical_info(self):
        transaction = self.accesslink.physical_info.create_transaction(
            user_id=self.config['user_id'],
            access_token=self.config['access_token'])
        if not transaction:
            print('No new physical information available.')
            return

        resource_urls = transaction.list_physical_infos(
        )['physical-informations']

        for url in resource_urls:
            physical_info = transaction.get_physical_info(url)

            print('Physical info:')
            utils.pretty_print_json(physical_info)
            time = utils.polar_datetime_to_python_datetime_str(
                str(physical_info['created']))
            utils.save_json_to_file(physical_info,
                                    f'physical_data/physical_data{time}.json')

        transaction.commit()
Пример #12
0
def request_graphQL(owner, repo, type):
    """
    :param owner: repository owner
    :param repo:  repository name
    :param type:  pullRequests or issues
    :return:  response of pr or issues
    """
    count = 0
    output_response_file = init.local_data_filepath + owner + "/" + repo + "/response_" + type + ".json"
    if os.path.isfile(output_response_file):
        r = utils.read_json_from_file(output_response_file)
    else:
        r = query_request(queries.first_page, owner, repo, type)
    if not r['data']['repository'][type]['pageInfo']['hasNextPage']:
        return r
    while True:
        count += 1
        print(count, datetime.now(),
              r['data']['repository'][type]['totalCount'],
              len(r['data']['repository'][type]['nodes']))
        if count % 1 == 0:
            utils.save_json_to_file(output_response_file, r)
        else:
            pass
        earliest_pr_cursor = r['data']['repository'][type]['edges'][-1][
            'cursor']
        r2 = query_request(queries.other_page, owner, repo, type,
                           earliest_pr_cursor)
        r['data']['repository'][type]['pageInfo'] = r2['data']['repository'][
            type]['pageInfo']
        r['data']['repository'][type]['edges'] += r2['data']['repository'][
            type]['edges']
        r['data']['repository'][type]['nodes'] += r2['data']['repository'][
            type]['nodes']
        if not r['data']['repository'][type]['pageInfo']['hasNextPage']:
            utils.save_json_to_file(output_response_file, r)
            break
    return r
Пример #13
0
    def save_results(results, file_name, file_type):
        """
        This function saves elastic search results to file.
        The supported types are: csv, json, pickle

        :param results: elasticsearch results
        :param file_name: file name to store
        :param file_type: file type to store

        Example:
            >>> builder = QueryBuilder()
            >>> builder.save_results(results, "results", "csv")
        """
        prepared_data = prepare_for_save(results)  # modify actors field
        file = "{}.{}".format(file_name, file_type)

        if file_type == "json":
            jsonified_data = toJSON(prepared_data)  # jsonify data from ELK
            save_json_to_file(jsonified_data, file)

        elif file_type == "csv":
            save_attr_dict_to_csv(prepared_data, file)  # save data as CSV
        else:
            print("this type is not supported")
Пример #14
0
 def save_to_file(self):
     for sport in list(self.sports_lists.keys()):
         for json_object in self.sports_lists[sport]:
             utils.save_json_to_file(json_object,
                                     const.files[sport.lower()])
Пример #15
0
def extract_link_type(response, type, filepath=None):
    """

    :param response:
    :param type:
    :param filepath: input ”‘init.local_data_filepath+owner+’/’+repo+‘/’“ when need to save to file
    :return:
    """
    nodes = response['data']['repository'][type]['nodes']
    pr_pr = []
    pr_iss = []
    iss_pr = []
    iss_iss = []
    for item in nodes:
        node1_type = type
        node1_number = item['number']
        node1_time = item['createdAt']
        for node in item['timelineItems']['nodes']:
            if node:
                # 判断另一个node是pr还是issues
                if "id" in node['source'].keys():
                    node2_type = "pullRequests"
                else:
                    node2_type = "issues"

                node2_number = node["source"]['number']
                node2_ref_date = node["referencedAt"]

                link = {}
                link['target'] = node1_number
                link['source'] = node2_number
                type = node1_type
                target_type = node2_type
                link['timeInterval'] = abs(
                    datetime.strptime(node1_time,
                                      "%Y-%m-%dT%H:%M:%SZ").__sub__(
                                          datetime.strptime(
                                              node2_ref_date,
                                              "%Y-%m-%dT%H:%M:%SZ")).days)
                if type == "pullRequests" and target_type == 'pullRequests':
                    pr_pr.append(link)
                if type == "issues" and target_type == 'pullRequests':
                    iss_pr.append(link)
                if type == "pullRequests" and target_type == 'issues':
                    pr_iss.append(link)
                if type == "issues" and target_type == 'issues':
                    iss_iss.append(link)

    if type == "pullRequests":
        if filepath:
            utils.save_json_to_file(filepath + "pr_pr.json", pr_pr)
            utils.save_json_to_file(filepath + "pr_iss.json", pr_iss)
        else:
            pass
        return pr_pr, pr_iss
    elif type == "issues":
        if filepath:
            utils.save_json_to_file(filepath + "iss_pr.json", iss_pr)
            utils.save_json_to_file(filepath + "iss_iss.json", iss_iss)
        else:
            pass
        return iss_pr, iss_iss
Пример #16
0
 def save_file(self, data=None):
     """ Store json to yaml """
     task = "." + self.task if self.task != "" else ""
     name = (self.name + ".stats" + task)
     data = {"result": self.result, "recent": self.recent}
     utils.save_json_to_file(data, name)
Пример #17
0
def run(work_path):

    global data

    # 系统初始化,参数要与创建技能时填写的检验值保持一致
    hilens.init("driving")

    # 初始化自带摄像头与HDMI显示器,
    # hilens studio中VideoCapture如果不填写参数,则默认读取test/camera0.mp4文件,
    # 在hilens kit中不填写参数则读取本地摄像头
    camera = hilens.VideoCapture()

    display = hilens.Display(hilens.HDMI)

    if rec:
        rec_video(camera, display, show)

    # 初始化模型
    # -*- coding: utf-8 -*-
    # model_path = os.path.join(work_path, 'model/yolo3_darknet53_raw3_4_sup_slope_terminal_t.om')
    model_path = os.path.join(work_path, 'model/yolo3_darknet53_raw3_4_sup_slope_now_terminal_t.om')

    driving_model = hilens.Model(model_path)

    frame_index = 0
    json_bbox_list = []
    json_data = {'info': 'det_result'}

    while True:
        frame_index += 1
        try:
            time_start = time.time()

            # 1. 设备接入 #####
            input_yuv = camera.read()  # 读取一帧图片(YUV NV21格式)

            # 2. 数据预处理 #####
            if rgb:
                img_rgb = cv2.cvtColor(input_yuv, cv2.COLOR_YUV2RGB_NV21)  # 转为RGB格式
            else:
                img_rgb = cv2.cvtColor(input_yuv, cv2.COLOR_YUV2BGR_NV21)  # 转为BGR格式

            if pad:
                img_preprocess, img_w, img_h, new_w, new_h, shift_x_ratio, shift_y_ratio = preprocess_with_pad(img_rgb)  # 缩放为模型输入尺寸
                # 3. 模型推理 #####
                output = driving_model.infer([img_preprocess.flatten()])
                # 4. 获取检测结果 #####
                bboxes = get_result_with_pad(output, img_w, img_h, new_w, new_h, shift_x_ratio, shift_y_ratio)
            else:
                img_preprocess, img_w, img_h = preprocess(img_rgb)  # 缩放为模型输入尺寸
                # 3. 模型推理 #####
                output = driving_model.infer([img_preprocess.flatten()])
                # 4. 获取检测结果 #####
                bboxes = get_result(output, img_w, img_h)

            # # 5-1. [比赛提交作品用] 将结果输出到json文件中 #####
            # if len(bboxes) > 0:
            #     json_bbox = convert_to_json(bboxes, frame_index)
            #     json_bbox_list.append(json_bbox)
            # # if bboxes != []:
            # #     print()

            if socket_use:
                data = data_generate_4(bboxes)

            # 5-2. [调试用] 将结果输出到display #####
            if show:
                if rgb:
                    img_bgr = cv2.cvtColor(img_rgb, cv2.COLOR_RGB2BGR)
                else:
                    img_bgr = img_rgb
                img_bgr, labelName = draw_boxes(img_bgr, bboxes)  # 在图像上画框
                output_yuv = hilens.cvt_color(img_bgr, hilens.BGR2YUV_NV21)
                display.show(output_yuv)  # 显示到屏幕上
            if log:
                time_frame = 1000 * (time.time() - time_start)
                hilens.info('----- time_frame = %.2fms -----' % time_frame)

        except RuntimeError:
            print('last frame')
            break

    # 保存检测结果
    hilens.info('write json result to file')
    result_filename = './result.json'
    json_data['result'] = json_bbox_list
    save_json_to_file(json_data, result_filename)
    hilens.terminate()
    def get_exercises(self):
        transaction = self.accesslink.training_data.create_transaction(
            user_id=self.config['user_id'],
            access_token=self.config['access_token'])
        if not transaction:
            print('No new exercises available.')
            return

        resource_urls = transaction.list_exercises()['exercises']

        for url in resource_urls:
            exercise_summary = transaction.get_exercise_summary(url)
            gpx_data = transaction.get_gpx(url)
            tcx_data = transaction.get_tcx(url)
            hr_data = transaction.get_heart_rate_zones(url)
            samples_data = transaction.get_available_samples(url)
            sample_data = transaction.get_samples(url)

            print('Exercise summary:')
            utils.pretty_print_json(exercise_summary)
            time = utils.polar_datetime_to_python_datetime_str(
                str(exercise_summary['start-time']))
            utils.save_json_to_file(
                exercise_summary, f'exercises_data/summary_data_{time}.json')
            if gpx_data:  # not empty dict. If there is no data, this variable will have '{}' value
                utils.save_json_to_file(
                    utils.xml_to_dict(gpx_data),
                    f'exercises_data/gpx_data_{time}.json')
            if tcx_data:
                utils.save_json_to_file(
                    utils.xml_to_dict(tcx_data),
                    f'exercises_data/tcx_data_{time}.json')
            if hr_data:
                utils.save_json_to_file(hr_data,
                                        f'exercises_data/hr_data_{time}.json')
            if samples_data:
                utils.save_json_to_file(
                    samples_data, f'exercises_data/samples_data_{time}.json')
            if sample_data:
                utils.save_json_to_file(
                    sample_data, f'exercises_data/sample_data_{time}.json')

        transaction.commit()