Esempio n. 1
0
    def get_nike_additional_metrics(self):
        """
            Not using this detailed Nike data for anything.
            But it seems interesting to fetch from the API.
            While you're hacking the Nike walled garden.
        """
        detailed_activities = get_json_from_file(
            self.dir, self.additional_activities_filename)
        if detailed_activities:
            logging.info('Fetching nike detailed activities from file')
            return detailed_activities

        logging.info("Fetching nike detailed activities from API")
        try:
            activities = self.fetch_nike_activities()
            nike_detailed_activities = []
            for activity in activities:
                activity_id = activity['id']
                url = f"https://api.nike.com/sport/v3/me/activity/{activity_id}?metrics=ALL"
                detailed_activity = get(url,
                                        bearer_token=self.nike_access_token)
                nike_detailed_activities.append(detailed_activity)
            save_json_to_file(self.additional_activities_filename,
                              nike_detailed_activities)
            return nike_detailed_activities
        except Exception:
            logging.exception(
                "Something went wrong, could not fetch additional Nike data")
Esempio n. 2
0
    def fetch_nike_activities(self, refresh_cache=False):
        """Fetches all Nike activities. If they are saved in a JSON file, it will use that as the source,
        otherwise will fetch all activities from the API.

        Args:
            refresh_cache (bool, optional): Will force a refresh of the data in saved JSON files. Defaults to False.
        """
        activities = get_json_from_file(self.dir, self.activities_filename)
        if activities and not refresh_cache:
            logging.info(f"Using cached activities for Nike data")
            return activities

        logging.info(f"Fetching new activities for Nike data")
        try:
            if isBlank(self.nike_access_token):
                raise Exception(
                    "Please provide a Nike token in order to fetch Nike data.")

            url = ("https://api.nike.com/sport/v3/me/activities/after_time/0")
            first_page = get(url, bearer_token=self.nike_access_token)
            activities = self.get_all_subsequent_nike_pages(first_page)
            save_json_to_file(self.activities_filename, activities)

            # For fun fetch all additional metrics
            self.get_nike_additional_metrics()

            return activities
        except Exception:
            logging.exception(
                "Something went wrong, could not fetch Nike data")
Esempio n. 3
0
def main():
    """
    main function
    """
    in_dir = "in"
    utils.is_exists_dir(in_dir, True)

    out_dir = "out"
    utils.is_exists_dir(out_dir, True)

    log_dir = 'log'
    utils.is_exists_dir(log_dir, True)

    prefix_out_file = "zudello_intrfc_"
    #extension_for_in_files = 'json'

    log_filename = os.path.join(log_dir, 'error.log')
    logging.basicConfig(filename=log_filename,
                        filemode='a',
                        format='%(asctime)s - %(levelname)s: %(message)s',
                        datefmt='%m/%d/%Y %I:%M:%S %p')
    logger = logging.getLogger()

    in_files = os.listdir(in_dir)
    for in_file in in_files:
        print("File: {}".format(in_file))
        json_content = utils.get_json_from_file(os.path.join(in_dir, in_file),
                                                logger)
        if json_content:
            out_interface = proc_json(json_content)
            # for k, v in generator_node(json_content):
            #     print("{} - {}".format(k, v))
            # out_interface = convert_to_interface(json_content)
            out_file_name = os.path.join(out_dir, prefix_out_file + in_file)
            utils.dump_json(out_file_name, out_interface)
Esempio n. 4
0
    def first_time_setup(self, config):
        # Define burn-in variables
        total_burn_in_time = 60 * _FIRST_TIME_RUN_TIME_MINS  # == 1MinuteInSeconds * DefaultBurnInTime
        percent_complete_increment = total_burn_in_time / 10
        next_milestone_marker = 0
        percent_complete = 0
        burn_in_data = []

        # Let the user know what is about to happen
        print(
            f'Collecting sensor first-time-run \'burn-in\' data for {_FIRST_TIME_RUN_TIME_MINS} minutes.'
        )
        print(
            f'Approximate completion time: '
            f'{datetime.fromtimestamp(time.time() + total_burn_in_time).strftime("%d/%m/%Y %H:%M")}:00'
        )

        # Capture Data for pre-determined minutes
        start_time = time.time()
        current_time = time.time()
        while current_time - start_time < total_burn_in_time:
            # Decide if it's time for an update on progress.
            if current_time - start_time >= next_milestone_marker:
                print(f'Collecting Data... {percent_complete}% complete')
                next_milestone_marker += percent_complete_increment
                percent_complete = int(
                    (next_milestone_marker / total_burn_in_time) * 100)

            # Capture more sensor data.
            if self.sensor.get_sensor_data() and self.sensor.data.heat_stable:
                ambient_gas_reading = self.sensor.data.gas_resistance
                burn_in_data.append(ambient_gas_reading)
                utils.v_print('Gas: {0} Ohms'.format(ambient_gas_reading))
                # Rest for a second to save pounding the sensor / CPU unnecessarily.
                time.sleep(1)

            # Update the current time so while loop can progress.
            current_time = time.time()

        # Gather an average of the last 50 data points for smoothing and save as background readings.
        config['gas']['ambient_background'] = sum(burn_in_data[-50:]) / 50.0

        # Write data to config file.
        with open(_CONFIG_FILE_NAME, 'w') as json_file:
            json.dump(config, indent=4, fp=json_file)

        # return new data.
        return utils.get_json_from_file(_CONFIG_FILE_NAME)
Esempio n. 5
0
    def __init__(self):
        # Configure
        if utils.validate_file_exists(_CONFIG_FILE_NAME):
            config = utils.get_json_from_file(_CONFIG_FILE_NAME)
        else:
            utils.validate_can_write_file(_CONFIG_FILE_NAME,
                                          should_del_after=True)
            self._configure_sensor(_DEFAULT_SENSOR_CONFIG)
            config = self.first_time_setup(_DEFAULT_SENSOR_CONFIG)
        self._configure_sensor(config)

        # Populate properties based on config
        self._data = utils.DataCapture()
        self.humidity_baseline = config['humidity']['baseline']
        self.humidity_gas_quality_ratio = config['humidity'][
            'quality_weighting']
        self.gas_baseline = config['gas']['ambient_background']
        self.__cpu = config['cpu']
        self.__cpu['smoothing'] = []
    def fetch_strava_activities(self, refresh_cache=False):
        """Fetches all Strava activities. If they are saved in a JSON file, it will use that as the source,
        otherwise will fetch all activities from the API.

        Args:
            refresh_cache (bool, optional): Will force a refresh of the data in saved JSON files. Defaults to False.
        """
        activities = get_json_from_file(self.dir, self.activities_filename)

        if activities and not refresh_cache:
            logging.info(f"Using cached activities for Strava data")
            return activities

        logging.info(f"Fetching new activities for Strava data")
        try:
            self.authorize_strava()
            activities = self.get_all_strava_pages()
            save_json_to_file(self.activities_filename, activities)
            return activities
        except Exception:
            logging.exception("Something went wrong, could not fetch Strava data")