Пример #1
0
 def get_activity(self, activity_id):
     url = '{b}/activity/{act}'.format(b=self.base_url, act=activity_id)
     resp = self.s.get(url, verify=False)
     try:
         return Activity(resp.json())
     except ValueError:
         raise Exception(resp.text)
Пример #2
0
def store_activites():
    with application.app_context():
        fitbit_secret_token = config.get('config', 'fitbit_secret_token')
        stats_query_string = f'https://api.fitbit.com/1/user/-/activities/date/{time_helper.get_local_date()}.json'
        secret_header = {
            'Authorization': 'Bearer {}'.format(fitbit_secret_token)
        }
        response = requests.get(stats_query_string, headers=secret_header)

        weight_query_string = f'https://api.fitbit.com/1/user/-/body/log/weight/goal.json'
        weight_response = requests.get(weight_query_string,
                                       headers=secret_header)

        sleep_goal_query_string = f'https://api.fitbit.com/1/user/-/sleep/goal.json'
        sleep_goal_response = requests.get(sleep_goal_query_string,
                                           headers=secret_header)

        sleep_query_string = f'https://api.fitbit.com/1.2/user/-/sleep/list.json?beforeDate={time_helper.get_local_date()}&sort=desc&offset=0&limit=1'
        sleep_response = requests.get(sleep_query_string,
                                      headers=secret_header)

        #data = response.json()
        json_data = json.loads(response.text)
        weight_json_data = json.loads(weight_response.text)
        sleep_goal_json_data = json.loads(sleep_goal_response.text)
        sleep_json_data = json.loads(sleep_response.text)

        json_data['date'] = time_helper.get_local_date()
        json_data['startWeight'] = weight_json_data['goal']['startWeight']
        json_data['weight'] = weight_json_data['goal']['weight']
        json_data['sleepGoalMinutes'] = sleep_goal_json_data['goal'][
            'minDuration']
        json_data['sleepMinutes'] = sleep_json_data['sleep'][0]['timeInBed']
        json_data['sleepMinutesAwake'] = sleep_json_data['sleep'][0][
            'minutesAwake']

        fitbit_stats_db.update({'date': f'{json_data["date"]}'},
                               json_data,
                               upsert=True)

        result = Stats.stats_from_dict(json_data)

        activity_query_string = f'https://api.fitbit.com/1/user/-/activities/list.json?afterDate={time_helper.get_local_date()}&sort=desc&offset=0&limit=1'
        secret_header = {
            'Authorization': 'Bearer {}'.format(fitbit_secret_token)
        }

        response = requests.get(activity_query_string, headers=secret_header)

        json_data = json.loads(response.text)
        json_data['date'] = time_helper.get_local_date()

        if fitbit_activity_db.find({'date': f'{json_data["date"]}'}).count():
            fitbit_activity_db.update({'date': f'{json_data["date"]}'},
                                      json_data)
        else:
            fitbit_activity_db.insert(json_data)

        result = Activity.activity_from_dict(json_data)
        print(result)
Пример #3
0
 def create(self, actId, persIds, year, month, day, time, description):
     if self.__validator.validateTimeDate(year, month, day, time) == True:
         activity = Activity(actId, persIds, date(2018, 3, 12), time,
                             description)
         errors = self.__validator.validate(activity)
         errors.append(" Invalid time or date!")
     else:
         date_ = date(year, month, day)
         activity = Activity(actId, persIds, date_, time, description)
         errors = self.__validator.validate(activity)
     if len(errors) == 0:
         f1 = Function(self.__repo.delete, activity)
         f2 = Function(self.__repo.store, activity)
         o = Operation(f1, f2)
         self.__undoController.addOperation(o)
         self.__repo.store(activity)
         return activity
     else:
         return errors
Пример #4
0
 def update(self, activity, newPersIds, newYear, newMonth, newDay, newTime,
            newDescr):
     if self.__validator.validateTimeDate(newYear, newMonth, newDay,
                                          newTime) == True:
         newActivity = Activity(activity.activityId, newPersIds,
                                date(2018, 3, 12), newTime, newDescr)
         errors = self.__validator.validate(newActivity)
         errors.append(" Invalid time or date!")
     else:
         newDate = date(newYear, newMonth, newDay)
         newActivity = Activity(activity.activityId, newPersIds, newDate,
                                newTime, newDescr)
         auxActivity = Activity(activity.activityId, activity.personIds,
                                activity.date, activity.time,
                                activity.description)
         errors = self.__validator.validate(newActivity)
         if len(errors) == 0:
             f1 = Function(self.__repo.update, newActivity, auxActivity)
             f2 = Function(self.__repo.update, auxActivity, newActivity)
             o = Operation(f1, f2)
             self.__undoController.addOperation(o)
             return self.__repo.update(activity, newActivity)
     return errors
 def _loadFile(self):
     try:
         f = open(self._fileName, "r")
         line = f.readline()
         while len(line) > 2:
             tok = line.split("          ")
             tokDate = tok[2].split("-")
             tok[1] = readList(tok[1])
             act = Activity(
                 int(tok[0].strip()), tok[1],
                 date(int(tokDate[0]), int(tokDate[1]), int(tokDate[2])),
                 tok[3], tok[4].strip())
             ActivityRepository.store(self, act)
             line = f.readline()
     except IOError as e:
         print("Cannot load file" + str(e))
     finally:
         f.close()
Пример #6
0
    def get_webform(self, fig_url, download_images=False):
        full_url = '{b}{url}?token={t}'.format(b=self.base_url,
                                               url=fig_url,
                                               t=self.token)
        webform_json = requests.get(full_url).json()

        #TODO: refactor the service so this isn't necessary
        webform_nid = webform_json.keys()[0]
        figure_json = webform_json[webform_nid]['figure'][0]

        f = Figure(figure_json, trans=trans.FIG_TRANSLATIONS)

        #Add contributor info
        if 'list_the_creator_of_the_figure' in figure_json:
            f.add_contributor(
                parse_creators(figure_json['list_the_creator_of_the_figure']))

        #Add provenance information (wasDerivedFrom parent)
        if 'what_type_of_source_provided_this_figure' in figure_json and figure_json[
                'what_type_of_source_provided_this_figure'] == 'published_source':
            f.add_parent(
                Parent(deepcopy(f.original),
                       trans=trans.PARENT_TRANSLATIONS,
                       pubtype_map=trans.PARENT_PUBTYPE_MAP,
                       search_hints=trans.PARENT_SEARCH_HINTS))

        if 'images' in webform_json[webform_nid]:
            for img_idx, image in enumerate(
                    webform_json[webform_nid]['images']):
                image_obj = Image(
                    image,
                    local_path=self.get_local_image_path(image),
                    remote_path=self.get_remote_image_path(image),
                    trans=trans.IMG_TRANSLATIONS)

                #Add contributor info
                if 'list_the_creator_of_the_image' in image:
                    image_obj.add_contributor(
                        parse_creators(image['list_the_creator_of_the_image']))

                #TODO: this just keeps getting worse
                if 'datasources' in webform_json[webform_nid]['images'][
                        img_idx]:
                    for dataset_json in webform_json[webform_nid]['images'][
                            img_idx]['datasources']:
                        dataset = Dataset(dataset_json,
                                          trans=trans.DATASET_TRANSLATIONS,
                                          known_ids=trans.DATASET_IDS)

                        #Commence the hacks
                        try:
                            dataset.temporal_extent = ' '.join([
                                parse(dataset_json[field]).isoformat()
                                for field in ['start_time', 'end_time']
                            ])
                        except TypeError, e:
                            print 'Problem with start/end time: ', fig_url, f.title, e
                            print dataset_json['start_time'], dataset_json[
                                'end_time']
                            dataset.temporal_extent = None
                        except ValueError, e:
                            print 'Problem with start/end time: ', fig_url, f.title, e
                            print dataset_json['start_time'], dataset_json[
                                'end_time']
                            dataset.temporal_extent = None

                        dataset.spatial_extent = ' '.join([
                            '{k}: {v};'.format(k=key, v=dataset_json[key])
                            for key in [
                                'maximum_latitude', 'minimum_latitude',
                                'maximum_longitude', 'minimum_longitude'
                            ]
                        ])

                        #Filter overlapping Dataset keys out
                        activity_json = {
                            k: dataset_json[k]
                            for k in dataset_json if k not in [
                                'href', 'uri', 'identifier', 'start_time',
                                'end_time'
                            ]
                        }

                        #Add synthetic identifier
                        activity_json['identifier'] = '-'.join(
                            (image_obj.identifier.split('-')[0],
                             dataset.identifier, 'process'))
                        dataset.activity = Activity(
                            activity_json, trans=trans.ACT_TRANSLATIONS)

                        #TODO: Extract DOIs from citation
                        image_obj.datasets.append(dataset)

                f.images.append(image_obj)