def validate_marriage_after_fourteen(fams, indis):
    invalid_marriages = []

    for fid in fams:
        # if married
        if fams[fid]['MARR'] is not None:
            marriage_date = utils.parse_date(fams[fid]['MARR'])

            husbandID, wifeID = fams[fid]['HUSB'], fams[fid]['WIFE']
            husband_birth, wife_birth = indis[husbandID]['BIRT'], indis[
                wifeID]['BIRT']

            ages = []
            if husband_birth is not None:
                husband_birth = utils.parse_date(husband_birth)
                husband_marriage_age = utils.get_age(husband_birth,
                                                     marriage_date)
                ages.append(husband_marriage_age)

            if wife_birth is not None:
                wife_birth = utils.parse_date(wife_birth)
                wife_marriage_age = utils.get_age(wife_birth, marriage_date)
                ages.append(wife_marriage_age)

            if len(ages) > 0 and min(ages) < 14:
                invalid_marriages.append(
                    (fid, f'Family id={fid} has marriage before age 14'))

        return invalid_marriages
Example #2
0
def gen_indi_table(fams, indis):
    indi_table = PrettyTable()
    indi_table.field_names = [
        'ID', 'Name', 'Gender', 'Birthday', 'Age', 'Alive', 'Death', 'Child',
        'Spouse'
    ]
    for indi_id in sorted(indis.keys()):
        indi_data = indis[indi_id]

        children = []
        spouse = None
        for fam_id in indi_data['FAMS']:
            children += fams[fam_id]['CHIL']
            if fams[fam_id]['DIV'] is None:
                if indi_data['SEX'] == 'M':
                    spouse = fams[fam_id]['WIFE']
                else:
                    spouse = fams[fam_id]['HUSB']

        age = None
        if indi_data['BIRT'] is not None:
            birthday = utils.parse_date(indi_data['BIRT'])
            age = utils.get_age(birthday)
            if age < 0:
                age = None
        alive = indi_data['DEAT'] is None
        indi_table.add_row([
            indi_id, indi_data['NAME'] or 'NA', indi_data['SEX'] or 'NA',
            indi_data['BIRT'] or 'NA', age or 'NA', alive, indi_data['DEAT']
            or 'NA',
            utils.format_list(children) if len(children) > 0 else 'NA', spouse
            or 'NA'
        ])
    return indi_table
Example #3
0
	def customer_registered(self, d):
		customer_birthdate = d["data"]["birthdate"]
		year = int(customer_birthdate[0:4])
		month = int(customer_birthdate[5:7])
		day = int(customer_birthdate[8:10])

		self.age = get_age(date(year, month, day))
Example #4
0
def print_individuals_pretty_table(individuals_dict):
    pt = PrettyTable(field_names=[
        "ID", "Name", "Gender", "Birthday", "Age", "Alive", "Death", "Child",
        "Spouse"
    ])
    for individual_id, individual_info in individuals_dict.items():
        individual_info["ALIVE"] = individual_info.get("DEAT") == None
        birth_date = None
        if individual_info.get("BIRT") != None:
            birth_date = datetime.datetime.strptime(
                individual_info.get("BIRT"), "%d %b %Y")
        death_date = None
        if individual_info.get("DEAT") != None:
            datetime.datetime.strptime(individual_info.get("DEAT"), "%d %b %Y")
        individual_info["AGE"] = get_age(birth_date, death_date)
        individual_info["FAMC"] = individual_info.get(
            "FAMC") if individual_info.get("FAMC") != None else "NA"
        individual_info["FAMS"] = individual_info.get(
            "FAMS") if individual_info.get("FAMS") != None else "NA"
        individual_info["DEAT"] = individual_info.get(
            "DEAT") if individual_info.get("DEAT") != None else "NA"
        individual_info_list = [individual_id]
        for key in get_individual_pretty_Table_order():
            individual_info_list.append(individual_info.get(key))
        pt.add_row(individual_info_list)
    print(pt)
Example #5
0
def root():  # pragma: no cover
    today = datetime.date.today()
    content = "<div>"
    for post in posts:
        content += '<br />{age}<a href="{url}">{title}</a>'.format(
            age=get_age(today, parse_datetime(post["date_gmt"])),
            url=post["link"],
            title=post["title"]["rendered"])
    content += "</div>"
    return """<html><a href="{}">Atom Feed</a><br />{}</html>""".format(
        url_for("recent_feed"), content)
Example #6
0
def merge_cash(voter_filename, data_filename, new_filename, year):
    ref_date = 0
    if year == 2019:
        # last day to designate vouchers for 2019 was November 29, 2019
        ref_date = datetime.date(2019, 11, 29)
    elif year == 2017:
        # last day to designate vouchers for 2017 was December 1st, 2017
        ref_date = datetime.date(2017, 12, 1)

    voter_file = pd.read_csv(voter_filename, sep='|', encoding="cp1252")
    data_file = pd.read_csv(data_filename)

    print("voter file", voter_file.head())
    print("data file", data_file.head())

    # keep only the transactions noted C3.2 (contributions)
    data_file = data_file[data_file.strPDCFormLineNumber == "C3.2"]

    # drop duplicates and irrelevant donations based on list above
    if year == 2019:
        data_file = data_file[~data_file.strCampaignName.isin(NON_MUN_CAND_2019
                                                              )]
    elif year == 2017:
        data_file = data_file[~data_file.strCampaignName.isin(NON_MUN_CAND_2017
                                                              )]

    data_file.drop_duplicates(subset=['intLinkID_SEEC'])

    print("voter file shape", voter_file.shape)
    print("donor file shape", data_file.shape)

    # in donor file, add first and last name columns to prepare for matching with voter file
    name_split = data_file["strTransactorName"].str.split()
    data_file['first_name'] = name_split.str[0]
    data_file['last_name'] = name_split.str[-1]

    # merge on last name, first name, zipcode, and street number (see McCabe/Herwig appendix)
    merged = pd.merge(data_file,
                      voter_file,
                      left_on=['first_name', 'last_name', 'strCity', 'strZip'],
                      right_on=['FName', 'LName', 'RegCity', 'RegZipCode'],
                      how="inner")
    print("merged", merged.head())
    print("merged size", merged.shape)

    merged['age'] = merged.apply(
        lambda x: utils.get_age(ref_date, x['Birthdate']), axis=1)
    merged['age_bucket'] = merged.apply(
        lambda x: utils.get_age_bucket(x['age']), axis=1)

    merged.to_csv(new_filename)
    return merged
def validate_parent_age(fams, indis):
    return_data = []

    for fid in fams:
        if fams[fid]['HUSB'] is not None:
            husband_id = fams[fid]['HUSB']
            husband_birth = indis[husband_id]['BIRT']
            if husband_birth is not None:
                husband_birthday = utils.parse_date(husband_birth)

        if fams[fid]['WIFE'] is not None:
            wife_id = fams[fid]['WIFE']
            wife_birth = indis[wife_id]['BIRT']
            if wife_birth is not None:
                wife_birthday = utils.parse_date(wife_birth)

        for cid in fams[fid]['CHIL']:
            if indis[cid]['BIRT'] is not None:
                child_birthday = utils.parse_date(indis[cid]['BIRT'])

                if husband_birthday is not None:
                    husband_age = utils.get_age(husband_birthday,
                                                child_birthday)
                    if husband_age > 80:
                        return_data.append((
                            husband_id,
                            f'Husband id={husband_id} in family id={fid} was born over 80 years before child id={cid}.'
                        ))

                if wife_birthday is not None:
                    wife_age = utils.get_age(wife_birthday, child_birthday)
                    if wife_age > 60:
                        return_data.append((
                            wife_id,
                            f'Wife id={wife_id} in family id={fid} was born over 60 years before child id={cid}.'
                        ))

    return return_data
Example #8
0
 def employees(self, source_employees, cid):
     """
     Transforms source employees objects.
     """
     for employee in source_employees:
         yield TargetEmployee(
             employee.id,  # Reference use only, NOT INSERTED INTO DB
             employee.gender,
             employee.did,
             employee.jid,
             self.tid,
             cid,
             self._apply_exchange_rate(employee.salary),
             employee.manager,
             get_age(employee.dob, self.today))
Example #9
0
def user_data(user):
    avatar_img = user.avatar_img if user.avatar_img else "avatar.png"
    avatar_large, avatar_medium, avatar_small = get_tile_image_name(avatar_img, 'l'), get_tile_image_name(avatar_img, 'm'), get_tile_image_name(avatar_img, 's')
    lib_id, lib_name = get_library(user.library_id)[:2]
    user_json = {"id":user.id, "username":user.username, "nickname":user.nickname, "realname":F(user.realname),
                 "sex":user.sex, "age":get_age(user.birthday), "birthday":user.birthday.strftime("%Y-%m-%d") if user.birthday else "",
                 "library_id":lib_id, "library_name":lib_name, "auth_type":user.auth_type, 
                 "telephone":F(user.telephone), "email":F(user.email), "qq":F(user.qq), "parent_qq":F(user.parent_qq),
                 "home_address":F(user.home_address), "school":F(user.school), "sign":F(user.sign), "hobby":F(user.hobby),
                 #"avatar_id":user.avatar_id if user.avatar_id else "",
                 #"large":request.build_absolute_uri(MEDIA_URL + avatar_large),"medium":request.build_absolute_uri(MEDIA_URL + avatar_medium),"small":request.build_absolute_uri(MEDIA_URL + avatar_small)}
                 "large":MEDIA_URL + avatar_large,"medium":MEDIA_URL + avatar_medium,"small":MEDIA_URL + avatar_small}
    #print user_data
    user_json["is_staff"] = user.is_staff
    return user_json
Example #10
0
def merge_voucher_data(voter_filename, data_filename, new_filename, year):
    ref_date = 0
    col_name = ""
    if year == 2019:
        # last day to designate vouchers for 2019 was November 29, 2019
        ref_date = datetime.date(2019, 11, 29)
        col_name = 'Participant ID (Participant) (Contact)'
    elif year == 2017:
        # last day to designate vouchers for 2017 was December 1st, 2017
        ref_date = datetime.date(2017, 12, 1)
        col_name = 'Participant ID (Participant)'
    voter_file = pd.read_csv(voter_filename, sep='|', encoding="cp1252")
    data_file = pd.read_csv(data_filename)

    # rename donor file column
    data_file.rename(columns={col_name: 'participant_id'}, inplace=True)

    print("voter file", voter_file.head())
    print("data file", data_file.head())

    # print # of rows and columns in voter file and donor file
    print("voter file shape", voter_file.shape)
    print("donor file shape", data_file.shape)

    # drop duplicates in donor file, to get people who used >= 1 democracy voucher
    vouchers_no_dups = data_file.drop_duplicates(subset="participant_id")
    print("size of no dups file", vouchers_no_dups.shape)
    print("vouchers no dups", vouchers_no_dups.head())

    # merge on participant_id and StateVoterID
    merged = pd.merge(vouchers_no_dups,
                      voter_file,
                      left_on='participant_id',
                      right_on='StateVoterID',
                      how="inner")
    print("merged", merged.head())
    print("merged size", merged.shape)

    # add raw_age column and put in age buckets
    merged['age'] = merged.apply(
        lambda x: utils.get_age(ref_date, x['Birthdate']), axis=1)
    merged['age_bucket'] = merged.apply(
        lambda x: utils.get_age_bucket(x['age']), axis=1)

    merged.to_csv(new_filename)
    return merged
def list_single_living(fams, indis):
    def is_married(iid):
        for fid in indis[iid]['FAMS']:
            if fams[fid]['DIV'] is None:
                return True
        return False

    ret_data = []

    for iid in indis:
        if not is_married(iid) and (
                indis[iid]['BIRT'] is None
                or utils.get_age(utils.parse_date(indis[iid]['BIRT'])) > 30):
            if indis[iid]['DEAT'] is None:
                ret_data.append(iid)

    return ret_data
def validate_reasonable_age(fams, indis):
    ret_data = []
    lifetime = 150

    for iid in indis:
        if indis[iid]['BIRT'] is not None:
            birth_date = utils.parse_date(indis[iid]['BIRT'])
            death_date = utils.current_date()

            if indis[iid]['DEAT'] is not None:
                death_date = utils.parse_date(indis[iid]['DEAT'])

            if utils.get_age(birth_date, death_date) > lifetime:
                ret_data.append(
                    (iid,
                     f'Individual id={iid} is older than {lifetime} years'))

    return ret_data
Example #13
0
def event_dict_for_user(user, access_token):
        age = utils.get_age(user.birthday.year, user.birthday.month, user.birthday.day)
        event = Event.objects.filter(age_years=age['years'], age_months=age['months'], age_days=age['days'])
        user_dict = serialize_eventuser_json(user, access_token)
        if user_dict != None:

            event_dict = {}

            if event.count() > 0:
                event_dict = serialize_event_json(event[0])
                
            else:
                event_dict['error'] = {'NoEventError' : "There are no entries for %s at their age." % user.first_name}

            event_dict['person'] = user_dict

            return event_dict
        else:
            return None
Example #14
0
def pods(context):
    ret = get_client(context).list_pod_for_all_namespaces()

    pods = []
    for p in ret.items:
        pods.append({
            'metadata': {
                'namespace':
                p.metadata.namespace,
                'name':
                p.metadata.name,
                'creation_timestamp':
                safe_to_iso_timestamp(p.metadata.creation_timestamp),
                'age':
                get_age(p.metadata.creation_timestamp),
                'deletion_timestamp':
                safe_to_iso_timestamp(p.metadata.deletion_timestamp),
                'owner_references':
                list(
                    map(lambda r: {
                        'kind': r.kind,
                        'name': r.name
                    }, p.metadata.owner_references))
                if p.metadata.owner_references else None
            },
            'status': {
                'phase':
                p.status.phase,
                'container_statuses':
                list(
                    map(
                        lambda c: {
                            'name': c.name,
                            'ready': c.ready,
                            'restart_count': c.restart_count,
                            'state': c.state.to_dict(),
                            'last_state': c.last_state.to_dict()
                        }, p.status.container_statuses or []))
            }
        })
    return jsonify(pods)
Example #15
0
def get_players_info(cur, n_players=None):
    """
        多表查询获取球员基本数据
    """
    # 从Player表中获取球员基本信息
    if n_players:
        # 获取指定个数的球员信息
        sql = "SELECT * FROM Player LIMIT {};".format(n_players)
    else:
        # 获取所有球员信息
        sql = "SELECT * FROM Player;"

    rows = cur.execute(sql).fetchall()

    # 构造球员列表
    player_list = []
    for row in rows:
        player = dict()
        # 1. 姓名
        player['name'] = row[2]
        # 2. 年龄
        birthday_str = row[4]
        player['age'] = utils.get_age(birthday_str)
        # 3. 体重
        player['weight'] = row[5]
        # 4. 身高
        player['height'] = row[6]
        # 5. 平均评分
        player_api_id = row[1]
        player['average rating'] = utils.get_overall_rating(cur, player_api_id)

        player_list.append(player)

    # 将处理后的结果保存到JSON文件中
    with open(json_filepath, 'w') as f:
        json.dump(player_list, f)
Example #16
0
from http.server import HTTPServer, SimpleHTTPRequestHandler
from jinja2 import Environment, FileSystemLoader, select_autoescape
from utils import get_age, parse_drinks_list

env = Environment(loader=FileSystemLoader('.'),
                  autoescape=select_autoescape(['html']))
template = env.get_template('template.html')

with open('wine', 'r') as drink_file:
    drinks_list = drink_file.read()

rendered_page = template.render(time=get_age(),
                                drinks=parse_drinks_list(drinks_list))

with open('index.html', 'w', encoding="utf8") as file:
    file.write(rendered_page)

server = HTTPServer(('0.0.0.0', 8000), SimpleHTTPRequestHandler)
server.serve_forever()
        df_patient_copy = df_patient.copy(deep=True)
        df_patient_copy['yob'] = df_patient_copy['yob'].apply(lambda x: x - 1)

        df_patient_double = df_patient.append(df_patient_copy,
                                              ignore_index=True)

        print('Num. of patients with two YOBs:', len(df_patient_double))

        df_joined = pd.merge(df_patient_double,
                             df_voter_filtered,
                             on=['county', 'yob', 'gender'])  # race

        # print('Initial join size: {}'.format(len(df_joined)))

        df_joined['voter_age'] = df_joined.apply(
            lambda x: get_age(x['dob'], x['event_date']), axis=1)

        df_joined = df_joined.loc[df_joined['voter_age'] == df_joined['age']]

        # print('Intermediate join size: {}'.format(len(df_joined)))

        patient_serial_counts = df_joined['patient_serial'].value_counts(
        ).to_dict()

        filtered_patient_serials = [
            k for k, v in patient_serial_counts.items() if v <= eq_class_limit
        ]

        df_joined = df_joined.loc[df_joined['patient_serial'].isin(
            filtered_patient_serials)]
Example #18
0
def describe(context, namespace, pod):
    api_client = get_client(context)
    pod_ret = api_client.read_namespaced_pod(pod, namespace)

    events_ret = api_client.list_namespaced_event(namespace)
    pod_events = filter(
        lambda e:
        (e.involved_object.kind == 'Pod' and e.involved_object.name == pod),
        events_ret.items)

    return jsonify({
        'metadata': {
            'namespace':
            pod_ret.metadata.namespace,
            'name':
            pod_ret.metadata.name,
            'creation_timestamp':
            safe_to_iso_timestamp(pod_ret.metadata.creation_timestamp),
            'age':
            get_age(pod_ret.metadata.creation_timestamp),
            'deletion_timestamp':
            safe_to_iso_timestamp(pod_ret.metadata.deletion_timestamp),
            'labels':
            pod_ret.metadata.labels,
            'owner_references':
            list(
                map(lambda r: {
                    'kind': r.kind,
                    'name': r.name
                }, pod_ret.metadata.owner_references))
            if pod_ret.metadata.owner_references else None
        },
        'spec': {
            'node_name':
            pod_ret.spec.node_name,
            'containers':
            list(
                map(
                    lambda c: {
                        'name':
                        c.name,
                        'resources':
                        c.resources.to_dict(),
                        'liveness_probe':
                        safe_to_dict(c.liveness_probe),
                        'readiness_probe':
                        safe_to_dict(c.readiness_probe),
                        'env':
                        list(map(lambda e: e.to_dict(), c.env or [])),
                        'ports':
                        list(map(lambda p: p.to_dict(), c.ports or [])),
                        'volume_mounts':
                        list(map(lambda v: v.to_dict(), c.volume_mounts or []))
                    }, pod_ret.spec.containers)),
            'volumes':
            list(map(lambda v: v.to_dict(), pod_ret.spec.volumes)),
            'node_selector':
            pod_ret.spec.node_selector,
            'tolerations':
            list(map(lambda v: v.to_dict(), pod_ret.spec.tolerations))
            if pod_ret.spec.tolerations else None
        },
        'status': {
            'conditions':
            list(
                map(
                    lambda c: {
                        'status':
                        c.status,
                        'type':
                        c.type,
                        'last_transition_time':
                        safe_to_iso_timestamp(c.last_transition_time)
                    }, pod_ret.status.conditions)),
            'phase':
            pod_ret.status.phase,
            'container_statuses':
            list(
                map(
                    lambda c: {
                        'name': c.name,
                        'container_id': c.container_id,
                        'image': c.image,
                        'image_id': c.image_id,
                        'state': c.state.to_dict(),
                        'last_state': c.last_state.to_dict(),
                        'ready': c.ready,
                        'restart_count': c.restart_count
                    }, pod_ret.status.container_statuses or [])),
            'host_ip':
            pod_ret.status.host_ip,
            'pod_ip':
            pod_ret.status.pod_ip,
            'qos_class':
            pod_ret.status.qos_class
        },
        'events':
        list(
            map(
                lambda e: {
                    'type': e.type,
                    'reason': e.reason,
                    'metadata': {
                        'name':
                        pod_ret.metadata.name,
                        'creation_timestamp':
                        safe_to_iso_timestamp(e.metadata.creation_timestamp),
                        'age':
                        get_age(e.metadata.creation_timestamp)
                    },
                    'message': e.message,
                    'source': e.source.to_dict()
                }, pod_events))
    })
 def test_standard_age(self):
     birt = utils.parse_date('2001-03-14')
     today = utils.parse_date('2005-12-25')
     age = utils.get_age(birt, today)
     self.assertEqual(age, 4)
 def test_negative_age(self):
     birt = utils.parse_date('2001-03-14')
     today = utils.parse_date('1990-01-01')
     age = utils.get_age(birt, today)
     self.assertEqual(age, -12)
 def test_zero_age(self):
     birt = utils.parse_date('2001-03-14')
     today = utils.parse_date('2001-03-14')
     age = utils.get_age(birt, today)
     self.assertEqual(age, 0)
Example #22
0
    def process_running(self, sport, data, exercise_index=0):
        if sport not in list(self.sports_lists.keys()):
            self.sports_lists[sport] = []

        filtered = {}

        tcx_file = f'{const.accesslink_tcx_file_prefix}{self.current_file_id}.json'

        filtered['start_time'] = self.current_file_id
        
        ### TODO:
        # Pegar a localização da primeira posição do percurso e descobrir o fuso-horário de lá
        # Os horários nos arquivos tcx estão em utc time zone

        with open(tcx_file, 'r') as f:
            tcx_data = json.load(f)

        has_route = False
        # if has_route:
        try:
            # Try to find the starting point in the first 10 positions
            got_starting_point = False
            for i in range(10):
                try:
                    first_route_point = {
                                            'latitude': float(tcx_data['TrainingCenterDatabase']['Activities']['Activity']['Lap'][0]['Track']['Trackpoint'][i]['Position']['LatitudeDegrees']),
                                            'longitude': float(tcx_data['TrainingCenterDatabase']['Activities']['Activity']['Lap'][0]['Track']['Trackpoint'][i]['Position']['LongitudeDegrees'])
                                        }
                    got_starting_point = True
                    break
                except:
                    pass

            if got_starting_point == False:
                raise Exception('Could not find starting point. Trying a different data format.')

            utils.get_weather_data_file(first_route_point, self.current_file_id)
            has_route = True
            filtered['landmark'], filtered['state'], filtered['country'] = utils.get_initial_location(first_route_point, filtered['start_time'])
        except:
            try:
                first_route_point = {
                                        'latitude': float(tcx_data['TrainingCenterDatabase']['Activities']['Activity']['Lap'][0]['Track'][0]['Trackpoint'][0]['Position']['LatitudeDegrees']),
                                        'longitude': float(tcx_data['TrainingCenterDatabase']['Activities']['Activity']['Lap'][0]['Track'][0]['Trackpoint'][0]['Position']['LongitudeDegrees'])
                                    }

                utils.get_weather_data_file(first_route_point, self.current_file_id)
                has_route = True
                filtered['landmark'], filtered['state'], filtered['country'] = utils.get_initial_location(first_route_point, filtered['start_time'])
            except:
                filtered['landmark'], filtered['state'], filtered['country'] = (const.empty_value, const.empty_value, const.empty_value)

        # Checking for no distance recorded
        if 'distance' not in data:
            filtered['distance'] = 1 # if there is no distance recorded, I'll assume it is 1km
        else:
            filtered['distance'] = utils.get_km(data['distance'])

        tcx_laps = tcx_data['TrainingCenterDatabase']['Activities']['Activity']['Lap']

        filtered['duration'] = utils.accesslink_time_to_python_time(data['duration'])

        filtered['avg_speed'] = utils.calculate_speed(filtered['distance'], filtered['duration'])

        try:
            filtered['max_speed'] = utils.find_tcx_max_speed(tcx_laps)
        except:
            filtered['max_speed'] = filtered['avg_speed']
        
        filtered['avg_pace'] = utils.get_pace(filtered['avg_speed'])
        filtered['max_pace'] = utils.get_pace(filtered['max_speed'])

        # Checking for no heart rate recorded
        try:
            filtered['avg_heart_rate'] = data['heart-rate']['average']
            filtered['max_heart_rate'] = data['heart-rate']['maximum']
        except:
            filtered['avg_heart_rate'] = const.empty_value
            filtered['max_heart_rate'] = filtered['avg_heart_rate']
            
        filtered['age'] = utils.get_age(filtered['start_time'])

        filtered['body_max_heart_rate'] = 220 - filtered['age']

        try:
            filtered['avg_heart_rate_as_percentage'] = round(filtered['avg_heart_rate']/filtered['body_max_heart_rate']*10000)/100.0
            filtered['max_heart_rate_as_percentage'] = round(filtered['max_heart_rate']/filtered['body_max_heart_rate']*10000)/100.0
        except:
            filtered['avg_heart_rate_as_percentage'] = const.empty_value
            filtered['max_heart_rate_as_percentage'] = filtered['avg_heart_rate_as_percentage']
        
        if has_route:
            samples = utils.convert_tcx_laps_to_downloaded_format(tcx_laps)

            _, _, _, filtered['has_negative_split'] = utils.get_data_at_dist(filtered['distance'], samples)
            if filtered['has_negative_split'] == const.empty_value:
                _, _, _, filtered['has_negative_split'] = utils.get_data_at_dist(filtered['distance'] - 0.01, samples)

            filtered['5km_time'], filtered['5km_avg_speed'], filtered['5km_avg_pace'], filtered['5km_has_negative_split'] = utils.get_data_at_dist(5, samples)
            filtered['10km_time'], filtered['10km_avg_speed'], filtered['10km_avg_pace'], filtered['10km_has_negative_split'] = utils.get_data_at_dist(10, samples)
            filtered['15km_time'], filtered['15km_avg_speed'], filtered['15km_avg_pace'], filtered['15km_has_negative_split'] = utils.get_data_at_dist(15, samples)
            filtered['21km_time'], filtered['21km_avg_speed'], filtered['21km_avg_pace'], filtered['21km_has_negative_split'] = utils.get_data_at_dist(21, samples)
            filtered['42km_time'], filtered['42km_avg_speed'], filtered['42km_avg_pace'], filtered['42km_has_negative_split'] = utils.get_data_at_dist(42, samples)
        else:
            filtered['has_negative_split'] = const.empty_value
            filtered['5km_time'], filtered['5km_avg_speed'], filtered['5km_avg_pace'], filtered['5km_has_negative_split'] = (const.empty_value, const.empty_value, const.empty_value, const.empty_value)
            filtered['10km_time'], filtered['10km_avg_speed'], filtered['10km_avg_pace'], filtered['10km_has_negative_split'] = (const.empty_value, const.empty_value, const.empty_value, const.empty_value)
            filtered['15km_time'], filtered['15km_avg_speed'], filtered['15km_avg_pace'], filtered['15km_has_negative_split'] = (const.empty_value, const.empty_value, const.empty_value, const.empty_value)
            filtered['21km_time'], filtered['21km_avg_speed'], filtered['21km_avg_pace'], filtered['21km_has_negative_split'] = (const.empty_value, const.empty_value, const.empty_value, const.empty_value)
            filtered['42km_time'], filtered['42km_avg_speed'], filtered['42km_avg_pace'], filtered['42km_has_negative_split'] = (const.empty_value, const.empty_value, const.empty_value, const.empty_value)

        filtered['day_link'] = utils.get_day_link(filtered['start_time'])
        # utils.pretty_print_json(filtered)
        # input()

        self.sports_lists[sport].append(filtered)
Example #23
0
    def process_distance_based_sport(self, sport, data, exercise_index=0):
        if sport not in list(self.sports_lists.keys()):
            self.sports_lists[sport] = []

        filtered = {}

        filtered['start_time'] = utils.polar_datetime_to_python_datetime_str(
            data['exercises'][exercise_index]['startTime'])

        has_route = 'recordedRoute' in data['exercises'][0]['samples']
        if has_route:
            first_route_point = data['exercises'][0]['samples'][
                'recordedRoute'][0]
            utils.get_weather_data_file(first_route_point,
                                        filtered['start_time'])
            filtered['landmark'], filtered['state'], filtered[
                'country'] = utils.get_initial_location(
                    first_route_point, filtered['start_time'])
        else:
            filtered['landmark'], filtered['state'], filtered['country'] = (
                const.empty_value, const.empty_value, const.empty_value)

        # Checking for no distance recorded
        if 'distance' not in data['exercises'][exercise_index]:
            filtered['distance'] = const.empty_value
        else:
            filtered['distance'] = utils.get_km(
                data['exercises'][exercise_index]['distance'])

        filtered['duration'] = utils.polar_time_to_python_time(
            data['exercises'][exercise_index]['duration'])

        try:
            filtered['avg_speed'] = utils.round_speed(
                data['exercises'][exercise_index]['speed']['avg'])
            filtered['max_speed'] = utils.round_speed(
                data['exercises'][exercise_index]['speed']['max'])
        except:
            try:
                filtered['avg_speed'] = utils.calculate_speed(
                    filtered['distance'], filtered['duration'])
            except:
                filtered['avg_speed'] = const.empty_value
            filtered['max_speed'] = filtered['avg_speed']

        try:
            filtered['avg_pace'] = utils.get_pace(filtered['avg_speed'])
        except:
            filtered['avg_pace'] = const.empty_value

        # Checking for no heart rate recorded
        if 'heartRate' not in data['exercises'][exercise_index]:
            filtered['avg_heart_rate'] = const.empty_value
            filtered['max_heart_rate'] = filtered['avg_heart_rate']
        else:
            filtered['avg_heart_rate'] = data['exercises'][exercise_index][
                'heartRate']['avg']
            filtered['max_heart_rate'] = data['exercises'][exercise_index][
                'heartRate']['max']

        filtered['age'] = utils.get_age(filtered['start_time'])

        filtered['body_max_heart_rate'] = 220 - filtered['age']

        try:
            filtered['avg_heart_rate_as_percentage'] = round(
                filtered['avg_heart_rate'] / filtered['body_max_heart_rate'] *
                10000) / 100.0
            filtered['max_heart_rate_as_percentage'] = round(
                filtered['max_heart_rate'] / filtered['body_max_heart_rate'] *
                10000) / 100.0
        except:
            filtered['avg_heart_rate_as_percentage'] = const.empty_value
            filtered['max_heart_rate_as_percentage'] = filtered[
                'avg_heart_rate_as_percentage']

        filtered['day_link'] = utils.get_day_link(filtered['start_time'])
        # utils.pretty_print_json(filtered)
        # input()

        self.sports_lists[sport].append(filtered)
Example #24
0
def create_contract():

    try:
        treatment_start = parser.parse(request.json['treatment_start']).date()

        # Find or create insurer
        insurer_name = request.json['insurer']
        match = db.session.query(Insurer) \
            .filter(Insurer.name == insurer_name) \
            .first()

        if match:
            insurer = match
        else:
            insurer = Insurer(name=insurer_name)

        # Find or create manufacturer
        manufacturer_name = request.json['manufacturer']
        match = db.session.query(Producer) \
            .filter(Producer.name == manufacturer_name) \
            .first()

        if match:
            manufacturer = match
        else:
            manufacturer = Producer(name=manufacturer_name)

        # Find or create patient
        patient_surname = request.json['patient_surname']
        patient_name = request.json['patient_name']
        patient_birthday = parser.parse(
            request.json['patient_birthday']).date()
        patient_cancer_stage = CancerStage(
            request.json['patient_cancer_stage'])

        # Check if patient is young enough for enrollment
        patient_age = get_age(patient_birthday)
        if patient_age >= 55:
            return Response(
                f"{{'Error':'Patient does not fullfill enrollment criteria: Age {patient_age} >= 55'}}",
                status=400,
                mimetype='application/json')

        match = db.session.query(Patient) \
            .filter(Patient.name == patient_name) \
            .filter(Patient.surname == patient_surname) \
            .filter(Patient.birthday == patient_birthday) \
            .first()

        if match:
            patient = match
        else:
            patient = Patient(name=patient_name,
                              surname=patient_surname,
                              birthday=patient_birthday,
                              cancer_stage=patient_cancer_stage)

        # Find or create product configuration
        product_brand = request.json['product_brand']
        product_name = request.json['product_name']
        product_units = request.json['product_units']
        product_baseprice = request.json['product_baseprice']
        match = db.session.query(Product) \
            .filter(Product.brand == product_brand) \
            .filter(Product.product == product_name) \
            .filter(Product.units == product_units) \
            .filter(Product.baseprice == product_baseprice) \
            .first()

        if match:
            product = match
        else:
            product = Product(brand=product_brand,
                              product=product_name,
                              units=product_units,
                              baseprice=product_baseprice)

        # Find or create payable amounts configuration
        os = request.json['os']
        no_os = request.json['no_os']
        pfs = request.json['pfs']
        no_pfs = request.json['no_pfs']
        match = db.session.query(PayableAmount) \
            .filter(PayableAmount.os_after_12_months == os) \
            .filter(PayableAmount.no_os_after_12_months == no_os) \
            .filter(PayableAmount.pfs_after_9_months == pfs) \
            .filter(PayableAmount.no_pfs_after_9_months == no_pfs) \
            .first()

        if match:
            payable_amounts = match
        else:
            payable_amounts = PayableAmount(os_after_12_months=os,
                                            no_os_after_12_months=no_os,
                                            pfs_after_9_months=pfs,
                                            no_pfs_after_9_months=no_pfs)

        new_contract = Contract(insurer=insurer,
                                producer=manufacturer,
                                product=product,
                                patient=patient,
                                status='ongoing',
                                treatment_start=treatment_start,
                                payable_amounts=payable_amounts)

        # Check if contract is already finished -> simulation purposes
        payable = check_contract_status(new_contract.to_dict(), [])

        if not payable == -1:
            # No events could have been generated at contract creation -> bill for 9 months PFS
            new_contract.amount = 9 * (product_baseprice * payable)
            new_contract.status = 'finished'

        db.session.add(new_contract)
        db.session.commit()

        return Response('{"status": "ok"}', status=200)

    except:

        return Response('{"status": "error"}', status=500)
Example #25
0
    def process_distance_based_sport(self, sport, data, exercise_index=0):
        if sport not in list(self.sports_lists.keys()):
            self.sports_lists[sport] = []

        filtered = {}

        tcx_file = f'{const.accesslink_tcx_file_prefix}{self.current_file_id}.json'

        filtered['start_time'] = self.current_file_id
        
        ### TODO:
        # Pegar a localização da primeira posição do percurso e descobrir o fuso-horário de lá
        # Os horários nos arquivos tcx estão em utc time zone

        try:
            with open(tcx_file, 'r') as f:
                tcx_data = json.load(f)
        except:
            pass

        has_route = False
        # if has_route:
        try:
            first_route_point = {
                                    'latitude': float(tcx_data['TrainingCenterDatabase']['Activities']['Activity']['Lap'][0]['Track']['Trackpoint'][0]['Position']['LatitudeDegrees']),
                                    'longitude': float(tcx_data['TrainingCenterDatabase']['Activities']['Activity']['Lap'][0]['Track']['Trackpoint'][0]['Position']['LongitudeDegrees'])
                                }
            utils.get_weather_data_file(first_route_point, self.current_file_id)
            has_route = True
            filtered['landmark'], filtered['state'], filtered['country'] = utils.get_initial_location(first_route_point, filtered['start_time'])
        except:
            filtered['landmark'], filtered['state'], filtered['country'] = (const.empty_value, const.empty_value, const.empty_value)

        # Checking for no distance recorded
        if 'distance' not in data:
            filtered['distance'] = const.empty_value
        else:
            filtered['distance'] = utils.get_km(data['distance'])

        filtered['duration'] = utils.accesslink_time_to_python_time(data['duration'])

        try:
            filtered['avg_speed'] = utils.calculate_speed(filtered['distance'], filtered['duration'])
        except:
            filtered['avg_speed'] = const.empty_value

        try:
            filtered['max_speed'] = utils.find_tcx_max_speed(tcx_laps)
        except:
            filtered['max_speed'] = filtered['avg_speed']
        
        filtered['avg_pace'] = utils.get_pace(filtered['avg_speed'])

        # Checking for no heart rate recorded
        try:
            filtered['avg_heart_rate'] = data['heart-rate']['average']
            filtered['max_heart_rate'] = data['heart-rate']['maximum']
        except:
            filtered['avg_heart_rate'] = const.empty_value
            filtered['max_heart_rate'] = filtered['avg_heart_rate']
            
        filtered['age'] = utils.get_age(filtered['start_time'])

        filtered['body_max_heart_rate'] = 220 - filtered['age']

        try:
            filtered['avg_heart_rate_as_percentage'] = round(filtered['avg_heart_rate']/filtered['body_max_heart_rate']*10000)/100.0
            filtered['max_heart_rate_as_percentage'] = round(filtered['max_heart_rate']/filtered['body_max_heart_rate']*10000)/100.0
        except:
            filtered['avg_heart_rate_as_percentage'] = const.empty_value
            filtered['max_heart_rate_as_percentage'] = filtered['avg_heart_rate_as_percentage']
        
        filtered['day_link'] = utils.get_day_link(filtered['start_time'])
        # utils.pretty_print_json(filtered)
        # input()

        self.sports_lists[sport].append(filtered)
Example #26
0
    def process_running(self, sport, data, exercise_index=0):
        if sport not in list(self.sports_lists.keys()):
            self.sports_lists[sport] = []

        filtered = {}

        filtered['start_time'] = utils.polar_datetime_to_python_datetime_str(
            data['exercises'][exercise_index]['startTime'])

        has_route = 'recordedRoute' in data['exercises'][0]['samples']
        if has_route:
            first_route_point = data['exercises'][0]['samples'][
                'recordedRoute'][0]
            utils.get_weather_data_file(first_route_point,
                                        filtered['start_time'])
            filtered['landmark'], filtered['state'], filtered[
                'country'] = utils.get_initial_location(
                    first_route_point, filtered['start_time'])
        else:
            filtered['landmark'], filtered['state'], filtered['country'] = (
                const.empty_value, const.empty_value, const.empty_value)

        # Checking for no distance recorded
        if 'distance' not in data['exercises'][exercise_index]:
            filtered[
                'distance'] = 1  # if there is no distance recorded, I'll assume it is 1km
        else:
            filtered['distance'] = utils.get_km(
                data['exercises'][exercise_index]['distance'])

        filtered['duration'] = utils.polar_time_to_python_time(
            data['exercises'][exercise_index]['duration'])

        filtered['avg_speed'] = utils.round_speed(
            data['exercises'][exercise_index]['speed']['avg'])
        filtered['max_speed'] = utils.round_speed(
            data['exercises'][exercise_index]['speed']['max'])

        # Checking for zero speed
        if filtered['avg_speed'] == 0:
            filtered['avg_speed'] = utils.calculate_speed(
                filtered['distance'], filtered['duration'])
            filtered['max_speed'] = filtered['avg_speed']

        filtered['avg_pace'] = utils.get_pace(filtered['avg_speed'])
        filtered['max_pace'] = utils.get_pace(filtered['max_speed'])

        # Checking for no heart rate recorded
        if 'heartRate' not in data['exercises'][exercise_index]:
            filtered['avg_heart_rate'] = const.empty_value
            filtered['max_heart_rate'] = filtered['avg_heart_rate']
        else:
            filtered['avg_heart_rate'] = data['exercises'][exercise_index][
                'heartRate']['avg']
            filtered['max_heart_rate'] = data['exercises'][exercise_index][
                'heartRate']['max']

        filtered['age'] = utils.get_age(filtered['start_time'])

        filtered['body_max_heart_rate'] = 220 - filtered['age']

        try:
            filtered['avg_heart_rate_as_percentage'] = round(
                filtered['avg_heart_rate'] / filtered['body_max_heart_rate'] *
                10000) / 100.0
            filtered['max_heart_rate_as_percentage'] = round(
                filtered['max_heart_rate'] / filtered['body_max_heart_rate'] *
                10000) / 100.0
        except:
            filtered['avg_heart_rate_as_percentage'] = const.empty_value
            filtered['max_heart_rate_as_percentage'] = filtered[
                'avg_heart_rate_as_percentage']

        if has_route:
            _, _, _, filtered['has_negative_split'] = utils.get_data_at_dist(
                filtered['distance'],
                data['exercises'][exercise_index]['samples']['distance'])
            if filtered['has_negative_split'] == const.empty_value:
                _, _, _, filtered[
                    'has_negative_split'] = utils.get_data_at_dist(
                        filtered['distance'] - 0.01, data['exercises']
                        [exercise_index]['samples']['distance'])

            filtered['5km_time'], filtered['5km_avg_speed'], filtered[
                '5km_avg_pace'], filtered[
                    '5km_has_negative_split'] = utils.get_data_at_dist(
                        5, data['exercises'][0]['samples']['distance'])
            filtered['10km_time'], filtered['10km_avg_speed'], filtered[
                '10km_avg_pace'], filtered[
                    '10km_has_negative_split'] = utils.get_data_at_dist(
                        10, data['exercises'][0]['samples']['distance'])
            filtered['15km_time'], filtered['15km_avg_speed'], filtered[
                '15km_avg_pace'], filtered[
                    '15km_has_negative_split'] = utils.get_data_at_dist(
                        15, data['exercises'][0]['samples']['distance'])
            filtered['21km_time'], filtered['21km_avg_speed'], filtered[
                '21km_avg_pace'], filtered[
                    '21km_has_negative_split'] = utils.get_data_at_dist(
                        21, data['exercises'][0]['samples']['distance'])
            filtered['42km_time'], filtered['42km_avg_speed'], filtered[
                '42km_avg_pace'], filtered[
                    '42km_has_negative_split'] = utils.get_data_at_dist(
                        42, data['exercises'][0]['samples']['distance'])
        else:
            filtered['has_negative_split'] = const.empty_value
            filtered['5km_time'], filtered['5km_avg_speed'], filtered[
                '5km_avg_pace'], filtered['5km_has_negative_split'] = (
                    const.empty_value, const.empty_value, const.empty_value,
                    const.empty_value)
            filtered['10km_time'], filtered['10km_avg_speed'], filtered[
                '10km_avg_pace'], filtered['10km_has_negative_split'] = (
                    const.empty_value, const.empty_value, const.empty_value,
                    const.empty_value)
            filtered['15km_time'], filtered['15km_avg_speed'], filtered[
                '15km_avg_pace'], filtered['15km_has_negative_split'] = (
                    const.empty_value, const.empty_value, const.empty_value,
                    const.empty_value)
            filtered['21km_time'], filtered['21km_avg_speed'], filtered[
                '21km_avg_pace'], filtered['21km_has_negative_split'] = (
                    const.empty_value, const.empty_value, const.empty_value,
                    const.empty_value)
            filtered['42km_time'], filtered['42km_avg_speed'], filtered[
                '42km_avg_pace'], filtered['42km_has_negative_split'] = (
                    const.empty_value, const.empty_value, const.empty_value,
                    const.empty_value)

        filtered['day_link'] = utils.get_day_link(filtered['start_time'])
        # utils.pretty_print_json(filtered)
        # input()

        self.sports_lists[sport].append(filtered)