Exemplo n.º 1
0
def upload_summaries(oh_user, summaries, file_name, existing_file_id):
    temp_dir, file = write_json_data_to_tmp_file(f'garmin-health-api-{file_name}.json', summaries)
    api.upload_aws(file, create_metadata(file_name), oh_user.get_access_token(), project_member_id=oh_user.oh_id, max_bytes=MAX_FILE_BYTES)
    if existing_file_id:
        api.delete_file(oh_user.get_access_token(), file_id=existing_file_id)
    os.remove(file)
    os.rmdir(temp_dir)
Exemplo n.º 2
0
def add_jawbone_data(oh_member, data, endpoint):
    # delete old file and upload new to open humans
    tmp_directory = tempfile.mkdtemp()
    metadata = {
        'tags': ['Jawbone'],
        'updated_at': str(datetime.utcnow()),
        }
    if endpoint == 'moves':
        metadata['description'] = ('Jawbone "moves" data, including steps, '
                                   'calories, and activity')
        metadata['tags'].append('steps')
    elif endpoint == 'sleeps':
        metadata['description'] = ('Jawbone "sleeps" data, including time, '
                                   'duration, and depth estimates.')
        metadata['tags'].append('sleep')
    elif endpoint == 'heartrates':
        metadata['description'] = ('Jawbone "heartrates" data, including '
                                   'resting heartrates')
        metadata['tags'].append('heartrate')
    out_file = os.path.join(
        tmp_directory,
        'jawbone-{}-data.json'.format(endpoint))
    logger.debug('deleted old file for {}'.format(oh_member.oh_id))
    api.delete_file(oh_member.access_token,
                    oh_member.oh_id,
                    file_basename='jawbone-{}-data.json'.format(endpoint))
    with open(out_file, 'w') as json_file:
        json.dump(data, json_file)
        json_file.flush()
    api.upload_aws(out_file, metadata,
                   oh_member.access_token,
                   project_member_id=oh_member.oh_id)
    logger.debug('added new jawbone {} file for {}'.format(
        endpoint, oh_member.oh_id))
Exemplo n.º 3
0
def fetch_googlefit_data(oh_id):
    '''
    Fetches all of the googlefit data for a given user
    '''
    print("Started googlefit update task")
    try:
        current_dt = datetime.utcnow()
        oh_member = OpenHumansMember.objects.get(oh_id=oh_id)
        gf_member = oh_member.googlefit_member
        oh_access_token = oh_member.get_access_token()
        gf_access_token = gf_member.get_access_token()

        basenames_to_ids = get_existing_basenames_to_ids(oh_member)

        filesmonth = get_googlefit_data(oh_access_token, gf_access_token, current_dt)
        for fn, month in filesmonth:
            api.upload_aws(fn, create_metadata(month),
                                  oh_access_token,
                                  project_member_id=oh_id)
            basename = os.path.basename(fn)
            if basename in basenames_to_ids:
                file_id_to_delete = basenames_to_ids[basename]
                api.delete_file(oh_access_token, file_id=file_id_to_delete)

        gf_member.last_updated = arrow.now().format()
        gf_member.save()

    except Exception as e:
        import traceback
        print("Fetching googlefit data failed: {}".format(e))
        print(traceback.format_exc())
        # queue to retry later
        fetch_googlefit_data.apply_async(args=[oh_id], countdown=3600)
        raise
Exemplo n.º 4
0
def process_file(dfile, access_token, member, metadata, taxonomy):
    try:
        verify_ubiome(dfile)
        tmp_directory = tempfile.mkdtemp()
        base_filename = dfile['basename'].replace('.zip', '')
        taxonomy_file = base_filename + '.taxonomy.json'
        raw_filename = temp_join(tmp_directory, taxonomy_file)
        metadata = {
            'description': 'uBiome 16S taxonomy data, JSON format.',
            'tags': ['json', 'uBiome', '16S']
        }
        with open(raw_filename, 'w') as raw_file:
            json.dump(taxonomy, raw_file)
            raw_file.flush()

        api.upload_aws(raw_filename,
                       metadata,
                       access_token,
                       base_url=OH_BASE_URL,
                       project_member_id=str(member['project_member_id']))
    except:
        api.message("uBiome integration: A broken file was deleted",
                    "While processing your uBiome file "
                    "we noticed that your file does not conform "
                    "to the expected specifications and it was "
                    "thus deleted. Email us as [email protected] if "
                    "you think this file should be valid.",
                    access_token,
                    base_url=OH_BASE_URL)
        api.delete_file(access_token,
                        str(member['project_member_id']),
                        file_id=str(dfile['id']),
                        base_url=OH_BASE_URL)
        raise
Exemplo n.º 5
0
def bundle(handler, model):
    """Create a compressed tarball containing the notebook document.

    Parameters
    ----------
    handler : tornado.web.RequestHandler
        Handler that serviced the bundle request
    model : dict
        Notebook model from the configured ContentManager
    """
    redirect_url = os.getenv("JH_BUNDLE_REDIRECT",
                             "http://127.0.0.1:5000/shared")
    try:
        access_token = os.getenv('OH_ACCESS_TOKEN')
        ohmember = api.exchange_oauth2_member(access_token)
        project_member_id = ohmember['project_member_id']
        notebook_filename = model['name']
        api.delete_file(access_token,
                        project_member_id,
                        file_basename=notebook_filename)
        print('deleted old_file')
        notebook_content = nbformat.writes(model['content']).encode('utf-8')

        upload_notebook(notebook_content, notebook_filename, access_token,
                        project_member_id)
        handler.redirect(redirect_url)
    except:
        print('whopsy, something went wrong')
        handler.finish(("Your upload failed. "
                        "Please restart your notebook server "
                        "and try again."))
Exemplo n.º 6
0
def upload_summaries(oh_user, summaries, file_name, month, existing_file_id):
    fn = write_json_data_to_tmp_file(f'garmin-health-api-{file_name}-{month}.json', summaries)
    api.upload_aws(fn, create_metadata(file_name, month),
                   oh_user.get_access_token(),
                   project_member_id=oh_user.oh_id,
                   max_bytes=MAX_FILE_BYTES)
    if existing_file_id:
        api.delete_file(oh_user.get_access_token(), file_id=existing_file_id)
Exemplo n.º 7
0
 def iterate_member_files(self, ohmember):
     client_info = ProjectConfiguration.objects.get(id=1).client_info
     ohmember_data = api.exchange_oauth2_member(
         ohmember.get_access_token(**client_info))
     files = ohmember_data['data']
     for f in files:
         fname = f['basename']
         if not fname.endswith('.zip') and not fname.endswith('.json'):
             api.delete_file(ohmember.access_token,
                             ohmember.oh_id,
                             file_id=f['id'])
Exemplo n.º 8
0
def process_file(dfile, access_token, member, metadata):
    try:
        vcf_metadata = verify_vcf(dfile)
    except:
        api.message("VCF integration: A broken file was deleted",
                    "While processing your VCF file "
                    "we noticed that your file does not conform "
                    "to the expected specifications and it was "
                    "thus deleted. Email us as [email protected] if "
                    "you think this file should be valid.",
                    access_token,
                    base_url=OH_BASE_URL)
        api.delete_file(access_token,
                        str(member['project_member_id']),
                        file_id=str(dfile['id']),
                        base_url=OH_BASE_URL)
        raise
    try:
        tmp_directory = tempfile.mkdtemp()
        base_filename = dfile['basename']

        # Save raw 23andMe genotyping to temp file.
        if base_filename.endswith('.gz'):
            base_filename = base_filename[0:-3]
        elif base_filename.endswith('.bz2'):
            base_filename = base_filename[0:-4]
        meta_filename = base_filename + '.metadata.json'
        raw_filename = temp_join(tmp_directory, meta_filename)
        metadata = {'description': 'VCF file metadata', 'tags': ['vcf']}
        with open(raw_filename, 'w') as raw_file:
            json.dump(vcf_metadata, raw_file)
            raw_file.flush()

        api.upload_aws(raw_filename,
                       metadata,
                       access_token,
                       base_url=OH_BASE_URL,
                       project_member_id=str(member['project_member_id']))
    except:
        api.message("VCF integration: File could not be uploaded",
                    "Something went wrong when processing your "
                    "file. Please try to upload it again. "
                    "Please email us as [email protected] if "
                    "this keeps happening.",
                    access_token,
                    base_url=OH_BASE_URL)
        api.delete_file(access_token,
                        str(member['project_member_id']),
                        file_id=str(dfile['id']),
                        base_url=OH_BASE_URL)
        raise
Exemplo n.º 9
0
def process_file(dfile, access_token, member, metadata):
    infile_suffix = dfile['basename'].split(".")[-1]
    tf_in = tempfile.NamedTemporaryFile(suffix="." + infile_suffix)
    tf_in.write(requests.get(dfile['download_url']).content)
    tf_in.flush()
    tmp_directory = tempfile.mkdtemp()
    filename_base = '23andMe-genotyping'

    raw_23andme = clean_raw_23andme(tf_in)
    raw_23andme.seek(0)
    vcf_23andme = vcf_from_raw_23andme(raw_23andme)

    # Save raw 23andMe genotyping to temp file.
    raw_filename = filename_base + '.txt'

    metadata = {
        'description': '23andMe full genotyping data, original format',
        'tags': ['23andMe', 'genotyping'],
        'creation_date': arrow.get().format(),
    }
    with open(temp_join(tmp_directory, raw_filename), 'w') as raw_file:
        raw_23andme.seek(0)
        shutil.copyfileobj(raw_23andme, raw_file)
        raw_file.flush()

    with open(temp_join(tmp_directory, raw_filename), 'r+b') as raw_file:

        upload_new_file(raw_file, access_token,
                        str(member['project_member_id']), metadata)

    # Save VCF 23andMe genotyping to temp file.
    vcf_filename = filename_base + '.vcf.bz2'
    metadata = {
        'description': '23andMe full genotyping data, VCF format',
        'tags': ['23andMe', 'genotyping', 'vcf'],
        'creation_date': arrow.get().format()
    }
    with bz2.BZ2File(temp_join(tmp_directory, vcf_filename), 'w') as vcf_file:
        vcf_23andme.seek(0)
        for i in vcf_23andme:
            vcf_file.write(i.encode())

    with open(temp_join(tmp_directory, vcf_filename), 'r+b') as vcf_file:
        upload_new_file(vcf_file, access_token,
                        str(member['project_member_id']), metadata)
    api.delete_file(access_token,
                    str(member['project_member_id']),
                    file_id=str(dfile['id']))
Exemplo n.º 10
0
 def test_delete_file__invalid_access_token(self):
     with self.assertRaises(Exception):
         response = delete_file(
             access_token=ACCESS_TOKEN_INVALID,
             project_member_id='59319749',
             all_files=True)
         assert response.json() == {"detail": "Invalid token."}
Exemplo n.º 11
0
def replace_fitbit(oh_member, fitbit_data):
    print("replace function started")
    # delete old file and upload new to open humans
    tmp_directory = tempfile.mkdtemp()
    metadata = {
        'description': 'Fitbit data.',
        'tags': ['Fitbit', 'activity', 'steps'],
        'updated_at': str(datetime.utcnow()),
    }
    out_file = os.path.join(tmp_directory, 'fitbit-data.json')
    logger.debug('deleted old file for {}'.format(oh_member.oh_id))
    deleter = api.delete_file(oh_member.access_token,
                              oh_member.oh_id,
                              file_basename="fitbit-data.json")
    print("delete response")
    print(deleter)
    print("trying to write to file")
    with open(out_file, 'w') as json_file:
        print("inside open file")
        # json.dump(fitbit_data, json_file)
        json_file.write(json.dumps(fitbit_data))
        # print(json.dump(fitbit_data, json_file))
        print("dumped, trying to flush")
        json_file.flush()
    print("attempting add response")
    addr = api.upload_aws(out_file,
                          metadata,
                          oh_member.access_token,
                          project_member_id=oh_member.oh_id)
    print("add response")
    print(addr)
    logger.debug('uploaded new file for {}'.format(oh_member.oh_id))
Exemplo n.º 12
0
def fetch_googlefit_data(oh_id, send_email=False):
    '''
    Fetches all of the googlefit data for a given user
    '''
    print("Started googlefit update task")
    try:
        current_dt = datetime.utcnow()
        oh_member = OpenHumansMember.objects.get(oh_id=oh_id)
        if not hasattr(oh_member, 'googlefit_member'):
            print("No googlefit connection exists for member")
            return
        gf_member = oh_member.googlefit_member
        oh_access_token = oh_member.get_access_token()
        gf_access_token = gf_member.get_access_token()

        basenames_to_ids = get_existing_basenames_to_ids(oh_member)

        filesmonth = get_googlefit_data(oh_access_token, gf_access_token,
                                        current_dt)
        for fn, month in filesmonth:
            api.upload_aws(fn,
                           create_metadata(month),
                           oh_access_token,
                           project_member_id=oh_id,
                           max_bytes=MAX_FILE_BYTES)
            basename = os.path.basename(fn)
            if basename in basenames_to_ids:
                file_id_to_delete = basenames_to_ids[basename]
                api.delete_file(oh_access_token, file_id=file_id_to_delete)

        gf_member.last_updated = arrow.now().format()
        gf_member.save()

        if send_email and len(filesmonth) > 0:
            send_first_success_email(oh_id, oh_access_token)
        elif send_email and len(filesmonth) == 0:
            send_first_no_data_email(oh_id, oh_access_token)

    except Exception as e:
        import traceback
        print("Fetching googlefit data failed: {}".format(e))
        print(traceback.format_exc())
        # queue to retry later
        fetch_googlefit_data.apply_async(args=[oh_id], countdown=3600)
        raise
Exemplo n.º 13
0
def remove_jawbone(request):
    if request.method == 'POST' and request.user.is_authenticated:
        try:
            oh_member = request.user.oh_member
            api.delete_file(oh_member.access_token,
                            oh_member.oh_id,
                            file_basename='jawbone-moves-data.json')
            messages.info(request, 'Your Jawbone account has been removed')
            jawbone_account = request.user.oh_member.datasourcemember
            jawbone_account.delete()
        except:
            jawbone_account = request.user.oh_member.datasourcemember
            jawbone_account.delete()
            messages.info(request, ('Something went wrong, please '
                                    're-authorize us on Open Humans'))
            logout(request)
            return redirect('/')
    return redirect('/dashboard')
Exemplo n.º 14
0
def remove_nokia(request):
    if request.method == "POST" and request.user.is_authenticated:
        try:
            oh_member = request.user.oh_member
            api.delete_file(oh_member.access_token,
                            oh_member.oh_id,
                            file_basename="nokia_data")
            messages.info(request, "Your Withings/Nokia account has been removed")
            nokia_account = request.user.oh_member.nokia_member
            nokia_account.delete()
        except:
            nokia_account = request.user.oh_member.nokia_member
            nokia_account.delete()
            messages.info(request, ("Something went wrong, please"
                          "re-authorize us on Open Humans"))
            logout(request)
            return redirect('/')
    return redirect('/dashboard')
Exemplo n.º 15
0
def remove_googlefit(request):
    if request.method == "POST" and request.user.is_authenticated:
        try:
            openhumansmember = request.user.openhumansmember
            api.delete_file(openhumansmember.access_token,
                            openhumansmember.oh_id,
                            file_basename="googlefit-data.json")
            messages.info(request, "Your GoogleFit account has been removed")
            googlefit_account = request.user.openhumansmember.googlefit_member
            googlefit_account.delete()
        except:
            googlefit_account = request.user.openhumansmember.googlefit_member
            googlefit_account.delete()
            messages.info(request, ("Something went wrong, please"
                                    "re-authorize us on Open Humans"))
            #logout(request)
            return redirect('/')
    return redirect('/')
Exemplo n.º 16
0
def remove_moves(request):
    if request.method == "POST" and request.user.is_authenticated:
        try:
            oh_member = request.user.oh_member
            api.delete_file(oh_member.access_token,
                            oh_member.oh_id,
                            file_basename="moves-storyline-data.json")
            messages.info(request, "Your Moves account has been removed")
            moves_account = request.user.oh_member.datasourcemember
            moves_account.delete()
        except:
            moves_account = request.user.oh_member.datasourcemember
            moves_account.delete()
            messages.info(request, ("Something went wrong, please"
                                    "re-authorize us on Open Humans"))
            logout(request)
            return redirect('/')
    return redirect('/dashboard')
Exemplo n.º 17
0
def upload_user_dailies(garmin_user_id, user_map, existing_file_id):

    min_date = earliest_date(user_map)
    fn = write_jsonfile_to_tmp_dir('garmin-dailies.json', user_map)
    oh_user = get_oh_user_from_garmin_id(garmin_user_id)
    api.upload_aws(fn,
                   create_metadata(),
                   oh_user.get_access_token(),
                   project_member_id=oh_user.oh_id,
                   max_bytes=MAX_FILE_BYTES)

    oh_user.garmin_member.last_updated = datetime.now()
    if not oh_user.garmin_member.earliest_available_data or \
            (oh_user.garmin_member.earliest_available_data and min_date < oh_user.garmin_member.earliest_available_data.replace(tzinfo=None)):
        oh_user.garmin_member.earliest_available_data = min_date
    oh_user.garmin_member.save()
    if existing_file_id:
        api.delete_file(oh_user.get_access_token(), file_id=existing_file_id)
Exemplo n.º 18
0
def write_new_tweets(oh_member, twitter_api, month, new_data):
    existing_files = api.exchange_oauth2_member(
        oh_member.get_access_token(),
        all_files=True)
    old_data = None
    file_id = None
    for dfile in existing_files['data']:
        if dfile['basename'] == 'twitter-data-{}.json'.format(month):
            old_data = requests.get(dfile['download_url']).json()
            file_id = dfile['id']
            break
    if old_data:
        old_data['tweets'] = new_data['tweets'] + old_data['tweets']
        old_data['likes'] = new_data['likes'] + old_data['likes']
    else:
        old_data = {'tweets': new_data['tweets'], 'likes': new_data['likes'],
                    'followers': [], 'following': []}
    if month == str(datetime.datetime.today())[:7]:
        me = twitter_api.me()
        old_data['followers'].append(
            {'timestamp': str(datetime.datetime.today()),
                'value': me.followers_count})
        old_data['following'].append(
            {'timestamp': str(datetime.datetime.today()),
                'value': me.friends_count})
    with tempfile.TemporaryFile() as f:
                js = json.dumps(old_data)
                js = str.encode(js)
                f.write(js)
                f.flush()
                f.seek(0)
                api.upload_stream(
                    f, "twitter-data-{}.json".format(month),
                    metadata={
                        "description": "Twitter Data",
                        "tags": ["Twitter"]
                        }, access_token=oh_member.get_access_token())
    if file_id:
        api.delete_file(
            oh_member.get_access_token(),
            project_member_id=oh_member.oh_id,
            file_id=file_id)
Exemplo n.º 19
0
def process_file(dfile, access_token, member, metadata):
    infile_suffix = dfile['basename'].split(".")[-1]
    tf_in = tempfile.NamedTemporaryFile(suffix="." + infile_suffix)
    tf_in.write(requests.get(dfile['download_url']).content)
    tf_in.flush()
    tmp_directory = tempfile.mkdtemp()
    filename_base = 'Location History.json'
    location_data = get_json(tf_in)
    if location_data:
        location_json = json.loads(location_data)
        output_file = tmp_directory + '/' + filename_base
        with open(output_file, 'w') as raw_file:
            json.dump(location_json, raw_file)
        metadata = {
            'description': 'Google Location History JSON',
            'tags': ['google location history', 'gps'],
            'creation_date': arrow.get().format(),
        }
        api.upload_aws(output_file,
                       metadata,
                       access_token,
                       base_url=OH_BASE_URL,
                       project_member_id=str(member['project_member_id']))
        get_semantic_data(tf_in, tmp_directory, member, access_token)
    else:
        api.message("Google Location History: A broken file was deleted",
                    "While processing your Google Location History file "
                    "we noticed that your file does not conform "
                    "to the expected specifications and it was "
                    "thus deleted. Please make sure you upload "
                    "the right file:\nWe expect the file to be a "
                    "single json file "
                    "or a .zip file as downloaded from Google Takeout."
                    " Please "
                    "do not alter the original file, as unexpected "
                    "additions can invalidate the file.",
                    access_token,
                    base_url=OH_BASE_URL)
    api.delete_file(access_token,
                    str(member['project_member_id']),
                    file_id=str(dfile['id']),
                    base_url=OH_BASE_URL)
Exemplo n.º 20
0
def replace_datasource(oh_member, source_data):
    # delete old file and upload new to open humans
    tmp_directory = tempfile.mkdtemp()
    metadata = {
        'description': 'Dummy data for demo.',
        'tags': ['demo', 'dummy', 'test'],
        'updated_at': str(datetime.utcnow()),
    }
    out_file = os.path.join(tmp_directory, 'dummy-data.json')
    logger.debug('deleted old file for {}'.format(oh_member.oh_id))
    api.delete_file(oh_member.access_token,
                    oh_member.oh_id,
                    file_basename="dummy-data.json")
    with open(out_file, 'w') as json_file:
        json.dump(source_data, json_file)
        json_file.flush()
    api.upload_aws(out_file,
                   metadata,
                   oh_member.access_token,
                   project_member_id=oh_member.oh_id)
    logger.debug('uploaded new file for {}'.format(oh_member.oh_id))
Exemplo n.º 21
0
def replace_moves(oh_member, moves_data):
    # delete old file and upload new to open humans
    tmp_directory = tempfile.mkdtemp()
    metadata = {
        'description': 'Moves GPS maps, locations, and steps data.',
        'tags': ['GPS', 'Moves', 'steps'],
        'updated_at': str(datetime.utcnow()),
    }
    out_file = os.path.join(tmp_directory, 'moves-storyline-data.json')
    logger.debug('deleted old file for {}'.format(oh_member.oh_id))
    api.delete_file(oh_member.access_token,
                    oh_member.oh_id,
                    file_basename="moves-storyline-data.json")
    with open(out_file, 'w') as json_file:
        json.dump(moves_data, json_file)
        json_file.flush()
    api.upload_aws(out_file,
                   metadata,
                   oh_member.access_token,
                   project_member_id=oh_member.oh_id)
    logger.debug('uploaded new file for {}'.format(oh_member.oh_id))
Exemplo n.º 22
0
def replace_rescuetime(oh_member, rescuetime_data):
    # delete old file and upload new to open humans
    tmp_directory = tempfile.mkdtemp()
    metadata = {
        'description': 'RescueTime productivity data.',
        'tags': ['Rescuetime', 'productivity'],
        'updated_at': str(datetime.utcnow()),
    }
    out_file = os.path.join(tmp_directory, 'rescuetime.json')
    logger.debug('deleted old file for {}'.format(oh_member.oh_id))
    api.delete_file(oh_member.access_token,
                    oh_member.oh_id,
                    file_basename="rescuetime.json")
    with open(out_file, 'w') as json_file:
        json.dump(rescuetime_data, json_file)
        json_file.flush()
    api.upload_aws(out_file,
                   metadata,
                   oh_member.access_token,
                   project_member_id=oh_member.oh_id)
    logger.debug('uploaded new file for {}'.format(oh_member.oh_id))
Exemplo n.º 23
0
def process_github(oh_id):
    """
    Update the github file for a given OH user
    """
    try:
        logger.debug('Starting github processing for {}'.format(oh_id))
        oh_member = OpenHumansMember.objects.get(oh_id=oh_id)
        oh_access_token = oh_member.get_access_token(
            client_id=settings.OPENHUMANS_CLIENT_ID,
            client_secret=settings.OPENHUMANS_CLIENT_SECRET)
        #github_data = get_existing_github_data(oh_access_token)#
        github_member = oh_member.datasourcemember
        github_access_token = github_member.get_access_token(
            client_id=settings.GITHUB_CLIENT_ID,
            client_secret=settings.GITHUB_CLIENT_SECRET)

        #print("OH access token: {}".format(oh_access_token))

        gh_file = gh_api.get_github_data(oh_access_token, github_access_token)

        existing_file_ids = get_existing_file_ids(oh_member)
        print(existing_file_ids)
        api.upload_aws(gh_file,
                       create_file_metadata(),
                       oh_access_token,
                       project_member_id=oh_id,
                       max_bytes=MAX_FILE_BYTES)

        for id in existing_file_ids:
            api.delete_file(oh_access_token, file_id=id)

        github_member.last_updated = arrow.now().format()
        github_member.save()
    except Exception as e:
        import traceback
        print("Fetching github data failed: {}".format(e))
        print(traceback.format_exc())
        # queue to retry later
        process_github.apply_async(args=[oh_id], countdown=4 * 3600)
        raise
Exemplo n.º 24
0
def process_target(data_file, access_token, member, metadata):
    try:
        tf = tempfile.NamedTemporaryFile(suffix=".gz")
        tf_out = tempfile.NamedTemporaryFile(prefix="ftdna-",
                                             suffix=".csv",
                                             mode="w+b")
        print("downloading ftdna file from oh")
        tf.write(requests.get(data_file['download_url']).content)
        tf.flush()
        print('read ftdna file')
        with gzip.open(tf.name, "rt", newline="\n") as ftdna_file:
            for line in ftdna_file:
                if valid_line(line):
                    tf_out.write(line.encode('ascii'))
        tf_out.flush()
        tf_out.seek(0)
        print('cleaned file')
        api.delete_file(access_token,
                        str(member['project_member_id']),
                        file_id=str(data_file['id']))
        print('deleted old')
        upload_new_file(tf_out, access_token, str(member['project_member_id']),
                        data_file['metadata'])
    except:
        print('delete broken file')
        api.delete_file(access_token,
                        str(member['project_member_id']),
                        file_id=str(data_file['id']))
        api.message(
            "A broken file was deleted",
            "While processing your FamilyTreeDNA file "
            "we noticed that your file does not conform "
            "to the expected specifications and it was "
            "thus deleted. Please make sure you upload "
            "the right file:\nWe expect the file to be a "
            "single, - gzipped (ends in .gz) - file as "
            "you can download from FamilyTreeDNA. Please "
            "do not alter or unzip this file, as unexpected additions "
            "also invalidate the file.", access_token)
Exemplo n.º 25
0
def replace_twitter(oh_member, twitter_data):
    # delete old file and upload new to open humans
    tmp_directory = tempfile.mkdtemp()
    metadata = {
        'description':
        'Twitter activity feed, repository contents and stars data.',
        'tags': ['demo', 'Twitter', 'test'],
        'updated_at': str(datetime.utcnow()),
    }
    out_file = os.path.join(tmp_directory, 'twitter-data.json')
    logger.debug('deleted old file for {}'.format(oh_member.oh_id))
    api.delete_file(oh_member.access_token,
                    oh_member.oh_id,
                    file_basename="twitter-data.json")
    with open(out_file, 'w') as json_file:
        json.dump(twitter_data, json_file)
        json_file.flush()
    api.upload_aws(out_file,
                   metadata,
                   oh_member.access_token,
                   project_member_id=oh_member.oh_id)
    logger.debug('uploaded new file for {}'.format(oh_member.oh_id))
Exemplo n.º 26
0
def replace_nokia(oh_member, nokia_data):
    """
    Delete any old file and upload new
    """
    tmp_directory = tempfile.mkdtemp()
    metadata = {
        'tags': ['nokiahealthdata', 'health', 'measure'],
        'description': 'File with Nokia Health data',
        'updated_at': str(datetime.utcnow()),
    }
    filename = 'nokiahealthdata.json'
    out_file = os.path.join(tmp_directory, filename)
    logger.debug('deleted old file for {}'.format(oh_member.oh_id))
    api.delete_file(oh_member.access_token,
                    oh_member.oh_id,
                    file_basename=filename)
    with open(out_file, 'w') as json_file:
        json.dump(nokia_data, json_file)
        json_file.flush()
    api.upload_aws(out_file, metadata,
                   oh_member.access_token,
                   project_member_id=oh_member.oh_id)
    logger.debug('uploaded new file for {}'.format(oh_member.oh_id))
Exemplo n.º 27
0
def process_source(oh_id):
    oh_member = OpenHumansMember.objects.get(oh_id=oh_id)
    OUT_DIR = os.environ.get('OUT_DIR')
    metadata = {
        'description': 'Imputed genotypes from Imputer',
        'tags': ['genomics'],
        'updated_at': str(datetime.utcnow()),
    }
    oh_access_token = oh_member.get_access_token(
        client_id=settings.OPENHUMANS_CLIENT_ID,
        client_secret=settings.OPENHUMANS_CLIENT_SECRET)

    # this works below
    try:
        api.delete_file(oh_member.access_token,
                        oh_member.oh_id,
                        file_basename="member.imputed.vcf.bz2")
    except FileNotFoundError:
        logger.info('New Source File')
    api.upload_aws('{}/{}/member.imputed.vcf.bz2'.format(OUT_DIR, oh_id),
                   metadata,
                   oh_access_token,
                   project_member_id=oh_member.oh_id,
                   max_bytes=256000000)
def delete_oh_file(access_token, file_name):
    """
    Deletes any files belonging to an OH user and projet, whose base file name matches the given file name.

    :param access_token: The project access token for the given member.
    :param file_name: The name of the file to be deleted from OpenHumans (all matching filenames will be deleted).
    :return: boolean. True if successful, else False
    """
    try:
        deletion_response = delete_file(access_token, file_basename=file_name)

        if deletion_response.status_code == 200:
            return True
        else:
            print(
                f'An error was encountered trying to delete file {file_name}')
            return False
    except:
        print(f'An error was encountered trying to delete file {file_name}')
        return False
Exemplo n.º 29
0
 def test_delete_file__valid_access_token(self):
     response = delete_file(access_token=ACCESS_TOKEN,
                            project_member_id='59319749',
                            all_files=True)
     self.assertEqual(response.status_code, 200)
Exemplo n.º 30
0
 def test_delete_file__expired_access_token(self):
     response = delete_file(access_token=ACCESS_TOKEN_EXPIRED,
                            all_files=True,
                            project_member_id='59319749')
     assert response.json() == {"detail": "Expired token."}