Beispiel #1
0
def get_download_url(oh_member):
    files = api.exchange_oauth2_member(
        access_token=oh_member.get_access_token())['data']
    for f in files:
        if f['basename'] == 'oura-data.json':
            return {'url': f['download_url'], 'created': f['created']}
    return None
Beispiel #2
0
def dashboard(request):
    oh_member = request.user.oh_member
    context = {
        'oh_member': oh_member,
    }
    try:
        oh_member_data = api.exchange_oauth2_member(
            oh_member.get_access_token())
    except:
        messages.error(request, "You need to re-authenticate with Open Humans")
        logout(request)
        return redirect("/")

    requested_sources = {
        'direct-sharing-128': '23andMe Upload',
        'direct-sharing-129': 'AncestryDNA Upload',
        'direct-sharing-120': 'FamilyTreeDNA integration',
        'direct-sharing-40': 'Gencove',
        'direct-sharing-131': 'Genome/Exome Upload',
        'direct-sharing-139': 'Harvard Personal Genome Project',
        'direct-sharing-55': 'openSNP'
    }

    matching_sources = {}
    found_source_ids = []
    for data_source in oh_member_data['data']:
        if (data_source['source'] in requested_sources
                and 'vcf' in data_source['basename']
                and 'metadata' not in data_source['basename']):
            matching_sources[data_source['basename']] = {
                'project': requested_sources[data_source['source']],
                'id': data_source['id'],
                'source_id': data_source['source']
            }
            found_source_ids.append(data_source['source'])

    for source_id, source_name in requested_sources.items():
        if source_id not in found_source_ids:
            matching_sources[source_id] = {
                'project': source_name,
                'id': None,
                'source_id': source_id
            }

    # check position in queue
    active_sorted = ImputerMember.objects.filter(active=True).order_by('id')
    queue_position = None
    for index, active in enumerate(active_sorted):
        if int(oh_member.oh_id) == int(active.oh_id):
            queue_position = index

    context = {
        'base_url': request.build_absolute_uri("/").rstrip('/'),
        'section': 'dashboard',
        'all_datasources': requested_sources,
        'matching_sources': matching_sources,
        'queue_position': queue_position
    }

    return render(request, 'main/dashboard.html', context=context)
Beispiel #3
0
def get_vcf(data_source_id, oh_id, calculate_variant_length=True):
    """Download member .vcf."""
    oh_member = OpenHumansMember.objects.get(oh_id=oh_id)
    imputer_record = ImputerMember.objects.get(oh_id=oh_id, active=True)
    imputer_record.step = 'get_vcf'
    imputer_record.save()
    logger.info('Downloading vcf for member {}'.format(oh_id))
    user_details = api.exchange_oauth2_member(oh_member.get_access_token())
    for data_source in user_details['data']:
        if str(data_source['id']) == str(data_source_id):
            data_file_url = data_source['download_url']
            imputer_record.data_source_id = data_source['id']
            imputer_record.save()
    datafile = requests.get(data_file_url)
    os.makedirs('{}/{}'.format(DATA_DIR, oh_id), exist_ok=True)
    with open('{}/{}/member.{}.vcf'.format(DATA_DIR, oh_id, oh_id), 'wb') as handle:
        try:
            try:
                try:
                    textobj = bz2.decompress(datafile.content)
                    handle.write(textobj)
                except OSError:
                    textobj = gzip.decompress(datafile.content)
                    handle.write(textobj)
            except OSError:
                for block in datafile.iter_content(1024):
                    handle.write(block)
        except:
            logger.critical('your data source file is malformated')
    time.sleep(5)  # download takes a few seconds

    # set CHROMOSOMES variable appropriately by checking which were submitted for imputation.
    if calculate_variant_length is True:
        with open('{}/{}/member.{}.vcf'.format(DATA_DIR, oh_id, oh_id)) as vcf:
            member_chroms = set()
            longest_variant = 0
            for line in vcf:
                if not line.startswith('#'):
                    member_chroms.add(str(line.split('\t')[0]).replace("chr", ""))
                    reflen = len(str(line.split('\t')[3]))
                    altlen = len(str(line.split('\t')[4]))
                    if reflen + altlen > longest_variant:
                        longest_variant = reflen + altlen
        # store the variant length, but only if less than 1000
        if longest_variant <= 990:
            imputer_record.variant_length = longest_variant + 10
            imputer_record.save()
    elif calculate_variant_length is False:
        with open('{}/{}/member.{}.vcf'.format(DATA_DIR, oh_id, oh_id)) as vcf:
            member_chroms = set()
            for line in vcf:
                if not line.startswith('#'):
                    member_chroms.add(str(line.split('\t')[0]).replace("chr", ""))

    global CHROMOSOMES
    default_chroms = set(CHROMOSOMES)
    CHROMOSOMES = default_chroms.intersection(member_chroms)
    if "X" in member_chroms or "chrX" in member_chroms:
        CHROMOSOMES.add('23')
    CHROMOSOMES = sorted(CHROMOSOMES, key=int)
Beispiel #4
0
def clean_uploaded_file(self, access_token, file_id):
    member = api.exchange_oauth2_member(access_token)
    for dfile in member['data']:
        if dfile['id'] == file_id:
            print(dfile)
            process_target(dfile, access_token, member, dfile['metadata'])
    pass
Beispiel #5
0
def get_vcf(data_source_id, oh_id):
    """Download member .vcf."""
    oh_member = OpenHumansMember.objects.get(oh_id=oh_id)
    imputer_record = ImputerMember.objects.get(oh_id=oh_id, active=True)
    imputer_record.step = 'get_vcf'
    imputer_record.save()
    logger.info('Downloading vcf for member {}'.format(oh_id))
    user_details = api.exchange_oauth2_member(oh_member.get_access_token())
    for data_source in user_details['data']:
        if str(data_source['id']) == str(data_source_id):
            data_file_url = data_source['download_url']
    datafile = requests.get(data_file_url)
    os.makedirs('{}/{}'.format(DATA_DIR, oh_id), exist_ok=True)
    with open('{}/{}/member.{}.vcf'.format(DATA_DIR, oh_id, oh_id),
              'wb') as handle:
        try:
            textobj = bz2.decompress(datafile.content)
            handle.write(textobj)
        except OSError:
            textobj = gzip.decompress(datafile.content)
            handle.write(textobj)
        except OSError:
            for block in datafile.iter_content(1024):
                handle.write(block)
        except:
            logger.critical('your data source file is malformated')
    time.sleep(5)  # download takes a few seconds
Beispiel #6
0
def clean_uploaded_file(self, access_token, file_id, taxonomy):
    member = api.exchange_oauth2_member(access_token, base_url=OH_BASE_URL)
    for dfile in member['data']:
        if dfile['id'] == file_id:
            process_file(dfile, access_token, member, dfile['metadata'],
                         taxonomy)
    pass
 def handle(self, *args, **options):
     users = OpenHumansMember.objects.all()
     for user in users:
         member = api.exchange_oauth2_member(user.get_access_token())
         for dfile in member['data']:
             if 'GoogleFit' in dfile['metadata']['tags']:
                 print(dfile)
Beispiel #8
0
def get_last_id(oh_access_token):
    member = api.exchange_oauth2_member(
        oh_access_token,
        all_files=True)
    twitter_files = {}
    tweet_id = None
    like_id = None
    for dfile in member['data']:
        if 'Twitter' in dfile['metadata']['tags']:
            twitter_files[dfile['basename']] = dfile
    if twitter_files:
        filenames = list(twitter_files.keys())
        filenames.sort()
        last_file = twitter_files[filenames[-1]]
        tf_in = tempfile.NamedTemporaryFile(suffix='.json')
        tf_in.write(requests.get(last_file['download_url']).content)
        tf_in.flush()
        twitter_data = json.load(open(tf_in.name))
        print("fetched last ID from OH")
        print(twitter_data['tweets'][0]['id_str'])
        print('---')
        if len(twitter_data['tweets']):
            tweet_id = twitter_data['tweets'][0]['id_str']
        if len(twitter_data['likes']):
            like_id = twitter_data['tweets'][0]['id_str']
    return tweet_id, like_id
Beispiel #9
0
def bundle(handler, model):
    """Create a compressed tarball containing the notebook document.

    Parameters
    ----------
    handler : tornado.web.RequestHandler
        Handler that serviced the bundle request
    model : dict
        Notebook model from the configured ContentManager
    """
    redirect_url = os.getenv("JH_BUNDLE_REDIRECT",
                             "http://127.0.0.1:5000/shared")
    try:
        access_token = os.getenv('OH_ACCESS_TOKEN')
        ohmember = api.exchange_oauth2_member(access_token)
        project_member_id = ohmember['project_member_id']
        notebook_filename = model['name']
        api.delete_file(access_token,
                        project_member_id,
                        file_basename=notebook_filename)
        print('deleted old_file')
        notebook_content = nbformat.writes(model['content']).encode('utf-8')

        upload_notebook(notebook_content, notebook_filename, access_token,
                        project_member_id)
        handler.redirect(redirect_url)
    except:
        print('whopsy, something went wrong')
        handler.finish(("Your upload failed. "
                        "Please restart your notebook server "
                        "and try again."))
Beispiel #10
0
def get_existing_data(garmin_user_id):
    oh_user = get_oh_user_from_garmin_id(garmin_user_id)
    member = api.exchange_oauth2_member(oh_user.get_access_token())
    for dfile in member['data']:
        if 'Garmin' in dfile['metadata']['tags']:
            download_url = dfile['download_url']
            return download_to_json(download_url), dfile['id']
    return {'dailies': []}, None
Beispiel #11
0
def dashboard(request):
    oh_member = request.user.oh_member
    context = {
        'oh_member': oh_member,
    }
    try:
        oh_member_data = api.exchange_oauth2_member(
            oh_member.get_access_token())
    except:
        messages.error(request, "You need to re-authenticate with Open Humans")
        logout(request)
        return redirect("/")

    requested_sources = {
        'direct-sharing-128': '23andMe Upload',
        'direct-sharing-129': 'AncestryDNA Upload',
        'direct-sharing-120': 'FamilyTreeDNA integration',
        'direct-sharing-40': 'Gencove',
        'direct-sharing-131': 'Genome/Exome Upload',
        'direct-sharing-139': 'Harvard Personal Genome Project',
        'direct-sharing-55': 'openSNP'
    }

    # check position in queue
    active_sorted = ImputerMember.objects.filter(active=True).order_by('id')
    queue_position = None
    for index, active in enumerate(active_sorted):
        if oh_member.oh_id == active.oh_id:
            queue_position = index

    matching_sources = {}
    for data_source in oh_member_data['data']:
        matching_source = requested_sources.get(data_source['source'], )
        vcf_in_name = 'vcf' in data_source['basename']
        metadata_in_name = 'metadata' in data_source['basename']
        if matching_source is not None and vcf_in_name is True and metadata_in_name is False:
            # check if the data source id has been imputed
            imputed = ImputerMember.objects.filter(
                active=False,
                oh_id=oh_member.oh_id,
                data_source_id=data_source['id'])
            matching_sources[data_source['id']] = {
                'project': requested_sources[data_source['source']],
                'source_id': data_source['source'],
                'basename': data_source['basename'],
                'already_imputed': True if len(imputed) > 0 else False
            }

    context = {
        'base_url': request.build_absolute_uri("/").rstrip('/'),
        'section': 'dashboard',
        'all_datasources': requested_sources,
        'matching_sources': matching_sources,
        'queue_position': queue_position
    }

    return render(request, 'main/dashboard.html', context=context)
Beispiel #12
0
def get_latest_googlefit_file_url(oh_access_token):
    member = api.exchange_oauth2_member(oh_access_token)
    latest_month = GOOGLEFIT_DEFAULT_START_DATE.strftime("%Y-%m")
    download_url = None
    for dfile in member['data']:
        if 'GoogleFit' in dfile['metadata']['tags']:
            if dfile['metadata'].get('month', '') >= latest_month:
                latest_month = dfile['metadata']['month']
                download_url = dfile['download_url']
    return download_url
Beispiel #13
0
def get_download_url(oh_member):
    try:
        files = api.exchange_oauth2_member(
            access_token=oh_member.get_access_token())['data']
        for f in files:
            if f['basename'] == 'spotify-listening-archive.json':
                return {'url': f['download_url'], 'created': f['created']}
        return None
    except:
        return 'token-broken'
 def iterate_member_files(self, ohmember):
     client_info = ProjectConfiguration.objects.get(id=1).client_info
     ohmember_data = api.exchange_oauth2_member(
         ohmember.get_access_token(**client_info))
     files = ohmember_data['data']
     for f in files:
         fname = f['basename']
         if not fname.endswith('.zip') and not fname.endswith('.json'):
             api.delete_file(ohmember.access_token,
                             ohmember.oh_id,
                             file_id=f['id'])
Beispiel #15
0
def get_existing_data(oh_access_token):
    member = api.exchange_oauth2_member(oh_access_token)
    for dfile in member['data']:
        if 'demo' in dfile['metadata']['tags']:
            # get file here and read the json into memory
            tf_in = tempfile.NamedTemporaryFile(suffix='.json')
            tf_in.write(requests.get(dfile['download_url']).content)
            tf_in.flush()
            demo_data = json.load(open(tf_in.name))
            return demo_data
    return []
Beispiel #16
0
def get_latest_github_file_url(oh_access_token):
    member = api.exchange_oauth2_member(oh_access_token)
    download_url = None
    last_updated_at = None
    for dfile in member['data']:
        if 'Github' in dfile['metadata']['tags']:
            if last_updated_at is None or dfile['metadata'].get(
                    'updated_at', '') >= last_updated_at:
                last_updated_at = dfile['metadata']['updated_at']
                download_url = dfile['download_url']
    return download_url
def get_nokia_file(oh_member):
    try:
        oh_access_token = oh_member.get_access_token(
            client_id=settings.OPENHUMANS_CLIENT_ID,
            client_secret=settings.OPENHUMANS_CLIENT_SECRET)
        user_object = api.exchange_oauth2_member(oh_access_token)
        for dfile in user_object['data']:
            if 'nokia' in dfile['metadata']['tags']:
                return dfile['download_url']
        return ''
    except:
        return 'error'
def get_existing_rescuetime(oh_access_token):
    member = api.exchange_oauth2_member(oh_access_token)
    for dfile in member['data']:
        if 'Rescuetime' in dfile['metadata']['tags']:
            # get file here and read the json into memory
            tf_in = tempfile.NamedTemporaryFile(suffix='.json')
            tf_in.write(requests.get(dfile['download_url']).content)
            tf_in.flush()
            rescuetime_data = json.load(open(tf_in.name))
            if 'rows' in rescuetime_data.keys():
                if len(rescuetime_data['rows']) > 0:
                    return rescuetime_data
    return {}
Beispiel #19
0
def get_runkeeper_file(oh_member):
    try:
        oh_access_token = oh_member.get_access_token(
            client_id=settings.OPENHUMANS_CLIENT_ID,
            client_secret=settings.OPENHUMANS_CLIENT_SECRET)
        user_object = api.exchange_oauth2_member(oh_access_token)
        files = {}
        for dfile in user_object['data']:
            if 'Runkeeper' in dfile['metadata']['tags']:
                files[dfile['basename']] = dfile['download_url']
        return files
    except:
        return 'error'
Beispiel #20
0
def get_latest_googlefit_file_updated_dt(oh_access_token):
    member = api.exchange_oauth2_member(oh_access_token)
    latest_month = GOOGLEFIT_DEFAULT_START_DATE.strftime("%Y-%m")
    last_updated = None
    for dfile in member['data']:
        if 'GoogleFit' in dfile['metadata']['tags']:
            if dfile['metadata'].get('month', '') >= latest_month:
                latest_month = dfile['metadata']['month']
                last_updated = dfile['metadata']['updated_at']
    if last_updated:
        return datetime.strptime(last_updated, "%Y-%m-%d %H:%M:%S.%f")
    else:
        return None
Beispiel #21
0
def oh_code_to_member(code):
    """
    Exchange code for token, use this to create and return OpenHumansMember.
    If a matching OpenHumansMember exists, update and return it.
    """
    if settings.OPENHUMANS_CLIENT_SECRET and \
       settings.OPENHUMANS_CLIENT_ID and code:
        data = {
            'grant_type': 'authorization_code',
            'redirect_uri':
            '{}/complete'.format(settings.OPENHUMANS_APP_BASE_URL),
            'code': code,
        }
        req = requests.post('{}/oauth2/token/'.format(
            settings.OPENHUMANS_OH_BASE_URL),
                            data=data,
                            auth=requests.auth.HTTPBasicAuth(
                                settings.OPENHUMANS_CLIENT_ID,
                                settings.OPENHUMANS_CLIENT_SECRET))
        data = req.json()

        if 'access_token' in data:
            oh_memberdata = api.exchange_oauth2_member(data['access_token'])
            oh_id = oh_memberdata['project_member_id']
            oh_username = oh_memberdata['username']
            try:
                oh_member = OpenHumansMember.objects.get(oh_id=oh_id)
                logger.debug('Member {} re-authorized.'.format(oh_id))
                oh_member.access_token = data['access_token']
                oh_member.refresh_token = data['refresh_token']
                oh_member.token_expires = OpenHumansMember.get_expiration(
                    data['expires_in'])
            except OpenHumansMember.DoesNotExist:
                oh_member = OpenHumansMember.create(
                    oh_id=oh_id,
                    oh_username=oh_username,
                    access_token=data['access_token'],
                    refresh_token=data['refresh_token'],
                    expires_in=data['expires_in'])
                logger.debug('Member {} created.'.format(oh_id))
            oh_member.save()

            return oh_member

        elif 'error' in req.json():
            logger.debug('Error in token exchange: {}'.format(req.json()))
        else:
            logger.warning('Neither token nor error info in OH response!')
    else:
        logger.error('OH_CLIENT_SECRET or code are unavailable')
    return None
 def iterate_member_files(self, ohmember):
     client_info = ProjectConfiguration.objects.get(id=1).client_info
     ohmember_data = api.exchange_oauth2_member(
         ohmember.get_access_token(**client_info))
     files = ohmember_data['data']
     metadata_files = self.get_metadata_list(files)
     for f in files:
         fname = f['basename']
         if fname.endswith('.gz'):
             fname = fname.replace('.gz', '')
         elif fname.endswith('bz2'):
             fname = fname.replace('.bz2', '')
         if fname + '.vcf.metadata.json' not in metadata_files:
             clean_uploaded_file.delay(ohmember.access_token, f['id'])
def get_existing_fitbit(oh_access_token):
    print("entered get_existing_fitbit")
    member = api.exchange_oauth2_member(oh_access_token)
    for dfile in member['data']:
        if 'Fitbit' in dfile['metadata']['tags']:
            print("got inside fitbit if")
            # get file here and read the json into memory
            tf_in = tempfile.NamedTemporaryFile(suffix='.json')
            tf_in.write(requests.get(dfile['download_url']).content)
            tf_in.flush()
            fitbit_data = json.load(open(tf_in.name))
            print("fetched data from OH")
            print(fitbit_data)
            return fitbit_data
    return []
Beispiel #24
0
def merge_with_existing_and_upload(oh_user, summaries, file_name):
    access_token = oh_user.get_access_token()
    oh_user_data = api.exchange_oauth2_member(access_token)
    existing_file = find_existing_data_file(oh_user_data, file_name)
    if existing_file:
        download_url = existing_file['download_url']
        old_summaries = json.loads(requests.get(download_url).content)
        summaries = merge_summaries(summaries, old_summaries)
    else:
        summaries = merge_summaries(summaries, [])  # Remove duplicates
    existing_file_id = existing_file['id'] if existing_file else None

    _LOGGER.info(f"Uploading {len(summaries)} summaries to file {file_name} for user {oh_user.oh_id}")
    upload_summaries(oh_user, summaries, file_name, existing_file_id)

    return summaries
Beispiel #25
0
def get_twitter_files(oh_member):
    try:
        oh_access_token = oh_member.get_access_token(
            client_id=settings.OPENHUMANS_CLIENT_ID,
            client_secret=settings.OPENHUMANS_CLIENT_SECRET)
        user_object = api.exchange_oauth2_member(oh_access_token,
                                                 all_files=True)
        files = []
        for dfile in user_object['data']:
            if 'Twitter' in dfile['metadata']['tags']:
                files.append(dfile)
        if files:
            files.sort(key=lambda x: x['basename'], reverse=True)
        return files

    except:
        return 'error'
Beispiel #26
0
def get_jawbone_files(oh_member):
    try:
        files = []
        oh_access_token = oh_member.get_access_token(
            client_id=settings.OPENHUMANS_CLIENT_ID,
            client_secret=settings.OPENHUMANS_CLIENT_SECRET)
        user_object = api.exchange_oauth2_member(oh_access_token)
        for dfile in user_object['data']:
            if 'Jawbone' in dfile['metadata']['tags']:
                files.append({
                    'url': dfile['download_url'],
                    'name': dfile['basename']
                })
        return files

    except:
        return 'error'
def get_existing_nokia(oh_access_token):
    print("Entering get_existing_nokia function...")
    member = api.exchange_oauth2_member(oh_access_token)
    for dfile in member['data']:
        if 'nokiahealthdata' in dfile['metadata']['tags']:
            print("Found file with tag...")
            # get file here and read the json into memory
            tf_in = tempfile.NamedTemporaryFile(suffix='.json')
            tf_in.write(requests.get(dfile['download_url']).content)
            tf_in.flush()
            nokia_data = json.load(open(tf_in.name))
            # print("getting existing data:")
            # print(nokia_data)
            # print(type(nokia_data))
            # for key in nokia_data:
            #     print(key)
            return nokia_data
    print('no existing data with nokiahealthdata tag')
    return {}
Beispiel #28
0
def write_new_tweets(oh_member, twitter_api, month, new_data):
    existing_files = api.exchange_oauth2_member(
        oh_member.get_access_token(),
        all_files=True)
    old_data = None
    file_id = None
    for dfile in existing_files['data']:
        if dfile['basename'] == 'twitter-data-{}.json'.format(month):
            old_data = requests.get(dfile['download_url']).json()
            file_id = dfile['id']
            break
    if old_data:
        old_data['tweets'] = new_data['tweets'] + old_data['tweets']
        old_data['likes'] = new_data['likes'] + old_data['likes']
    else:
        old_data = {'tweets': new_data['tweets'], 'likes': new_data['likes'],
                    'followers': [], 'following': []}
    if month == str(datetime.datetime.today())[:7]:
        me = twitter_api.me()
        old_data['followers'].append(
            {'timestamp': str(datetime.datetime.today()),
                'value': me.followers_count})
        old_data['following'].append(
            {'timestamp': str(datetime.datetime.today()),
                'value': me.friends_count})
    with tempfile.TemporaryFile() as f:
                js = json.dumps(old_data)
                js = str.encode(js)
                f.write(js)
                f.flush()
                f.seek(0)
                api.upload_stream(
                    f, "twitter-data-{}.json".format(month),
                    metadata={
                        "description": "Twitter Data",
                        "tags": ["Twitter"]
                        }, access_token=oh_member.get_access_token())
    if file_id:
        api.delete_file(
            oh_member.get_access_token(),
            project_member_id=oh_member.oh_id,
            file_id=file_id)
Beispiel #29
0
def dashboard(request):
    oh_member = request.user.oh_member
    context = {
        'oh_member': oh_member,
    }
    try:
        oh_member_data = api.exchange_oauth2_member(
            oh_member.get_access_token())
    except:
        messages.error(request, "You need to re-authenticate with Open Humans")
        logout(request)
        return redirect("/")
    all_available_notebooks = get_notebook_files(oh_member_data)
    existing_notebooks = SharedNotebook.objects.filter(oh_member=oh_member)
    context['notebook_files'] = all_available_notebooks
    context['existing_notebooks'] = existing_notebooks
    context['JH_URL'] = settings.JUPYTERHUB_BASE_URL
    context['base_url'] = request.build_absolute_uri("/").rstrip('/')
    context['section'] = 'dashboard'
    return render(request, 'main/dashboard.html', context=context)
Beispiel #30
0
def add_notebook(request, notebook_id):
    oh_member = request.user.oh_member
    try:
        oh_member_data = api.exchange_oauth2_member(
            oh_member.get_access_token())
    except:
        messages.error(request, "You need to re-authenticate with Open Humans")
        logout(request)
        return redirect("/")

    notebook_name, notebook_url = get_notebook_oh(oh_member_data, notebook_id)

    if request.method == 'POST':
        add_notebook_helper(request, notebook_url, notebook_name, oh_member)
        return redirect('/dashboard')
    else:
        if len(
                SharedNotebook.objects.filter(
                    oh_member=oh_member, notebook_name=notebook_name)) > 0:
            existing_notebook = SharedNotebook.objects.get(
                oh_member=oh_member, notebook_name=notebook_name)
            context = {
                'description': existing_notebook.description,
                'tags': existing_notebook.get_tags(),
                'data_sources': existing_notebook.get_data_sources(),
                'name': notebook_name,
                'notebook_id': str(notebook_id),
                'edit': True
            }
        else:
            notebook_content = download_notebook_oh(notebook_url)
            suggested_sources = suggest_data_sources(notebook_content)
            context = {
                'description': '',
                'name': notebook_name,
                'notebook_id': str(notebook_id),
                'tags': '',
                'data_sources': suggested_sources
            }
        return render(request, 'main/add_notebook.html', context=context)