Beispiel #1
0
    def get(self, request):
        """

        :param request:
        :return:
        """
        get_access_token(request.session)


        return render_to_response(
            self.template_name, {}, RequestContext(request)
        )
Beispiel #2
0
def get_block_volume_list(client_group, host="localhost"):
    retval = -1
    volume_list = []
    if type(client_group) in (str, int):
        if isinstance(client_group, str):
            client_group = get_client_group_id(client_group, host)
        if client_group != -1:
            token = utils.get_access_token(host)
            if token != -1:
                url = utils.XMS_REST_BASE_URL.format(ip=host) + "block-volumes/?token={token}\&client_group_id={cgid}" 
                curl_header = utils.XMS_CURL_GET_HEADER
                cmd = curl_header + url.format(token=token, cgid=client_group)
                print cmd
                ret = utils.execute_cmd_in_host(cmd, host)
                if ret[2] != 0:
                    print "[Error] Failed to get client group volumes info. Error message: [{err}]".format(err=ret[1])
                else:
                    try:
                        volume_info = json.loads(ret[0])
                        volumes = volume_info['block_volumes']
                        for v in volumes:
                            if v['access_path']:
                                volume_list.append(BlockVolume.BlockVolume(v['id'], v['name'], v['client_group_num'], v['pool']['id'], v['pool']['name'], v['access_path']['id'], v['access_path']['name'])) 
                            else:
                                volume_list.append(BlockVolume.BlockVolume(v['id'], v['name'], v['client_group_num'], v['pool']['id'], v['pool']['name'], None, None))
                        retval = 0
                    except Exception as e:
                        print "[Error] The volumes info is invalid. Error message: " + e.message

    return retval, volume_list
Beispiel #3
0
def main():
    args = get_args()
    config = utils.get_config(args.config)
    logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)

    session = http_session.StorageSession(**config['session'],
                                          access_key=utils.get_access_token())

    root_dir = config['data']['root_dir']

    raw_path = utils.build_path(root_dir=root_dir,
                                sub_dir=args.raw,
                                date=args.date,
                                ext='json')
    data = download_data(session, path=raw_path)

    rows = parse_data(data)

    LOGGER.info("Retrieved %s rows", len(rows))

    headers = utils.get_headers(config['fields'])
    rows = transform.clean(rows, data_types=headers, date=args.date)

    output_path = utils.build_path(root_dir=root_dir,
                                   sub_dir=args.output,
                                   date=args.date,
                                   ext='csv')
    utils.write_csv(path=output_path, rows=rows, header=args.header)
Beispiel #4
0
def generate_excel(configuration):
    ente = configuration['ente']
    filename = None

    t = get_access_token(ente)
    json_files = glob.glob("json/*.json")
    out = 0
    num_threads = 10

    with ProcessPoolExecutor(max_workers=num_threads) as pool:
        with tqdm(total=len(json_files)) as progress:
            futures = []
            for filename in json_files:
                future = pool.submit(generate_one_excel, t, filename)
                futures.append(future)

            for future in futures:
                progress.update()
                result = future.result()
                out += len([r for r in result
                            if r['generated']]) if result is not None else 0
                for r in result:
                    if r['generated']:
                        logging.debug("(%d/%d) Generated excel file: %s", out,
                                      len(json_files), r['file'])
                    else:
                        logging.debug(
                            "(%d/%d) Not generated excel file: %s, with message %s",
                            out, len(json_files), r['file'], r['message'])

    return out
Beispiel #5
0
def download_json(configuration):
    ente = configuration['ente']
    filename = configuration['filename']
    t = get_access_token(ente)

    num_threads = 10
    call_ids = []
    with open(filename) as csv_file:
        csv_reader = csv.reader(csv_file, delimiter=',')
        for row in csv_reader:
            call_id = row[0]
            call_ids.append(call_id)

        csv_file.close()

    out = 0
    with ProcessPoolExecutor(max_workers=num_threads) as pool:
        with tqdm(total=len(call_ids)) as progress:
            futures = []
            for call_id in call_ids:
                if "Conference" in call_id: continue
                future = pool.submit(download_call_data, t, call_id)
                futures.append(future)

            for future in futures:
                progress.update()
                result = future.result()
                out += len(result) if result is not None else 0
                for r in result:
                    if r['downloaded']:
                        logging.debug("(%d/%d) Downloaded json file: %s", out, len(call_ids), r['file'])
                    else:
                        logging.debug("(%d/%d) Json file already present: %s", out, len(call_ids), r['file'])
        
    return out
def add_members_to_workspace(workspace_name, acls, namespace=NAMESPACE, ignore=[]):
    """Add members to workspace permissions."""
    json_request = make_add_members_to_workspace_request(acls, ignore=ignore)

    # request URL for updateWorkspaceACL
    uri = f"https://api.firecloud.org/api/workspaces/{namespace}/{workspace_name}/acl?inviteUsersNotFound=false"

    # Get access token and and add to headers for requests.
    headers = {"Authorization": "Bearer " + get_access_token(), "accept": "*/*", "Content-Type": "application/json"}
    # -H  "accept: */*" -H  "Authorization: Bearer [token] -H "Content-Type: application/json"

    # capture response from API and parse out status code
    response = requests.patch(uri, headers=headers, data=json_request)
    status_code = response.status_code

    emails = [acl['email'] for acl in json.loads(json_request)]
    # print success or fail message based on status code
    if status_code != 200:
        print(f"WARNING: Failed to update {namespace}/{workspace_name} with the following user(s)/group(s): {emails}.")
        print("Check output file for error details.")
        return False, response.text

    print(f"Successfully updated {namespace}/{workspace_name} with the following user(s)/group(s): {emails}.")
    emails_str = ("\n".join(emails))  # write list of emails as strings on new lines
    return True, emails_str
Beispiel #7
0
def donate():
    app = request.args.get('app', 'strava')
    token = get_access_token(app)
    status = kafkaproducer_connector.donate_activity_data({
        'token': token,
        'app': app
    })
    return jsonify(success=status)
Beispiel #8
0
def add_members_to_workspace(workspace_name, project=NAMESPACE):
    """Add members to workspace permissions."""

    acls = []
    # add van allen group as READER, B.Reardon and J.Park OWNER(s)
    acls.append({
        'email': '*****@*****.**',
        'accessLevel': 'READER',
        'canShare': False,
        'canCompute': False
    })
    acls.append({
        'email': '*****@*****.**',
        'accessLevel': 'OWNER',
        'canShare': True,
        'canCompute': True
    })
    acls.append({
        'email': '*****@*****.**',
        'accessLevel': 'OWNER',
        'canShare': True,
        'canCompute': True
    })

    json_request = json.dumps(acls)

    # request URL for updateWorkspaceACL
    uri = f"https://api.firecloud.org/api/workspaces/{project}/{workspace_name}/acl?inviteUsersNotFound=false"

    # Get access token and and add to headers for requests.
    headers = {
        "Authorization": "Bearer " + get_access_token(),
        "accept": "*/*",
        "Content-Type": "application/json"
    }
    # -H  "accept: */*" -H  "Authorization: Bearer [token] -H "Content-Type: application/json"

    # capture response from API and parse out status code
    response = requests.patch(uri, headers=headers, data=json_request)
    status_code = response.status_code

    emails = [acl['email'] for acl in acls]
    # print success or fail message based on status code
    if status_code != 200:
        print(
            f"WARNING: Failed to update {project}/{workspace_name} with the following user(s)/group(s): {emails}."
        )
        print("Check output file for error details.")
        return False, response.text

    print(
        f"Successfully updated {project}/{workspace_name} with the following user(s)/group(s): {emails}."
    )
    emails_str = ("\n".join(emails)
                  )  # write list of emails as strings on new lines
    return True, emails_str
Beispiel #9
0
def prepare_and_launch(file_path):
    # get access token and input to headers for requests
    headers = {"Authorization": "bearer " + get_access_token()}

    # get the workflow config
    workflow = get_workspace_config(WORKSPACE_NAMESPACE, WORKSPACE_NAME,
                                    WORKFLOW_NAMESPACE, WORKFLOW_NAME, headers)
    check_fapi_response(workflow, 200)
    workflow_config_json = workflow.json()

    # This workflow uses inputs from the data table as well as the file_path
    # value input to this function. We first pull the root entity type from
    # the workflow config, and then look for sets of that entity type,
    # selecting the first set found in the data table.
    root_entity_type = workflow_config_json['rootEntityType']

    expression = f'this.{root_entity_type}s'
    set_entity_type = f'{root_entity_type}_set'
    entities = get_entities(WORKSPACE_NAMESPACE, WORKSPACE_NAME,
                            set_entity_type, headers)
    check_fapi_response(entities, 200)
    all_set_names = [ent['name'] for ent in entities.json()]
    set_to_use = all_set_names[0]  # use the first set

    # Next we need to add the specific input from file_path. We update this value
    # in the inputs section of the workflow_config_json.
    for input_value in workflow_config_json['inputs']:
        if input_value.endswith(INPUT_NAME):
            workflow_config_json['inputs'][input_value] = f"\"{file_path}\""

    # remove outputs assignment from config
    workflow_config_json['outputs'] = {}

    # update the workflow configuration
    updated_workflow = update_workspace_config(WORKSPACE_NAMESPACE,
                                               WORKSPACE_NAME,
                                               WORKFLOW_NAMESPACE,
                                               WORKFLOW_NAME,
                                               workflow_config_json, headers)
    check_fapi_response(updated_workflow, 200)

    # launch the workflow
    create_submisson_response = create_submission(WORKSPACE_NAMESPACE,
                                                  WORKSPACE_NAME,
                                                  WORKFLOW_NAMESPACE,
                                                  WORKFLOW_NAME,
                                                  headers,
                                                  use_callcache=True,
                                                  entity=set_to_use,
                                                  etype=set_entity_type,
                                                  expression=expression)
    check_fapi_response(create_submisson_response, 201)

    submission_id = create_submisson_response.json()['submissionId']
    print(f"Successfully created submission: submissionId = {submission_id}.")
Beispiel #10
0
def method(args):
    access_token = utils.get_access_token(args.consumer_key, auth_file=args.auth_file)
    pocket_instance = pocket.Pocket(args.consumer_key, access_token)

    # perfoms all these actions in one request
    # NOTE: Each individual method returns the instance itself. The response
    # dictionary is not returned till commit is called on the instance.
    response, headers = pocket_instance.get(
        state=args.state, favorite=args.favorite, tag=args.tag,
        contentType=args.content_type, sort=args.sort,
        detailType=args.detail_type, search=args.search,
        domain=args.domain, since=args.since,
        count=args.count, offset=args.offset
    )

    if args.header_output is not None:
        header_file = args.header_output
        if os.path.splitext(header_file)[1] == '':
            header_file = header_file + "." + args.header_format
        with open(args.header_file, 'w+') as fp:
            if args.header_format == "yaml":
                fp.write(yaml.dump(headers))
            elif args.header_format == "json":
                fp.write(json.dumps([headers]))
            else:
                raise ArgumentError

    headers_dict = {}
    for key in headers:
        headers_dict[key] = headers[key]
    response["headers"] = headers_dict

    args_dict = {}
    for arg in vars(args):
        if type(getattr(args, arg)) in [str, int]:
            args_dict[arg] = getattr(args, arg)
    response["args"] = args_dict

    if args.response_format == "yaml":
        response_output = yaml.dump(response)
    elif args.response_format == "json":
        response_output = json.dumps([response])
    else:
        raise ArgumentError

    if args.response_output is None:
        print(response_output)
    else:
        response_file = args.response_output
        if os.path.splitext(response_file)[1] == '':
            response_file = response_file + "." + args.response_format

        with open(response_file, 'w+') as fp:
            fp.write(response_output)
Beispiel #11
0
def twitter_auth():
    oauth_verifier = request.args.get('oauth_verifier')
    access_token = get_access_token(session['request_token'], oauth_verifier)

    user = User.load_from_db_by_screen_name(access_token['screen_name'])
    if not user:
        user = User(access_token['screen_name'], access_token['oauth_token'],
                    access_token['oauth_token_secret'], None)
        user.save_to_db()

    session['screen_name'] = user.screen_name

    return redirect(url_for('profile'))
Beispiel #12
0
def authenticate_callback():
    """
    Once Salesforce has confirmed that the client application is authorized, 
    the end-user's web browser is redirected to the callback URL specified by 
    the redirect_uri parameter, appended with the following values in its 
    query string:
    
        code:   The authorization code that is passed to get the access and refresh tokens.
        state:  The state value that was passed in as part of the initial request, if applicable.
    
    The client application server must extract the authorization code and pass
    it in a request to Salesforce for an access token. This request should be 
    made as a POST against this URL: 
    https://login.instance_name/services/oauth2/token with the following query 
    parameters:
    
        grant_type:     Value must be authorization_code for this flow.
        client_id:      Consumer key from the remote access application definition.
        client_secret:  Consumer secret from the remote access application definition.
        redirect_uri:   URI to redirect the user to after approval. This must match the value in the Callback URL field in the remote access application definition exactly, and is the same value sent by the initial redirect.
        code:           Authorization code obtained from the callback after approval.
        format:         Expected return format. This parameter is optional. The default is json. Values are:
        
            * urlencoded
            * json
            * xml
    """
    # Ensure we have an authorization code
    code = request.args.get("code", None)
    if not code:
        abort(400)
    
    # Lookup the access code
    status, access = utils.get_access_token(code)
    
    if not (status == 200 and "error" not in access):
        # Error
        abort(500)
    
    # Save the details
    user = User.get_current_user()
    user.access_token = access.get("access_token", "")
    user.refresh_token = access.get("refresh_token", "")
    user.instance_url = access.get("instance_url", "")
    user.save()
    
    # Create email handle
    handle = EmailHandle.create_handle_for_user(user)
    
    # Redirect
    return redirect("/?handle_created")
Beispiel #13
0
def main():
    args = get_args()
    logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
    config = utils.get_config(args.config)

    session = http_session.StorageSession(**config['session'], access_key=utils.get_access_token())

    for device in objects.Device.list(session=session):
        LOGGER.info("Device: '%s'", device)

        site = device_to_site(device=device)
        site.save()

        sensor = device_to_sensor(device=device, family=session.application_id)
        sensor.save()
def create_workspace(workspace_name, auth_domains, attributes, namespace=NAMESPACE):
    """Create the Terra workspace."""
    # check if workspace already exists
    ws_exists, ws_exists_response = check_workspace_exists(workspace_name, namespace)

    if ws_exists is None:
        return False, ws_exists_response

    if not ws_exists:  # workspace doesn't exist (404), create workspace
        # format auth_domain_response
        auth_domain_names = json.loads(auth_domains)["workspace"]["authorizationDomain"]
        # create request JSON
        create_ws_json = make_create_workspace_request(workspace_name, auth_domain_names, attributes, namespace)  # json for API request

        # request URL for createWorkspace (rawls) - bucketLocation not supported in orchestration
        uri = f"https://rawls.dsde-prod.broadinstitute.org/api/workspaces"

        # Get access token and and add to headers for requests.
        # -H  "accept: application/json" -H  "Authorization: Bearer [token] -H  "Content-Type: application/json"
        headers = {"Authorization": "Bearer " + get_access_token(), "accept": "application/json", "Content-Type": "application/json"}

        # capture response from API and parse out status code
        response = requests.post(uri, headers=headers, data=json.dumps(create_ws_json))
        status_code = response.status_code

        if status_code != 201:  # ws creation fail
            print(f"WARNING: Failed to create workspace with name: {workspace_name}. Check output file for error details.")
            return False, response.text
        # workspace creation success
        print(f"Successfully created workspace with name: {workspace_name}.")
        return True, None

    # workspace already exists
    print(f"Workspace already exists with name: {namespace}/{workspace_name}.")
    print(f"Existing workspace details: {json.dumps(json.loads(ws_exists_response), indent=2)}")
    # make user decide if they want to update/overwrite existing workspace
    while True:  # try until user inputs valid response
        update_existing_ws = input("Would you like to continue modifying the existing workspace? (Y/N)" + "\n")
        if update_existing_ws.upper() in ["Y", "N"]:
            break
        else:
            print("Not a valid option. Choose: Y/N")
    if update_existing_ws.upper() == "N":       # don't overwrite existing workspace
        deny_overwrite_message = f"{namespace}/{workspace_name} already exists. User selected not to overwrite. Try again with unique workspace name."
        return None, deny_overwrite_message

    accept_overwrite_message = f"{namespace}/{workspace_name} already exists. User selected to overwrite."
    return True, accept_overwrite_message    # overwrite existing workspace - 200 status code for "Y"
Beispiel #15
0
def generate_image_info(username=None, limit=None):
    d = {}
    if username:
        d = {'instagram_username': username}
    # last_used_in_api helps us limit the # of API calls
    infos = InstagramInfo.objects.filter(**d).exclude(end_date=None).\
        order_by('last_used_in_api')[:MAX_API_PER_GENERATION]

    for info in infos:
        last_saved = info.start_date
        if InstagramPhoto.objects.filter(license_info=info):
            latest_saved_photo = InstagramPhoto.objects.filter(
                license_info=info).order_by('-created_time')[0]
            last_saved = latest_saved_photo.created_time

        recent = cache.get('api_rc_%s' % info.instagram_id)
        if recent is None:
            # Get the most recent since we last cached from the API
            try:
               recent_resp = instagram.api.users(info.instagram_id).media.\
                   recent.get(
                       access_token=get_access_token(info.user),
                       max_timestamp=to_unix_time(info.end_date),
                       min_timestamp=to_unix_time(last_saved))
            except:
                return
            recent = recent_resp['data']
            # One hour cache per-user
            cache.set('api_rc_%s' % info.instagram_id, recent, 60 * 60)

        for item in recent:
            # The API returns items even if they're before min_timestamp
            # sometimes, so we have to check by hand here.
            created_time = from_unix_time(int(item['created_time']))
            if (created_time < info.end_date and
                created_time > info.start_date and created_time > last_saved):
                save_image_info(item, info)

        info.last_used_in_api = datetime.now().replace(tzinfo=utc)
        info.save()
def _get_people(configuration):
    ente = configuration['ente']
    t = get_access_token(ente)

    groups = []
    uri = 'https://graph.microsoft.com/beta/groups?$orderby=displayName'
    for g in get_graph_data(t, uri):
        groups.append(g)
    
    people = {}
    for g in groups:
        if g['displayName'].startswith('Organizzatori FAD '):
            centro = g['displayName'].replace('Organizzatori FAD ', '')
            groupid = g['id']

            participants = []
            uri = f'https://graph.microsoft.com/beta/groups/{groupid}/members'
            for p in get_graph_data(t, uri):
                participants.append(p['displayName'])

            people[centro] = participants

    return people
Beispiel #17
0
 def refresh_access_token(self):
     access_token = get_access_token(self.api_key, self.api_secret)
     self.expires = access_token.expires_in + time.time()
     self.access_token = access_token.access_token
Beispiel #18
0
# -*- coding: utf-8 -*-
# @Time    : 2019/4/25 9:29
# @Author  : shine
# @File    : code_main.py
from ai_api.ocr_api import get_content_by_path
from ai_sdk.ocr_sdk import general_image
from utils import get_access_token, API_KEY, SECRET_KEY, get_image_content

if __name__ == "__main__":
    # SDK方式
    image_obj = get_image_content('timg.jpg')
    res = general_image(image_obj)
    print(res)

    # API方式
    token = get_access_token(API_KEY, SECRET_KEY)
    content = get_content_by_path('taxi.jpg', token)
    print(content)
Beispiel #19
0
def main(event, context):
    """Triggered from a message on a Cloud Pub/Sub topic.
    Args:
         event (dict): Event payload.
         context (google.cloud.functions.Context): Metadata for the event.
    """
    pubsub_message = base64.b64decode(event['data']).decode('utf-8')

    ####################################
    ########## Check tickers ###########
    ####################################
    if pubsub_message == 'Ticker':
        print('Getting list of predefined tickers')
        defaultTickers = pd.read_csv('tickers.txt')
        defaultTickerSet = set(defaultTickers['tickers'].values.tolist())

        print("Going to Wikipedia to get list of current tickers in S&P500")
        wikiTickers = utils.get_sp500_tickers()

        # Need to filter out anything with a period.
        filteredWikiTickers = [x for x in wikiTickers if '.' not in x]

        # Now double check to make sure there's a max of 4 letters, remove otherwise
        finalWikiTickers = [x for x in filteredWikiTickers if len(x)<=4]
        wikiTickerSet = set(finalWikiTickers)

        # Check if website tickers are different:
        defaultNotInWiki = defaultTickerSet.difference(wikiTickerSet)
        wikiNotInDefault = wikiTickerSet.difference(defaultTickerSet)

        removeTicker = pd.DataFrame({'RemoveTickers': list(defaultNotInWiki)})

        addTicker = pd.DataFrame({'AddTickers': list(wikiNotInDefault)})

        if ((len(removeTicker)>0) | (len(addTicker)>0)):
            print('Need to change some tickers..')
            html, subject = emails.ticker_check_email(addTicker.to_html(), removeTicker.to_html())
            response = emails.send_email(request=pubsub_message, html_content=html, subject=subject)
            print(response)
        else:
            print("No tickers to change")
        
    ########################################
    ########## Run trading algo ###########
    ########################################
    if pubsub_message == 'Trading':

        # Get Tickers: 
        print('getting tickers')
        tickers = pd.read_csv('tickers.txt')
        tickers = tickers['tickers'].values.tolist()


        print('getting access token')
        newAccess = utils.get_access_token()
        access_token = newAccess['access_token']
        expires_in = newAccess['expires_in']

        print('Running the algo..')
        hist_stock_data = utils.get_stocks(token=access_token, tickers=tickers, expires_in=expires_in)
        print('Calculating trade metrics..')
        trade_metric_df = utils.calc_trade_metrics(stock_data=hist_stock_data)
        print('Shape of trade metrics: ', trade_metric_df.shape)

        print("Getting buy/sell symbols...")
        (algoBuys, algoSells) = utils.find_trades(data_frame=trade_metric_df, token=access_token, tickers=tickers)
        
        print('Submit the orders!')
        # This is the old way - the slow way!
        #(buys, sells) = utils.make_trades(positionsToBuy=algoBuys, positionsToSell=algoSells, token=access_token)

        # Async order submissions
        orderStart = pd.to_datetime('today')
        asyncio.run(utils.make_trades_async(buySymbolsList=algoBuys, sellSymbolsList=algoSells, token=access_token))
        orderEnd = pd.to_datetime('today')
        print('Time took to send orders: ', (orderEnd - orderStart))
        
        buyToday = hist_stock_data[(hist_stock_data['symbol'].isin(algoBuys)  & 
                (hist_stock_data['datetime']==pd.to_datetime('today').strftime('%Y-%m-%d')))]['close'].sum()

        sellToday = hist_stock_data[(hist_stock_data['symbol'].isin(algoSells)  & 
                (hist_stock_data['datetime']==pd.to_datetime('today').strftime('%Y-%m-%d')))]['close'].sum()

        maxNeeded = hist_stock_data[(hist_stock_data['datetime']==pd.to_datetime('today').strftime('%Y-%m-%d'))]['close'].sum()
                         
        print('Approx amount bought today: ', round(buyToday,2))
        print('Approx amount sold today: ', round(sellToday,2))
        print('Maximum possible needed: ', round(maxNeeded,2))
        print('Trading bot deployed')


    #######################################
    #      Save todays trades to DB       #
    #######################################
    if pubsub_message == 'MorningTrades':
        
        print('getting access token')
        newAccess = utils.get_access_token()
        access_token = newAccess['access_token']
        expires_in = newAccess['expires_in']

        print('Pulling and saving todays trades...')
        today = pd.to_datetime('today').strftime('%Y-%m-%d')
        todaysTrades = utils.get_historical_trades_DF(start_date=today, end_date=today, token=access_token)
        print('There were {} trades today..'.format(todaysTrades.shape[0]))

        print('Saving trades to the DB..')
        db.save_trades_gbq(ordersDF=todaysTrades)
        print('Done saving todays trades..')

        html, subject = emails.daily_trades(tradesDF=todaysTrades)
        response = emails.send_email(pubsub_message, html_content=html, subject=subject)
        print('Email response: ', response)
        print('Done saving and send todays trades...')
   


    #######################################
    #           Shut it down!             #
    #######################################
    if pubsub_message == 'Kill':    

        # Get Tickers: 
        print('getting tickers')
        tickers = pd.read_csv('tickers.txt')
        tickers = tickers['tickers'].values.tolist()

        print('getting access token')
        newAccess = utils.get_access_token()
        access_token = newAccess['access_token']

        orderStart = pd.to_datetime('today')
        failures = utils.shut_it_down(token=access_token, tickers=tickers)
        orderEnd = pd.to_datetime('today')
        print('Trades that failed: ', failures)
        print('Time took to send orders: ', (orderEnd - orderStart))


    #######################################
    #        Update Refresh Token         #
    #######################################

    if pubsub_message == 'Refresh Token':

        newAccess = utils.get_access_token()
        access_token = newAccess['access_token']
        
        # Get a new token
        newRefreshToken = utils.get_new_refresh_token(token=access_token)
        print('New creds: ', newRefreshToken)

        configFile = open("config.py").read().splitlines()
        newString = "TD_REFRESH_TOKEN=\'{}\'".format(newRefreshToken['refresh_token'])
        print(newString)

        # Location of token string
        configFile[1] = newString
        with open('config.py', 'w') as f:
            for item in configFile:
                f.write("%s\n" % item)

        print('Saved new refresh token')