Ejemplo n.º 1
0
 def getSearchResponse(self, symbolOrNames):
     symbolOrNames = Provider._ensureList(symbolOrNames)
     symbolsEncoded = self.__encode(symbolOrNames)
     res = fetch(
         f"https://query2.finance.yahoo.com/v1/finance/search?q={symbolsEncoded}&quotesCount=1&newsCount=0&enableFuzzyQuery=false&quotesQueryId=tss_match_phrase_query&multiQuoteQueryId=multi_quote_single_token_query&newsQueryId=news_ss_symbols&enableCb=false&enableNavLinks=false"
     ).json()
     return res
    def StartCrawling(self):
        queue = []
        queue.append(self._link)
        
        while (queue and self._depth):
            firstOnQueue = queue.pop()
            try:
                htmlTextRet = fetch(firstOnQueue).text
                parsedHtmlText = BeautifulSoup(htmlTextRet, "lxml")

                for eachLink in parsedHtmlText.findAll('h3', class_='gs_ai_name'):
                    try:
                        profileLink = eachLink.a.get('href')
                        publications = self.CrawlBooks(ROOT_URL + profileLink)
                        if publications:
                            self.authorsToPublications[eachLink.a.text] = publications
                    except:
                        pass
                self._depth -= 1

                for eachButton in parsedHtmlText.findAll('button'):
                    if (eachButton.get('aria-label') == 'Next'):
                        link = self.CleanUpTheLink(eachButton.get('onclick'))
                        queue.append(link)
                        break
            except:
                pass
        for key, value in self.authorsToPublications.items():
            print (key)
            print (value)
            print()

        with open('data.json', 'w') as fp:
            json.dump(self.authorsToPublications, fp, sort_keys=True, indent=4)
Ejemplo n.º 3
0
def fetch_request():
    endpoint = request.form['endpoint']
    request_type = request.form['type']
    try:
        params = request.form['params']
    except KeyError:
        params = []
    try:
        header = request.form['header']
        r = fetch(request_type, endpoint, data=params, headers=header)
    except KeyError:
        r = fetch(request_type, endpoint, data=params)
    response_text = {
        'headers': unicode(r.headers),
        'status_code': unicode(r.status_code),
        'content': unicode(r.text)
    }
    return dumps(response_text)
 def CrawlBooks(self, link):
     htmlTextRet = fetch(link).text
     parsedHtmlText = BeautifulSoup(htmlTextRet, "lxml")
     publicationList = parsedHtmlText.findAll('td', class_='gsc_a_t')
     citationsList = parsedHtmlText.findAll('td', class_='gsc_a_c')
     yearOfPublicationsList = parsedHtmlText.findAll('td', class_='gsc_a_y')
     publications = []
     for (eachBook, citation, year) in zip(publicationList, citationsList, yearOfPublicationsList):
         publications.append(tuple((eachBook.a.text, citation.a.text, year.span.text, link)))
     return publications
Ejemplo n.º 5
0
    def getQuoteLatestResponse(self, symbolOrNames):
        symbolOrNames = self.verifySymbol(symbolOrNames)
        symbolsEncoded = self.__encode(symbolOrNames)
        quote = fetch(
            f'https://query2.finance.yahoo.com/v7/finance/quote?lang=en-IN&region=IN&symbols={symbolsEncoded}&fields=longName%2CshortName%2CregularMarketPrice%2CregularMarketChange%2CregularMarketChangePercent%2CmessageBoardId%2CmarketCap%2CunderlyingSymbol%2CunderlyingExchangeSymbol%2CheadSymbolAsString%2CregularMarketVolume%2Cuuid%2CregularMarketOpen%2CfiftyTwoWeekLow%2CfiftyTwoWeekHigh%2CtoCurrency%2CfromCurrency%2CtoExchange%2CfromExchange'
        ).json()
        return quote


# p=YahooFinance()
# print([list(d) for d in p.getQuoteHistory('ONGC.NS',1612351202)])
Ejemplo n.º 6
0
 def loadvalues(self):
     try:
         log("loading file for today")
         data = self.__load(date.today().strftime("%Y-%m-%d"))
         log("loaded data from filesystem")
     except:
         ''' fetch latest, save for later use '''
         log("will fetch data from API")
         data = fetch(
             'https://api.exchangerate-api.com/v4/latest/USD').json()
         self.__save(data)
         log("fetched and saved data from API")
     ''' Create a list of currency short name  '''
     self.data = data['rates']
Ejemplo n.º 7
0
def login():
    callback_url = CALLBACK_BASE_URI + "/twitter_auth/verify"
    twitter_req, headers = obtain_request_token(APP_CRED, callback_url)
    resp = fetch(twitter_req.method,
                 twitter_req.url,
                 params=twitter_req.params,
                 headers=headers)

    status, content = resp.status_code, resp.content
    oauth_token, oauth_secret = extract_request_token(status, content)
    if not oauth_token:
        return 'Could not get an oauth token from twitter', 401
    else:
        return redirect(redirect_url(oauth_token))
Ejemplo n.º 8
0
def verify():
    oauth_token = request.args.get('oauth_token')
    oauth_verifier = request.args.get('oauth_verifier')
    if not oauth_token or not oauth_verifier:
        return 'The client request was invalid', 400

    twitter_req, headers = obtain_access_token(APP_CRED, oauth_token,
                                               oauth_verifier)

    resp = fetch(twitter_req.method,
                 twitter_req.url,
                 params=twitter_req.params,
                 headers=headers)

    d = extract_access_token(resp.status_code, resp.content)
    if not d:
        return 'Client did not authorize the app', 401
    else:
        return jsonify(d)
Ejemplo n.º 9
0
 def download(symbol):
     return fetch(
         f'https://query1.finance.yahoo.com/v7/finance/download/{symbol}?period1={p1}&period2={p2}&interval=1d&events=history&includeAdjustedClose=true'
     ).text
Ejemplo n.º 10
0
def requestContent(url):
    return fetch(url).content
Ejemplo n.º 11
0
def network():
    fetch("https://blah.com")