Ejemplo n.º 1
0
def dayWeather():
    ssl._create_default_https_context = ssl._create_unverified_context
    params = parse.urlencode({'city': '上海'})
    url = 'https://www.apiopen.top/weatherApi'
    req = request.urlopen(url, params.encode('utf-8'))
    req = req.read().decode('UTF-8')
    return req
Ejemplo n.º 2
0
def displayglobalmultivariate():
    if not (os.path.isfile(config.PATH_TO_ORIGINAL_PCA) and os.path.isfile(config.PATH_TO_EIGS)):
        print("Missing Global PCA Calculation, Calculating")
        if not os.path.isfile(config.PATH_TO_GLOBAL_OCCURRENCES):
            #Get the actual all identifictions file
            import urllib.request as request
            from contextlib import closing
            import shutil

            with closing(request.urlopen('ftp://massive.ucsd.edu/MSV000084206/other/ReDU_all_identifications.tsv')) as r:
                with open(config.PATH_TO_GLOBAL_OCCURRENCES, 'wb') as f:
                    shutil.copyfileobj(r, f)

        redu_pca.calculate_master_projection(config.PATH_TO_GLOBAL_OCCURRENCES)
    
    print("Begin Getting Global PCA")
    df_temp  = pd.read_csv(config.PATH_TO_ORIGINAL_PCA)
    full_file_list = df_temp["Unnamed: 0"].tolist() 
    df_temp.drop("Unnamed: 0", axis = 1, inplace = True)       
    sklearn_output = df_temp.values
        
    component_matrix = pd.read_csv(config.PATH_TO_COMPONENT_MATRIX)
    eig_var_df = pd.read_csv(config.PATH_TO_EIGS)
    eigenvalues = eig_var_df["eigenvalues"].tolist()
    percent_variance = eig_var_df["percent_variance"].tolist()

    output_file = ("./tempuploads/global")

    redu_pca.emperor_output(sklearn_output, full_file_list, eigenvalues, percent_variance, output_file)

    return send_file("./tempuploads/global/index.html")
def index():
    url = "https://www.gutenberg.org/files/61236/61236-0.txt"
    response = request.urlopen(url)
    raw = response.read().decode('utf8')
    sentence = sent_tokenize(raw)

    result = random.choice(sentence)
    return render_template("index.html", result=result)
Ejemplo n.º 4
0
def predict(image_path):
    image = open(image_path, "rb").read()
    image = base64.b64encode(image).decode("utf-8")
    data = {"image": image, 'postId': 11111, 'userId': 22222, 'imageUrl': image_path}
    data = json.dumps(data)
    req = request.Request(HOST, headers={"Content-Type": "application/json"})
    res = request.urlopen(req, data=bytes(data, encoding="utf-8"))
    result = json.loads(res.read())
    return result
Ejemplo n.º 5
0
def home():
    nasa_in = request.urlopen(
        "https://api.nasa.gov/planetary/apod?api_key=9kjFb1X6GKVUdknUZKkyNvCaIiioetBM8ug3I6gj"
    )
    nasa_in = nasa_in.read()
    nDict = json.loads(nasa_in)

    return render_template("home.html",
                           img=nDict['url'],
                           expl=nDict['explanation'])
def getip_test():
    import json
    from urllib import request
    url = 'http://ipinfo.io/json?ca'
    response = request.urlopen(url)
    str_response = response.read().decode('utf-8')
    data1 = json.loads(str_response)
    IP=data1['ip']
    region = data1['region']
    return IP
Ejemplo n.º 7
0
def order():
    nonce_str = getNonceStr()
    out_trade_no = getPayOrdrID()
    # spbill_create_ip = getHostIp()
    spbill_create_ip = SPBILL_CREATE_IP
    total_fee = 1
    openid = ''
    body = 'test'
    # appid,mch_id,nonce_str,sign,body,out_trade_no,total_fee,spbill_create_ip,notify_url,trade_type
    order_dict = {
        "appid": APP_ID,
        "body": body,
        "mch_id": MCH_ID,
        "nonce_str": nonce_str,
        "openid": openid,
        "out_trade_no": out_trade_no,
        "spbill_create_ip": spbill_create_ip,
        "total_fee": total_fee,
        "trade_type": 'JSAPI',
        "notify_url": notify_url,
        'sign_type': 'MD5',
        "sign": ''
    }
    print('first sign')
    sign = xcx_pay_sign(order_dict)
    order_dict['sign'] = sign
    # print('signed_dict = ', order_dict )
    order_xmlstr = dict2xmlstr(order_dict)
    print('xmstr:', order_xmlstr)

    # 测试
    # f1 = open('test.txt','w')
    # f1.write(order_xmlstr)

    # print(type(order_xmlstr),'order_xmlstr = ', order_xmlstr )

    order_xmlbytes = order_xmlstr.encode('utf-8')
    print('>>>>>>>>order_xmlbytes:', order_xmlbytes)

    url = 'https://api.mch.weixin.qq.com/pay/unifiedorder'
    try:
        # rsp = request.urlopen(url, order_xmlbytes)
        rsp = request.urlopen(url, order_xmlstr)
        print('rsp:', rsp)
        msg = rsp.read().decode('utf-8')
        print('>>>msg:', msg)
        # xmlresp = xmltodict.parse(msg)
        # print('xmlresp = ', xmlresp)
        # response = requests.request('post', url, data=order_xmlstr)
        # xmlresp = xmltodict.parse(response.content)
        # print('xmlresp:',xmlresp)
    finally:
        pass
Ejemplo n.º 8
0
def get_twitter_data_solr(query):
    filterQuery = parse.quote_plus(query)
    in_url = 'http://localhost:8983/solr/twitter_data/select?indent=on&q=' + filterQuery + '&rows=1000&wt=json'
    data = request.urlopen(in_url)
    docs = json.load(data)
    response = docs['response']['docs']
    output = list()
    source = list()
    person = list()
    org = list()
    topic_list = list()
    date_list = list()
    location = list()
    summary = list()
    for row in response:
        try:
            curr_summary = ""
            curr_per = ""
            curr_org = ""
            curr_date = ""
            curr_loc = ""
            curr_event_type = ""
            if 'Summary' in row.keys():
                curr_summary = row['Summary']
            if 'Person' in row.keys():
                curr_per = row['Person']
            if 'Organization' in row.keys():
                curr_org = row['Organization']
            if 'Date' in row.keys():
                dt = datetime.strptime(row['Date'][0],'%Y-%m-%dT%H:%M:%SZ').strftime('%d-%m-%Y')
                curr_date =[dt]
            if 'Location' in row.keys():
                curr_loc = row['Location']
            if 'Event_Type' in row.keys():
                curr_event_type = row['Event_Type']
            if 'Source' in row.keys():
                source.append(row['Source'])
            output.append([curr_event_type, curr_date, curr_loc, curr_per, curr_org, curr_summary])
        except Exception as ex:
            print(os.path.basename(__file__) + "::" + inspect.currentframe().f_code.co_name + " - " + ex)
            continue
    for row in output:
        date_list.append(row[1][0])
        if len(row[2]) > 0:
            location.append(row[2][0])
        topic_list.append(row[0][0])
        if len(row[3]) > 0:
            person.append(row[3][0])
        if len(row[4]) > 0:
            org.append(row[4][0])
        summary.append([row[5]])
    return output, source, date_list, person, org, topic_list, location, summary
Ejemplo n.º 9
0
def home():
    apiData = request.urlopen(
        "http://api.openweathermap.org/data/2.5/weather?q=London&APPID=1d18700111907e62e27adc5fa89fad1a"
    )
    apiData = apiData.read()
    dict = json.loads(apiData)
    icon = dict["weather"][0]["icon"]
    iconUrl = "http://openweathermap.org/img/w/" + icon + ".png"

    #return "Hello"
    return render_template("home.html",
                           img=iconUrl,
                           expl=dict["weather"][0]["description"])
Ejemplo n.º 10
0
def home():

    #currencylayer ()
    URL_ONE = 'http://apilayer.net/api/live?access_key=7f1092d7f907a899bb7d6196216bf2fa&currencies=USD,BRL,BTC,KRW,CNY,NZD&format=1'
    response = request.urlopen(URL_ONE).read()
    info = json.loads(response)
    finalSource = info['source']
    finalInfo = info['quotes']


    #xkcd (Theodore Peters)
    URL_TWO = 'http://xkcd.com/info.0.json'
    response = request.urlopen(URL_TWO).read()
    info = json.loads(response)
    finalImage = info['img']

    #Advice Slip (Daniel Gelfand)
    URL_THREE = 'https://api.adviceslip.com/advice'
    response = request.urlopen(URL_THREE).read()
    info = json.loads(response)
    finalAdvice = info['slip']['advice']

    return render_template("index.html", context=context, source=finalSource, curr=finalInfo, image=finalImage, advice=finalAdvice)
Ejemplo n.º 11
0
def _get(urlString):
    """
    Internal method to convert a URL into it's response (a *str*).

    :param str urlString: the url to request a response from
    :returns: the *str* response
    """
    if PYTHON_3:
        req = request.Request(urlString, headers=HEADER)
        response = request.urlopen(req)
        return response.read().decode('utf-8')
    else:
        req = urllib2.Request(urlString, headers=HEADER)
        response = urllib2.urlopen(req)
        return response.read()
Ejemplo n.º 12
0
def send_message_to_slack(text):
    from urllib import request
    import json

    post = {"text": "{0}".format(text)}

    try:
        json_data = json.dumps(post)
        req = request.Request(
            "https://hooks.slack.com/services/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",
            data=json_data.encode('ascii'),
            headers={'Content-Type': 'application/json'})
        resp = request.urlopen(req)
    except Exception as em:
        print("EXCEPTION: " + str(em))
Ejemplo n.º 13
0
def _get(urlString):
    """
    Internal method to convert a URL into it's response (a *str*).

    :param str urlString: the url to request a response from
    :returns: the *str* response
    """
    if PYTHON_3:
        req = request.Request(urlString, headers=HEADER)
        response = request.urlopen(req)
        return response.read().decode('utf-8')
    else:
        req = urllib2.Request(urlString, headers=HEADER)
        response = urllib2.urlopen(req)
        return response.read()
Ejemplo n.º 14
0
def send_message_to_slack(text):
    from urllib import request, parse
    import json

    post = {"text": "{0}".format(text)}

    try:
        json_data = json.dumps(post)
        req = request.Request(
            "https://hooks.slack.com/services/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",
            data=json_data.encode('ascii'),
            headers={'Content-Type': 'application/json'})
        resp = request.urlopen(req)
    except Exception as em:
        print("EXCEPTION: " + str(em))
Ejemplo n.º 15
0
def gerdistinctroutes():
    response = request.urlopen(
        'https://transitdata.phoenix.gov/api/vehiclepositions?format=json')
    string = response.read().decode('utf-8')
    parsedData = json.loads(string)

    timeStamp = parsedData['header']['timestamp']
    parsedTimeStamp = datetime.datetime.fromtimestamp(timeStamp).strftime(
        '%Y-%m-%d %H:%M:%S')

    match = parsedData['entity']

    s = set(match[i]['vehicle']['trip']['route_id']
            for i, route in enumerate(match))

    return {'time': parsedTimeStamp, 'distinctRoutes': sorted(list(s))}
Ejemplo n.º 16
0
def send_message_to_slack(text: str):

    post = {"text": "{0}".format(text)}

    try:
        json_data = json.dumps(post)
        req = request.Request(
            "https://hooks.slack.com/services/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",
            data=json_data.encode('ascii'),
            headers={'Content-Type': 'application/json'})
        resp = request.urlopen(req)
    except Exception as em:
        print("EXCEPTION: " + str(em))
    output = {"input": text, "output": "sent message"}
    return json.dumps(output)
    send_message_to_slack("<text>")
Ejemplo n.º 17
0
def read_config(path):
    parse_result = urlparse(path)

    try:
        if parse_result.scheme in ('http', 'https'):
            logging.debug('-- HTTP(s) based configuration file')
            response = request.urlopen(config)
            config_data = response.read().decode('utf-8')
        else:
            logging.debug('-- File based configuration file')
            with open(path) as configFile:
                config_data = configFile.read()

        return json.loads(config_data)

    except Exception:
        raise
Ejemplo n.º 18
0
def tuling(text):
    userInfo = {
        'apiKey': '6d7cfb30e6d5463cbd5336cba25bbb6f',
        'userId': '382554'
    }
    inputText = {'text': text}
    selfInfo = {'location': {'city': '上海'}}
    perception = {'inputText': inputText, 'selfInfo': selfInfo}
    dic = {'reqType': 0, 'perception': perception, 'userInfo': userInfo}
    req = request.Request('http://openapi.tuling123.com/openapi/api/v2')
    f = request.urlopen(req, json.dumps(dic).encode('utf-8'))
    dic = json.loads(f.read().decode('utf-8'))
    results = dic['results']
    if len(results) > 0:
        res = results[0]
        text = res['values']['text']
        return text
    return ''
Ejemplo n.º 19
0
def alert(text):
    from urllib import request, parse
    x = "{"
    y = "}"
    a = "True"
    b = "False"
    post = {"text": "{0}".format(text)}
    try:
        json_data = json.dumps(post)
        req = request.Request(
            "https://hooks.slack.com/services/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",
            data=json_data.encode('ascii'),
            headers={'Content-Type': 'application/json'})
        resp = request.urlopen(req)
        return f"{x}\n\"input\": {text},\n\"output\": {a}\n{y}"
    except Exception as em:
        print("EXCEPTION: " + str(em))
        return f"{x}\n\"input\": {text},\n\"output\": {b}\n{y}"
    alert(f'{text}')
Ejemplo n.º 20
0
def stategraphdata():
    ur5 = "https://api.covid19india.org/csv/latest/case_time_series.csv	"
    with request.urlopen(ur5) as response:
        x = pd.read_csv(response)

    l = pd.DataFrame()
    for i in range(len(x['Daily Confirmed'])):
        if i % 15 == 0:
            l = l.append(x.iloc[i, :])
    l = l.append(x.iloc[-1, :])
    # print(l.columns)
    l['Total Active'] = l['Total Confirmed'] - l['Total Deceased'] - l[
        'Total Recovered']
    fin = l.loc[:, ('Date', 'Total Confirmed', 'Total Recovered',
                    'Total Deceased', 'Total Active')]
    fin = fin.reset_index(drop=True)
    # print(fin)
    y = pd.DataFrame.to_json(fin)
    jsonfile = json.dumps(y)
    return jsonfile
Ejemplo n.º 21
0
def index():

    '''
    View root page function that returns the index page and its data

    '''
    response = request.urlopen('http://quotes.stormconsultancy.co.uk/random.json')

    if response.code==200:
      read_Data=response.read()

      JSON_object = json.loads(read_Data.decode('UTF-8'))
      print(JSON_object)
      author = JSON_object['author']
      id = JSON_object['id']
      quote = JSON_object['quote']
      permalink = JSON_object['permalink']

      head = "Welcome to Blog"
      return render_template("index.html", head = head, author = author, id = id, quote = quote, permalink = permalink)

    return render_template('index.html')
Ejemplo n.º 22
0
def get_news_data_solr(query):
    filterQuery = parse.quote_plus(query)
    in_url = 'http://localhost:8983/solr/news_data/select?indent=on&q=' + filterQuery + '&rows=100&wt=json'
    data = request.urlopen(in_url)
    docs = json.load(data)
    response = docs['response']['docs']
    output = list()
    coord = list()
    source = list()
    for row in response:
        try:
            curr_summary = ""
            curr_per = ""
            curr_org = ""
            curr_date = ""
            curr_loc = ""
            curr_event = ""
            if 'Summary' in row.keys():
                curr_summary = row['Summary']
            if 'Person' in row.keys():
                curr_per = row['Person']
            if 'Organization' in row.keys():
                curr_org = row['Organization']
            if 'Event_Date' in row.keys():
                curr_date = row['Event_Date']
            if 'Location' in row.keys():
                curr_loc = row['Location']
            if 'Event_Type' in row.keys():
                curr_event = row['Event_Type']
            if 'Coordinates' in row.keys():
                coord.append(row['Coordinates'])
            if 'Source' in row.keys():
                source.append(row['Source'])
            output.append([curr_event, curr_date, curr_loc, curr_per, curr_org, curr_summary])
        except Exception as ex:
            print(os.path.basename(__file__) + "::" + inspect.currentframe().f_code.co_name + " - " + ex)
            continue
    return output, coord, source
Ejemplo n.º 23
0
def getweather1(latitude, longitude):
    import json
    from urllib import request
    temparray = []

    #darksky
    keyid = 'eb3cb3d158c5a78a3288451c725f618a'
    url = "https://api.darksky.net/forecast/" + keyid + "/" + latitude + "," + longitude
    response = request.urlopen(url)
    str_response = response.read().decode('utf-8')
    data = json.loads(str_response)
    celsius = int(((int(data["currently"]["temperature"]) - 32) * 5) / 9)
    #celsius = data["currently"]["temperature"]
    present_status = data["currently"]["icon"]
    future_status = data["minutely"]["icon"]
    #text = "accurate temperature in your area "+cityname+","+countryname+" is "+str(celsius)+"C and may be "+str(data["currently"]["summary"])
    temparray.append(celsius)
    #return text
    avgtemp = sum(temparray) / len(temparray)

    #degree_sign = '\u00b0'
    #text = "accurate temperature in your area "+cityname+","+countryname+" is "+str(avgtemp)+degree_sign+"C"
    return avgtemp, present_status, future_status
Ejemplo n.º 24
0
def getTweets(query,city,topic):
    finalQuery= 'city:"'+city+'" AND topic:"'+topic+'" AND '+query
    filterQuery = parse.quote_plus(finalQuery)
    query = parse.quote_plus(query)
    in_url = 'http://127.0.0.1:8983/solr/Project4/select?facet.field=extended_tweet.entities.hashtags.text&facet=on&q=' + filterQuery + '&fl=text,extended_tweet.entities.hashtags.text&rows=20&wt=json'
    print(in_url)
    trend_in_url = 'http://127.0.0.1:8983/solr/Project4/select?facet.field=extended_tweet.entities.hashtags.text&facet.field=lang&facet.field=city&facet.field=topic&facet=on&q=' + query + '&fl=text&rows=1000&wt=json'
    data = request.urlopen(in_url)
    data_analysis=request.urlopen(trend_in_url)
    docs = json.load(data)
    docs_analysis=json.load(data_analysis)
    response=docs['response']['docs']
    hashtags=docs_analysis['facet_counts']['facet_fields']['extended_tweet.entities.hashtags.text']
    cities = docs_analysis['facet_counts']['facet_fields']['city']
    topics = docs_analysis['facet_counts']['facet_fields']['topic']
    languages = docs_analysis['facet_counts']['facet_fields']['lang']
    print(cities)
    print(topics)
    print(languages)
    fetched_data = docs_analysis['response']['docs']
    tweets = list()
    analysis=list()
    trending_hashtags = list()
    city_analysis = list()
    lang_analysis = list()
    topic_analysis = list()
    for doc in response:
        tweet=str(doc['text'])
        tweets.append(tweet[2:tweet.__len__()-2])
    #getLang(fetched_data)
    #getCity(fetched_data)
    for tweet in fetched_data:
        parsed_tweet = dict()
        currTweet = tweet['text']
        # saving text of tweet
        # print(currTweet)
        parsed_tweet['text'] = currTweet
        # print(currTweet)
        currTweetTxt=str(currTweet)
        currTweetTxt=currTweetTxt[2:currTweetTxt.__len__()-2]
        currTweetTxt=re.sub(r'(@[A-Za-z0-9]+)|(^https?:\/\/.*[\r\n]*)', ' ',currTweetTxt)
        # print(currTweetTxt)
        # try:
        #     translator = Translator()
        #     en=translator.translate(currTweetTxt,dest='en')
        #     for enTweet in en:
        #         currTweet=enTweet.text
        # except Exception as ex:
        #     print(ex)
        #     pass
        parsed_tweet['sentiment'] = get_tweet_sentiment(currTweet)
        analysis.append(parsed_tweet)
    index=0
    city_analysis=['','','','','']
    topic_analysis=['','','','','']
    for city in cities:
        print(city)
        if (index == len(cities) - 1):
            break
        if (str(city).__contains__('New York City')):
            city_analysis[0]=cities[index+1]
        elif (str(city).__contains__('Mexico City') or str(city).__contains__('Mexico')):
            city_analysis[1] = cities[index + 1]
        elif (str(city).__contains__('Paris')):
            city_analysis[2] = cities[index + 1]
        elif (str(city).__contains__('Bangkok')):
            city_analysis[3] = cities[index + 1]
        elif(str(city).__contains__('New Delhi')):
            city_analysis[4] = cities[index + 1]
        index=index+1
    index = 0
    for lang in languages:
        if index <= len(languages) - 1:
            if (int(languages[index + 1]) >= 1):
                lang_analysis.append(str(languages[index]))
            index += 2
        else:
            break
    index = 0
    for topic in topics:
        if(index==len(topics)-1):
            break
        if (str(topic).__contains__('Social Unrest') or str(topic).__contains__('Social Unrest')):
            topic_analysis[0]=topics[index+1]
        elif (str(topic).__contains__('infra') or str(topic).__contains__('Infra') or str(topic).__contains__('Infrastructure')):
            topic_analysis[1] = topics[index + 1]
        elif (str(topic).__contains__('environment') or str(topic).__contains__('Environment')):
            topic_analysis[2] = topics[index + 1]
        elif (str(topic).__contains__('crime') or str(topic).__contains__('Crime')):
            topic_analysis[3] = topics[index + 1]
        elif(str(topic).__contains__('politics') or str(topic).__contains__('Politics')):
            topic_analysis[4] = topics[index + 1]
        index = index + 1
    index=0
    for hashtag in hashtags:
        if index <= len(hashtags) - 1:
            if (int(hashtags[index + 1]) >= 1):
                trending_hashtags.append('#' + str(hashtags[index]))
            index += 2
        else:
            break
    data_set = pd.DataFrame(trending_hashtags, columns=["Hashtag"])
    index=0
    for topic in topic_analysis:
        if topic=='':
            topic_analysis[index]=0
        index=index+1
    index=0
    for city in city_analysis:
        if city=='':
            city_analysis[index]=0
        index=index+1
    print(topic_analysis)
    print(city_analysis)
    return trending_hashtags,tweets,analysis,data_set,city_analysis,lang_analysis,topic_analysis
Ejemplo n.º 25
0
def diag_and_return(id):


    resp = request.urlopen('http://localhost:9004/diag_and_return/' + str(id))
    print(resp.read().decode())
Ejemplo n.º 26
0
def read_url_to_json(url):
    import urllib.request as request
    webpage = request.urlopen(url)
    get_data = webpage.read()
    data = json.loads(get_data)
    return data
Ejemplo n.º 27
0
def index():
    client = Client(app.config['API_KEY'], app.config['API_SECRET'])
    prices = client.get_all_tickers()

    url = 'http://data.fixer.io/api/latest?access_key=87fdd450a053ec762d421382f62b7ad7&symbols=USD,CNY'
    req = request.Request(url)
    res = request.urlopen(req)
    res = res.read()
    resjson = json.loads(res)
    usdrate = resjson["rates"]["USD"]
    cnyrate = resjson["rates"]["CNY"]
    fex = float(cnyrate / usdrate)

    mybtc = mydb.session.query(
        Btc.symbol,
        func.sum(Btc.amount).label('amount'),
        func.sum(Btc.cnycost).label('cnycost')).group_by(Btc.symbol).all()
    mybase = mydb.session.query(func.sum(Base.invest).label('invest')).first()
    mybaseinvest = mybase.invest

    portfolio = {}

    mybtclist = {}
    for symbol in prices:
        mybtclist[symbol['symbol']] = symbol['price']

    cnysum = float(0)
    for coin in mybtc:
        if coin.symbol + "USDT" in mybtclist:
            cnyprice = float(
                mybtclist[coin.symbol + "USDT"]) * fex  #*float(coin.amount)
            portfolio[coin.symbol] = [
                coin.amount, coin.cnycost,
                round(float(coin.cnycost) / float(coin.amount), 2), cnyprice,
                round(cnyprice, 2),
                round((cnyprice * float(coin.amount) - float(coin.cnycost)) /
                      (float(coin.cnycost)) * 100, 2)
            ]
        elif coin.symbol + "ETH" in mybtclist:
            cnyprice = float(mybtclist[coin.symbol + "ETH"]) * float(
                mybtclist['ETHUSDT']) * fex  #*float(coin.amount)
            portfolio[coin.symbol] = [
                coin.amount, coin.cnycost,
                round(float(coin.cnycost) / float(coin.amount), 2), cnyprice,
                round(cnyprice, 2),
                round((cnyprice * float(coin.amount) - float(coin.cnycost)) /
                      (float(coin.cnycost)) * 100, 2)
            ]
        else:
            cnyprice = float(mybtclist[coin.symbol + "BTC"]) * float(
                mybtclist['BTCUSDT']) * fex  #*float(coin.amount)
            portfolio[coin.symbol] = [
                coin.amount, coin.cnycost,
                round(float(coin.cnycost) / float(coin.amount), 2), cnyprice,
                round(cnyprice, 2),
                round((cnyprice * float(coin.amount) - float(coin.cnycost)) /
                      (float(coin.cnycost)) * 100, 2)
            ]
        cnysum = cnysum + float(portfolio[coin.symbol][3] * float(coin.amount))

    gain = round((cnysum / float(mybaseinvest) - 1) * 100, 2)

    cnysumround = float('%.2f' % cnysum)
    mybaseinvestround = float('%.2f' % mybaseinvest)

    return render_template('index.html',
                           prices=prices,
                           fex=fex,
                           mybtc=mybtc,
                           portfolio=portfolio,
                           cnysumround=cnysumround,
                           mybaseinvestround=mybaseinvestround,
                           gain=gain)
Ejemplo n.º 28
0
def is_valid(url):
    try:
        request.urlopen(url)
        return True
    except:
        return False