示例#1
0
文件: model.py 项目: will-iam/Variant
    # Trace la courbe en fonction de predict
    cx = fig.add_subplot(111)
    cx.plot(X,Y,"+")
    cx.plot(X,X,"--")
    '''

    plt.show()

    return B


# Définition du cas d'étude
filterDict = {'nSizeX': 512, 'nSizeY': 512}
resource = 64
data = parser.getData(filterDict)
if not len(data):
    print("Aucune données.")
    sys.exit(1)
#pprint(data)

# Estimation de Tc sur un seul point de fonctionnement.
expectTc = chargeEstimate(data, resource)
print("E[Tc] = %s milliseconds per (iteration x cell number)" % (expectTc))

# Estimation des paramètres du modèle de Ts en fonction de plusieurs cas
greeks = greeksEstimate(data, resource)
print("Greeks: ", greeks)

# Estimation de Ts par point de fonctionnement.
expectTsDict = synchronizeEstimate(data, resource)
示例#2
0
    arizonaPercipitation = parser.monsoon_masterCSV + "/arizona_precipitation_master.csv"
    main_pathPercipitation = parser.monsoon_percipitation
else:
    arizonaPercipitation = parser.local_masterCSV + "/arizona_precipitation_master.csv"
    main_pathPercipitation = parser.local_percipitation

# change HTML data into list within a dictionary
# keys are years
percipitationDataFrame = pd.read_csv(arizonaPercipitation)["0"]
mtUnion, magma, lakePleasant = dict(), dict(), dict()
for filePath in percipitationDataFrame:
    fileName, filePath = parser.split_path(filePath)
    with open(filePath, "r") as p:
        tempHTML = p.readlines()

    tempData = parser.getData(tempHTML)
    for dataLine in tempData:
        dataString = ""
        date, time, inches = parser.parseLine(dataLine)
        year = int(date.split("/")[2])
        dataString += "{}, {}, {}".format(date, time, inches)

        if "lake" in fileName:
            try:
                lakePleasant[year].append(dataString)
            except KeyError:
                lakePleasant[year] = [dataString]
        elif "magma" in fileName:
            try:
                magma[year].append(dataString)
            except KeyError:
示例#3
0
import MySQLdb
import sys
import parser

if (len(sys.argv) < 3):
    print('input usr&password')

fname = input('')

db = MySQLdb.connect("localhost", sys.argv[1], sys.argv[2], "gslproject_basic")

cs = db.cursor()

data = parser.getData(fname)

insert = (
    "INSERT INTO `Transactions` (`sid`, `date`, `time`, `size`, `price`) "
    "VALUES (%s, %s, %s, %s, %s)")

for d in data:
    cs.execute(insert, d)
    #print(d)
#print(cs.execute("SELECT * FROM Transactions"))
db.commit()

print('{} records inserted from: {}'.format(len(data), fname))
示例#4
0
async def getCovidData():
    data = getData()
    return {"Turkiye Geneli" : data}
示例#5
0
def main():
    from sys import argv
    myargs = getopts(argv)

    url = myargs.get('url', None)
    pxurl = myargs.get('pxurl', None)

    _pxselctor = myargs.get('pxselector')
    pxlimit = myargs.get('pxlimit', [3])
    pxscrolllimit = myargs.get('pxscrolllimit', [0])

    data = myargs.get('data')
    dataselector = myargs.get('dataselector', [None])
    datacommon = myargs.get('datacommon', [])
    #pdb.set_trace()
    datascrolllimit = myargs.get('datascrolllimit', [0])
    datalimit = myargs.get('datalimit', [10])
    threads = myargs.get('threads', [10])

    if pxscrolllimit: pxscrolllimit = [int(x) for x in pxscrolllimit]
    if datascrolllimit: datascrolllimit = [int(x) for x in datascrolllimit]
    if datalimit: datalimit = [int(x) for x in datalimit]
    if threads: threads = [int(x) for x in threads]

    debug = myargs.get('debug', False)

    # We are maing pxselector as more powerful
    # --pxselctor div1.a,div2.a,next:div3.a
    pxselctor = []
    for p in _pxselctor:
        now = {'selector': [], 'next': None}
        d = [x.strip() for x in p.split(',') if x.strip()]
        for d1 in d:
            if 'next:' in d1:
                now['next'] = d1.replace('next:', '')
            else:
                now['selector'].append(d1)
        pxselctor.append(now)
    print pxselctor

    #pdb.set_trace()
    action = myargs.get('action', ['print'])[0]

    ans = 'Not able to get Ans'
    if not url and not pxurl:
        print '\n\n    Error: You must have --url or --pxurl'
        desc()
        sys.exit(0)
    elif not data:
        print 'Error: You must have --data or we just print the urls'
        desc()
        #sys.exit(0)
    if url:
        ans = parser.getData({}, debug, url[0], data, dataselector[0],
                             datacommon, datalimit[0], datascrolllimit[0],
                             threads[0])
    elif pxurl:
        assert (pxselctor is not None)
        ans = parser.getPXData({}, debug, pxurl[0], pxselctor, pxlimit[0],
                               pxscrolllimit[0], data, dataselector[0],
                               datacommon, datalimit[0], datascrolllimit[0],
                               threads[0])
    if action == 'print':
        sys.stdout.write(RESET)
        #print ans
        from tabulate import tabulate
        print tabulate([x.values() for x in ans],
                       headers=ans[0].keys(),
                       tablefmt='fancy_grid')
        print 'Total entry found:', len(ans)
    elif action == 'save':
        print 'Saveing file...'
        import pickle
        pickle_out = open("data.pickle", "wb")
        pickle.dump(ans, pickle_out)
        pickle_out.close()
    else:
        print 'No action required'
        pass