示例#1
0
def on_release(event):
    global HotkeyStatus, fatigueLevel
    if HotkeyStatus == 0:
        global List_of_Lists
        threading.Thread(target=Writer.rawfileWriter().write_rawfile,
                         args=(
                             'Data/RawKeystokes.tsv',
                             event,
                             'Released',
                             datetime.now(),
                             fatigueLevel,
                         )).start()
        for list in List_of_Lists:
            if list[0] == event:
                list[2] = str(datetime.now())
                list[3] = fatigueLevel
                thred = threading.Thread(target=Writer.Write().write_file,
                                         args=(
                                             list,
                                             fileLocation,
                                         ))
                thred.start()
                List_of_Lists.remove(list)
    elif HotkeyStatus == 1:
        List_of_Lists = []
示例#2
0
def CollectIOC(outFormat):
    url = "https://otx.alienvault.com/api/v1/pulses/activity"
    while True:  # page loop
        header = {
            'X-OTX-API-KEY':
            '<----------------API-KEY---------------------->'  # Write API Key
        }

        r = requests.get(url=url, headers=header)
        result = r.json()
        if r.status_code == 200:

            for i in range(0, len(result["results"])):  # results loop

                indicatorsLength = len(
                    result["results"][i]["indicators"])  # indicators loop
                for j in range(0, indicatorsLength):

                    if outFormat == 1:
                        Writer.jsonWriter(i, j, result)  # json output
                    else:
                        Writer.csvWriter(i, j, result)  # csv output

            if r.json()["next"] is None:  #next page control
                print("IOC Hunter Finished")  #finish
                break
            else:
                url = r.json()["next"]  # go to next page

        else:
            print("IOC Hunter Finished")  #finish
            break
示例#3
0
 def __init__(self):
     self.root = Tk()
     self.north = Entry(self.root)
     self.direction = 0
     self.distance = 0
     self.pen_button = Button()
     self.color_button = Button()
     self.eraser_button = Button()
     self.north_button = Button()
     self.south_button = Button()
     self.west_button = Button()
     self.east_button = Button()
     self.square_button = Button()
     self.circle_button = Button()
     self.triangle_button = Button()
     self.up = Button()
     self.down = Button()
     self.clear_canvas = Button()
     self.choose_size_button = Scale()
     self.c = Canvas()
     self.x = 0
     self.y = 0
     self.entry = 0.0
     self.line_width = 1
     self.button = Button()
     self.north = Entry()
     self.entry = self.north.get()
     self.line_width = self.choose_size_button.get()
     self.pen_state = True
     # so can be used in every mehotds.
     self.file = Writer("TKInterDrawer_Result.txt")
示例#4
0
def open():
    display("     ≈≈ WELCOME TO ≈≈    ")
    print("")
    display("    ≈≈  D-CIPHERED!  ≈≈    ")
    print("")
    time.sleep(2)

    command = ''
    while command != "exit":
        print("•")
        print("-To create an Encrypted message type 'create' ")
        print("•")
        print("-To Decrypt a message type 'read' ")
        print("•")
        print("-Finally, To exit type 'exit'\n")
        command = input(">")
        if "create" in command.casefold():
            Writer.new(userName, key)
        elif "read" in command.casefold():
            print("One moment please while we fetch your top secret messages!")
            Reciver.mailQueue(userName, key)
            decrypt = Reciver.decipher()
            display(decrypt)
            #            time.sleep(len(decrypt)/4)        ///uncomment this string to add time to the display of your messages
            print("\n")
            con = input("when finished reading type anything to continue \n >")
        elif "exit" in command.casefold():
            sys.exit(0)
        else:
            print("im sorry that doesnt seem to be a command.")
示例#5
0
文件: RPC.py 项目: redhog/Grimoire
 def write(self, msg):
     self.writelock.acquire()
     try:
         if debugWrite: print "Write:", msg
         Writer.write(self.wfile, Writer.contract(msg, self.extension.serialize))
         self.wfile.write('\n')
         self.wfile.flush()
     finally:
         self.writelock.release()
示例#6
0
def main2():
    person_tweets = []
    count = 0
    for f in os.listdir("/home/kevin/Documents/Epic/NER/Input/Gold/CrfGold/Person/"):
        count += 1
        print f
        print count
        person_tweets = Writer.load_tweets_crf_gold(f.split("/")[-1], "/home/kevin/Documents/Epic/NER/Input/Gold/CrfGold/", "Person")

        Writer.write_tweets_crf_gold(person_tweets, f, "/home/kevin/Documents/Epic/NER/Input/Gold/CrfGoldTest/", ["Person"])
示例#7
0
def run():

    global graph
    filename = "AISearchfile017.txt"
    global tour
    global length
    global pQueue
    global n

    distances = Parser.loadSearchFile(filename)  # Distance matrix
    n = len(distances)  # No. of nodes

    graph = Parser.createGraph(distances)

    pTour = PartialTourModG()  # Create the empty partial tour

    heapq.heappush(pQueue, pTour)  # Add initial partial tour to the frontier

    while (True):

        if not pQueue:  # Check if Frontier is Empty
            return

        else:

            pTour = heapq.heappop(pQueue)  # Get the most optimal pTour
            # outPath = []
            # outPath.extend(pTour.path)
            # print outPath
            # print "g: " + str(pTour.g) + " - h: " + str(pTour.h)
            # print

            if len(pTour.path) < (n - 1):  # If not a full tour

                if pTour.endNode == "":  # If pTour = []
                    children = [Node(str(i), 0) for i in range(n)]
                else:
                    children = graph.get(pTour.endNode)  # Get all children

                expand(pTour, children)  # Expand the frontier

            else:  # If a full tour, i.e ensuring goal node not expanded
                tour = pTour.path
                tour.append(pTour.endNode)
                tour = [str(int(i) + 1) for i in tour]  # start at 1 not 0

                for node in graph.get(str(int(tour[0]) - 1)):  # F*****G -1
                    if node.name == pTour.endNode:
                        length = pTour.lth + node.dist
                break

    Writer.writeTourFile(filename, n, length, tour)
    tourString = ",".join(str(t) for t in tour)
    print "Start Location: " + tour[0] + " - Distance: " + str(
        length) + " - Tour: " + tourString
def Calucalte(type1):
    fileOperator = Writer("Scores.txt")
    scores = fileOperator.readFile()
    scores = removeX(scores)
    ScoresTuples = tuplize(scores)
    
    
    
  
    scoresSummation = 0 
    unitsSummation = 0
    
    for scoresTuple in ScoresTuples:
        score = scoresTuple[0] * 5
        unit = scoresTuple[1]
        
        
        GPA = 0 
        if (type1 == True):
            if (score >= 0  and  score < 60):
                GPA = 0
            elif (score >= 60  and  score < 67):
                GPA = 1
            elif (score >= 67  and  score <= 69):
                GPA = 1.3
            elif (score >= 70  and  score < 73):
                GPA = 1.7
            elif (score >= 73  and  score < 77):
                GPA = 2
            elif (score >= 77  and  score < 80):
                GPA = 2.3
            elif (score >= 80  and  score < 84):
                GPA = 2.7
            elif (score >= 84  and  score < 87):
                GPA = 3
            elif (score >= 87  and  score < 90):
                GPA = 3.3
            elif (score >= 90  and  score < 94):
                GPA = 3.7
            elif (score >= 94  and  score <= 100):
                GPA = 4.0

            
        print("Normal: " + str(scoresTuple[0]) + "  |Score : " + str(score) + "  | Unit : " + str(unit) + " | GPA: " + str(GPA))
        scoresSummation += GPA * unit
        unitsSummation += unit
        
    return  scoresSummation/unitsSummation 
    def run(self):
        print("[log]Generator start...")
        while (not globcfg.event.is_set()):
            print(
                "[log]currentRunThread: Reader= {readCount}, Writer= {writeCount}"
                .format(readCount=globcfg.currentRunThreadCount['Reader'],
                        writeCount=globcfg.currentRunThreadCount['Writer']))
            genterate_time = getRandomInterval(globcfg.lamGen)
            globcfg.generateTime_lock.acquire()
            globcfg.generate_time_globalCopy = genterate_time
            globcfg.generateTime_lock.release()
            globcfg.event.wait(genterate_time)
            choice = random.randint(0, 1)
            # generate a new thread

            if (choice):
                print("[log]Generate thread {number} : {name}".format(
                    number=globcfg.threadNumber, name="Reader"))
                self.gui.change_state("R", globcfg.threadNumber,
                                      self.gui.nowhere, self.gui.scheduling)
                globcfg.waitingList.append(
                    Reader.Reader(self.book, self.lock, globcfg.threadNumber,
                                  self.gui))  #new Reader
            else:
                print("[log]Generate thread {number} : {name}".format(
                    number=globcfg.threadNumber, name="Writer"))
                self.gui.change_state("W", globcfg.threadNumber,
                                      self.gui.nowhere, self.gui.scheduling)
                globcfg.waitingList.append(
                    Writer.Writer(self.book, self.lock, globcfg.threadNumber,
                                  self.gui))  #new Writer
            globcfg.threadNumber += 1
示例#10
0
 def __str__(self):
     try:
         stringWriter = Writer.StringWriter()
         self.writeDeclaration(stringWriter)
         return str(stringWriter)
     except:
         return self.name
示例#11
0
 def WriteGmadFiles(self):
     """ Write the gmad files for all tests in the Tests directory.
         """
     _os.chdir('Tests')
     for test in self._tests:
         writer = Writer.Writer()
         writer.WriteTests(test)
         if not self._testNames.__contains__(test.Component):
             self._testNames[test.Component] = []
         self._testNames[test.Component].extend(writer._fileNamesWritten[test.Component])
     _os.chdir('../')
示例#12
0
def main():

    filename = "AISearchfile012.txt"
    distances = Parser.loadSearchFile(filename)  # Distance matrix
    numNodes = len(distances)
    graph = Parser.createGraph(distances)

    shortestLength = sys.maxsize
    shortestTour = []

    for i in range(numNodes):
        tour, length = nn(graph, numNodes, str(i))
        if length < shortestLength:
            shortestLength = length
            shortestTour = tour

    print shortestTour
    print shortestLength

    Writer.writeTourFile(filename, numNodes, shortestLength, shortestTour)
示例#13
0
def Check_Availability():
    if not path.exists(fileLocation):
        f = open(fileLocation, "w")
        f.close()
        firstList = ['key', 'keydown_time', 'keyup_time', 'fatigue level']
        thread1 = threading.Thread(target=Writer.Write().write_file,
                                   args=(
                                       firstList,
                                       fileLocation,
                                   ))
        thread1.start()
示例#14
0
    def on_save_entries( self, selection ):
        dialog = Gtk.FileChooserDialog(
                "Save generated names",
                None,
                Gtk.FileChooserAction.SAVE,
                (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_SAVE, Gtk.ResponseType.ACCEPT),
                )

        response = dialog.run()
        if response == Gtk.ResponseType.ACCEPT:
            filename = dialog.get_filename()
            def store_iterator():
                for row in self.store_:
                    yield( row[0], row[1] )
            Writer.writeLexiconFromIterator( filename, store_iterator() )
        elif response == Gtk.ResponseType.CANCEL:
            dialog.destroy()
            return None
        dialog.destroy()

        return filename
示例#15
0
def GenerateSubgraph(name, outputPath):
    s2file = path.join(outputPath, str(name)+'_s2.csv')
    s3file = path.join(outputPath, str(name)+'_s3.csv')
    s4file = path.join(outputPath, str(name)+'_s4.csv')

    Writer.outputGraph(s2file, follower, 2, name)
    Writer.outputGraph(s3file, follower, 3, name)
    Writer.outputGraph(s4file, follower, 4, name)
示例#16
0
def GenerateSubgraph(name, outputPath):
    s2file = path.join(outputPath, str(name) + '_s2.csv')
    s3file = path.join(outputPath, str(name) + '_s3.csv')
    s4file = path.join(outputPath, str(name) + '_s4.csv')

    Writer.outputGraph(s2file, follower, 2, name)
    Writer.outputGraph(s3file, follower, 3, name)
    Writer.outputGraph(s4file, follower, 4, name)
示例#17
0
 def __init__(self, fileName):
     self.fileName = fileName
     self.cfgfname = 'cfg.json'
     self.cfg = {}
     self.branches = {}
     self.out = Writer.Writer(
         os.path.join('output',
                      fileName[:-2] + '_tranformed.ll'))  # output file
     self.branchLabel = 0  #
     self.branch_blocks = {}
     self.isVisited = {}
     self.TB_st = {}
     self.FB_st = {}
     self.debug = False
示例#18
0
def write_entity_test(verbose):
    relative_file_path = "../../resources/sql/accountSchema.sql"

    file_contents = Reader.open_file(relative_file_path)
    parser = Reader.SQL_Parser(file_contents)
    table_name = parser.parse_table()
    table_attributes = parser.parse_attributes()

    t = Table.Table(table_name, file_contents, table_attributes)

    w = Writer.Entity_Writer(t)

    w.write_entity("some_file")

    return 0
示例#19
0
def on_press(event):
    global HotkeyStatus, fatigueLevel
    if HotkeyStatus == 0:
        setFatigueLevel()
        threading.Thread(target=Writer.rawfileWriter().write_rawfile,
                         args=(
                             'Data/RawKeystokes.tsv',
                             event,
                             'Pressed',
                             datetime.now(),
                             fatigueLevel,
                         )).start()
        Check_Availability()
        global List_of_Lists
        if not isExist(List_of_Lists, event):
            List_of_Lists.append(list([event, str(datetime.now()), '', '']))
示例#20
0
def main():
    theInputPath = os.getcwd()
    theOutputPath = os.path.join(os.getcwd(), 'Output')

    os.makedirs(theOutputPath)

    #	theDocumentPath = os.path.join(theInputPath, 'WallE.dae')
    #	theDocumentPath = os.path.join(theInputPath, 'Samples/Cylinder.dae')
    #	theDocumentPath = os.path.join(theInputPath, 'Samples/F1.dae')
    theDocumentPath = os.path.join(theInputPath, 'Samples/Cube.dae')

    theTree = etree.parse(theDocumentPath)
    theRootElement = OneOrThrow(
        theTree.xpath("/NS:COLLADA", namespaces=Collada.Parser.NS))

    theParser = Collada.Parser()

    doc = theParser.DocumentFactory(None, None, theRootElement)
    doc.dump()

    for node in doc.walk():
        if isinstance(node, Collada.Source):
            print 'Source:', node.id, node.vbo.signature.hexdigest()
            node.vbo.write(theOutputPath)
        elif isinstance(node, Collada.Mesh):
            print 'Mesh:', node.id, node.indices.signature.hexdigest()
            if node.indices:
                node.indices.write(theOutputPath)

    theLibrary = []
    for l in doc.libraries:
        theLibrary.extend(l.children)

    theLibrary = [
        o for o in theLibrary if not isinstance(o, Collada.VisualScene)
    ]
    theLibrary = [o for o in theLibrary if o.id is not None]

    d = {
        'root': doc.scene.visualScene.resolve(),
        'library': dict([(o.id, o) for o in theLibrary]),
    }

    s = Writer.MyJSONEncoder(indent=2).encode(d)
    file(os.path.join(theOutputPath, 'Output.json'), 'w').write(s)
示例#21
0
def write():
    writer = Writer()
    argc = len(sys.argv)

    if argc < 4:
        print("Expecting data file(s) and output paths")
        return False

    for i in range(2, argc - 1):
        writer.load(sys.argv[i])
    writer.write(sys.argv[argc - 1])

    return True
示例#22
0
class ArgumentSourceReader(AbstractSourceReader):
    results = Writer("SourceReader_Result.txt")

    def go(self):
        result = ArgumentParser.parse(self, '')
        if result == 'g':
            print('graphics')
            self.results.writeToFile("Graphics")
        elif result == 't':
            self.results.writeToFile("Running Turtle Command")
            TurtlePrompt().cmdloop()
        elif result == 'k':
            self.results.writeToFile("Running TKInter Drawer")
            TkinterDrawer().start()
        elif result == 'e':
            self.results.writeToFile("Exiting program")
            exit()
        else:
            self.results.writeToFile(
                "Graphics from else as arguments were wrong")
            print('graphics')
示例#23
0
    def ParserMatriz(self, configuracion, matriz):
        error =''
        self.posicionLista = self.inicializarLista()
        for i in range (0,5):
            for j in range (0,4):
                self.posicionLista[i][j] = self.variablesGUI[ matriz[i][j] ]
        print (self.posicionLista)
        error = self.validarMatriz()
        if error == '':

            if configuracion == 'inicial':
                self.posicionInicial = copy.deepcopy(self.posicionLista)
                writer = Writer() #se escribe en el txt file
                writer.writeConfiguracion('inicial', self.posicionInicial)

            elif configuracion == 'final':
                self.posicionFinal = copy.deepcopy(self.posicionLista)
                writer = Writer() #se escribe en el txt file
                writer.writeConfiguracion('final', self.posicionInicial)
        return error
示例#24
0
def main():
    totalFile = 0
    students = Reader.__readStudentList__("CES3063_Fall2020_rptSinifListesi.XLS")
    if os.name == "posix":
        polls = Reader.__readAnswerFileNames__(os.getcwd() + "/Answers")
        Reader.__readPollFileNames__(os.getcwd() + "/Polls", students,totalFile)  # we will read student answers with this func
    elif os.name == "nt":
        polls = Reader.__readAnswerFileNames__(os.getcwd() + "\Answers")
        Reader.__readPollFileNames__(os.getcwd() + "\Polls", students,totalFile)  # we will read student answers with this func

    Analyzer.__findAttendancePolls__(students, "Are you attending this lecture")

    Analyzer.__findPollsAndChangeKey__(students, polls)

    Analyzer.__findStudentAnswers__(students, polls)



    Statistic.__answerCounts__(students, polls)
    Writer.__export__(students, polls)
    Writer.__globalFile__(students,polls)
    Writer.__Attendance__(students,polls)
示例#25
0
from ParSite import *
from Writer import *

if __name__ == '__main__':
    print("http://", input('http://'))
    http = ParSite()
    http.html_doc('http://')

    data = Writer()
    data.csv_writer(data)
    #data = link.get(get.html('https://33pingvina.ru'))
    runValueIteration = ValueIteration.ValueIteration(stateList, actionList,
                                                      decayRate,
                                                      convergeThreshold,
                                                      maxIterationStep)
    createPolicyFromValue = ValueIteration.PolicyFromValue(
        stateList, actionList, decayRate)
    runQLearning = QLearning.QLearning(alpha, gamma, epsilon,
                                       segmentTotalNumber, stateList,
                                       actionList,
                                       transitionFromStateAndAction)

    print('finish setting function', time.time() - time0)
    trainWolfPolicy = TrainWolfPolicyValueIteration(stateList,
                                                    transitionProbabilityDict,
                                                    createRewardDict,
                                                    runValueIteration,
                                                    createPolicyFromValue)
    # trainWolfPolicy = TrainWolfPolicyQLearning(stateList, createRewardDict, runQLearning)
    wolfPolicy = trainWolfPolicy()
    # print(wolfPolicy)
    print('finish training policy', time.time() - time0)

    print('begin saving policy, please wait')
    Writer.savePolicyToPkl(wolfPolicy, savePolicyFilename)
    # Writer.savePolicyToNpy(wolfPolicy, savePolicyFilename)
    # Writer.savePolicyToJson(wolfPolicy, savePolicyFilename)
    print('finish saving policy, mission complete', time.time() - time0)

    # loadWolfPolicy=klepto.archives.file_archive(savePolicyFilename+'.json')
    # print(loadWolfPolicy.archive[((1,0),(0,1))])
示例#27
0
fout.close()
print "static finish"

#mutual retweets of the author of the tweet is instance related
start = time.time()
edge_feat_file = path.join(trainPath, 'edge_cotreweet.csv')
f = open(edge_feat_file, 'w')
for node1, node2 in relations:
    feat_list = []
    for ID in tweet_list:
        authorID = Processor.GetAuthor(ID, tweet_info_path)
        edge_feat = Processor.GetEdgeFeat(node1, node2, authorID, mutual_path, tweet_info_path)
        if edge_feat > 0:
            feat_list += ["%s:%s" % (ID, edge_feat)]
    f.write("%s_%s,%s\n" % (node1, node2, ",".join(feat_list)))
f.close()

print time.time()-start, " complete edge feature extraction, start node feature extraction"

nodes = [int(line.strip()) for line in open(nodefile)]
#user only feature are static, other features are associated with instance
tag=''
for node in nodes:
    pool = []
    node_feat_file = path.join(nodeFeatPath, str(node)+'.txt')
    for ID in tweet_list:
        tag, node_feat = Collect.GetTrainData(node, ID, tag, follower_dict, tweet_info_path, user_path, mutual_path)
        pool += [node_feat]
    Writer.outputFeature(node_feat_file, pool)

示例#28
0
def GenerateSubgraph(name, outputPath):
    s2file = path.join(outputPath, str(name)+'_s2.csv')
    s3file = path.join(outputPath, str(name)+'_s3.csv')
    s4file = path.join(outputPath, str(name)+'_s4.csv')

    Writer.outputGraph(s2file, follower, 2, name)
    Writer.outputGraph(s3file, follower, 3, name)
    Writer.outputGraph(s4file, follower, 4, name)

follower = defaultdict(list)
rank_list = []
for line in open(networkfile):
    linelist = line.strip().split()
    user = int(linelist[0])
    if len(linelist) < 2:
        continue

    follower[user] = [int(item) for item in linelist[1].split(',')]
    rank_list.append( (user, len(follower[user])) )

rank_list = sorted(rank_list, key=lambda item:item[1], reverse=True)

allfile = path.join(outputPath, 'all_1000.csv')
Writer.outputGraph(allfile, follower, -1)

select = [0, 100, 200, 300, 400, 500, 600, 700, 800, 900]
for rank in select:
    print rank_list[rank]
    GenerateSubgraph(rank_list[rank][0], outputPath)

示例#29
0
class Domain(object):
    '''
    variable:
        self.width 
        self.height
        self.nCellsX 
        self.nCellsY 
        self.X 
        self.Y 
        self.hx        # cell size in x-direction
        self.hy        # cell size in y-direction
        self.nodes 
        self.cells 
        self.particles

        self.analysisControl
        self.plotControl
        self.outputControl

        self.Re
        self.v0
        self.time = 0.0

        self.plot
        self.writer

        self.motion                 ... manufactured solution for testing
        self.particleUpdateScheme   ... the timeIntegrator

        self.lastWrite    ... time of the last output
        self.lastPlot     ... time of the last plot

        self.recordParticleTrace = False
    
    methods:
        def __init__(self, width=1., height=1., nCellsX=2, nCellsY=2)
        def __str__(self)
        def setTimeIntegrator(self, integrator)
        def setMotion(self, motion)
        def setBoundaryConditions(self)
        def setPlotInterval(self, dt)
        def setWriteInterval(self, dt)
        def setAnalysis(self, doInit, solveVstar, solveP, solveVtilde, solveVenhanced, updatePosition, updateStress)
        def getAnalysisControl(self)
        def setInitialState(self)
        def setParameters(self, Re, density, velocity)
        def runAnalysis(self, maxtime=1.0)
        def runSingleStep(self, dt=1.0)
        def initStep(self)
        def solveVstar(self, dt)
        def solveP(self, dt)
        def solveVtilde(self, dt)
        def solveVenhanced(self, dt)
        def updateParticleStress(self)
        def updateParticleMotion(self)
        def findCell(self, x)
        def createParticles(self, n, m)     # Default particle creator that generates particles in all cells
        def createParticlesMID(self, n, m)  # Particle creator that generates particle only in the middle cell
        def createParticleAtX(self, mp, xp) # Particle creator that generates a single particle of mass mp at position xp 
        def getTimeStep(self, CFL)
        def plotData(self)
        def writeData(self)
        def setMotion(self, dt=0.0)
        def particleTrace(self, OnOff)      # turn particle trace on and off
        def computeCellFlux(self)
    '''

    def __init__(self, width=1., height=1., nCellsX=2, nCellsY=2):
        '''
        Constructor
        '''
        self.width   = width
        self.height  = height
        self.nCellsX = nCellsX
        self.nCellsY = nCellsY
        
        self.hx = width/nCellsX        # cell size in x-direction
        self.hy = height/nCellsY       # cell size in y-direction
        
        self.time = 0.0

        self.recordParticleTrace = False
        
        #self.X = outer(ones(nCellsY+1), linspace(0.0, width, nCellsX+1))
        #self.Y = outer(linspace(0.0, height, nCellsY+1), ones(nCellsX+1))
        
        x = linspace(0,width ,(nCellsX+1))
        y = linspace(0,height,(nCellsY+1))
        
        self.X, self.Y = meshgrid(x, y, indexing='xy')
        
        self.Re  = 1.0
        self.rho = 1.0
        self.v0  = 0.0
        self.motion = None
        self.particleUpdateScheme = ExplicitEuler()
        
        self.nodes = [ [ None for j in range(self.nCellsY+1) ] for i in range(self.nCellsX+1) ]
        id = -1
        
        for i in range(nCellsX+1):
            for j in range(nCellsY+1):
                id += 1
                theNode = Node(id,x[i],y[j])
                theNode.setGridCoordinates(i,j)
                self.nodes[i][j] = theNode
                
        self.cells = []
        id = -1
        hx = width / nCellsX
        hy = height / nCellsY
        
        for i in range(nCellsX):
            for j in range(nCellsY):
                id += 1
                newCell = Cell(id, hx, hy)
                newCell.setCellGridCoordinates(i, j)
                theNodes = []
                theNodes.append(self.nodes[i][j])
                theNodes.append(self.nodes[i+1][j])
                theNodes.append(self.nodes[i+1][j+1])
                theNodes.append(self.nodes[i][j+1])
                newCell.SetNodes(theNodes)
                self.cells.append(newCell)
        
        self.setParameters(self.Re, self.rho, self.v0)
        
        self.particles = []

        # set default analysis parameters
        self.setAnalysis(False, True, True, True, False, True, True, True)

        # set default plot parameters
        self.plotControl   = {'Active':False, 'DelTime':-1 }
    
        self.plot = Plotter()
        self.plot.setGrid(width, height, nCellsX, nCellsY)
        self.lastPlot = self.time

        # set default output parameters
        self.outputControl = {'Active':False, 'DelTime':-1 }

        self.writer = Writer()
        self.writer.setGrid(width, height, nCellsX, nCellsY)
        self.lastWrite = self.time
        
    def __str__(self):
        s = "==== D O M A I N ====\n"
        s += "Nodes:\n"
        for i in range(self.nCellsX+1):
            for j in range(self.nCellsY+1):
                s += str(self.nodes[i][j]) + "\n"
        s += "nCells:\n"
        for cell in self.cells:
            s += str(cell) + "\n"
        return s

    def particleTrace(self, OnOff):
        self.recordParticleTrace = OnOff
        for particle in self.particles:
            particle.trace(OnOff)

    def setTimeIntegrator(self, integrator):
        self.particleUpdateScheme = integrator

    def setMotion(self, motion):
        self.motion = motion

    def setPlotInterval(self, dt):
        self.plotControl['DelTime'] = dt
        self.plotControl['Active'] = (dt >= 0)

    def setWriteInterval(self, dt):
        self.outputControl['DelTime'] = dt
        self.outputControl['Active'] = (dt >= 0)
        
    def setBoundaryConditions(self):
        
        nCellsX = self.nCellsX
        nCellsY = self.nCellsY
        
        # define fixities
        for i in range(nCellsX+1):
            self.nodes[i][0].fixDOF(1, 0.0)
            self.nodes[i][nCellsY].fixDOF(1, 0.0)
            
            #self.nodes[i][0].fixDOF(0, 0.0)             # fully fixed
            #self.nodes[i][nCellsY].fixDOF(0, 0.0)       # fully fixed
            
            if (i>0 and i< nCellsX+1):
                self.nodes[i][nCellsY].fixDOF(0, self.v0)
        for j in range(nCellsY+1):
            self.nodes[0][j].fixDOF(0, 0.0)
            self.nodes[nCellsX][j].fixDOF(0, 0.0)
            
            #self.nodes[0][j].fixDOF(1, 0.0)             # fully xixed
            #self.nodes[nCellsX][j].fixDOF(1, 0.0)       # fully fixed       


    def setAnalysis(self, doInit, solveVstar, solveP, solveVtilde, solveVenhanced, updatePosition, updateStress, addTransient):

        self.analysisControl = {
            'doInit':doInit,
            'solveVstar':solveVstar,
            'solveP':solveP,
            'solveVtilde':solveVtilde,
            'solveVenhanced':solveVenhanced,
            'updatePosition':updatePosition,
            'updateStress':updateStress,
            'addTransient':addTransient
            }

        for cell in self.cells:
            # cell.setEnhanced(True)
            cell.setEnhanced(solveVenhanced)

        if (doInit and updatePosition and addTransient):
            print("INCONSISTENCY WARNING: transient active with updatePosition && doInit ")
        
    def getAnalysisControl(self):
        return self.analysisControl
    
    def setParameters(self, Re, density, velocity):
        
        if (self.width < self.height ):
            L = self.width
        else:
            L = self.height
            
        viscosity = density * velocity * L / Re
        
        self.Re  = Re
        self.rho = density
        self.v0  = velocity
        self.mu  = viscosity
            
        self.setBoundaryConditions()
        
        for cell in self.cells:
            cell.setParameters(density, viscosity)
       
    def setInitialState(self):
        for nodeList in self.nodes:
            for node in nodeList:
                node.wipe()
        
        for cell in self.cells:
            cell.mapMassToNodes()
        
        # initial condition at nodes define v*, not v
        for i in range(self.nCellsX+1):
            for j in range(self.nCellsY):
                self.nodes[i][j].setVelocity(zeros(2))
            self.nodes[i][self.nCellsY].setVelocity(array([self.v0 ,0.0]))
        # fix the top corner nodes
        self.nodes[0][self.nCellsY].setVelocity(zeros(2))
        self.nodes[self.nCellsX][self.nCellsY].setVelocity(zeros(2))
        
        # now find pressure for a fictitious time step dt = 1.0
        self.solveP(1.0)
        
        # compute \tilde v
        self.solveVtilde(1.0)
        
        # initial conditions are now set
        self.plotData()
        self.writeData()

    def setState(self, time):

        self.time = time

        for nodeList in self.nodes:
            for node in nodeList:
                node.setVelocity(zeros(2))
        
        for cell in self.cells:
            cell.mapMassToNodes()

        self.setNodalMotion(time)

        ###self.time += dt   # WHAT IS THAT FOR ?????
            
    def runAnalysis(self, maxtime=1.0):
        
        # find ideal timestep using CFL
        dt = self.getTimeStep(0.5)
        if (dt > (maxtime - self.time)):
            dt = (maxtime - self.time)
        if (dt < (maxtime - self.time)):
            nsteps = ceil((maxtime - self.time)/dt)
            if (nsteps>50):
                nsteps= 50
            dt = (maxtime - self.time) / nsteps

        while (self.time < maxtime-0.1*dt):
            self.runSingleStep(self.time, dt)
            self.time += dt

            if self.plotControl['Active']:
                # check if this is a plot interval
                if self.time > (self.lastPlot + self.plotControl['DelTime'] - 0.5*dt) :
                    self.plotData()
                    self.lastPlot = self.time

            if self.outputControl['Active']:
                # check if this is an outout interval
                if self.time > (self.lastPlot + self.outputControl['DelTime'] - 0.5*dt) :
                    self.writeData()
                    self.lastWrite = self.time

    def runSingleStep(self, time=0.0, dt=1.0):

        t = process_time()
        
        if (self.analysisControl['doInit']):
            self.initStep()
        if (self.analysisControl['solveVstar']):
            self.solveVstar(dt, self.analysisControl['addTransient'])
        if (self.analysisControl['solveP']):
            self.solveP(dt)
        if (self.analysisControl['solveVtilde']):
            self.solveVtilde(dt)
        if (self.analysisControl['solveVenhanced']):
            self.solveVenhanced(dt)
        if (self.analysisControl['updatePosition']):
            self.updateParticleMotion(dt)
        if (self.analysisControl['updateStress']):
            self.updateParticleStress()
            
        elapsed_time = process_time() - t
        print("starting at t_n = {:.3f}, time step \u0394t = {}, ending at t_(n+1) = {:.3f} (cpu: {:.3f}s)".format(time, dt, time+dt, elapsed_time))

    def initStep(self):
        # reset nodal mass, momentum, and force
        for nodeList in self.nodes:
            for node in nodeList:
                node.wipe()
            
        # map mass and momentum to nodes
        for cell in self.cells:
            # cell.mapMassToNodes()  # for particle formulation only
            cell.mapMomentumToNodes()

    def solveVstar(self, dt, addTransient=False):
        # compute nodal forces from shear
        for i in range(self.nCellsX+1):
            for j in range(self.nCellsY+1):
                self.nodes[i][j].setForce(zeros(2))
                
        for cell in self.cells:
            cell.computeForces(addTransient)
        
        # solve for nodal acceleration a*
        # and update nodal velocity to v*
        for i in range(self.nCellsX+1):
            for j in range(self.nCellsY+1):
                self.nodes[i][j].updateVstar(dt)

    def solveP(self, dt):
        ndof = (self.nCellsX+1)*(self.nCellsY+1)
        
        # sparse outperformes dense very quickly for this problem.
        # I lowered this number to 100 and might want to switch to sparse entirely.
        if ndof <= 100:
            useDense = True
        else:
            useDense = False
        
        # assemble matrix and force 
        if (useDense):
            # use dense matrix
            self.FP = zeros(ndof)
            self.KP = zeros((ndof,ndof))
        else:
            # use sparse matrix
            self.FP = zeros(ndof)
            KP = matrixDataType(ndof)
        
        for cell in self.cells:
            ke = cell.GetStiffness()
            fe = cell.GetPforce(dt)
            nodeIndices = cell.getGridCoordinates()
            dof = [ x[0] + x[1]*(self.nCellsX+1)   for x in nodeIndices ]
            
            if (useDense):
                # use dense matrix
                for i in range(4):
                    self.FP[dof[i]] += fe[i]
                    for j in range(4):
                        self.KP[dof[i]][dof[j]] += ke[i][j]
            else:
                # use sparse matrix
                for i in range(4):
                    self.FP[dof[i]] += fe[i]
                    for j in range(4):
                        KP.add(ke[i][j],dof[i],dof[j]) 
                        
        # apply boundary conditions
        i = self.nCellsX // 2
        dof = i + self.nCellsY*(self.nCellsX+1)
        
            
        if (useDense):
            # use dense matrix
            self.KP[dof][dof] = 1.0e20
            self.FP[dof] = 0.0
        else:
            # use sparse matrix
            KP.add(1.0e20, dof, dof)
            self.FP[dof] = 0.0
            
        # solve for nodal p
        if (useDense):
            # use dense matrix
            pressure = solve(self.KP, self.FP)
        else:
            # use sparse matrix
            self.KP = KP.toCSCmatrix()
            pressure = spsolve(self.KP, self.FP)
        
        # assign pressure to nodes
        for i in range(self.nCellsX+1):
            for j in range(self.nCellsY+1):
                dof = i + j*(self.nCellsX+1)
                self.nodes[i][j].setPressure(pressure[dof])
                
        #print(pressure)

    def solveVtilde(self, dt):
        for i in range(self.nCellsX+1):
            for j in range(self.nCellsY+1):
                # compute nodal pressure gradient
                if ( i==0 or i==self.nCellsX ):
                    dpx = 0.0
                else:
                    dpx = 0.5*(self.nodes[i+1][j].getPressure()-self.nodes[i-1][j].getPressure())/self.hx
                
                if ( j==0 or j==self.nCellsY ):
                    dpy = 0.0
                else:
                    dpy = 0.5*(self.nodes[i][j+1].getPressure()-self.nodes[i][j-1].getPressure())/self.hy
        
                # update nodal velocity
                dv = -dt/self.rho * array([dpx,dpy])
                self.nodes[i][j].addVelocity(dv)

    def solveVenhanced(self, dt):
        for cell in self.cells:
            # initialize the divergence terms in the cell
            cell.SetVelocity()

    def updateParticleStress(self):
        pass

    def updateParticleMotion(self, dt):
        # this is the Butcher tableau
        a = dt*self.particleUpdateScheme.get_a()  # time factors
        b = dt*self.particleUpdateScheme.get_b()  # position factors
        c = dt*self.particleUpdateScheme.get_c()  # update factors        
        tn = self.time
        
        for p in self.particles:

            kI = []
            fI = []
            Dv = []
            
            dF  = identity(2)
            xn1 = p.position()
            Nsteps = len(a)
            
            try:
                for i in range(Nsteps):
                    xi = p.position()
                    f  = identity(2)      
                    
                    for j in range(i):
                        if (b[i][j] != 0.):
                            xi += b[i][j] * kI[j]
                            f  += b[i][j] * dot(Dv[j], fI[j])
                            
                    cell = self.findCell(xi)

                    # this line represents the MPM-style interpolation of the velocity field
                    kI.append(cell.GetVelocity(xi) + a[i] * cell.GetApparentAccel(xi))

                    # the following line uses the analytic expression for the motion. It yields the proper accuracy
                    #kI.append(self.motion.getVel(xi, self.time + a[i]))

                    Dv.append(cell.GetGradientV(xi) + a[i]*cell.GetGradientA(xi))
                    
                    fI.append(f)
                    
                    # particle position
                    xn1 += c[i] * kI[-1]
                    #incremental deformation gradient
                    dF  += c[i] * dot(Dv[-1], fI[-1])
                    

                # update particle position ...
                p.addToPosition(xn1 - p.position())
                
                # update particle velocity ...
                cell = self.findCell(xn1)
                vel  = cell.GetVelocity(xn1) + dt*cell.GetApparentAccel(xn1)

                p.setVelocity(vel)
                
                # update the deformation gradient ...
                p.setDeformationGradient(dot(dF, p.getDeformationGradient()))

            except CellIndexError as e:
                print(e)
                raise e

    def findCell(self, x, testCell=None):
        if (testCell != None  and  testCell.contains(x)):
            return testCell
        
        # find a cell that contains x
        i = np.int_((x[0] - 0.0) / self.hx)
        j = np.int_((x[1] - 0.0) / self.hy)

        if (i<0):
            i = 0
        if (i>self.nCellsX-1):
            i = self.nCellsX -1
        if (j<0):
            j = 0
        if (j>self.nCellsY-1):
            j = self.nCellsY -1
            
        k = self.nCellsX * i + j
        
        try:
            cell = self.cells[k]
        except:
            raise CellIndexError((i,j,k,x))

        # this is used for safety check but will slow down multi-particle simulations
        if not cell.contains(x):
            print("warning: particle position ({},{}) outside cell {}".format(*x, cell.id))
        
        return cell
    
    def createParticles(self, n, m):
        for cell in self.cells:
            h = cell.getSize()
            mp = self.rho,h[0]*h[1]/n/m
            
            for i in range(n):
                s = -1. + (2*i+1)/n
                for j in range(m):
                    t = -1. + (2*j+1)/m
                    xl = array([s,t])
                    xp = cell.getGlobal(xl)
                    newParticle = Particle(mp,xp)
                    self.particles.append(newParticle)
                    cell.addParticle(newParticle)

    def createParticlesMID(self, n, m):
        for cell in self.cells:
            if ( cell.getID() != int( (self.nCellsX) * (self.nCellsY) / 2) - int(self.nCellsY/2.0) ):
                continue
            # print(cell.getID())
            h = cell.getSize()
            mp = self.rho,h[0]*h[1]/n/m
            
            for i in range(n):
                s = -1. + (2*i+1)/n
                # s = 0.5
                for j in range(m):
                    t = -1. + (2*j+1)/m
                    # t = 0.5
                    xl = array([s,t])
                    xp = cell.getGlobal(xl)
                    # print(xp)
                    newParticle = Particle(mp,xp)
                    self.particles.append(newParticle)
                    cell.addParticle(newParticle)        
                    
    
    def createParticleAtX(self, mp, xp):     # Particle creator that generates a single particle at position X
        newParticle = Particle(mp,xp)
        self.particles.append(newParticle)
        cell = self.findCell(xp)
        if (cell):
            cell.addParticle(newParticle)
    
    def getTimeStep(self, CFL):
        dt = 1.0e10
        
        for nodeList in self.nodes:
            for node in nodeList:
                vel = node.getVelocity()
                if (abs(vel[0]) > 1.0e-5):
                    dtx = self.hx / abs(vel[0])
                    if (dtx<dt):
                        dt = dtx
                if (abs(vel[1]) > 1.0e-5):
                    dty = self.hy / abs(vel[1])
                    if (dty<dt):
                        dt = dty

        return dt*CFL

    def plotData(self):
        self.computeCellFlux()
        self.plot.setCellFluxData(self.cells)
        self.plot.setData(self.nodes)
        self.plot.setParticleData(self.particles)
        self.plot.refresh(self.time)

    def writeData(self):
        self.writer.setData(self.nodes)
        self.writer.setParticleData(self.particles)
        self.writer.writeData(self.time)

    def setNodalMotion(self, time=0.0):

        # set nodal velocity field
        for rowOfNodes in self.nodes:
            for node in rowOfNodes:
                x = node.getPosition()  # x is Eulerial nodal position
                node.setVelocity( self.motion.getVel(x, time) )
                node.setApparentAccel( self.motion.getDvDt(x, time) )

        for cell in self.cells:
            cell.SetVelocity()

    def getParticles(self):
        return self.particles

    def setTime(self, time):
        self.time = time

    def plotParticleTrace(self, filename):
        plotter = ParticleTracePlot()
        plotter.setDomain(0.0, 0.0, self.width, self.height)

        particleTraceList = []
        if self.recordParticleTrace:
            for particle in self.particles:
                pDict = {}
                pDict['node'] = particle.id
                pDict['path'] = array(particle.getTrace())
                particleTraceList.append(pDict)

        plotter.addTraces(particleTraceList)
        plotter.setGridNodes(self.nodes)
        plotter.exportImage(filename)


    def computeCellFlux(self):
        for theCell in self.cells:
            cellSize = theCell.getSize()
            nodeIndices = theCell.GetNodeIndexes()
            node00 = self.nodes[nodeIndices[0][0]][nodeIndices[0][1]]
            node10 = self.nodes[nodeIndices[1][0]][nodeIndices[1][1]]
            node11 = self.nodes[nodeIndices[2][0]][nodeIndices[2][1]]
            node01 = self.nodes[nodeIndices[3][0]][nodeIndices[3][1]]
            leftFlux   =  0.5 * (node00.getVelocity() + node01.getVelocity()) @ array([-1.,  0.]) * cellSize[0]
            rightFlux  =  0.5 * (node10.getVelocity() + node11.getVelocity()) @ array([ 1.,  0.]) * cellSize[0]
            topFlux    =  0.5 * (node11.getVelocity() + node01.getVelocity()) @ array([ 0.,  1.]) * cellSize[1]
            bottomFlux =  0.5 * (node11.getVelocity() + node01.getVelocity()) @ array([ 0., -1.]) * cellSize[1]

            theCell.setFlux(leftFlux + rightFlux + topFlux + bottomFlux)
示例#30
0
def GenerateSubgraph(name, outputPath):
    s2file = path.join(outputPath, str(name) + '_s2.csv')
    s3file = path.join(outputPath, str(name) + '_s3.csv')
    s4file = path.join(outputPath, str(name) + '_s4.csv')

    Writer.outputGraph(s2file, follower, 2, name)
    Writer.outputGraph(s3file, follower, 3, name)
    Writer.outputGraph(s4file, follower, 4, name)


follower = defaultdict(list)
rank_list = []
for line in open(networkfile):
    linelist = line.strip().split()
    user = int(linelist[0])
    if len(linelist) < 2:
        continue

    follower[user] = [int(item) for item in linelist[1].split(',')]
    rank_list.append((user, len(follower[user])))

rank_list = sorted(rank_list, key=lambda item: item[1], reverse=True)

allfile = path.join(outputPath, 'all_1000.csv')
Writer.outputGraph(allfile, follower, -1)

select = [0, 100, 200, 300, 400, 500, 600, 700, 800, 900]
for rank in select:
    print rank_list[rank]
    GenerateSubgraph(rank_list[rank][0], outputPath)
示例#31
0
                  param=param,
                  datapath=args.directory,
                  multiopt="many")
# Now read in as desired
# def readDataMany(self,skip=0,step=1,howmany='all',Nvariable=False,readtypes = 'all'):
Cornea.readDataMany("SAMoS",
                    args.skip,
                    args.step,
                    args.howmany,
                    True,
                    readtypes=[1, 2])
#def __init__(self,directory,conffile,skip,howmany,ignore=True,maxtype=3):

data = {'configuration': args.conffile}

write = Writer()
output = True

nav = 0
nbin = 100

data['nbin'] = nbin
data['directory'] = args.directory
data['skip'] = args.skip
data['howmany'] = args.howmany
data['step'] = args.step
data['average'] = args.average
data['coneangle'] = args.coneangle

# Histogram of swirling
v_swirlhist = np.zeros((args.howmany - args.average, nbin))
示例#32
0
    def write(self):
        #First I write the license, comments etc..
        #then the includes and finally the members,
        #in the same order in which they are contained
        #inside self.members

        if FileDumper.developer_name or FileDumper.developer_email:
            copyright = '(c) ' + FileDumper.developer_name + ', ' + FileDumper.developer_email
        else:
            copyright = ''

        fileHnd = open(self.name, 'wt')
        printOnFile('/***************************************************************************\\', fileHnd)
        printOnFile(' *', fileHnd)
        for line in FileDumper.banner.split('\n'):
            printOnFile(' *   ' + line, fileHnd)
        printOnFile(' *', fileHnd)
        printOnFile(' *', fileHnd)
        for line in FileDumper.license_text.split('\n'):
            printOnFile(' *   ' + line, fileHnd)
        printOnFile(' *', fileHnd)
        printOnFile(' *', fileHnd)
        for line in copyright.split('\n'):
            printOnFile(' *   ' + line, fileHnd)
        printOnFile(' *', fileHnd)
        printOnFile('\\***************************************************************************/\n\n', fileHnd)

        # Now I can start priting the actual code: lets create the writer
        writer = Writer.CodeWriter(fileHnd)
        if self.isHeader:
            writer.write('#ifndef ' + self.name.replace('.','_').upper() + '\n')
            writer.write('#define ' + self.name.replace('.','_').upper() + '\n')
        # as a first thing I compute the includes and print them
        for member in self.members:
            try:
                for include in member.getIncludes():
                    if include and not include in self.includes:
                        self.includes.append(include)
            except AttributeError:
                pass
        if self.includes:
            writer.write('\n')
        # Not that I have to add a dirty hack to put always SystemC in the
        # last position among the includes, otherwise it might create problems
        # compiling
        foundSysC = False
        for include in self.includes:
            include = include.lstrip()
            if 'systemc.h' in include:
                foundSysC = True
            else:
                if include.startswith('#'):
                    writer.write(include + '\n')
                elif include != self.name:
                    writer.write('#include <' + include + '>\n')
        if foundSysC:
            writer.write('#include <systemc.h>\n')
        writer.write('\n')
        # Now I simply have to print in order all the members
        for member in self.members:
            if self.isHeader:
                try:
                    member.writeDeclaration(writer)
                except AttributeError:
                    pass
            else:
                try:
                    member.writeImplementation(writer)
                except AttributeError:
                    pass
        if self.isHeader:
            writer.write('\n\n#endif')
        writer.write('\n')
        fileHnd.close()
示例#33
0
    # (or any n) from the .csv file.
    dec = int(nStocks / 10)
    topDecile = []

    # Store temporary momentums from top decile for sorting reasons
    moms = [o.mom for o in stocks[:dec]]

    # Sort top decile by momentum
    for i in range(dec):
        # Get index of top momentum performer in top decile
        topMomInd = moms.index(max(moms))
        # Sort
        topDecile.append(stocks[topMomInd])
        # Remove top momentum performer from further consideration
        moms[topMomInd] = -100

    print('Saving stocks...')

    # Save momentum-weighted top decile
    topCsvPath = 'top.csv'
    Writer.writeCSV(topCsvPath, topDecile)

    # Save results to .csv
    allCsvPath = 'stocks.csv'
    Writer.writeCSV(allCsvPath, stocks)

    print('\n')
    print('Complete.')
    print('Top decile (sorted by momentum) saved to: ' + topCsvPath)
    print('All stocks (sorted by trending value) saved to: ' + allCsvPath)
示例#34
0
c.executemany('INSERT INTO temp_track (trackID) VALUES (?)',new_tup)

c.execute('SELECT count(albumID) FROM ID_album')
n_albumIDs = [i for i in c][0][0]
album_pop = range(1,n_albumIDs)
album_sample = random.sample(album_pop,200)
new_tup = [ ( i,) for i in album_sample]

c.execute('DROP TABLE IF EXISTS temp_album')
c.execute('CREATE TABLE temp_album (albumID)')
c.executemany('INSERT INTO temp_album (albumID) VALUES (?)',new_tup)

con.commit()
# instantiate writer track

wr=Writer(category='track',user='******',db_path=db_path,db_lock=db_lock)
print 'writer instantiated'
# playslist selection is limited to 5000 track takes around 5 min to assemble

wr.limit(rank_by='plays_track_total',topX=2, con=con, asc=False)

sort_by = ['date_artist','date_track','i_score_artist','i_score_track']
asc     = [ True        , True       , False          , False]

wr.sort( sort_by=sort_by, asc=asc, con=con )

wr.write_playlist('test_track',path, con=con)

# instantiate writer album

wr=Writer(category='album',user='******',db_path=db_path,db_lock=db_lock)
示例#35
0
        data = newmodels.Models(data)
        joblib.dump(data, "trained.data")
        print 'Done with stage:', stage
        stage = 'test'

    if stage == 'test':
        models = joblib.load("trained.data", "r")
        testData = newparser.InsultParser(testingDataFile, isTestDataLabelled)
        evaluation = Evaluate.Evaluate(testData, models)
        ypred = evaluation.predictions
        print ypred
        if isTestDataLabelled == True:
            ytrue = testData.y
            print len(ytrue), len(ypred)
            fpr, tpr, thresholds = sklearn.metrics.roc_curve(ytrue, ypred)
            print fpr, tpr, thresholds
            print sklearn.metrics.auc(fpr, tpr)

            incorrect = Evaluate.crossValidate(ypred, ytrue)
            for key in incorrect:
                print key+2, ypred[key], ytrue[key], testData.X[key]
            Evaluate.crossValidate(evaluation.mainModelPredictions, ytrue)
            #incorrect=Evaluate.crossValidate(evaluation.invIdxModelPredictions, ytrue)
            #Evaluate.crossValidate(evaluation.distanceBasedClassifier, ytrue)

        else:
            Writer.writeCSVFile('insult_output.csv', ypred, testingDataFile)
        print 'Done with stage:', stage
        stage = 'done'

示例#36
0
        tweet_feat, authorID = Processor.GetTweetFeature(tweetfile)

        authorfile = path.join(user_path, str(authorID)+'.txt')
        author_feat = Processor.GetAuthorFeature(authorfile)

        userfile = path.join(user_path, str(userID)+'.txt')
        user_feat = Processor.GetUserFeature(userfile)

        relation_feat = Processor.GetRelationFeature(int(userID), int(authorID), follower_dict, follow_dict, mutual_path)
        #instance: label, feat_list
        train_pool += [(tag, tweet_feat+author_feat+user_feat+relation_feat[0:2])]

    return train_pool

if path.exists(retweet_list_file):
    positive_list = Reader.LoadID(retweet_list_file)
    all_list = Reader.LoadID(all_retweet_file)
    negative_list = list(set(all_list)-set(positive_list))

    follow_dict, follower_dict = Reader.LoadNetwork(networkfile)

    print "%s positive: %s, negative: %s" % (userID, len(positive_list), len(negative_list))

    pos_pool = GetTrainData(positive_list, '1')
    neg_pool = GetTrainData(negative_list, '0')
    print len(pos_pool), len(neg_pool)

    Writer.outputData(trainfile, pos_pool+neg_pool)
else:
    print "no retweets ", userID
示例#37
0
stocks = [x for (y, x) in sorted(zip(rankOverall, stocks))]

# Sort top decile by momentum factor
dec = int(nStocks / 10)
topDecile = []

# Store temporary momentums from top decile
moms = [o.mom for o in stocks[:dec]]

for i in range(dec):
    # Get index of top momentum performer in top decile
    topMomInd = moms.index(max(moms))
    # Sort
    topDecile.append(stocks[topMomInd])
    # Remove top momentum performer from further consideration
    moms[topMomInd] = -100

print('Saving stocks...')

# Save momentum-weighted top decile
csvpath = 'top.csv'
Writer.writeCSV(csvpath, topDecile)

# Save results to .csv
csvpath = 'stocks.csv'
Writer.writeCSV(csvpath, stocks)

print('\n')
print('Complete.')
print('Results saved to ' + csvpath)
示例#38
0
    # (or any n) from the .csv file.
    dec = int(nStocks / 10)
    topDecile = []

    # Store temporary momentums from top decile for sorting reasons
    moms = [o.mom for o in stocks[:dec]]

    # Sort top decile by momentum
    for i in range(dec):
        # Get index of top momentum performer in top decile
        topMomInd = moms.index(max(moms))
        # Sort
        topDecile.append(stocks[topMomInd])
        # Remove top momentum performer from further consideration
        moms[topMomInd] = -100

    print('Saving stocks...')

    # Save momentum-weighted top decile
    topCsvPath = 'top.csv'
    Writer.writeCSV(topCsvPath, topDecile)

    # Save results to .csv
    allCsvPath = 'stocks.csv'
    Writer.writeCSV(allCsvPath, stocks)

    print('\n')
    print('Complete.')
    print('Top decile (sorted by momentum) saved to: ' + topCsvPath)
    print('All stocks (sorted by trending value) saved to: ' + allCsvPath)
示例#39
0
    def __init__(self, width=1., height=1., nCellsX=2, nCellsY=2):
        '''
        Constructor
        '''
        self.width   = width
        self.height  = height
        self.nCellsX = nCellsX
        self.nCellsY = nCellsY
        
        self.hx = width/nCellsX        # cell size in x-direction
        self.hy = height/nCellsY       # cell size in y-direction
        
        self.time = 0.0

        self.recordParticleTrace = False
        
        #self.X = outer(ones(nCellsY+1), linspace(0.0, width, nCellsX+1))
        #self.Y = outer(linspace(0.0, height, nCellsY+1), ones(nCellsX+1))
        
        x = linspace(0,width ,(nCellsX+1))
        y = linspace(0,height,(nCellsY+1))
        
        self.X, self.Y = meshgrid(x, y, indexing='xy')
        
        self.Re  = 1.0
        self.rho = 1.0
        self.v0  = 0.0
        self.motion = None
        self.particleUpdateScheme = ExplicitEuler()
        
        self.nodes = [ [ None for j in range(self.nCellsY+1) ] for i in range(self.nCellsX+1) ]
        id = -1
        
        for i in range(nCellsX+1):
            for j in range(nCellsY+1):
                id += 1
                theNode = Node(id,x[i],y[j])
                theNode.setGridCoordinates(i,j)
                self.nodes[i][j] = theNode
                
        self.cells = []
        id = -1
        hx = width / nCellsX
        hy = height / nCellsY
        
        for i in range(nCellsX):
            for j in range(nCellsY):
                id += 1
                newCell = Cell(id, hx, hy)
                newCell.setCellGridCoordinates(i, j)
                theNodes = []
                theNodes.append(self.nodes[i][j])
                theNodes.append(self.nodes[i+1][j])
                theNodes.append(self.nodes[i+1][j+1])
                theNodes.append(self.nodes[i][j+1])
                newCell.SetNodes(theNodes)
                self.cells.append(newCell)
        
        self.setParameters(self.Re, self.rho, self.v0)
        
        self.particles = []

        # set default analysis parameters
        self.setAnalysis(False, True, True, True, False, True, True, True)

        # set default plot parameters
        self.plotControl   = {'Active':False, 'DelTime':-1 }
    
        self.plot = Plotter()
        self.plot.setGrid(width, height, nCellsX, nCellsY)
        self.lastPlot = self.time

        # set default output parameters
        self.outputControl = {'Active':False, 'DelTime':-1 }

        self.writer = Writer()
        self.writer.setGrid(width, height, nCellsX, nCellsY)
        self.lastWrite = self.time
示例#40
0
文件: MTest.py 项目: jpparajeles/IA-1
final=Tower(
[['n','n','n','e'],
['a','v','A','r'],
['a','v','A','r'],
['a','v','A','r'],
['a','v','A','r']])

# igual que aqui
easy=Tower( #0.5s
[['n','n','n','e'],
['a','a','v','r'],
['r','v','A','A'],
['a','v','A','r'],
['a','v','A','r']])

#se debe llamar a esa funcion
result = busqueda(easy,final)

for elem in result:
    try:
        print(elem.description)
        printBeauty(elem.tower.matrix) #deberia ser la funcion para escribir en archivos
        #printBeauty(elem.Modelo.matrix)
    except:
        printBeauty(easy.matrix) # inicial #deberia ser la funcion para escribir en archivos
    print() #\n


writer = Writer()

writer.writeSolution(easy,result)
示例#41
0
                        print bestLength
                        print bestTour
                        print bestBeta
                        print bestOpt
                        print bestRand
                        print bestSuccess

        bestTours[f] = bestTour
        bestLengths[f] = bestLength
        bestBetas[f] = bestBeta
        bestOpts[f] = bestOpt
        bestRands[f] = bestRand
        bestSuccesses[f] = bestSuccess

        print bestTours
        print
        print bestLengths
        print
        print bestBetas
        print
        print bestOpts
        print
        print bestRands
        print
        print bestSuccesses

        Writer.writeTourFile(FILENAME, numNodes, bestLength,
                             bestTour)  #CHANGE TO BESTLENGTH, BESTTOUR

    #main()
示例#42
0
recordPath='/home/yipei/Twitter/FeatureExtraction/data/author'
tweetPath='/home/yipei/Twitter/FeatureExtraction/data/tweets'

for line in open(filelist):
    filepath = line.strip()
    filename = path.basename(filepath)
    author = path.splitext(filename)[0]

    retweetfile = path.join(recordPath, filename)
    retweet_list = []
    if path.exists(retweetfile):
        retweet_list = [int(line.strip()) for line in open(retweetfile)]
    
    #print "retweetfile: ", retweetfile, len(retweet_list)

    user_info, history = Reader.CollectInfo(filepath)
    if len(user_info.items())==0:
        print "skip ", line.strip()
        continue

    tweet_dict = {}
    if len(retweet_list)>0:
        tweet_dict = Reader.CollectTweet(filepath, retweet_list)

    outputfile = path.join(outputPath, filename)
    Writer.outputInfo(user_info, history, outputfile)

    if len(tweet_dict.items())>0:
        Writer.outputTweet(tweet_dict, author, tweetPath)