示例#1
0
def select_search_tables():
    search_results = glob('{}/search_*'.format(JSON_DIR))
    folder_len = len(JSON_DIR)
    data = {
        'Last Updated': [],
        'LSAT': [],
        'GPA': [],
        'User Name': [],
        'Year': []
    }
    sw = StopWatch()
    for filename in search_results:
        year = filename[folder_len:].split('_')[1]
        with open(filename, 'rb') as fp:
            table = json.load(fp)
        for key in table:
            data[key] += table[key]
        data['Year'] += [year] * len(table['GPA'])
    df = pd.DataFrame(data)
    sw.tic('generate search table')
    return df
示例#2
0
    def waitforcond(self, conditionfn, conditionargs, timeout):
        sw = StopWatch()
        origtimeout = timeout

        result = None
        timeout = _CondTimeout(timeout)
        with self._lock:
            while (not self._eof) and (not timeout.expired()):
                # Count
                result = conditionfn(*conditionargs)
                if result is not None:
                    break
                self._cond.wait(timeout.next())

        if result is None:
            self._log.debug("Waiting timed out after %s" % (str(sw)))
        else:
            prc = 100 * (sw.stop() / origtimeout)
            self._log.debug("Waiting successful after %s (%.0f%%)" %
                            (str(sw), prc))
        return result
示例#3
0
def select_extra_curricular_information_tables():
    user_applications = glob(
        os.path.join(JSON_DIR, 'user_*_extra_curricular_information.json'))
    folder_len = len(JSON_DIR)
    user_names = [
        '_'.join(s[folder_len:].split('_')[1:-3]) for s in user_applications
    ]
    sw = StopWatch()
    print len(user_applications)

    lists = []
    for i, filename in enumerate(user_applications):
        if (i + 1) % 10000 == 0:
            print i
        with open(filename, 'rb') as fp:
            table = json.load(fp)
            line = [user_names[i], table['text']]
            lists.append(line)
    df = pd.DataFrame(lists, columns=['User Name', 'text'])
    df = df.rename(columns={'text': 'extra curricular'})
    sw.tic('generating extra curricular information')
    return df
示例#4
0
def select_search_tables():
    search_results = glob(os.path.join(JSON_DIR, 'search_*.json'))
    folder_len = len(JSON_DIR)
    data = {
        'Last Updated': [],
        'LSAT': [],
        'GPA': [],
        'User Name': [],
        'Year': []
    }
    sw = StopWatch()
    for filename in search_results:
        year = filename[folder_len:].split('_')[1]
        with open(filename, 'rb') as fp:
            table = json.load(fp)
        for key in table:
            data[key] += table[key]
        data['Year'] += [year] * len(table['GPA'])
    data['User Name'] = [remove_special_char(s) for s in data['User Name']]
    df = pd.DataFrame(data)
    sw.tic('generate search table')
    return df
示例#5
0
def main():
	count = 1000000

	#begin stopwatch module 
	runTime = StopWatch()
	runTime.start()
	endTimeCount = 0
	for number in range(1, count + 1):
		endTimeCount += number 
	#end the stopwatch 
	runTime.stop()

	#display results
	print("The elapsed time it took to count by 1 to 1000000, which began at", runTime.start())
	print("and ended at", runTime.stop())
	print("is", runTime.getElapsedTime(), "milliseconds")
示例#6
0
def select_user_tables():
    user_applications = glob(os.path.join(JSON_DIR, 'user_*_application.json'))
    folder_len = len(JSON_DIR)
    user_names = [
        '_'.join(s[folder_len:].split('_')[1:-1]) for s in user_applications
    ]
    user_stats = np.zeros((len(user_applications), 4))
    date_cols = ['Complete', 'Decision', 'Received', 'Sent']
    sw = StopWatch()
    print len(user_applications)
    for i, filename in enumerate(user_applications):
        if (i + 1) % 10000 == 0:
            print i
        with open(filename, 'rb') as fp:
            table = json.load(fp)
            n_applications = float(len(table['Sent']))
            if n_applications == 0:
                continue
            for j, col_name in enumerate(date_cols):
                has_date = len([s for s in table[col_name] if s != '--'])
                user_stats[i, j] = has_date / n_applications
    sw.tic('generating stats')
    return user_names, user_stats
示例#7
0
def iothub_client_sample_run():
    global gMQTTClient
    try:
        iotHubStopWatch = StopWatch(AZURE_REPORT_TIME)
        iotHubStopWatch.reset()
        client = iothub_client_init()

        if client.protocol == IoTHubTransportProvider.MQTT:
            print ( "IoTHubClient is reporting state" )
            reported_state = "{\"newState\":\"standBy\"}"
            client.send_reported_state(reported_state, len(reported_state), 
              send_reported_state_callback, SEND_REPORTED_STATE_CONTEXT)

        count = 1
        while True:
            if (True == iotHubStopWatch.isExpired()):
              global gSendMessage
              if gSendMessage:
                message = IoTHubMessage("JimsFridgeStatus")
              
                message.message_id = "message_%d" % count
                message.correlation_id = "correlation_%d" % count
              
                prop_map = message.properties()
                prop_map.add("FreezerDoor", str(gFreezerDoor))
                prop_map.add("FreezerTemp", str(gFreezerTemp))
                prop_map.add("FridgeDoor",  str(gFridgeDoor))
                prop_map.add("FridgeTemp",  str(gFridgeTemp))
                prop_map.add("BoredTime",   str(gBoredTime))
                prop_map.add("LockoutTime", str(gLockoutTime))
                prop_map.add("DoorTime",    str(gDoorTime))
                prop_map.add("LastTweet",   urllib.quote(gLastTweet))
                prop_map.add("WebTweet",    gWebTweet)

                client.send_event_async(message, send_confirmation_callback, count)
                count = count + 1

                status = client.get_send_status()
                iotHubStopWatch.reset()
            gMQTTClient.loop()
            time.sleep(0.01)
            pass

    except IoTHubError as iothub_error:
        print ( "Unexpected error %s from IoTHub" % iothub_error )
        return
    except KeyboardInterrupt:
        print ( "IoTHubClient sample stopped" )

    print_last_message_time(client)
示例#8
0
    def stop(self, ht):
        self.endTime = ht

    def get_elapsed_time(self):
        return self.endTime - self.startTime
    
    def getStartTime(self):
        return self.startTime

    def getEndTime(self):
        return self.endTime
    

from StopWatch import StopWatch
size = 1000000
stopWatch = StopWatch()
stopWatch.start(size)
sum = 0
for i in range(1, size + 1):
    sum += i
    
stopWatch.stop(sum)
print("The loop time is", stopWatch.get_elapsed_time(), "milliseconds")

#opgave 7
#a) Ja statement 3 wordt dan nog uitgevoerd.
#b) Nee, de statement wordt dan niet uitgevoerd.
#c) Ja statement 4 wordt dan alsnog uitgevoerd.

#opgave 8
#a) 
示例#9
0
文件: CEV.py 项目: tani3010/Finance
class CEV(BaseWienerItoChaosExpansion):
    def __init__(self):
        super().__init__()
        self.model_name = 'CEV'
        self.r0 = Symbol('r0', positive=True)
        self.gam = Symbol('gamma', positive=True)

    def r(self, t=Symbol('t', positive=True)):
        # return self.r0
        return 0

    def sigma(self, s=Symbol('s', positive=True), v=Symbol('v',
                                                           positive=True)):
        return v * s**(self.gam - 1)

    def theta(self, t=Symbol('t', positive=True)):
        return 0

    def kappa(self, t=Symbol('t', positive=True)):
        return 0

    def gamma(self, v=Symbol('v', positive=True)):
        return 0


if __name__ == '__main__':
    sw = StopWatch()
    proc = CEV()
    proc.show_model_spec()
    print(proc.print_in_r(proc.f_St(simplify=False)))
    sw.show_elapsed_time()
示例#10
0
import pandas as pd
from sklearn.datasets import load_digits
from sklearn.linear_model import Perceptron
from StopWatch import StopWatch
X, y = load_digits(return_X_y=True)
ST= StopWatch()
clf = Perceptron(tol=1e-3, random_state=0)
ST.Reset()
clf.fit(X, y)
ST.RP()
clf.score(X, y)
ST.Print()
print(' Perceptron Accuarcy for Digits : '+ str(clf.score(X, y)*100))

from sklearn import svm

clf_SVM = svm.SVC(gamma=0.001)
ST.Reset()
clf_SVM.fit(X, y)
ST.RP()
clf_SVM.score(X, y)
ST.Print()
print(" SVC Accuarcy for Digits : "+ str(clf_SVM.score(X, y)*100))

from sklearn.svm import LinearSVC

clf_SVML = LinearSVC(random_state=0, tol=1e-5)
ST.Reset()
clf_SVML.fit(X, y)
ST.RP()
示例#11
0
class ComputingNode:
    def __init__(self, cn_id, start, length):
        self.id = cn_id
        self.batch_size = 200
        self.num_epochs = 10
        self.train_dataset, self.train_labels, self.valid_dataset, self.valid_labels, self.test_dataset, self.test_labels = open_cifar10_dataset(
            start, length)
        gpu_config = gpu_split(len(cluster_spec['cn']))
        self.tensorgraph = CNN(gpu_config)
        self.tensorgraph_shape = self.tensorgraph.get_configure()

        # establish connection with parameter server to acquire store service
        self.ps = init_conn(cluster_spec['ps'][0]['IP'],
                            cluster_spec['ps'][0]['Port'])
        self.sw = StopWatch()
        self.logging = True

    def run(self):
        if not len(self.train_dataset) % self.batch_size == 0:
            raise ValueError('Batch size error')
        all_batch_data = [
            self.train_dataset[x:x + self.batch_size]
            for x in xrange(0, len(self.train_dataset), self.batch_size)
        ]
        all_batch_label = [
            self.train_labels[x:x + self.batch_size]
            for x in xrange(0, len(self.train_labels), self.batch_size)
        ]
        self.update_parameters()
        st = time.time()
        for step in range(self.num_epochs):
            self.training(all_batch_data, all_batch_label)
            if step % 1 == 0:
                self.validating()
        et = time.time()
        if self.logging:
            self.write_log(et - st)
        self.terminate()

    def terminate(self):
        self.sw.present()

    def training(self, all_batch_data, all_batch_label):
        for i in range(len(all_batch_data)):
            # compute the graidents
            self.sw.reset()
            gradients = self.tensorgraph.get_gradients(all_batch_data[i],
                                                       all_batch_label[i])
            self.sw.accumulate('compute_gradients')
            # update the gradients to the ps
            self.upload_gradients(gradients)
            self.update_parameters()

    def validating(self):
        print(
            "Valid accuracy: %.1f%%" %
            accuracy(self.tensorgraph.predict(self.valid_dataset),
                     self.valid_labels)),
        print "\tLoss : ", self.tensorgraph.get_loss(self.valid_dataset,
                                                     self.valid_labels)

    def testing(self):
        print(
            "Test accuracy: %.1f%%" % accuracy(
                self.tensorgraph.predict(self.test_dataset), self.test_labels))

    def upload_gradients(self, grads):
        self.sw.reset()
        text = comp.preprocess(grads)
        self.sw.accumulate('preprocess')
        self.ps.upload(self.id, text)
        self.sw.accumulate('upload_gradients')

    def update_parameters(self):
        self.sw.reset()
        text = self.ps.download()
        self.sw.accumulate('download')
        model = comp.deprocess(text, self.tensorgraph_shape)
        self.sw.accumulate('deprocess')
        self.tensorgraph.put_parameters(model)
        self.sw.accumulate('put para')

    def write_log(self, overall_elapsed, log_path='worker_log.txt'):
        partial_elapsed = self.sw.get_log()
        log_message = partial_elapsed + ',' + '%2.5f' % overall_elapsed + '\n'
        with open(log_path, 'a') as f:
            f.write(log_message)
示例#12
0
displayPixelHeight = 160 / 30


#some utility functions
def constrain(val, min_val, max_val):
    return min(max_val, max(min_val, val))


def map(x, in_min, in_max, out_min, out_max):
    return (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min


# End Of Thermal Camera Stuff --------------------------------------------------------

# Timer Stuff ------------------------------------------------------------------------
timer = StopWatch()
# End Of Timer Stuff -----------------------------------------------------------------

# Main loop
exitFlag = True
while (exitFlag):
    for event in pygame.event.get():
        if (event.type is pygame.MOUSEBUTTONDOWN or event.type is pygame.QUIT):
            exitFlag = False

    # Live streaming from the pi camera
    stream = io.BytesIO()
    camera.capture(stream, use_video_port=True, format='rgb')
    stream.seek(0)
    stream.readinto(rgb)
    stream.close()
示例#13
0
import sys

sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from config import JSON_DIR, HTML_DIR
from StopWatch import StopWatch


def html_to_json(filename):
    with open(filename, 'rb') as fp:
        html_doc = fp.read()
    category, name = os.path.basename(filename).split('.')[0].split('_', 1)
    data = get_info(category, html_doc)
    uid = '{}_{}'.format(category, name)
    for key, val in data.iteritems():
        with open(os.path.join(JSON_DIR, '{}_{}.json'.format(uid, key)), 'wb') as fp:
            json.dump(val, fp)


if __name__ == '__main__':
    html_files = glob(os.path.join(HTML_DIR, 'school_*.html'))
    # html_files = glob(os.path.join(HTML_DIR, 'search_*.html'))\
    #              + glob(os.path.join(HTML_DIR, 'user_*.html'))\
    #              + glob(os.path.join(HTML_DIR, 'school_*.html'))
    print len(html_files)
    stop_watch = StopWatch()
    for i, filename in enumerate(html_files):
        html_to_json(filename)
        if i % 100 == 0: 
            print i, filename
    stop_watch.tic('converted {} html documents to json'.format(len(html_files)))
示例#14
0
    names = ['ContainerID', 'State', 'year', 'note']
    df_state = pd.read_csv(os.path.join(ENTRY_DIR,
                                        'recent_elections_state.csv'),
                           header=None,
                           names=names)
    df_state['StateID'] = range(len(df_state))
    df_state.drop('note', axis=1, inplace=True)
    df_state.loc[:, 'State'] = df_state['State'].str.replace('_',
                                                             ' ').str.title()
    return df.merge(df_state, left_on='State', right_on='State')


if __name__ == '__main__':
    lookup_office = {'CityID': 'Mayor', 'StateID': 'Governor', 'AllID': 'All'}
    for distID in ['AllID', 'CityID', 'StateID']:
        sw = StopWatch()
        df_race_details = generate_race_details_table(
            position=lookup_office[distID])
        sw.tic('generate race details table')
        df_candidate = generate_candidate_table(position=lookup_office[distID])
        sw.tic('generate candidate table')
        check_share_sum(df_candidate)
        df_m = fix_bad_share(df_candidate)
        check_share_sum(df_m)

        if distID == 'CityID':
            df_tmp = merge_city(df_race_details)
            for s in ['Vice Mayor', 'Mayor Pro Tem']:
                df_tmp = df_tmp[df_tmp['Position'] != s]
        elif distID == 'StateID':
            df_tmp = merge_state(df_race_details)
示例#15
0
文件: Vision.py 项目: lpyaia/DinoAI
class Vision:
    def __init__(self, display):
        self.__d = d3dshot.create(capture_output="numpy")
        self.__d.displays[display]
        self.__dx = 999999
        self.__dy = 999999
        self.__speed = 0
        self.__dt = 999999
        self.__ableToMeasure = False
        self.__stopwatch = None
        self.__dino = None
        self.__initCaptureScreen()

    def __initCaptureScreen(self):
        if self.__d is not None:
            self.__d.capture()
        else:
            print('capturing screen error!')

    def __getLastestFrame(self):
        return self.__d.get_latest_frame()

    def __findImageContours(self, image):
        imgCrop = image[200:200 + 315, 1:1 + 1365]
        imgCrop = cv2.cvtColor(imgCrop, cv2.COLOR_RGB2GRAY)

        blur = cv2.GaussianBlur(imgCrop, (5, 5), 0)
        ret, threshImg = cv2.threshold(blur, 91, 255, cv2.THRESH_BINARY)
        contours = cv2.findContours(threshImg, cv2.RETR_CCOMP,
                                    cv2.CHAIN_APPROX_SIMPLE)[-2]

        return contours, imgCrop

    def __getEnemiesFromContours(self, contours):
        enemies = list()
        cactos = None
        passaro = None

        for c in contours:
            (x, y, w, h) = cv2.boundingRect(c)

            # Dinossauro
            if x == 59 or w == 89:
                self.__dino = [x, y, w, h]

            # Dinossauro abaixado
            elif w >= 127 - 5 and w <= 127 + 5 and h >= 58 - 5 and h <= 58 + 5:
                self.__dino = [x, y, w, h]

            if x >= 148:
                # Passaro 1
                if w >= 97 - 5 and w <= 97 + 5 and h >= 70 - 5 and h <= 70 + 5:
                    passaro = [x, y, w, h]

                # Passaro 2
                elif w >= 98 - 5 and w <= 98 + 5 and h >= 62 - 5 and h <= 62 + 5:
                    passaro = [x, y, w, h]

                elif w >= 15 and w <= 70 and h >= 50 and h <= 120:
                    cactos = [x, y, w, h]
                    enemies.append(cactos)
                    cactos = None

        if passaro != None:
            enemies.append(passaro)

        enemies.sort(key=lambda x: x[0])

        return enemies

    def __groupClosestEnemies(self, enemies):
        i = 0

        while i < len(enemies):
            j = i + 1

            dx = 9999

            if j < len(enemies):
                dX = enemies[j][0] - enemies[i][0] - enemies[i][2]

                if dX <= 10:
                    if enemies[j][1] < enemies[i][1]:
                        enemies[i][1] = enemies[j][1]
                        enemies[i][3] = enemies[j][3]

                    enemies[i][2] = enemies[j][2] + enemies[i][2] + dX

                    enemies.remove(enemies[j])
                    i -= 1
            i += 1

        return enemies

    def __setCurrentSpeed(self, enemies):
        if len(enemies) > 0:
            if enemies[0][0] > 1000 and self.__ableToMeasure == False:
                self.__ableToMeasure = True

            elif self.__ableToMeasure == True and enemies[0][
                    0] < 1000 and self.__stopwatch is None:
                self.__stopwatch = StopWatch()
                self.__stopwatch.start()

            elif enemies[0][0] < 400 and self.__ableToMeasure == True:
                self.__ableToMeasure = False
                self.__stopwatch.stop()
                self.__dt = self.__stopwatch.get_elapsed_time()

                self.__stopwatch = None

    def __drawEnemies(self, enemies, imgCrop):
        for enemy in enemies[:]:
            cv2.rectangle(imgCrop, (enemy[0], enemy[1]),
                          (enemy[0] + enemy[2], enemy[1] + enemy[3]),
                          (0, 0, 0), 2)

        cv2.line(imgCrop, (1000, 350), (1000, 0), (0, 255, 0), thickness=2)
        cv2.line(imgCrop, (400, 350), (400, 0), (0, 255, 0), thickness=2)
        cv2.imshow('Contours', imgCrop)
        cv2.waitKey(1)

    def __getDistanceSensor(self, enemies):
        dx = 0
        dy = 0

        if (len(enemies) > 0 and self.__dino is not None):
            dx = enemies[0][0] - 148
            dy = self.__dino[1] - enemies[0][1]

        return dx, dy

    def resetCurrentSpeed(self):
        self.__stopwatch = None
        self.__ableToMeasure = False
        self.__dt = 999999

    def getClosestEnemy(self):
        imgArray = self.__getLastestFrame()

        if (imgArray is not None):
            contours, imgCrop = self.__findImageContours(imgArray)
            enemies = self.__getEnemiesFromContours(contours)
            enemies = self.__groupClosestEnemies(enemies)
            #self.__setCurrentSpeed(enemies)
            #self.__drawEnemies(enemies, imgCrop)
            self.__dx, self.__dy = self.__getDistanceSensor(enemies)

            return self.__dx, self.__dy

        return self.__dx, self.__dy
示例#16
0
    def __init__(self):

        # Main Tkinter window that we add our elements too
        root = Tk()
        root.title("Auto Tilting Ball Maze")
        root.configure(bg="#007777")

        # Make fullscreen
        root.geometry("{0}x{1}+0+0".format(root.winfo_screenwidth(),
                                           root.winfo_screenheight()))

        # Labels such as our thresholds, camera index, and serial port
        Label(root,
              text="Please Enter Variables",
              font=("Helvetica", 18),
              bg="#007777").grid(row=0, columnspan=3)
        Label(root,
              text="Upper (as B,G,R)",
              font=("Helvetica", 18),
              bg="#007777").grid(row=1, column=1)
        Label(root,
              text="Lower (as B,G,R)",
              font=("Helvetica", 18),
              bg="#007777").grid(row=1, column=2)
        Label(root,
              text="PlaySpace Thresholds",
              font=("Helvetica", 16),
              bg="#007777").grid(row=2, sticky=E)
        Label(root,
              text="Start Thresholds",
              font=("Helvetica", 16),
              bg="#007777").grid(row=3, sticky=E)
        Label(root,
              text="End Thresholds",
              font=("Helvetica", 16),
              bg="#007777").grid(row=4, sticky=E)
        Label(root, text="Serial Ports", font=("Helvetica", 16),
              bg="#007777").grid(row=5, sticky=E)
        Label(root, text="Camera", font=("Helvetica", 16),
              bg="#007777").grid(row=6, sticky=E)
        Label(root, text="  ", bg="#007777").grid(row=7, sticky=E)

        # Text Fields with defaulted thresholds to save ourselves time
        self.playSpaceEntry1 = Entry(root)
        self.playSpaceEntry1.insert(END, "255,255,255")
        self.playSpaceEntry1.grid(row=2, column=1)
        self.playSpaceEntry2 = Entry(root)
        self.playSpaceEntry2.insert(END, "80,80,80")
        self.playSpaceEntry2.grid(row=2, column=2)
        self.startEntry1 = Entry(root)
        self.startEntry1.insert(END, "255,135,35")
        self.startEntry1.grid(row=3, column=1)
        self.startEntry2 = Entry(root)
        self.startEntry2.insert(END, "150,0,0")
        self.startEntry2.grid(row=3, column=2)
        self.endEntry1 = Entry(root)
        self.endEntry1.insert(END, "235,150,255")
        self.endEntry1.grid(row=4, column=1)
        self.endEntry2 = Entry(root)
        self.endEntry2.insert(END, "100,70,160")
        self.endEntry2.grid(row=4, column=2)

        # Serial Port text field
        self.portNum = Entry(root)
        self.portNum.insert(END, "COM3")
        self.portNum.grid(row=5, column=1)

        # Get all available camera's
        camIndices = self.detectNumCameras()
        camIndex = []
        for n in xrange(camIndices):
            camIndex.append(n)
        camIndex.append("I don't see my camera . . .")

        # Camera Menu
        self.cameraIndex = StringVar(root)
        self.cameraIndex.set("Please select a camera")
        dropCamera = apply(OptionMenu,
                           (root, self.cameraIndex) + tuple(camIndex))
        dropCamera.grid(row=6, column=1)
        Button(root,
               text="Check",
               font=("Helvetica", 16),
               command=self.checkCamera).grid(
                   row=6, column=2)  #lambda: checkCamera(master))

        # Solve Button
        Button(root,
               text="                    Solve!                    ",
               command=self.grabVariables,
               font=("Helvetica", 16)).grid(row=8,
                                            column=0,
                                            columnspan=3,
                                            rowspan=2)

        #Camera Panel
        imageFrame = Frame(root, width=600, height=500)
        imageFrame.grid(row=0, column=4, rowspan=8)
        self.lmain = Label(imageFrame)
        self.lmain.grid(row=0, column=0)

        # Grid Sizing
        root.columnconfigure(0, weight=1)
        root.rowconfigure(0, weight=1)
        root.columnconfigure(1, weight=1)
        root.rowconfigure(1, weight=1)
        root.columnconfigure(2, weight=1)
        root.rowconfigure(2, weight=1)
        root.columnconfigure(3, weight=1)
        root.rowconfigure(3, weight=1)
        root.rowconfigure(4, weight=1)
        root.rowconfigure(5, weight=1)
        root.rowconfigure(6, weight=1)
        root.rowconfigure(7, weight=1)
        root.rowconfigure(8, weight=1)
        root.rowconfigure(9, weight=1)
        root.columnconfigure(4, weight=1)

        #Stopwatch Timer
        sw = StopWatch(root)
        sw.configure(bg="#007777")
        sw.grid(row=6, column=4)
        Button(root, text='Start', command=sw.Start).grid(row=7,
                                                          column=4,
                                                          sticky=S)
        Button(root, text='Stop', command=sw.Stop).grid(row=8, column=4)
        Button(root, text='Reset', command=sw.Reset).grid(row=9,
                                                          column=4,
                                                          sticky=N)

        # Info box: reminder to level the playing surface
        tkMessageBox.showinfo("Step 1",
                              "Please manually level the play surface . . .")
        root.mainloop()
示例#17
0
class ComputingNode:
    def __init__(self,
                 cn_id,
                 start,
                 length,
                 receive_service=True,
                 uploading_in_background=True):
        self.id = cn_id
        self.batch_size = 200
        self.num_epochs = 3
        self.staleness_threshold = 3
        self.train_dataset, self.train_labels, self.valid_dataset, self.valid_labels, self.test_dataset, self.test_labels = open_cifar10_dataset(
            start, length)
        gpu_config = gpu_split(len(cluster_spec['cn']))
        self.tensorgraph = CNN(gpu_config)
        self.tensorgraph_shape = self.tensorgraph.get_configure()

        # establish connection with parameter server to acquire store service
        self.ps = init_conn(cluster_spec['ps'][0]['IP'],
                            cluster_spec['ps'][0]['Port'])

        if receive_service:
            # start a model receiver service
            self.service_handler = Handler()
            service = threading.Thread(target=receive,
                                       args=(cluster_spec['cn'][cn_id]['IP'],
                                             cluster_spec['cn'][cn_id]['Port'],
                                             self.service_handler))
            service.daemon = True
            service.start()
            self.update_parameters = self.update_parameters_opt
        else:
            self.update_parameters = self.update_parameters_ori

        # switch between origin or optimized mode for uploading parameters
        self.lock = threading.Lock()
        if uploading_in_background:
            self.upload_parameters = self.upload_parameters_opt
        else:
            self.upload_parameters = self.upload_parameters_ori

        self.sw = StopWatch()
        self.logging = True
        self.status = {
            'GlobalStep': -1,
            'LocalStep': 0,
            'LocalHit': 0,
            'RemoteHit': 0
        }

    def run(self):
        if not len(self.train_dataset) % self.batch_size == 0:
            raise ValueError('Batch size error')
        all_batch_data = [
            self.train_dataset[x:x + self.batch_size]
            for x in xrange(0, len(self.train_dataset), self.batch_size)
        ]
        all_batch_label = [
            self.train_labels[x:x + self.batch_size]
            for x in xrange(0, len(self.train_labels), self.batch_size)
        ]
        del self.train_dataset
        del self.train_labels
        self.ps.notifyToStart(self.id)
        self.update_parameters()
        st = time.time()
        for step in range(self.num_epochs):
            self.training(all_batch_data, all_batch_label)
            if step % 1 == 0:
                self.validating()
        et = time.time()
        if self.logging:
            self.write_log(et - st)
        self.terminate()

    def terminate(self):
        self.sw.present()
        print "Hit count : %d(%d+%d)" % (
            self.status['LocalHit'] + self.status['RemoteHit'],
            self.status['LocalHit'], self.status['RemoteHit'])
        print "Hit rate : %f" % (
            1000. * (self.status['LocalHit'] + self.status['RemoteHit']) /
            self.status['LocalStep'] * 0.001)

    def training(self, all_batch_data, all_batch_label):
        for i in range(len(all_batch_data)):
            # compute the graidents
            self.sw.reset()
            gradients = self.tensorgraph.get_gradients(all_batch_data[i],
                                                       all_batch_label[i])
            self.sw.accumulate('compute_gradients')
            staleness = self.update_parameters()
            self.sw.reset()
            self.apply_gradients(gradients, staleness)
            self.sw.accumulate('apply_gradients')
            # update the gradients to the ps
            self.upload_parameters()
            self.sw.accumulate('upload_parameters')

    def validating(self):
        print "Valid accuracy: %.1f%%" % accuracy(
            self.tensorgraph.predict(self.valid_dataset), self.valid_labels),
        print "\tLoss : ", self.tensorgraph.get_loss(self.valid_dataset,
                                                     self.valid_labels)

    def testing(self):
        print(
            "Test accuracy: %.1f%%" % accuracy(
                self.tensorgraph.predict(self.test_dataset), self.test_labels))

    def upload_parameters_ori(self):
        self.lock.acquire()
        model = self.tensorgraph.get_parameters()
        text = comp.preprocess(model)
        self.sw.accumulate('preprocess')
        self.status['GlobalStep'] = self.ps.upload(self.id, text)
        self.lock.release()

    def upload_parameters_opt(self):
        model = self.tensorgraph.get_parameters()
        text = comp.preprocess(model)
        self.sw.accumulate('preprocess')
        self.status['GlobalStep'] = self.ps.getGlobalStatus() + 1
        self.ps.non_blocking_upload(self.id, text)

    def upload_parameters_bg(self):
        upload_bg = threading.Thread(target=self.upload_parameters_ori)
        upload_bg.start()

    def apply_gradients(self, gradients, staleness):
        self.status['LocalStep'] += 1
        self.tensorgraph.put_gradients(gradients, staleness)

    def update_parameters_opt(self):
        # sync with ps
        try:
            gStatus = self.ps.getGlobalStatus()
        except:
            gStatus = -1
        staleness = gStatus - self.status['GlobalStep']
        if gStatus == self.status['GlobalStep']:
            self.status['LocalHit'] += 1
            return
        version_stamp_diff = gStatus - self.service_handler.getStatus()
        if version_stamp_diff < self.staleness_threshold and self.service_handler.getStatus(
        ) >= 0 and self.service_handler.getStatus(
        ) > self.status['GlobalStep']:
            self.status['RemoteHit'] += 1
            model = comp.deprocess(self.service_handler.getModel(),
                                   self.tensorgraph_shape)
            self.tensorgraph.put_parameters(model)
            return
        self.sw.reset()
        try:
            text = self.ps.download()
            self.sw.accumulate('download')
            model = comp.deprocess(text, self.tensorgraph_shape)
            self.sw.accumulate('deprocess')
        except:
            del self.ps
            self.ps = init_conn(cluster_spec['ps'][0]['IP'],
                                cluster_spec['ps'][0]['Port'])
            return
        self.sw.reset()
        self.tensorgraph.put_parameters(model)
        self.sw.accumulate('put para')
        return staleness

    def update_parameters_ori(self):
        self.sw.reset()
        try:
            gStatus = self.ps.getGlobalStatus()
            staleness = gStatus - self.status['GlobalStep']
            text = self.ps.download()
            self.sw.accumulate('download')
            model = comp.deprocess(text, self.tensorgraph_shape)
            self.sw.accumulate('deprocess')
        except:
            del self.ps
            self.ps = init_conn(cluster_spec['ps'][0]['IP'],
                                cluster_spec['ps'][0]['Port'])
            return
        self.tensorgraph.put_parameters(model)
        self.sw.accumulate('put para')
        return staleness

    def write_log(self, overall_elapsed, log_path='worker_log.txt'):
        partial_elapsed = self.sw.get_log()
        hit_rate = (1000. *
                    (self.status['LocalHit'] + self.status['RemoteHit']) /
                    self.status['LocalStep'] * 0.001)
        log_message = partial_elapsed + ',' + '%2.5f' % overall_elapsed + ',' + '%1.3f' % hit_rate + '\n'
        with open(log_path, 'a') as f:
            f.write(log_message)
示例#18
0
def crawl():
    url_template = 'http://search.lawschoolnumbers.com/users/profiles?' \
                   'utf8=true&' \
                   'cycle_id={}&' \
                   'searchLSAT=true&' \
                   'blanksLSAT=true&' \
                   'LSAT_Slider_val=120+-+180&' \
                   'searchLGPA=true&' \
                   'blanksLGPA=true&' \
                   'LGPA_Slider_val=1.00+-+4.33&' \
                   'searchDGPA=true&' \
                   'blanksDGPA=true&' \
                   'DGPA_Slider_val=1.00+-+4.33&' \
                   'school_type_s=Any&' \
                   'school_type=&' \
                   'major_s=Any&' \
                   'major=&' \
                   'state%5Bstate_s%5D=Any&' \
                   'location_s=Any&' \
                   'location=&' \
                   'race_s=Any&' \
                   'race=&' \
                   'sex=Any&' \
                   'international=Included&' \
                   'urm=Included&' \
                   'nontr=Included&' \
                   'mlast=Included&' \
                   'commit=Search+Applicants&' \
                   'page={}'
    num_pages = [
        65, 120, 141, 149, 201, 126, 100, 70, 58, 52, 34, 40, 22, 28, 31, 1
    ]
    url_list = []
    cycle_id = 0
    for max_page in num_pages:
        cycle_id += 1
        for page_num in range(1, max_page + 1):
            url_list.append(url_template.format(cycle_id, page_num))
    task_queue = deque(url_list)
    task_set = set(url_list)
    stop_watch = StopWatch()
    count = 0
    while task_queue:
        url = task_queue.popleft()
        count += 1
        if count % 100 == 0:
            tokens = tokenize(url)
            if tokens is not None:
                category, name = tokenize(url)
                print '{}, crawling {}, name = {}, len(q) = {}'.format(
                    count, category, name, len(task_queue))
        to_crawl = _download_and_get_children(url)
        if to_crawl is not None:
            for child_url in to_crawl:
                if child_url not in task_set:
                    task_set.add(child_url)
                    task_queue.append(child_url)
        if count > 70000:
            with open('timeout.txt', 'wb') as fp:
                fp.write('\n'.join(list(task_queue)))
            break
    stop_watch.tic('crawl {} urls'.format(count))
示例#19
0
from StopWatch import StopWatch 

stopwatch = StopWatch()
print("Stopwatch created")
sum = 0
stopwatch.start()
for i in range(1, 1000000):
  sum +=i
stopwatch.stop()

print ("sum of 1 to 1,000,000 :", sum)
print ("time taken to add 1 to 1,000,000 :", stopwatch.getElapsedTime())