示例#1
0
def run(args):

    if args.input != None:
        if valid_path(args.input[0]):
            DataParser.inputPath = args.input[0]
        else:
            print("Input path not found")
            quit()
    if args.output != None:
        if valid_path(args.output[0]):
            ReportOutput.outputPath = args.output[0]
        else:
            print("Output path not found")
            quit()

    DataParser.loadAll()

    try:
        DataStore.validateTestWeights()
        DataStore.computeAll()
    except exceptions.InvalidCourseWeightError as e:
        print(e.message)
        quit()
    except:  #Catch any other error that arises and print the exception
        print("Unknown error occured while parsing data")
        traceback.print_exc()
        quit()

    ReportOutput.saveReport()
示例#2
0
def getSuperSmartTagDetectingParser():
    """Returns a TagDetectingParser which should know all minecraft tags.
    """

    from level import LevelParser
    from terrain_populated import TerrainPopulatedParser
    from height_map import HeightMapParser
    from blocks import BlocksParser
    from z_pos import ZPosParser
    from data import DataParser
    from sky_light import SkyLightParser
    from tile_entities import TileEntitiesParser
    from entities import EntitiesParser
    from block_light import BlockLightParser
    from last_update import LastUpdateParser
    from x_pos import XPosParser

    p = TagDetectingParser()
    p.tagParser['Level'] = LevelParser(p)
    p.tagParser['TerrainPopulated'] = TerrainPopulatedParser()
    p.tagParser['HeightMap'] = HeightMapParser()
    p.tagParser['Blocks'] = BlocksParser()
    p.tagParser['zPos'] = ZPosParser()
    p.tagParser['Data'] = DataParser()
    p.tagParser['SkyLight'] = SkyLightParser()
    p.tagParser['TileEntities'] = TileEntitiesParser()
    p.tagParser['Entities'] = EntitiesParser()
    p.tagParser['BlockLight'] = BlockLightParser()
    p.tagParser['LastUpdate'] = LastUpdateParser()
    p.tagParser['xPos'] = XPosParser()

    return p
示例#3
0
    def __init__(self, controller, ui_handler, logger):
        QObject.__init__(self)
        self.controller = controller
        self.data_parser = DataParser(ui_handler, logger)
        self.ui_handler = ui_handler
        self.tree_controller = ui_handler.tree_controller
        self.logger = logger
        self.client = None
        self.transfer_manager = None

        # Init poll timer
        self.poll_timer = QTimer(self)
        self.poll_timer.timeout.connect(self.thread_poller)

        # Init thread params
        self.running_threads = []
        self.queued_threads = []
        self.active_data_worker = None
        self.data_workers = deque()
        
        # DBus manager
        self.dbus_monitor = DBusMonitor(self, logger)
        
        # Start the thread poller
        self.poll_timer.start(100)
示例#4
0
class ConnectionManager(QObject):

    def __init__(self, controller, ui_handler, logger):
        QObject.__init__(self)
        self.controller = controller
        self.data_parser = DataParser(ui_handler, logger)
        self.ui_handler = ui_handler
        self.tree_controller = ui_handler.tree_controller
        self.logger = logger
        self.client = None
        self.transfer_manager = None

        # Init poll timer
        self.poll_timer = QTimer(self)
        self.poll_timer.timeout.connect(self.thread_poller)

        # Init thread params
        self.running_threads = []
        self.queued_threads = []
        self.active_data_worker = None
        self.data_workers = deque()
        
        # DBus manager
        self.dbus_monitor = DBusMonitor(self, logger)
        
        # Start the thread poller
        self.poll_timer.start(100)
    
    def set_transfer_manager(self, transfer_manager):
        self.transfer_manager = transfer_manager
    
    def check_data_workers(self):
        # Start next data worker if available
        if self.active_data_worker == None:
            try:
                worker = self.data_workers.popleft()
                worker.start()
                self.active_data_worker = worker
            except IndexError:
                self.active_transfer = None
    
    def thread_poller(self):
        # Check for completed network threads
        if len(self.running_threads) > 0:
            removable_network_threads = []
            for thread in self.running_threads:
                thread.join(0.01)
                if not thread.isAlive():
                    if thread.error != None:
                        # If tree exists, lets store the request and do it again
                        # when a connection is available
                        if self.tree_controller.root_folder != None:
                            self.queued_threads.append(thread)
                        # Request a connection only once
                        if len(self.queued_threads) == 1:
                            self.request_connection()
                    if thread.response != None:
                        if thread.callback_parameters != None:
                            thread.callback(thread.response, thread.callback_parameters)
                        else:
                            thread.callback(thread.response)
                    removable_network_threads.append(thread)  
            for removable in removable_network_threads:
                self.running_threads.remove(removable)
                del removable
                
        # Check for active data worker
        if self.active_data_worker != None:
            self.active_data_worker.join(0.01)
            if not self.active_data_worker.isAlive():
                if self.active_data_worker.error != None:
                    self.logger.error("DataWorker error: " + self.active_data_worker.error)
                self.active_data_worker = None
                self.check_data_workers()
                    
    def request_connection(self):
        self.dbus_monitor.request_connection()
        
    def connection_available(self):
        return self.dbus_monitor.device_has_networking
    
    def connection_is_wlan(self):
        if not self.connection_available():
            return False
        bearer = self.dbus_monitor.bearer
        if bearer == None:
            return False
        if bearer.startswith("WLAN"):
            return True
        else:
            return False
            
    def set_connected(self, connected):
        if connected:
            self.ui_handler.hide_loading_ui()
            if self.controller.login_done == False:
                self.controller.start_trusted_auth()
                trusted_login_ui = self.ui_handler.trusted_login_ui
                if trusted_login_ui.line_edit_email.text().isEmpty() == False and trusted_login_ui.line_edit_password.text().isEmpty() == False:
                    self.logger.network("Network connection established, starting authentication")
                    self.ui_handler.try_trusted_login()
            elif self.controller.connected == True and self.client != None:
                if len(self.queued_threads) > 0:
                    self.ui_handler.show_information_ui("Connection established, fetching queued tasks", True)
                    self.logger.network("Network connection established, starting queued networking")
                    for queued_thread in self.queued_threads:
                        worker = NetworkWorker()
                        worker.clone(queued_thread)
                        worker.start()
                        self.running_threads.append(worker)
                    self.queued_threads = []
                else:
                    self.ui_handler.show_information_ui("Connection established, fetching content", True)
                    self.logger.network("Network connection established, fetching root metadata")
                    self.get_account_data()
                    self.get_metadata("/", "dropbox")
        else:
            self.ui_handler.show_loading_ui("Waiting for a connection...", True)
    
    def set_client(self, client):
        # Set client for usage, we are not connected
        self.client = client
        self.transfer_manager.set_client(client)
        # Get account information
        if self.connection_available():
            if self.get_account_data():
                # Start by fetching sandbox root contents
                self.get_metadata("/", "dropbox")
            else:
                self.ui_handler.show_loading_ui("Waiting for a connection...", True)
                self.request_connection()
        else:
            self.ui_handler.show_loading_ui("Waiting for a connection...", True)
            self.request_connection()

    def get_account_data(self):
        try:
            self.data_parser.parse_account_info(self.client.account_info())
            return True
        except (socket.error, socket.gaierror), err:
            return False
	def run(self,session,run='training'):

		if not self.init:
			return

		self.images = tf.placeholder(tf.float32, [None, self.cfgs[run]['image_height'], self.cfgs[run]['image_width'],
		                                          self.cfgs[run]['n_channels']])
		self.edgemaps = tf.placeholder(tf.float32,
		                               [None, self.cfgs[run]['image_height'], self.cfgs[run]['image_width'], 1])

		train_data = DataParser(self.cfgs)


		total_batches = train_data.num_training_ids//self.cfgs['batch_size_train']

		sideoutput, dsn_fuse = self.model.hed_net(self.images)

		self.model.setup_training(session,sideoutput,dsn_fuse,self.edgemaps)

		d_step = 100 * total_batches
		global_step = tf.Variable(0)
		init_learn_rate = self.cfgs['optimizer_params']['learning_rate']
		learn_rate = tf.train.exponential_decay(init_learn_rate, global_step=global_step,
												decay_steps=d_step, decay_rate=0.75, staircase=True)

		train = tf.train.AdamOptimizer(learn_rate).minimize(self.model.loss, global_step=global_step)

		# opt = tf.train.AdamOptimizer(self.cfgs['optimizer_params']['learning_rate'])
        #
		# train = opt.minimize(self.model.loss)

		loss_log = []
		total_loss_log = []
		error_log = []
		ep = []
		batchs = []
		ep_v = []

		session.run(tf.global_variables_initializer())

		timestart = time.clock()


		print(' beign training')


		for idx in range(self.cfgs['max_iterations']):
			avg_loss = 0
			ep.append(idx)

			for b in range(total_batches):
				im,em,_ = train_data.next_training_batch(b)
				run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
				run_metadata = tf.RunMetadata()

				_,summary,loss = session.run([train,self.model.merged_summary,self.model.loss],
											 feed_dict = {self.images : im,self.edgemaps:em},
											 options=run_options,
											 run_metadata=run_metadata
											 )
				if (idx + 1) * (b + 1) % d_step == 0:
					lr = session.run(learn_rate)
					self.io.print_info('Learning_rate:{}'.format(lr))

				loss_log.append(loss)
				batchs.append(idx * total_batches + b)

				avg_loss += loss
				# if b+1 % 5 == 0:
					# self.io.print_info('[{}/{}] TRAINING loss : {}'.format(idx + 1, self.cfgs['max_iterations'], loss))
				self.io.print_info('[{}/{}] TRAINING loss : {}'.format(b + 1, total_batches, loss))


			total_loss = avg_loss / total_batches
			self.io.print_info(
				'[{}/{}] TRAINING average_loss : {}'.format(idx + 1, self.cfgs['max_iterations'], total_loss))

			total_loss_log.append(total_loss)

			self.model.train_writer.add_run_metadata(run_metadata, 'step{:06}'.format(idx))
			self.model.train_writer.add_summary(summary, idx)

			if idx+1 % self.cfgs['save_interval'] == 0:
				saver = tf.train.Saver()
				saver.save(session,os.path.join(self.cfgs['save_dir'],'models/hed-model'),global_step=idx)

			if idx+1 % self.cfgs['val_interval'] == 0:
				im,em ,_ = train_data.get_validation_batch()

				summary,error = session.run([self.model.merged_summary,self.model.error],
											feed_dict = {self.images:im,self.edgemaps:em})
				self.model.val_writer.add_summary(summary,idx)
				self.io.print_info('[{}/{}] VALIDATION error : {}'.format(idx + 1, self.cfgs['max_iterations'], error))
				ep_v.append(idx)
				error_log.append(error)
		endtime = time.clock()
		self.io.print_info('Train cost time : %f hours' % ((endtime - timestart) / 3600))
		np.savetxt(os.path.join(self.cfgs['cost_log_file'], self.cfgs['cost_log']['loss']), [batchs, loss_log])
		np.savetxt(os.path.join(self.cfgs['cost_log_file'], self.cfgs['cost_log']['avgloss']), [ep, total_loss_log])
		np.savetxt(os.path.join(self.cfgs['cost_log_file'], self.cfgs['cost_log']['error']), [ep_v, error_log])

		self.model.train_writer.close()
示例#6
0
# -*- coding: utf-8 -*-

import json
from data import DataParser, CSVParser, DataGenerator, DEPDataParser
import mturk
import nltk
from pprint import pprint


parser = DataParser()
csv_parser = CSVParser()

data_generator = DataGenerator()
dep_data_generator = DEPDataParser()

####
# INPUT FOR THE SCRIPT
####

main_data_file = "data/data_all.json"

####


# This is main data (from Tim)
with open(main_data_file) as data_file:
    data = json.load(data_file)

# Segment it into json
segmented_data = parser.parse_data(data)
示例#7
0
# -*- coding: utf-8 -*-

import json
from data import DataParser, CSVParser, AnnotationAnalyzer


parser = DataParser()
csv_parser = CSVParser()
aa = AnnotationAnalyzer()

####
# INPUT FOR THE SCRIPT
####

main_data_file = "data/data2.json"
batch_filename = "wiki-101.csv"
already_annotated_files = ['batch100.csv']

####

# This is main data (from Tim)
with open(main_data_file) as data_file:
    data = json.load(data_file)

# Segment it into json
segmented_data = parser.parse_data(data)
# name = None
# docs = 0
#
# for i in segmented_data:
#     #print ("name name: %s" % i["Name"])