Esempio n. 1
0
 def __init__(self, dict_map, embedding_dim, name, config, padding_idx=None,
              pretrained_embedding_file=None, mode=EmbeddingProcessType.FLAT,
              dropout=0, init_type=InitType.XAVIER_UNIFORM, low=0, high=1,
              mean=0, std=1, activation_type=ActivationType.NONE,
              fan_mode=FAN_MODE.FAN_IN, negative_slope=0):
     super(Embedding, self).__init__()
     self.logger = Logger(config)
     self.dropout = torch.nn.Dropout(p=dropout)
     self.mode = mode
     if self.mode == EmbeddingProcessType.FLAT:
         self.embedding = torch.nn.Embedding(
             len(dict_map), embedding_dim, padding_idx=padding_idx)
     else:
         self.embedding = torch.nn.EmbeddingBag(
             len(dict_map), embedding_dim, mode=mode)
     embedding_lookup_table = init_tensor(
         tensor=torch.empty(len(dict_map), embedding_dim),
         init_type=init_type, low=low, high=high, mean=mean, std=std,
         activation_type=activation_type, fan_mode=fan_mode,
         negative_slope=negative_slope)
     if pretrained_embedding_file is not None and \
             pretrained_embedding_file != "":
         self.load_pretrained_embedding(
             embedding_lookup_table, dict_map, embedding_dim, name,
             pretrained_embedding_file)
     if padding_idx is not None:
         embedding_lookup_table[padding_idx] = 0.0
     self.embedding.weight.data.copy_(embedding_lookup_table)
Esempio n. 2
0
    def create(self,
               table,
               columns,
               types,
               primary_key_index=(),
               is_ifnotexists=True):
        """
        Create table in the database
        :param table: Table name
        :param columns: Column array
        :param types: Type array
        :param is_ifnotexists: Create table if not exists keyword
        """
        file_path = os.path.join(self.file_directory, table + ".csv")
        columns = [e.split(' ')[0] for e in columns]
        if len(columns) != len(types):
            return False

        self.lock.acquire()
        if os.path.isfile(file_path):
            Logger.info(self.__class__.__name__,
                        "File (%s) has been created already." % file_path)
        else:
            with open(file_path, 'w+') as csvfile:
                csvfile.write(','.join(["\"" + e + "\""
                                        for e in columns]) + '\n')

        self.lock.release()
        return True
Esempio n. 3
0
 def __init__(self, datahandler, symbol_list=None):
     self._datahandler = datahandler
     self._selected_symbol_list = symbol_list
     self._symbol_list = []
     self._baseseries_list = datahandler.baseseries_list
     self._date_list = datahandler.date_list
     self._logger = Logger()
Esempio n. 4
0
    def connect(self, url,
                on_message_handler=None,
                on_open_handler=None,
                on_close_handler=None,
                on_error_handler=None):
        """
        :param url: Url link
        :param on_message_handler: Message handler which take the message as
                           the first argument
        :param on_open_handler: Socket open handler which take the socket as
                           the first argument
        :param on_close_handler: Socket close handler which take the socket as
                           the first argument
        :param on_error_handler: Socket error handler which take the socket as
                           the first argument and the error as the second
                           argument
        """
        Logger.info(self.__class__.__name__, "Connecting to socket <%s>..." % self.id)
        if on_message_handler is not None:
            self.on_message_handlers.append(on_message_handler)
        if on_open_handler is not None:
            self.on_open_handlers.append(on_open_handler)
        if on_close_handler is not None:
            self.on_close_handlers.append(on_close_handler)
        if on_error_handler is not None:
            self.on_error_handlers.append(on_error_handler)

        if not self._connected:
            self.wst = threading.Thread(target=lambda: self.run(url, 80))
            self.wst.start()

        return self.wst
Esempio n. 5
0
    def __init__(self, pipes, float_sizes, shapes, flags, n, learners):
        """
            :param pipes: One pipe for each agent
            :param float_sizes: Array which stores for each weight its size in bytes
            :param shapes: Array which stores for each weight its shape
            :param warm_start: if the first epochs synchronous should be used
            :param epochs: number of epochs for which synchronous is used
            :param flags: flags set by the user
            :param n: n of n-softsync.
            :param learners: number of agents
        """
        super().__init__(pipes, float_sizes, shapes)

        assert (learners % n  == 0) #otherwise this algorithm won't finish!!!
        self.learners = learners
        self.num = learners // n
        self.flags = flags
        self.timing = self.flags.time_program
        self.qlist = []
        self.counter = 0
        logger.state("softsync with number of learners", learners)
        self.Random = Random(time.time())
        self.shuffle = flags.shuffle
        self.step_in_phase = [0] * learners
        self.steps_for_learner = [0] * learners
        self.min = 0
        self.mode = 2
        self.holdback = []
        self.max_staleness = 0
        self.gen = self._pipes_generator()
Esempio n. 6
0
    def __init__(self, element, parent):
        ''' The init method, used to create a new preparation object which can
            be ensured on the host system.'''
        # Store information about the preparation object
        self.description = element.get('description')
        self.test = element.get('test')
        self.commands = element.get('commands')
        self.result = element.get('result')

        # Store the system for running commands
        self.parent = parent

        # Check if we're in chroot
        if self.parent.user != 'chroot':

            # Initialise the logger
            self.logger = Logger(self.parent.environment['WANDER'],
                                 self.parent.stage[1], '.')

        else:

            # Initialise the logger
            self.logger = Logger('/', self.parent.stage[1])

        # Note that we've started the check
        Output.log(Output.PENDING, self.description)
Esempio n. 7
0
 def learn(self, result, delta, learning_rate):
     super(BiasedNeuralLayer, self).learn(result, delta, learning_rate)
     Logger.debug("delta: " + str(delta))
     Logger.debug("biases: " + str(self.biases))
     tmp = -(learning_rate * delta)
     self.biases = tmp + np.atleast_2d(self.biases)
     self.biases[self.biases < 0] = 0
 def get_order_book_worker(self, instmt):
     """
     Get order book worker
     :param instmt: Instrument
     """
     while True:
         ExchGwQuoine.last_query_time_lock.acquire()
         if datetime.now() - ExchGwQuoine.last_query_time < timedelta(
                 seconds=ExchGwQuoine.waiting_seconds):
             ExchGwQuoine.last_query_time_lock.release()
             time.sleep(random.uniform(0, 1))
         else:
             ExchGwQuoine.last_query_time = datetime.now()
             try:
                 l2_depth = self.api_socket.get_order_book(instmt)
                 if l2_depth is not None and l2_depth.is_diff(
                         instmt.get_l2_depth()):
                     instmt.set_prev_l2_depth(instmt.get_l2_depth())
                     instmt.set_l2_depth(l2_depth)
                     instmt.incr_order_book_id()
                     self.insert_order_book(instmt)
             except Exception as e:
                 Logger.error(self.__class__.__name__,
                              "Error in order book: %s" % e)
             ExchGwQuoine.last_query_time_lock.release()
Esempio n. 9
0
    def __init__(self, sys, clk, user_id, level, num_load_mshrs,
                 num_parallel_stores, cache_size, line_size, latency,
                 logger_on, parent_component_id, child_component_id):
        super().__init__("L" + str(level) + " Cache " + str(user_id), clk, sys,
                         parent_component_id, child_component_id)
        self.level = level
        self.num_load_mshrs = num_load_mshrs
        self.num_parallel_stores = num_parallel_stores
        self.load_stall_queue = []
        self.store_stall_queue = []
        self.load_mshr_bank = MSHRBank(self.num_load_mshrs)
        self.logger = Logger(self.name, logger_on, self.sys)

        # Cache Configuration
        self.tlb_size = 32
        self.cache_size = cache_size
        self.line_size = line_size
        self.max_size = self.cache_size / self.line_size
        self.latency = latency

        self.accesses = []
        self.cache = [0, 0]
        self.load_queue = []
        self.store_queue = []

        self.offset_bits = int(math.log2(self.line_size))
        self.word_size = 64
        self.byte_addressable = True
Esempio n. 10
0
 def __init__(self, setting):
     try:
         self.logger = Logger(setting.saveDirPath)
         self.fm = FileManager(setting.saveDirPath, setting.downloadDirPath,
                               self.logger)
         self.driver = IDriver(setting.chromeDriverPath)
         self.setting = setting
     except:
         raise ('[error] setting is not valid')
Esempio n. 11
0
 def __init__(self, file_path):
     """
     CSVLoader loads fund's nav or index's value from a csv file.
     :param file_path:
     """
     self._file_path = file_path
     self._symbol2data = dict()
     self._symbol2status = dict()
     self._logger = Logger()
     self._read()
Esempio n. 12
0
 def train_test(self, train_X, train_Y, test_X):
     self.results = []
     rez = np.array([0.0] * len(test_X))
     l = Logger(len(self.models), 1, tag='BigModel')
     
     for i, m in enumerate(self.models):
         m_rez, m_batches = m.train_test(train_X, train_Y, test_X)
         rez += self.weights[i] * m_rez
         self.results.append((m_rez, m_batches))
         l.step()
     return rez
Esempio n. 13
0
 def distr_owners(self,map_otn):
     """Randomly distribute cells to owners"""
     if(sum(map_otn.items())>self.x*self.y):
         Logger.warning("Board.distr_owners called, \
         but the board is too small to allocate all the cells!")
     cells=set(self.cells)
     otc={}
     for owner in map_otn:
         s=self.rng.sample(cells, map_otn[owner])
         cells.difference_update(s)
         otc[owner]=s
         for c in s: c.owner=owner
Esempio n. 14
0
def get_db_data(tags=['train1', 'train2']):
    records = auctions.find({'tag': {'$in': tags}})
    l = Logger(records.count(),20000, tag=str(tags))
    X = []
    Y = []
    IDs = []
    for record in records:
        l.step()        
        xr = [record.get(k, -1) for k in columns]# removed Nones
        X.append(xr)
        Y.append(record.get(Ycol))
        IDs.append(record['SalesID'])
    return (X, Y, IDs)
Esempio n. 15
0
 def get_delta(self, out_data, last_delta, last_weights):
     """calculate delta for layer before"""
     #Logger.DEBUG = True
     Logger.debug("Get delta: ")
     Logger.debug("out: " + str(np.shape(out_data)))
     Logger.debug("last_delta: " + str(np.shape(last_delta)))
     Logger.debug("last_weights: " + str(np.shape(last_weights)))
     dot = np.dot(last_weights.T, last_delta)
     Logger.debug("dot shape: " + str(np.shape(dot)))
     delta = dot * self.activation_deriv(out_data)
     #Logger.debug("delta shape: " + str(np.shape(delta)))
     #Logger.DEBUG = False
     return delta
Esempio n. 16
0
class ExpenseReport(object):

	MANDATORY_FIELDS = ['amount','user','date','category','reimburseable']

	def __init__(self):
		self.logger = Logger('REPORT')

	@staticmethod
	def create(inputmsg, state):
		# create an expense
		report = ExpenseReport()
		report.log('Creating report')

		# check if all fields are present
		if (not ExpenseReport._is_valid_input(inputmsg)):
			state.error('Illegal input')
			return None

		for key in inputmsg.keys():
			report.__setattr__(key, inputmsg[key])

		return report

	@staticmethod
	def _is_valid_input(msg):
		l = map(lambda x: x in msg.keys(), ExpenseReport.MANDATORY_FIELDS)
		x = reduce(lambda x,y: x&y, l)
		return x


	################################
	####     INSTANCE METHODS   ####
	################################
	def store(self, db,state):
		if (state.is_valid()):
			self.log('Persisting report')
			db.write_record(self)

	def log(self, msg):
		self.logger.write(msg)


	################################
	####     INSTANCE METHODS   ####
	################################
	def __str__(self):
		msg = 'Expense Report printer:\n'
		for key in self.__dict__.keys():
			msg += '  %s: %s\n' % (key, self.__dict__[key])
		msg += '-------------------------'
		return msg
Esempio n. 17
0
class Expenses:
	def __init__(self):
		self.logger = Logger("WEB")

	def GET(self):
		web.header('Access-Control-Allow-Origin', '*')
		return 'Listing expenses not yet implemented'

	def POST(self, resource_id=None):
		web.header('Access-Control-Allow-Origin', '*')
		jsondata = web.data()
		self.logger.write(jsondata)
		resp = api.API().add_expense(jsondata)
		return resp
Esempio n. 18
0
    def __init__(self, dataset, weights=None, norms=None,
        keep_invalids=False, minvalid=None, clean_weights=True,
        logger=None, loglevel=None, zerofill=False, **kwargs):

        # Logger
        Logger.__init__(self, logger=logger, loglevel=loglevel,
            **dict_filter(kwargs, 'log_'))

        # Input shape
        if isinstance(dataset, (list, tuple)):
            dataset = list(dataset)
            self.map = len(dataset)
        else:
            dataset = [dataset]
            self.map = 0
        self.ndataset = self.nd = len(dataset)
        self.dataset = dataset

        # Other inits
        self.data = []
        self.nt = None
        weights = self.remap(weights, reshape=True)
        norms = self.remap(norms, reshape=True)
        if self.ndataset==1 and norms[0] is None: norms = [False]
        self._invalids = []
        self.masked = False

        # Loop on datasets
        for idata,data in enumerate(dataset):

            # Create the Data instance and pack array
            dd = Data(data, norm=norms[idata], weights=weights[idata],
                keep_invalids=keep_invalids, minvalid=minvalid, clean_weights=clean_weights,
                zerofill=zerofill)
            self.data.append(dd)
            self._invalids.append(dd.invalids)
            self.masked |= dd.masked

            # Check nt
            if self.nt is None:
                self.nt = dd.nt
            elif self.nt != dd.nt:
                self.error('Time dimension of variable %i must have length %i (not %i)'%(idata, self.nt, dd.nt))

        # Merge
        self.stacked_data = npy.asfortranarray(npy.vstack([d.packed_data for d in self.data]))
        self.splits = npy.cumsum([d.packed_data.shape[0] for d in self.data[:-1]])
        self.stacked_weights = npy.hstack([d.packed_weights for d in self.data])
        self.ns = self.stacked_data.shape[0]
        self.ntv = (self.stacked_data!=default_missing_value).any(axis=0).sum()
Esempio n. 19
0
 def __init__(self, in_size, out_size, activation_fn=None, activation_fn_deriv=None):
     Logger.debug("create Neural Layer: \n" + \
                       "  in_size: " + str(in_size) + "\n" + \
                       "  out_size: " + str(out_size))
     # initialize weight matrix
     self.weights = np.random.uniform(-0.001, 0.001, (out_size, in_size)).astype(np.float64)
     # set biases if not already set (by child class for example)
     if not hasattr(self, 'biases'):
         self.biases = np.zeros(out_size).astype(np.float64)
     # set activation function and derivate
     self.activation = activation_fn or NeuralLayer.activation_linear
     self.activation_deriv = activation_fn_deriv or NeuralLayer.activation_linear_deriv
     # set size and input size
     self.size = out_size
     self.in_size = in_size
Esempio n. 20
0
 def train_test(self, Xtrain, Ytrain, Xtest):
     Xtrain = np.array(Xtrain)
     Ytrain = np.array(Ytrain)
     Xtest = np.array(Xtest)
     tasks = []
     task_id = 0
     results = []
     l = Logger(len(self.split_keys) * len(self.model_params), \
             len(self.split_keys), tag='SplitModel')
     for key in self.split_keys:
         mask_train = Xtrain[:,self.index] == key
         mask_test = Xtest[:,self.index] == key
         for model_key, model_type, model_params in self.model_params:
             task = (
                 task_id,
                 model_type, 
                 model_params, 
                 Xtrain[mask_train],
                 Ytrain[mask_train],
                 Xtest[mask_test],
                 model_key,
                 key,
             )
             results.append(tt(task))
             print (task_id, model_key, key)
             tasks.append((task_id, model_key, key))
             l.step()
             
             task_id += 1
     
     tasks = {t[0]: t[1:] for t in tasks}
     result_batches = [np.array([0.0] * len(Xtest))\
                     for i in range(len(self.model_params))]
     for result_set in results:
         task_id, Ytask = result_set
         task = tasks[task_id]
         model_key = task[-2]
         mask_test = Xtest[:,self.index] == task[-1]
         result_batches[model_key][mask_test] += Ytask
     
     Ytest = np.array([0.0] * len(Xtest))
     for (i, batch) in enumerate(result_batches):
         Ytest += batch * self.weights[i]
     if self.bias:
         Ytest += self.bias
     
     return (Ytest, result_batches)
Esempio n. 21
0
    def parse(self, utterance):
        '''
        Parses and returns result.

        Returns:
            RobotCommand
        '''
        rc = self.parser.parse(utterance)
        self.parse_buffer = Logger.get_buffer()
        return rc
Esempio n. 22
0
    def _set_world_internal(self, world_objects, robot):
        '''
        Sets the parser's world, robot, parser maybe regenerates.

        This is so we can capture the log for reporting (if desired).

        Args:
            world_objects ([WorldObject])
            robot ([Robot])
        '''
        self.parser.set_world(world_objects, robot)
        self.start_buffer = Logger.get_buffer()
Esempio n. 23
0
    def ground(self, grounding_query):
        '''
        Grounds the provided expression with probabilities over objs.

        Args:
            grounding_query (str)

        Returns:
            {str: float}: Map of obj : P(obj).
        '''
        gprobs = self.parser.ground(grounding_query)
        self.parse_buffer = Logger.get_buffer()
        return gprobs
Esempio n. 24
0
    def describe(self, grab_buffer=True):
        '''
        Describes all objects in the world.

        Args:
            grab_buffer (bool, optional): Whether to grab the log buffer
                and save it as self.parse_buffer. Defaults to True.

        Returns:
            {str: str}: Map of object names to their description.
        '''
        desc = self.parser.describe()
        if grab_buffer:
            self.parse_buffer = Logger.get_buffer()
        return desc
Esempio n. 25
0
 def learn(self, result, delta, learning_rate):
     #raw_input("press Enter")
     # apply learning rule
     #Logger.DEBUG = True
     Logger.debug("NeuralLayer:learn")
     Logger.debug("result: " + str(np.shape(result)))
     Logger.debug("delta: " + str(np.shape(delta)))# + "\nresult shape:" + str(np.shape(result)))
     delta_weights = learning_rate * np.outer(delta, result)
     #Logger.debug("delta weights shape:" + str(np.shape(delta_weights)))
     #Logger.log(str(delta_weights))
     self.weights += delta_weights
Esempio n. 26
0
 def feed(self, input_data):
     # calculate activation of layer for given inputs
     #Logger.DEBUG = True
     Logger.debug("NeuralLayer:feed")
     Logger.debug("input: " + str(np.shape(input_data)))
     dot = np.dot(self.weights, input_data)
     result = self.activation(dot + np.atleast_2d(self.biases).T)
     Logger.debug("output: " + str(np.shape(result)))
     #Logger.debug("weights: " + str(np.shape(self.weights)))
     #Logger.debug("dot shape: " + str(np.shape(dot)))
     #Logger.DEBUG = False
     return result
Esempio n. 27
0
 def __init__(self, env, id=None, db=None, messageid=None, row=None):
     self.env = env
     self.db = db
     self.log = Logger(env)
     
     if id is not None:
         self.resource = Resource('mailarchive', str(id), None)
         self._fetch_mail(id)
     elif messageid is not None:
         self._fetch_mail_by_messageid(messageid)
         self.resource = Resource('mailarchive', self.id, None)
     elif row is not None:
         self._fetch_mail_by_row(row)
         self.resource = Resource('mailarchive', self.id, None)
     else:
         self.messageid = ''
         self.subject = ''
         self.utcdate = 0
         self.localdate = ''
         self.zoneoffset = 0
         self.body = ''
 def setUpClass(cls):
     Logger.init_log()
     cls.db_client = FileClient(dir=path)
     cls.db_client.connect()
Esempio n. 29
0
    def __init__(self, data, weights=None, norm=None, keep_invalids=False,
        minvalid=None, clean_weights=True,
        logger=None, loglevel=None, zerofill=False, **kwargs):

        # Logger
        Logger.__init__(self, logger=logger, loglevel=loglevel, **dict_filter(kwargs, 'log_'))

        # Guess data type and copy
        if cdms2_isVariable(data):
            self.array_type = 'MV2'
            self.array_mod = MV2
            data = data.clone()
        elif npy.ma.isMA(data):
            self.array_type = 'numpy.ma'
            self.array_mod = numpy.ma
            data = data.copy()
        else:
            self.array_type = 'numpy'
            data = data.copy()
            self.array_mod = numpy
        self.data = data
        self.dtype = data.dtype
        data = data.astype('d')


         # Shape
        self.shape = data.shape
        self.ndim = data.ndim
        self.nt = self.shape[0]
        self.nstot = data.size/self.nt
        self.nsdim = data.ndim-1

        # Check time axis
        if cdms2_isVariable(data) and data.getTime() is not None:
            order = data.getOrder()
            if not order.startswith('t'):
                warn('Time axis is not the first axis of input variable (order="%s")'%order)

        # Weights ?
        if weights is None or weights is False:
            if False and weights is not False and data.ndim == 3 and \
                cdms2_isVariable(data) and \
                'x' in data.getOrder() and 'y' in data.getOrder():
                import cdutil# FIXME: WARNING FALSE
                weights = cdutil.area_weights(data[0]).data.astype('d') # Geographic weights
            elif self.nstot==1:
                weights = npy.ones(1)
            else:
                weights = npy.ones(self.shape[1:])
        elif npy.ma.isMA(weights):
            weights = weight.astype('d').filled(0.)
        else:
            weights = npy.asarray(weights, dtype='d')
        if data.ndim>1 and self.shape[1:] != weights.shape:
            self.error('Weights must be of shape %s (instead of %s)'
                %(self.shape[1:],  weights.shape))

        # Store some info
        # - time
        if not cdms2_isVariable(data):
            self.taxis = data.shape[0]
        else:
            self.taxis = data.getAxis(0)
        # - others axes and attributes
        if cdms2_isVariable(data): # cdms -> ids
            self.saxes = data.getAxisList()[1:]
            self.id = data.id
            self.atts =  {}
            for att in data.listattributes():
                self.atts[att] = data.attributes[att]
            self.grid = data.getGrid()
            data = data.asma()
        else: # numpy -> length
            self.saxes = data.shape[1:]
            self.id = None
            self.atts = None
            self.grid = None
        # - missing value
        if npy.ma.isMA(data):
            self.missing_value = data.get_fill_value()
        else:
            self.missing_value = 1.e20
        # - special cases
        for att in 'long_name', 'units':
            if hasattr(data, att):
                setattr(self, att, data.attributes[att])


        # Masking nans
        nans = npy.isnan(data)
        if nans.any():
            self.warning("Masking %i NaNs"%nans.sum())
            if self.array_type == 'numpy':
                self.array_type = 'numpy.ma'
                self.array_mod = numpy.ma
                data = npy.ma.array(data, mask=nans, copy=False)
            else:
                data[nans] = npy.ma.masked
            self.data = data

        # Mask (1 means good)
        # - real good values
        bmask = npy.ma.getmaskarray(data)
        good = 1-bmask.astype('l')
        # - first from data (integrate) => 1D
        count = npy.atleast_1d(good.sum(axis=0))
        del good
        # - now remove channels where weight is zero
        if clean_weights:
            count[npy.atleast_1d(weights==0.)] = 0
        # - check number of valid data along time
        minvalid = kwargs.pop('nvalid', minvalid)
        if minvalid is not None and minvalid < 0:
            minvalid = -int(round(npy.clip(minvalid, -100., 0)*self.nt/100))
        minvalid = npy.clip(int(minvalid), 1, self.nt) if minvalid is not None else 1
        count[count<minvalid] = 0 # <minvalid -> 0
        count = npy.clip(count, 0, 1)
        # - save as 0/1
        self.ns = long(count.sum())
        self.compress = count.size != self.ns
        self.good = count>0 # points in space where there are enough data in time
        self.minvalid = self.nvalid = minvalid

        # Scale unpacked data
        if not self.good.any():
            self.warning('No valid data')
            self.norm = 1.
            self.mean = 0
        else:
            # - mean
            self.mean = data.mean(axis=0)
            # - normalisation factor
            if norm is True or norm is None:
                norm = self.data.std() # Standard norm
            elif norm is not False:
                if norm <0: # Relative norm, else strict norm
                    norm = abs(norm)*self.data.std()
            else:
                norm = 1.
            self.norm = norm
            # - apply
            self.scale(data)

        # Fill data
        # - fill with missing value or mean (0.) where possible
        if minvalid != self.nt:
#            invalids = bmask & self.good # invalids = masked data that will be analyzed
#            data[invalids] = 0. if zerofill else default_missing_value
#            data[invalids] = default_missing_value
            data[:, ~self.good] = default_missing_value
            if keep_invalids:
                self.invalids = bmask & self.good # invalids = masked data that will be analyzed
            else:
                self.invalids = None
                #del invalids
        else:
            self.invalids = None
        # - finally fill with missing values at zero
        if npy.ma.isMA(data):
            data_num = data.filled(default_missing_value)
        else:
            data_num = data

        # Pack
        # - data
        self.packed_data = self.core_pack(data_num, force2d=True)
        self.masked = npy.isclose(self.packed_data, default_missing_value).any()
        # - weights
        self.packed_weights = self.core_pack(weights)
Esempio n. 30
0
	def __init__(self):
		self.logger = Logger("WEB")
Esempio n. 31
0
    files.sort()
    pprint(files)
    
    cfiles = glob.glob(os.path.join(path, multiplefilepath + '*_initialcentroids*'))
    cfiles.sort()    
    
    plotexperiments = []    
#    points = np.zeros(()) #Use for data seen so far experiment
    numexperiments = 10
    for x in xrange(numexperiments): # experiments
    
        if x != 3:
            continue
            
        fileoutpostfix = '_mrjobmultiple_%s_%si' % (x, maxiterations)
        logger = Logger.function_logger(str(x), fileoutpostfix, logging.DEBUG, logging.INFO, logging.DEBUG)
              
        initialMeans = np.loadtxt(cfiles[x]) #separate initial file, #Deprecated
        initialcentroidsinputfile = cfiles[x]
        filenameinitcentroids = os.path.basename(initialcentroidsinputfile)

        
        oldCentroids = initialMeans #separate initial centroids
        
        numberfiles = 10
        for count in xrange(numberfiles): #files
    
            mrinputfile = files[count]
            # Use for current data set experiment
            points = np.loadtxt(mrinputfile)
                
Esempio n. 32
0
	def __init__(self):
		self.logger = Logger('REPORT')
Esempio n. 33
0
def main():
    global args
    args = parser.parse_args()

    # fix random seeds
    torch.manual_seed(args.seed)
    torch.cuda.manual_seed_all(args.seed)
    np.random.seed(args.seed)

    # CNN
    if args.verbose:
        print('Architecture: {}'.format(args.arch))
    model = models.__dict__[args.arch](sobel=args.sobel)
    fd = int(model.top_layer.weight.size()[1])
    model.top_layer = None
    model.features = torch.nn.DataParallel(model.features)
    model.cuda()
    cudnn.benchmark = True

    # create optimizer
    optimizer = torch.optim.SGD(
        filter(lambda x: x.requires_grad, model.parameters()),
        lr=args.lr,
        momentum=args.momentum,
        weight_decay=10**args.wd,
    )

    # define loss function
    criterion = nn.CrossEntropyLoss().cuda()

    # optionally resume from a checkpoint
    if args.resume:
        if os.path.isfile(args.resume):
            print("=> loading checkpoint '{}'".format(args.resume))
            checkpoint = torch.load(args.resume)
            args.start_epoch = checkpoint['epoch']
            # remove top_layer parameters from checkpoint
            for key in checkpoint['state_dict']:
                if 'top_layer' in key:
                    del checkpoint['state_dict'][key]
            model.load_state_dict(checkpoint['state_dict'])
            optimizer.load_state_dict(checkpoint['optimizer'])
            print("=> loaded checkpoint '{}' (epoch {})"
                  .format(args.resume, checkpoint['epoch']))
        else:
            print("=> no checkpoint found at '{}'".format(args.resume))

    # creating checkpoint repo
    exp_check = os.path.join(args.exp, 'checkpoints')
    if not os.path.isdir(exp_check):
        os.makedirs(exp_check)

    # creating cluster assignments log
    cluster_log = Logger(os.path.join(args.exp, 'clusters'))

    # preprocessing of data
    normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                     std=[0.229, 0.224, 0.225])
    tra = [transforms.Resize(256),
           transforms.CenterCrop(224),
           transforms.ToTensor(),
           normalize]

    # load the data
    end = time.time()
    dataset = datasets.ImageFolder(args.data, transform=transforms.Compose(tra))
    if args.verbose: print('Load dataset: {0:.2f} s'.format(time.time() - end))
    dataloader = torch.utils.data.DataLoader(dataset,
                                             batch_size=args.batch,
                                             num_workers=args.workers,
                                             pin_memory=True)

    # clustering algorithm to use
    deepcluster = clustering.__dict__[args.clustering](args.nmb_cluster)

    # training convnet with DeepCluster
    for epoch in range(args.start_epoch, args.epochs):
        end = time.time()

        # remove head
        model.top_layer = None
        model.classifier = nn.Sequential(*list(model.classifier.children())[:-1])

        # get the features for the whole dataset
        features = compute_features(dataloader, model, len(dataset))

        # cluster the features
        clustering_loss = deepcluster.cluster(features, verbose=args.verbose)

        # assign pseudo-labels
        train_dataset = clustering.cluster_assign(deepcluster.images_lists,
                                                  dataset.imgs)

        # uniformely sample per target
        sampler = UnifLabelSampler(int(args.reassign * len(train_dataset)),
                                   deepcluster.images_lists)

        train_dataloader = torch.utils.data.DataLoader(
            train_dataset,
            batch_size=args.batch,
            num_workers=args.workers,
            sampler=sampler,
            pin_memory=True,
        )

        # set last fully connected layer
        mlp = list(model.classifier.children())
        mlp.append(nn.ReLU(inplace=True).cuda())
        model.classifier = nn.Sequential(*mlp)
        model.top_layer = nn.Linear(fd, len(deepcluster.images_lists))
        model.top_layer.weight.data.normal_(0, 0.01)
        model.top_layer.bias.data.zero_()
        model.top_layer.cuda()

        # train network with clusters as pseudo-labels
        end = time.time()
        loss = train(train_dataloader, model, criterion, optimizer, epoch)

        # print log
        if args.verbose:
            print('###### Epoch [{0}] ###### \n'
                  'Time: {1:.3f} s\n'
                  'Clustering loss: {2:.3f} \n'
                  'ConvNet loss: {3:.3f}'
                  .format(epoch, time.time() - end, clustering_loss, loss))
            try:
                nmi = normalized_mutual_info_score(
                    clustering.arrange_clustering(deepcluster.images_lists),
                    clustering.arrange_clustering(cluster_log.data[-1])
                )
                print('NMI against previous assignment: {0:.3f}'.format(nmi))
            except IndexError:
                pass
            print('####################### \n')
        # save running checkpoint
        torch.save({'epoch': epoch + 1,
                    'arch': args.arch,
                    'state_dict': model.state_dict(),
                    'optimizer' : optimizer.state_dict()},
                   os.path.join(args.exp, 'checkpoint.pth.tar'))

        # save cluster assignments
        cluster_log.log(deepcluster.images_lists)
Esempio n. 34
0
class Mail(object):
    """model for the Mail."""
    
    id_is_valid = staticmethod(lambda num: 0 < int(num) <= 1L << 31)
    
    def __init__(self, env, id=None, db=None, messageid=None, row=None):
        self.env = env
        self.db = db
        self.log = Logger(env)
        
        if id is not None:
            self.resource = Resource('mailarchive', str(id), None)
            self._fetch_mail(id)
        elif messageid is not None:
            self._fetch_mail_by_messageid(messageid)
            self.resource = Resource('mailarchive', self.id, None)
        elif row is not None:
            self._fetch_mail_by_row(row)
            self.resource = Resource('mailarchive', self.id, None)
        else:
            self.messageid = ''
            self.subject = ''
            self.utcdate = 0
            self.localdate = ''
            self.zoneoffset = 0
            self.body = ''
        
    def __eq__(self, other):
        if isinstance(other, Mail):
            return self.messageid == other.messageid
        return super.__eq__(other)
        
    def _get_db(self):
        if self.db:
            return self.db
        else:
            return self.env.get_db_cnx()

    def _get_db_for_write(self):
        if self.db:
            return (self.db, False)
        else:
            return (self.env.get_db_cnx(), True)
        
    def get_sanitized_fromaddr(self):
        return self.fromaddr.replace('@',
                                     self.env.config.get('mailarchive',
                                                         'replaceat', '@'))
        
    def get_fromtext(self):
        return get_author(self.fromname, self.fromaddr) 
        
    def get_category(self):
        yearmonth = time.strftime("%Y%m", time.gmtime(self.utcdate))
        category = self.mlid + yearmonth
        return category.encode('utf-8')
        
    def get_plain_body(self):
        return self._sanitize(self.env, self.body)
    
    def get_html_body(self, req):
        
        # for HTML Mail
        if self.body.lstrip().startswith('<'):
            return Markup(self.body)
        
        contentlines = self.body.splitlines()
        htmllines = ['',]
        
        #customize!
        #http://d.hatena.ne.jp/ohgui/20090604/1244114483
        wikimode = req.args.get('wikimode', 'on')
        for line in contentlines:
            if self.env.config.get('mailarchive', 'wikiview',' enabled') == 'enabled' and wikimode == 'on':
                htmllines.append(wiki_to_oneliner(line, self.env, self.db, False, False, req))
            else:
                htmllines.append(Markup(Markup().escape(line).replace(' ','&nbsp;')))
            
        content = Markup('<br/>').join(htmllines)
        return content
        
    def _sanitize(self, env, text):
        return text.replace('@', env.config.get('mailarchive', 'replaceat','_at_') )
    
    def _fetch_mail(self, id):
        row = None
        if self.id_is_valid(id):
            db = self._get_db()
            cursor = db.cursor()
            cursor.execute(SELECT_FROM_MAILARC + " WHERE id=%s", (id,))

            row = cursor.fetchone()
        if not row:
            raise ResourceNotFound('Mail %s does not exist.' % id,
                                   'Invalid Mail Number')

        self._fetch_mail_by_row(row)
    
    def _fetch_mail_by_messageid(self, messageid):
        row = None

        db = self._get_db()
        cursor = db.cursor()
        cursor.execute(SELECT_FROM_MAILARC + " WHERE messageid=%s",
                        (messageid,))

        row = cursor.fetchone()
        if not row:
            raise ResourceNotFound('Mail messageid %s does not exist.' % messageid,
                                   'Invalid Mail messageid Number')

        self._fetch_mail_by_row(row)
        
    def _fetch_mail_by_row(self, row):
        self.id = row[0]
        self.messageid = row[1]
        self.utcdate = row[2]
        self.zoneoffset = row[3]
        self.subject = row[4]
        self.fromname = row[5]
        self.fromaddr = row[6]
        self.header =row[7]
        self.body = row[8]
        self.thread_root = row[9]
        self.thread_parent = row[10]
        
        self.zone = self._to_zone(self.zoneoffset)
        self.localdate = self._to_localdate(self.utcdate, self.zoneoffset)
        
    def _to_localdate(self, utcdate, zoneoffset):
        return time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(utcdate + zoneoffset))

    def _to_zone(self, zoneoffset):
        #zone and date
        zone = ''
        if zoneoffset == '':
            zoneoffset = 0
        if zoneoffset > 0:
            zone = ' +' + time.strftime('%H%M', time.gmtime(zoneoffset))
        elif zoneoffset < 0:
            zone = ' -' + time.strftime('%H%M', time.gmtime(-1 * zoneoffset))
        return zone
                
    def get_href(self, req):
        return req.href.mailarchive(self.id)
    
    def get_subject(self):
        if is_empty(self.subject):
            return '(no subject)'
        else:
            return self.subject
    
    def get_senddate(self):
        return self.localdate + self.zone
    
    def get_thread_root(self):
        if self.thread_root == '':
            return self
        try:
            root_mail = Mail(self.env, messageid=self.thread_root)
        except ResourceNotFound:
            return self
        
        #self.thread_rootはオリジナル版だと親のメールになってしまっている。
        #互換性維持のため、ルートではない場合は自力で探しにいくロジックを走らす
        if root_mail.thread_root == '':
            return root_mail
        else:
            if self.thread_parent != '':
                root_id = MailFinder.find_root_id(self.env, self.messageid)
                return Mail(self.env, messageid=root_id)
    
    def get_thread_parent_id(self):
        if self.thread_parent != '':
            return self.thread_parent.split(' ')[0]
        return None
    
    def get_thread_parent(self):
        if self.thread_parent != '':
            return Mail(self.env, db=self.db, messageid=self.get_thread_parent_id())
        return self
    
    def get_children(self, desc=False, cached_mails=None):
        if cached_mails:
            self.log.debug("[%s] mail's threads is cached." % self.id)
            return [x for x in cached_mails if x.get_thread_parent_id() == self.messageid]
            
        db = self._get_db()
        cursor = db.cursor()
        sql = SELECT_FROM_MAILARC + " WHERE threadparent LIKE %s ORDER BY utcdate"
        
        if desc:
            sql += " DESC"
        
        cursor.execute(sql, ('%s %%' % self.messageid,))
        
        children = []
        
        for row in cursor:
            child_mail = Mail(self.env, row=row, db=self.db)
            children.append(child_mail)
        return children
    
    def get_thread_mails(self, desc=False):
        root = self.get_thread_root()
        
        db = self._get_db()
        cursor = db.cursor()
        sql = SELECT_FROM_MAILARC + " WHERE threadroot = %s ORDER BY utcdate"
        
        if desc:
            sql += " DESC"
        
        cursor.execute(sql, (root.messageid,))
        mails = []
        for row in cursor:
            mails.append(Mail(self.env, row=row, db=self.db))
        return mails
    
    def has_children(self, cached_mails=None):
        rtn = len(self.get_children(cached_mails=cached_mails)) > 0
        return rtn 

    def get_related_tickets(self, req):
        db = self._get_db()
        return get_related_tickets(self.env, req, db, self.id)
    
    def has_attachments(self, req):
        attachment = MailArchiveAttachment(self.env, self.id)
        return attachment.has_attachments(req)

    def populate(self, author, msg, mlid):
        """Populate the mail with 'suitable' values from a message"""
        
        if 'message-id' not in msg:
            raise 'Illegal Format Mail!'
        
        self.is_new_mail = False
        self.mlid = mlid

        self._parse_messageid(msg)
        self._parse_date(msg)
        self._parse_subject(msg)
        
        if msg.is_multipart():
            self._parse_multipart(author, msg)
        else:
            self._parse_body(msg)

        ref_messageid = self._parse_reference(msg)
        self._make_thread(ref_messageid)
        
    def update_or_save(self):
        if self.messageid is None or self.messageid == '':
            raise "Can't save mail to database."
        
        db, has_tran = self._get_db_for_write()
        cursor = db.cursor()

        yearmonth = time.strftime("%Y%m", time.gmtime(self.utcdate))
        category = self.mlid + yearmonth
        cursor.execute("SELECT category, mlid, yearmonth, count FROM mailarc_category WHERE category=%s",
                        (category.encode('utf-8'),))
        row = cursor.fetchone()
        count = 0
        if row:
            count = row[3]
            pass
        else:
            cursor.execute("INSERT INTO mailarc_category (category, mlid, yearmonth, count) VALUES(%s, %s, %s, %s)",
                            (category.encode('utf-8'),
                             self.mlid.encode('utf-8'),
                             yearmonth,
                             0))
        if self.is_new_mail:
            count = count + 1
        cursor.execute("UPDATE mailarc_category SET count=%s WHERE category=%s",
            (count, category.encode('utf-8')))

        # insert or update mailarc

        #self.log.debug(
        #    "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" %(str(id),
        #    category.encode('utf-8'),
        #    messageid,
        #     utcdate,
        #      zoneoffset,
        #     subject.encode('utf-8'), fromname.encode('utf-8'),
        #     fromaddr.encode('utf-8'),'','',
        #     thread_root,thread_parent))
        cursor.execute("DELETE FROM mailarc where messageid=%s",
                       (self.messageid,))

        cursor.execute("INSERT INTO mailarc ("
            "id, category, messageid, utcdate, zoneoffset, subject,"
            "fromname, fromaddr, header, text, threadroot, threadparent) "
            "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
            (str(self.id),
            category.encode('utf-8'),
            self.messageid,
            self.utcdate,
            self.zoneoffset,
            self.subject.encode('utf-8'), self.fromname.encode('utf-8'),
            self.fromaddr.encode('utf-8'), '', self.body.encode('utf-8'),
            self.thread_root, self.thread_parent))

        if has_tran:
            db.commit()

    def _parse_messageid(self, msg):
        self.messageid = msg['message-id'].strip('<>')

        #check messageid is unique
        self.log.debug("Creating new mailarc '%s'" % 'mailarc')
        
        db = self._get_db()
        cursor = db.cursor()
        cursor.execute("SELECT id from mailarc WHERE messageid=%s", (self.messageid,))
        row = cursor.fetchone()
        id = None
        if row:
            id = row[0]
            
        if id == None or id == "":
            # why? get_last_id return 0 at first.
            #id = db.get_last_id(cursor, 'mailarc')
            self.is_new_mail = True
            cursor.execute("SELECT Max(id)+1 as id from mailarc")
            row = cursor.fetchone()
            if row and row[0] != None:
                id = row[0]
            else:
                id = 1
        self.id = int(id) # Because id might be 'n.0', int() is called.

    def _parse_date(self, msg):
        if 'date' in msg:
            datetuple_tz = email.Utils.parsedate_tz(msg['date'])
            localdate = calendar.timegm(datetuple_tz[:9]) #toDB
            zoneoffset = datetuple_tz[9] # toDB
            utcdate = localdate - zoneoffset # toDB
            #make zone ( +HHMM or -HHMM
            zone = ''
            if zoneoffset > 0:
                zone = '+' + time.strftime('%H%M', time.gmtime(zoneoffset))
            elif zoneoffset < 0:
                zone = '-' + time.strftime('%H%M', time.gmtime(-1 * zoneoffset))
            #self.log.debug( time.strftime("%y/%m/%d %H:%M:%S %z",datetuple_tz[:9]))
            
            self.log.debug(time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(utcdate)))
            self.log.debug(time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(localdate)))
            self.log.debug(zone)
        
        fromname, fromaddr = email.Utils.parseaddr(msg['from'])
        
        self.fromname = self._decode_to_unicode(fromname)
        self.fromaddr = self._decode_to_unicode(fromaddr)
        self.zone = zone
        self.utcdate = utcdate
        self.zoneoffset = zoneoffset
        self.localdate = self._to_localdate(utcdate, zoneoffset)
        
        self.log.info('  ' + self.localdate + ' ' + zone +' '+ fromaddr)
        
    def _parse_subject(self, msg):
        if 'subject' in msg:
            self.subject = self._decode_to_unicode(msg['subject'])
            
    def _parse_reference(self, msg):
        # make thread infomations
        ref_messageid = ''
        if 'in-reply-to' in msg:
            ref_messageid = ref_messageid + msg['In-Reply-To'] + ' '
            self.log.debug('In-Reply-To:%s' % ref_messageid )

        if 'references' in msg:
            ref_messageid = ref_messageid + msg['References'] + ' '

        m = re.findall(r'<(.+?)>', ref_messageid)
        ref_messageid = ''
        for text in m:
            ref_messageid = ref_messageid + "'%s'," % text
            
        ref_messageid = ref_messageid.strip(',')
        
        self.log.debug('RefMessage-ID:%s' % ref_messageid)
        
        return ref_messageid

    def _parse_multipart(self, author, msg):
        body = ''
        # delete all attachement at message-id
        Attachment.delete_all(self.env, 'mailarchive', self.id, self.db)

        for part in msg.walk():
            content_type = part.get_content_type()
            self.log.debug('Content-Type:' + content_type)
            file_counter = 1

            if content_type == 'multipart/mixed':
                pass
            
            elif content_type == 'text/html' and self._is_file(part) == False:
                if body != '':
                    body += "\n------------------------------\n\n"
                    
                body = part.get_payload(decode=True)
                charset = part.get_content_charset()
                
                self.log.debug('charset:' + str(charset))
                # Todo:need try
                if charset != None:
                    body = self._to_unicode(body, charset)
                
            elif content_type == 'text/plain' and self._is_file(part) == False:
                #body = part.get_payload(decode=True)
                if body != '':
                    body += "\n------------------------------\n\n"
                    
                current_body = part.get_payload(decode=True)
                charset = part.get_content_charset()
                
                self.log.debug('charset:' + str(charset))
                # Todo:need try
                if charset != None:
                    #body = self._to_unicode(body, charset)
                    body += self._to_unicode(current_body, charset)
                else:
                    body += current_body
                
            elif part.get_payload(decode=True) == None:
                pass
            
            # file attachment
            else:
                self.log.debug(part.get_content_type())
                # get filename
                # Applications should really sanitize the given filename so that an
                # email message can't be used to overwrite important files
                
                filename = self._get_filename(part)
                if not filename:
                    import mimetypes
                    
                    ext = mimetypes.guess_extension(part.get_content_type())
                    if not ext:
                        # Use a generic bag-of-bits extension
                        ext = '.bin'
                    filename = 'part-%03d%s' % (file_counter, ext)
                    file_counter += 1

                self.log.debug("filename:" + filename.encode(OUTPUT_ENCODING))

                # make attachment
                tmp = os.tmpfile()
                tempsize = len(part.get_payload(decode=True))
                tmp.write(part.get_payload(decode=True))

                tmp.flush()
                tmp.seek(0,0)

                attachment = Attachment(self.env, 'mailarchive', self.id)

                attachment.description = '' # req.args.get('description', '')
                attachment.author = author #req.args.get('author', '')
                attachment.ipnr = '127.0.0.1'

                try:
                    attachment.insert(filename,
                            tmp, tempsize, None, self.db)
                except Exception, e:
                    try:
                        ext = filename.split('.')[-1]
                        if ext == filename:
                            ext = '.bin'
                        else:
                            ext = '.' + ext
                        filename = 'part-%03d%s' % (file_counter, ext)
                        file_counter += 1
                        attachment.description += ', Original FileName: %s' % filename
                        attachment.insert(filename,
                                tmp, tempsize, None, self.db)
                        self.log.warn('As name is too long, the attached file is renamed : ' + filename)

                    except Exception, e:
                        self.log.error('Exception at attach file of Message-ID:' + self.messageid)
                        traceback.print_exc(e)

                tmp.close()
Esempio n. 35
0
File: watcher.py Progetto: rozap/arb
class Watcher(object):

    exchanges = {}

    def __init__(self, settings):
        self.set_settings(settings)
        self.L = Logger(settings)
        self.L.log('Setup watcher for %s' % (self.exchange_names), 'info')
        self.load_exchanges(settings)


    def set_settings(self, settings):
        self.trade_threshold = settings['trade_threshold']
        if self.trade_threshold <= 0:
            raise Error('settings variable trade_threshold must be above 0!')
        self.exchange_names = settings['exchanges']
        self.poll_interval = settings['poll_interval']



    def load_exchanges(self, settings):
        c_name = '%sExchange'

        modules = zip(self.exchange_names, [__import__('src.exchanges', fromlist=[str(c_name%e)]) for e in self.exchange_names])
        exchange_classes = [(e, getattr(module, c_name % e)) for e, module in modules]
        for name, klass in exchange_classes:
            self.exchanges[name] = klass(settings)
        
        self.L.log('Loaded exchanges %s' % self.exchanges, 'info')



    def find_trade(self):
        buys = [(name, exch.buy_price()) for name, exch in self.exchanges.iteritems()]
        sells = [(name, exch.sell_price()) for name, exch in self.exchanges.iteritems()]

        #find the minimum buy and the max sell price
        min_buy = min(buys, key = lambda x: x[1])
        max_sell = max(sells, key = lambda x : x[1])

        if max_sell[1] - min_buy[1] > self.trade_threshold:
            self.L.log('Possible Trade opportunity:', 'info')
            self.L.log('Buy from %s @ %s and sell to %s @ %s' % (min_buy + max_sell), 'info')
        else:
            self.L.log('No trading opportunity', 'info')
            self.L.log('Min buy from %s @ %s | Max sell to %s @ %s' % (min_buy + max_sell), 'info')

    def watch(self):
        while True:
            self.find_trade()
            time.sleep(self.poll_interval)
Esempio n. 36
0
File: watcher.py Progetto: rozap/arb
 def __init__(self, settings):
     self.set_settings(settings)
     self.L = Logger(settings)
     self.L.log('Setup watcher for %s' % (self.exchange_names), 'info')
     self.load_exchanges(settings)
        # Delete a row from the table
        self.db_client.delete(
            table_name,
            "k=2")

        # Fetch the whole table
        row = self.db_client.select(table=table_name)
        self.assertEqual(len(row), 2)
        self.assertEqual(row[0][0], 1)
        self.assertEqual(row[0][1], "20161026")
        self.assertEqual(row[0][2], "10:00:00.000000")
        self.assertEqual(row[0][3], 'AbC')
        self.assertEqual(row[0][4], 10.3)
        self.assertEqual(row[1][0], 3)
        self.assertEqual(row[1][1], "20161026")
        self.assertEqual(row[1][2], "10:00:02.000000")
        self.assertEqual(row[1][3], 'Efgh')
        self.assertEqual(row[1][4], 10.5)

        # Delete remaining rows from the table
        self.db_client.delete(table_name)
        # Fetch the whole table
        row = self.db_client.select(table=table_name)
        self.assertEqual(len(row), 0)

if __name__ == '__main__':
    Logger.init_log()
    unittest.main()