Esempio n. 1
0
class DataTests(unittest.TestCase):
    def setUp(self) -> None:
        self.parser = Parser()

    def test_row_length(self):
        for row in self.parser.answers:
            self.assertTrue(self.parser.correct_row_length(row),
                            'Incorrect row length: ' + row)

    def test_question_number(self):
        i = 1
        for row in self.parser.answers:
            self.assertEqual(self.parser.question_order(row), i,
                             'Missing answer number: ' + str(i))
            i += 1

    def test_answer_is_digit(self):
        for row in self.parser.answers:
            for number in row.split(';'):
                self.assertTrue(self.parser.is_number(number),
                                'Answer is not number: ' + repr(number))

    def test_number_is_correct_choice(self):
        for row in self.parser.answers:
            for number in row.split(';'):
                if self.parser.is_number(number):
                    self.assertIn(
                        int(number), [0, 1, 2, 3, 4, 5, 6, 7, 99],
                        'Number ' + number + ' is not correct answer')
Esempio n. 2
0
class ParserTest(unittest.TestCase):
    def __init__(self, *args, **kwargs):
        super(ParserTest, self).__init__(*args, **kwargs)
        self.parser = Parser()

    @data(('Sat May 04 15:00:33 +0000 2019', '3:00 PM -  4 May 2019'),
          ('', ''))
    @unpack
    def test_parse_date(self, input_date, formatted_date):
        self.assertEqual(self.parser.parse_date(input_date), formatted_date)

    def test_tweet_result(self):
        with open('testdata/search.json', encoding='utf-8') as f:
            tweet_data = json.load(f)
        with open('testdata/user_tweet.json', encoding='utf-8') as f:
            user_data = json.load(f)

        tweet_outcome = self.parser.parse_tweets_result(tweet_data, 'search')
        user_outcome = self.parser.parse_tweets_result(user_data, 'user')

        self.assertEqual(tweet_outcome[0]['account']['fullname'],
                         'Twitter Dev')
        self.assertEqual(tweet_outcome[1]['likes'], 27)

        self.assertEqual(user_outcome[0]['retweets'], 284)
        self.assertEqual(user_outcome[1]['hashtags'], [])
Esempio n. 3
0
 def __init__(self, data, handler):
     self.handler = handler
     self.data = data
     self.parser = Parser()
     self.scheduler = sched.scheduler(time.time, time.sleep)
     self.thread = self.create_thread()
     self.kill = False
     self.port = None
     self.table = None
     self.interval = 1
Esempio n. 4
0
 def run(self):
     result_x = [[] for _ in range(self.max_track_size)]
     result_y = [[] for _ in range(self.max_track_size)]
     result_z = [[] for _ in range(self.max_track_size)]
     for data in self.datas:
         data = data[1].values.tolist()
         parser = Parser(data)
         x_ = parser.parse_x(self.horses_stats)
         y_ = parser.parse_y()
         z_ = parser.parse_z()
         if len(x_.shape) == 2 and x_.shape[1] == 9:
             result_x[len(data)].append(x_)
             result_y[len(data)].append(y_)
             result_z[len(data)].append(z_)
     return result_x, result_y, result_z
Esempio n. 5
0
    def testParseBytesToBytes(self):
        # b'DB\x04\x00\x00\x00\x00\x00\x00\x00ABCD1997'
        parser = Parser(BYTES_ENCODED)
        parser_test_function(self,
                             parser=parser,
                             code="D",
                             dtype="B",
                             length=4,
                             payload=b'ABCD',
                             remaining=0)

        head = Head(code=parser.code,
                    dtype=parser.dtype,
                    length=parser.length,
                    remaining=parser.remaining)
        head_test_function(self,
                           head=head,
                           code="D",
                           dtype="B",
                           length=4,
                           remaining=0)

        payload = Payload(data=parser.payload,
                          dtype=parser.dtype,
                          length=parser.length)
        payload_test_function(self,
                              payload=payload,
                              data=b'ABCD',
                              dtype="B",
                              length=4)
Esempio n. 6
0
    def testParseBytesToInt(self):
        parser = Parser(INT_ENCODED)
        parser_test_function(self,
                             parser=parser,
                             code="D",
                             dtype="I",
                             length=2,
                             payload=257,
                             remaining=256)

        head = Head(code=parser.code,
                    dtype=parser.dtype,
                    length=parser.length,
                    remaining=parser.remaining)
        head_test_function(self,
                           head=head,
                           code="D",
                           dtype="I",
                           length=2,
                           remaining=256)

        payload = Payload(data=parser.payload,
                          dtype=parser.dtype,
                          length=parser.length)
        payload_test_function(self,
                              payload=payload,
                              data=257,
                              dtype="I",
                              length=2)
Esempio n. 7
0
    def testParseBytesToStr(self):
        # b'DS\x04\x00\x00\x00\x00\x00\x00\x00ABCD1997'
        parser = Parser(data=STR_ENCODED)
        parser_test_function(self,
                             parser=parser,
                             code="D",
                             dtype="S",
                             length=4,
                             payload="ABCD",
                             remaining=0)

        head = Head(code=parser.code,
                    dtype=parser.dtype,
                    length=parser.length,
                    remaining=parser.remaining)
        head_test_function(self,
                           head=head,
                           code="D",
                           dtype="S",
                           length=4,
                           remaining=0)

        payload = Payload(data=parser.payload,
                          dtype=parser.dtype,
                          length=parser.length)
        payload_test_function(self,
                              payload=payload,
                              data="ABCD",
                              dtype="S",
                              length=4)
Esempio n. 8
0
 def testParseHandshakeToBytes(self):
     parser = Parser(HANDSHAKE_ENCODED)
     parser_test_function(self,
                          parser=parser,
                          code="H",
                          dtype="B",
                          length=0,
                          payload=b"",
                          remaining=0)
     head = Head(code=parser.code,
                 dtype=parser.dtype,
                 length=parser.length,
                 remaining=parser.remaining)
     head_test_function(self,
                        head=head,
                        code="H",
                        dtype="B",
                        length=0,
                        remaining=0)
     payload = Payload(data=parser.payload,
                       dtype=parser.dtype,
                       length=parser.length)
     payload_test_function(self,
                           payload=payload,
                           data=b"",
                           dtype="B",
                           length=0)
Esempio n. 9
0
def user_tweets(username, limit=30):
    """
    Get user tweets. Get the list of tweets that the user has on his feed in JSON format.
    Optional parameters:
        limit: integer, specifies the number of tweets to retrieve, the default should be 30

    """

    # Return if Limit is Not in the range of 0 to 200
    if limit > 200 or limit < 0:
        return {"Message": "Please Provide a valid limit between 0-200"}, 422

    # Initiate the TwitterAPI instance with the given Credentials
    try:
        api = TwitterAPI(consumer_key=CONSUMER_KEY,
                         consumer_secret=CONSUMER_SECRET)
    except TwitterError as e:
        # Return with 401 if can't authenticate the user with given credentials
        return {"Message": e.message}, 401

    # Query Param for twitter API
    query = {'screen_name': username, 'count': limit}
    try:
        x = api.GetTweets(query)
    except TwitterError as e:
        return {"Message": e.message}, 400

    return Parser.parse_tweets_result(x, 'user'), 200
Esempio n. 10
0
def search_tweets(hashtag, limit=30):
    """
    Get tweets by a hashtag. Get the list of tweets with the given hashtag.
    Optional parameters:
        limit: integer, specifies the number of tweets to retrieve, the default should be 30
    """

    # Return if Limit is Not in the range of 0 to 200
    if limit > 200 or limit < 0:
        return {"Message": "Please Provide a valid limit between 0-200"}, 422

    # Initiate the TwitterAPI instance with the given Credentials
    try:
        api = TwitterAPI(consumer_key=CONSUMER_KEY,
                         consumer_secret=CONSUMER_SECRET)
    except TwitterError as e:
        # Return with 401 if can't authenticate the user with given credentials
        return {"Message": e.message}, 401

    # Query Param for twitter API
    query = {'q': '#{}'.format(hashtag), 'count': limit}
    try:
        x = api.GetSearch(query)
    except TwitterError as e:
        return {"Message": e.message}, 400

    return Parser.parse_tweets_result(x, 'search'), 200
Esempio n. 11
0
class Connection:

    def __init__(self, data, handler):
        self.handler = handler
        self.data = data
        self.parser = Parser()
        self.scheduler = sched.scheduler(time.time, time.sleep)
        self.thread = self.create_thread()
        self.kill = False
        self.port = None
        self.table = None
        self.interval = 1

    def establish_connection(self):
        if self.port is not None:
            self.port.close()
        ports = serial.tools.list_ports.comports()
        for device in ports:
            if device.vid is None or device.pid is None:
                continue
            if hex(device.vid) == '0x10c4' and hex(device.pid) == '0xea60':
                try:
                    self.port = serial.Serial(device.device, 2400, timeout=None, parity=serial.PARITY_NONE, rtscts=1)
                    return True
                except serial.serialutil.SerialException:
                    self.handler.info()
                    return False
        return False

    def start_measurement(self):
        self.kill = False
        self.scheduler.enter(self.interval, 1, self.get_data)
        self.scheduler.run()

    def get_data(self):
        data_string = None
        try:
            data_string = ""
            while len(data_string) != 14:
                self.port.reset_input_buffer()
                data_string = self.port.read_until(b'\n')
        except serial.serialutil.SerialException:
            self.handler.window.cont_measurement = False
        if self.kill:
            self.handler.window.cont_measurement = False
            return
        self.scheduler.enter(self.interval, 1, self.get_data)
        try:
            # print("device", data_string)
            correct = self.data.insert_value(self.parser.parse(data_string))
            if not correct:
                self.handler.window.cont_measurement = False
                return
            last_value = self.data.values[-1]
            self.table.add(last_value[0], last_value[1][0])
        except (ValueError, TypeError):
            self.handler.window.cont_measurement = False

    def create_thread(self):
        return threading.Thread(target=self.start_measurement)
Esempio n. 12
0
 def testParseBytesToError(self):
     parser = Parser(ERROR_ENCODED)
     parser_test_function(self,
                          parser=parser,
                          code="E",
                          dtype="B",
                          length=0,
                          payload=b"",
                          remaining=0)
     head = Head(code=parser.code,
                 dtype=parser.dtype,
                 length=parser.length,
                 remaining=parser.remaining)
     head_test_function(self, head, "E", "B", 0, 0,
                        b'EB\00\00\00\00\00\00\00\00')
Esempio n. 13
0
 def testParseCorruptedStreamToBytes(self):
     parser = Parser(CORRUPTED_STREAM)
     parser_test_function(self,
                          parser=parser,
                          code="E",
                          dtype="B",
                          length=0,
                          payload=b"",
                          remaining=0)
     head = Head(code=parser.code,
                 dtype=parser.dtype,
                 length=parser.length,
                 remaining=parser.remaining)
     head_test_function(self, head, "E", "B", 0, 0,
                        b'EB\00\00\00\00\00\00\00\00')
Esempio n. 14
0
 def __init__(self,
              content: Union[bytes, str, int] = b'',
              code: str = "default",
              remaining: int = 0,
              encoded: bytes = None):
     if encoded:
         self.parser = Parser(encoded)
         self.head = Head(code=self.parser.code,
                          dtype=self.parser.dtype,
                          length=self.parser.length,
                          remaining=self.parser.remaining)
         self.payload = Payload(data=self.parser.payload,
                                dtype=self.parser.dtype,
                                length=self.parser.length)
         self.end_of_package = self.parser.eop
     else:
         self.payload = Payload(content)
         self.head = Head(code=code,
                          dtype=self.payload.dtype,
                          length=self.payload.length,
                          remaining=remaining)
         self.end_of_package = self.get_end_of_package()
     self.encoded = self.get_encoded()
def rescan_dat():
    Parser().rescan_data()
def get_factor_by_year(name, year):
    factor: DataFrame = Parser().data[name]
    return factor[year]
Esempio n. 17
0
learning_rate_info['staircase'] = True

##loss operations
loss_op = tf.losses.sparse_softmax_cross_entropy
one_hot = False
loss_op_kwargs = None

##optimizers
optimizer = tf.train.AdamOptimizer
optimizer_kwargs = None

image_height = 32
image_width = 32
image_channel = 3

class_numbers = 10

checkpoint_path = './checkpoints'
model_checkpoint_path = join(checkpoint_path, 'model.ckpt')
prior_weights = None
train_summary_path = join(checkpoint_path, 'train')
valid_summary_path = join(checkpoint_path, 'valid')

train_tfrecords = '/opt/ml_data/cifar/cifar-10/train.tfrecords'
valid_tfrecords = '/opt/ml_data/cifar/cifar-10/valid.tfrecords'

## information for parsing the tfrecord
features={'image':tf.FixedLenFeature([], tf.string),\
    'label': tf.FixedLenFeature([], tf.int64 )}
train_parser = Parser(features, True, image_height, image_width)
valid_parser = Parser(features, False, image_height, image_width)
Esempio n. 18
0
        final_inst = list(sorted(self.pipeline.completed, key=lambda x: x.result["IF"]))

        for inst in final_inst:
            result = inst.print().ljust(25," ") + \
                str(inst.result["IF"]).ljust(10," ") + \
                str(inst.result["ID"]).ljust(10," ") + \
                str(inst.result["EX"]).ljust(10," ") + \
                str(inst.result["MEM"]).ljust(10," ") + \
                str(inst.result["WB"]).ljust(10," ") + \
                str(inst.hazards["raw"]).ljust(10," ") + \
                str(inst.hazards["war"]).ljust(10," ") + \
                str(inst.hazards["waw"]).ljust(10," ") + \
                str(inst.hazards["struct"]).ljust(10," ")
            print(result, "\n")

        print("Total number of access requests for instruction cache:", self.pipeline.access_count)
        print("Number of instruction cache hits:", self.pipeline.hit_count, "\n")
        print("Total number of access requests for data cache:", self.pipeline.dcache.access_count)
        print("Number of data cache hits:", self.pipeline.dcache.hit_count)

parser = Parser(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4])

# parser = Parser("inst.txt","data.txt","reg.txt","config.txt")
# parser = Parser("test_cases/test_case_1/inst.txt","test_cases/test_case_1/data.txt","test_cases/test_case_1/reg.txt","test_cases/test_case_1/config.txt")
# parser = Parser("test_cases/test_case_2/inst.txt","test_cases/test_case_2/data.txt","test_cases/test_case_2/reg.txt","test_cases/test_case_2/config.txt")
# parser = Parser("test_cases/test_case_3/inst.txt","test_cases/test_case_3/data.txt","test_cases/test_case_3/reg.txt","test_cases/test_case_3/config.txt")

mips_sim = Simulation()
mips_sim.set_parser_data(parser)
mips_sim.run(300)
Esempio n. 19
0
image_width  = 32
image_channel  = 3

class_numbers = 10

#model storage

model_checkpoint_path = join( checkpoint_path, 'model.ckpt')
train_summary_path = join( checkpoint_path, 'train' )
valid_summary_path = join( checkpoint_path, 'valid' )


## data loading
root_path = '/home/rachakra/few_shot_learning/prepare_data/output'
train_tfrecords = join( root_path, 'train_siamese_pair_{0}.tfrecords'.format( sample_per_class ) )
valid_tfrecords = join( root_path, 'siamese_pair_valid.tfrecords' )

## information for parsing the tfrecord

features={'image_one':tf.FixedLenFeature([], tf.string),
    'image_two':tf.FixedLenFeature([], tf.string),\
    'label': tf.FixedLenFeature([], tf.int64 )}

train_parser = Parser( features, image_height, image_width, True )
valid_parser = Parser( features, image_height, image_width, False )


##test files
training_list_file = 'train_raw_list_{0}.txt'.format( sample_per_class )
class_valid_list = join( root_path, 'class_valid.pickle' )
Esempio n. 20
0
 def __init__(self, *args, **kwargs):
     super(ParserTest, self).__init__(*args, **kwargs)
     self.parser = Parser()
Esempio n. 21
0
from data_parser import Parser

parser = Parser()
s1, s2, s2 = parser.load_features("text.txt")
Esempio n. 22
0
            return {}, 0


print("==============   Jobs Start    ==============")
api_key = os.environ.get("SARAMIN_KEY")
count = 0
page = 0
res, length = load_data(api_key, page)
total = int(res["jobs"]["total"])
print("total : {} jobs exist".format(total))
while length:
    for i in range(length):
        # progress bar
        progress_bar(i + count, total, 20)
        # parser data initialization
        parser = Parser(res["jobs"]["job"][i])
        # scrap company and job
        parser.scrap_company()
        parser.scrap_job()
    page += 1
    count += length
    if length == 100:
        res, length = load_data(api_key, page)
        total = int(res["jobs"]["total"])
    else:
        break

print("\n==============   Jobs Finish   ==============")
print("==============  Routes Start   ==============")
Route.create_routes()
print("\n==============  Routes Finish  ==============")
Esempio n. 23
0
 def setUp(self) -> None:
     self.parser = Parser()
import argparse
from preprocess import Preprocess
from data_parser import Parser
import pickle


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Prepare data with features and labels')

    parser.add_argument('-i', type=str, help='data file to prepare')
    parser.add_argument('-o', type=str, help='output file')
    parser.add_argument('-v', type=str, default='train.vocab', help='.vocab file for data parser')
    args = parser.parse_args()

    pp = Preprocess()
    dataset = pp.process(args.i, True)  # preprocess the data file.
    parser = Parser(dataset)
    feature_label_list = parser.process_and_get_features(dataset)  # extract tokenized features
    file = open(args.o, 'w')
    for feature, label in feature_label_list:
        str_to_write = ' '.join([parser.idx2token[f] for f in feature])
        str_to_write += '\t' + parser.idx2action[label]
        file.write(str_to_write + '\n')
    with open(args.v, 'wb') as file2:
        pickle.dump(parser, file2)
    file.close()
    file2.close()