def test_OpenCloseSingle(self):
        """Write 1 item, close, reopen checking if same item is there"""

        q = Queue(self.path)
        q.put('var1')
        del q
        q = Queue(self.path)
        self.assertEqual(1, q.qsize())
        self.assertEqual('var1', q.get())
        q.task_done()
Exemplo n.º 2
0
    def __init__(self, config):
        self.paths = config['paths']

        self.mode = int(config['mode'], 8)
        self.user = config['user']
        self.group = config['group']

        # FIXME: Handle rotated files keeping correctly
        # self.keep_files = int(config['rotate'])
        self.compress = config['compress']

        self.copy = config['copy']
        self.copytohdfs = config['copytohdfs']
        self.hdfs_config = config['hdfs']
        self.hdfs_client = None
        if self.hdfs_config:
            self.hdfs_client = hdfs.InsecureClient(**self.hdfs_config)

        self.dateformat = config['dateformat']
        self.now = datetime.datetime.now()
        self.timestamp = self.now.strftime(self.dateformat)
        self.destext = config['destext']

        self.fnformat = config['fnformat']
        if not self.fnformat:
            raise ValueError("'fnformat' cannot be empty")

        self.sharedscripts = config['sharedscripts']
        self.prerotates = config['prerotate']
        self.postrotates = config['postrotate']

        self.queuepath = config['queuepath']
        self.queue_chunksize = 1000
        self.queue_block_timeout = 30
        self.queue = Queue(self.queuepath, self.queue_chunksize)
    def test_PartialWrite(self):
        """Test recovery from previous crash w/ partial write"""

        q = Queue(self.path)
        for i in range(100):
            q.put('var%d' % i)
        del q
        with open(os.path.join(self.path, 'q00000'), 'ab') as f:
            pickle.dump('文字化け', f)
        q = Queue(self.path)
        self.assertEqual(100, q.qsize())
        for i in range(100):
            self.assertEqual('var%d' % i, q.get())
            q.task_done()
        with self.assertRaises(Empty):
            q.get_nowait()
    def test_ClearOldFile(self):
        """put until reaching chunksize, then get without calling task_done"""
        q = Queue(self.path, chunksize=10)
        for i in range(15):
            q.put('var1')

        for i in range(11):
            q.get()

        q = Queue(self.path, chunksize=10)
        self.assertEqual(q.qsize(), 15)

        for i in range(11):
            q.get()
            q.task_done()
        self.assertEqual(q.qsize(), 4)
    def test_GarbageOnHead(self):
        """Adds garbage to the queue head and let the internal integrity
        checks fix it"""

        q = Queue(self.path)
        q.put('var1')
        del q

        with open(os.path.join(self.path, 'q00001'), 'a') as fd:
            fd.write('garbage')

        q = Queue(self.path)
        q.put('var2')

        self.assertEqual(2, q.qsize())
        self.assertEqual('var1', q.get())
        q.task_done()
    def test_OpenCloseOneHundred(self):
        """Write 1000 items, close, reopen checking if all items are there"""

        q = Queue(self.path)
        for i in range(1000):
            q.put('var%d' % i)
        del q
        q = Queue(self.path)
        self.assertEqual(1000, q.qsize())
        for i in range(1000):
            data = q.get()
            self.assertEqual('var%d' % i, data)
            q.task_done()
        with self.assertRaises(Empty):
            q.get_nowait()
        # assert adding another one still works
        q.put('foobar')
        data = q.get()
    def queue_persistence(self, queue, tdir, qdir):
        self.assertEqual(queue.qsize(), 0)
        for request in self.fake_requests:
            queue.put(request)
        self.assertEqual(queue.qsize(), len(self.fake_requests))

        del queue
        queue = Queue(qdir, tempdir=tdir)

        self.assertEqual(queue.qsize(), len(self.fake_requests))
        for _ in range(len(self.fake_requests)):
            queue.get()
        self.assertEqual(queue.qsize(), 0)
Exemplo n.º 8
0
 def __open(self):
     cacheDirectoryName = "tinklad-cache" + "/" + self.name
     cachePath = None
     tempdir = None
     if os.path.exists("/data"):
         cachePath = "/data/" + cacheDirectoryName 
         tempdir="/data/local/tmp"
     else: # Development environment:
         cachePath = "./" + cacheDirectoryName
         tempdir="./"
     if not os.path.exists(cachePath):
         os.makedirs(cachePath)
     self.task_queue = Queue(cachePath, tempdir=tempdir)
    def test_RandomReadWrite(self):
        """Test random read/write"""

        q = Queue(self.path)
        n = 0
        for i in range(1000):
            if random.random() < 0.5:
                if n > 0:
                    q.get_nowait()
                    q.task_done()
                    n -= 1
                else:
                    with self.assertRaises(Empty):
                        q.get_nowait()
            else:
                q.put('var%d' % random.getrandbits(16))
                n += 1
    def test_MultiThreaded(self):
        """Create consumer and producer threads, check parallelism"""

        q = Queue(self.path)
        def producer():
            for i in range(1000):
                q.put('var%d' % i)

        def consumer():
            for i in range(1000):
                q.get()
                q.task_done()

        c = Thread(target = consumer)
        c.start()
        p = Thread(target = producer)
        p.start()
        c.join()
        p.join()
        with self.assertRaises(Empty):
            q.get_nowait()
Exemplo n.º 11
0
    def __init__(self):
        super().__init__()
        self.ui=Ui_MainWindow()
        self.ui.setupUi(self)

        #self.Ctl_timer = QTimer()
        ##self.Ctl_timer.setSingleShot(True)
        #self.Ctl_timer.setInterval(70)
        #self.Ctl_timer.timeout.connect(self.Ctl_loop)
        #self.Ctl_timer.start()
        
        self.N=1024*8
        self.fmt_r='Q'*self.N

        self.ip="192.168.1.2"
        self.port_s=8889
        self.port_r=8888
        self.Int=500 #unit us
        self.Scan=5000000 #unit ms, setting trigger
        self.Sim=50 #unit ns, setting simulation
        self.counter=0
        #UNIT and DATA initiation
        self._socket_s = None
        self._socket_t = None
        self._unit_connected_to = None
        self._unit_connected= False
        self.q=Queue('tmp',maxsize=self.N*2)
        #self.q=Queue('tmp')
        self.para_changed() 
        self.threadpool = QThreadPool()

        self.line,=self.ui.widget.canvas.ax.plot([1],[1],'b.',markersize=0.3)
        self.ui.widget.canvas.ax.set_ylim(-1,1)
        self.ui.widget.canvas.ax.set_xlim(0,100)

        self.ui.widget.canvas.draw()
        self.ui.statusbar.showMessage(f"Software started")
Exemplo n.º 12
0
 def __init__(self, config):
     self.config = config
     self.dateformat = config['dateformat']
     self.keep_files = int(config['rotate'])
     self.now = datetime.datetime.now()
     self.dateext = self.now.strftime(self.dateformat)
     self.mode = config['mode']
     self.compress = config['compress']
     self.user = config['user']
     self.group = config['group']
     self.sharedscripts = config['sharedscripts']
     self.destext = config['destext']
     self.copy = config['copy']
     self.copytohdfs = config['copytohdfs']
     self.prerotates = config['prerotate']
     self.postrotates = config['postrotate']
     self.hdfs_config = config['hdfs']
     self.queuepath = config['queuepath']
     self.queue_chunksize = 1000
     self.queue_block_timeout = 30
     self.queue = Queue(self.queuepath, self.queue_chunksize)
     self.client = None
     if self.hdfs_config:
         self.client = hdfs.InsecureClient(**self.hdfs_config)
Exemplo n.º 13
0
import common

PORT = os.environ.get("PORT", "5151")
API_KEY = os.environ.get("API_KEY")
if not API_KEY:
    API_KEY = common.get_secret("API_KEY")
if not API_KEY:
    API_KEY = "h/52y/E7cm8Ih4F3cVdlBM4ZQxER+Apk6P0L7yR0lFU="
TEMP_DIR, OQ_DIR, CQ_DIR = "queue_temp", "oqueue", "cqueue"
LB_TEMP_DIR, LBQ_DIR = "lb_temp", "lbqueue"

for d in [TEMP_DIR, OQ_DIR, CQ_DIR, LB_TEMP_DIR, LBQ_DIR]:
    if not os.path.exists(d):
        os.makedirs(d)

originality_queue = Queue(OQ_DIR, tempdir=TEMP_DIR)
concordance_queue = Queue(CQ_DIR, tempdir=TEMP_DIR)
leaderboard_queue = Queue(LBQ_DIR, tempdir=LB_TEMP_DIR)


@route('/', method='POST')
def queue_for_scoring():
    """ Recieves a submission and authenticates that the request has a valid API key.

    Once authenticated the submission request is then queued to the leaderboard_queue and later checked for concordance and originality.

    """
    json = request.json
    submission_id = json["submission_id"]
    api_key = json["api_key"]
 def _test_concordance_queue(self):
     with tempfile.TemporaryDirectory() as temp_dir:
         with tempfile.TemporaryDirectory() as concordance_dir:
             concordance_queue = Queue(concordance_dir, tempdir=temp_dir)
             self.queue_persistence(concordance_queue, temp_dir, concordance_dir)
 def _test_originality_queue(self):
     with tempfile.TemporaryDirectory() as temp_dir:
         with tempfile.TemporaryDirectory() as originality_dir:
             originality_queue = Queue(originality_dir, tempdir=temp_dir)
             self.queue_persistence(originality_queue, temp_dir, originality_dir)
Exemplo n.º 16
0
    def __init__(self, site):
        '''
        (Crawler, str) -> Crawler
        creates a Crawler with a given origin_url
        '''
        self.site = site
        self.filters = site.referringsitefilter_set.all()
        self.domain = urlparse(site.url).netloc
        # http://alexeyvishnevsky.com/2013/11/tips-on-optimizing-scrapy-for-a-high-performance/
        # fork of pybloom: https://github.com/joseph-fox/python-bloomfilter
        self.ignore_filter = ScalableBloomFilter(initial_capacity=10000000,
                                                 error_rate=0.00001)
        ignore_filter_dir = '../ignore_filter/'
        if not os.path.exists(ignore_filter_dir):
            os.makedirs(ignore_filter_dir)
            self.ignore_filter = ScalableBloomFilter(initial_capacity=10000000,
                                                     error_rate=0.00001)
            try:
                f = open(
                    '../ignore_filter/' + self.site.name + '_ignore_file.txt',
                    'r+')
                f.write(self.ignore_filter)
            except IOError:
                f = open(
                    '../ignore_filter/' + self.site.name + '_ignore_file.txt',
                    'w+')
            f.close()
        else:
            if (not (os.path.exists('../ignore_filter/' + self.site.name +
                                    '_ignore_file.txt'))):
                f = open(
                    '../ignore_filter/' + self.site.name + '_ignore_file.txt',
                    'w+')
                f.close()

            with open('../ignore_filter/' + self.site.name +
                      '_ignore_file.txt',
                      'r+',
                      buffering=False) as ignore_filter_file:
                try:
                    for line in ignore_filter_file:
                        self.ignore_filter.add(line.decode('utf8').rstrip())
                except Exception as e:
                    logging.info(str(e))
            ignore_filter_file.close()
        self.visited_count = 0

        tmpqueuetmp_dir = '../tmpqueue/tmp/'
        if not os.path.exists(tmpqueuetmp_dir):
            os.makedirs(tmpqueuetmp_dir)

        slugified_name = slugify(unicode(site.name))
        tmpqueue_dir = '../tmpqueue/{}'.format(slugified_name)
        if not os.path.exists(tmpqueue_dir):
            os.makedirs(tmpqueue_dir)

        self.to_visit = Queue(tmpqueue_dir, tempdir=tmpqueuetmp_dir)

        # Initial url
        if (self.site.is_shallow == False):
            self.to_visit.put(site.url)
        else:
            self.to_visit.put((site.url, str(0)))

        # Limit
        self.limit = common.get_config()["crawler"]["limit"]
        # Specifies how deep the shallow crawler should go; "1" is the lowest option for this
        self.level = common.get_config()["crawler"]["level"]
        """
Exemplo n.º 17
0
g_data = " "
g_cnt = 0
g_dtime = 0
g_sr = 0
g_dt = 0

g_disk = os.statvfs("/")
g_first_avail = g_disk.f_bsize * g_disk.f_bavail

count = 0

s_buff = ''
r_buff = list()
room_buff = list()

q = Queue("buffer")

# REST api (based on http)
firebase = firebase.FirebaseApplication('https://pnu-dubleve.firebaseio.com')


class NumpyEncoder(json.JSONEncoder):
    def default(self, obj):
        if isinstance(obj, np.ndarray):
            return obj.tolist()
        return json.JSONEncoder.default(self, obj)


def Sender():
    global sr
    while True:
Exemplo n.º 18
0
g_cnt = 0
g_dtime = 0
g_sr = 0
g_dt = 0

g_disk = os.statvfs("/")
g_first_avail = g_disk.f_bsize * g_disk.f_bavail

#fname = "data"+str(dt.datetime.now())+".txt"
#f=open(fname, "w")

s_buff = ''
r_buff = list()
room_buff = list()

q = Queue("Buffer")

firebase = firebase.FirebaseApplication('https://pnu-dubleve.firebaseio.com')


class NumpyEncoder(json.JSONEncoder):
    def default(self, obj):
        if isinstance(obj, np.ndarray):
            return obj.tolist()
        return json.JSONEncoder.default(self, obj)


class Sender(threading.Thread):
    def run(self):

        global sr