def __init__(self, mode, lambd, mu, theta, servers_count, core_servers_count, L, H, simulation_time, max_queue_size, is_debug): self.lambd = lambd self.mu = mu self.theta = theta self.servers_count = int(servers_count) self.core_servers_count = int(core_servers_count) self.L = int(L) self.H = int(H) self.simulation_time = simulation_time self.is_debug = is_debug self.auto_continue = not self.is_debug self.mode = mode self.flow = Flow(lambd, mu, is_debug) self.queue = Queue(int(max_queue_size), is_debug) self.generated_request = Request(-1, 0, 0, 0) self.system_state = States.IDLE self.prev_system_state = States.IDLE self.served_count = 0 self.served_sum_w = 0 self.served_sum_wq = 0 self.servers = [] self.generated_requests = [] self.time = 0 self.prev_time = 0 self.up_down_time = 0 self.prev_up_down_time = 0 self.up_down_count = 0 self.up_down_mean = 0 self.state_time = dict.fromkeys(States.get_States_list(States), 0) self.state_count = dict.fromkeys(States.get_States_list(States), 0) for i in range(int(servers_count)): self.servers.append( Server(i, True if i < int(core_servers_count) else False, is_debug))
def __init__(self, urllist, visits, min, max): self.bots = 3 # max amount of bots to use; can be changed at later time; atm used for sorted amount self.count = 0 # returning bots self.ip = None self.alive = True self.targets = {} # {url: visits} self.recentIPs = Queue(10) self.min = int(min) self.max = int(max) self.visits = int(visits) if not path.exists(urllist): exit('Error: Unable to locate `{}`'.format(urllist)) # read the url list with open(urllist, 'r') as f: try: for url in [_ for _ in f.read().split('\n') if _]: self.targets[url] = 0 # initial view except Exception as err:exit('Error:', err)
def __init__(self, urllist, visits, min, max): super().__init__() self.bots = 2 # max amount of bots to use self.count = 0 # returning bots self.ip = None self.alive = True self.targets = {} # {url: visits} self.recentIPs = Queue(5) self.min = int(min) self.max = int(max) self.visits = int(visits) if not path.exists(urllist): exit(f'Error: Unable to locate {format(urllist)}') # read the url list with open(urllist, 'r') as f: try: for url in [_ for _ in f.read().split('\n') if _]: self.targets[url] = 0 # initial view except Exception as err: exit(f'Error: ? {err}')
def test_init(cls): queue = Queue() assert len(queue) == 0
import pytest from core.queue import Queue, DequeQueue, EmptyQueueError @pytest.mark.parametrize("cls", [Queue, DequeQueue]) def test_init(cls): queue = Queue() assert len(queue) == 0 @pytest.mark.parametrize("queue", [Queue(), DequeQueue()]) def test_dequeue_empty(queue): with pytest.raises(EmptyQueueError): queue.dequeue() @pytest.mark.parametrize("queue", [Queue(), DequeQueue()]) def test_peek_empty(queue): with pytest.raises(EmptyQueueError): queue.peek() @pytest.mark.parametrize("queue", [Queue(), DequeQueue()]) def test_queue(queue): queue.enqueue(1) queue.enqueue(2) assert queue.peek() == 1 assert queue.dequeue() == 1
def setUp(self): """""" self.manager = UploadManager(Queue()) #disable scheduler self.manager.scheduling = True self.manager.add_package(PACKAGE)
args = parser.parse_args() print("CoffinBot v{} Ctrl+C to stop".format(consts.version)) mark_read = [] failure_counter = 1 # 1 by default since it is the wait timer multiplier # Queue mode if args.queue: # Normal q = None else: # Use the queue from core.queue import Queue q = Queue() while True: try: failure = False if mark_read: # Needed to clear after a Reddit disconnection error reddit.inbox.mark_read(mark_read) mark_read.clear() # for all unread messages for message in reddit.inbox.unread(): # for all unread comments if message.was_comment: result = None # username mentions are simple if message.subject == "username mention": result = process_comment(reddit,
def setUp(self): """""" self.queue = Queue() self.queue.add_package(FILE_LIST)