Example #1
0
    def __init__(self,
                 durable=False,
                 logdir='logs',
                 message=None,
                 display_only=False,
                 consumer_cfg=None,
                 publisher_cfg=None,
                 label=None):
        self.durable = durable
        self.label = 'pulse-build-translator-%s' % (label
                                                    or socket.gethostname())
        self.logdir = logdir
        self.message = message
        self.display_only = display_only
        self.consumer_cfg = consumer_cfg
        self.publisher_cfg = publisher_cfg

        if not os.access(self.logdir, os.F_OK):
            os.mkdir(self.logdir)

        self.bad_pulse_msg_logger = self.get_logger('BadPulseMessage',
                                                    'bad_pulse_message.log')

        self.error_logger = self.get_logger('ErrorLog',
                                            'error.log',
                                            stderr=True)

        loghandler_error_logger = self.get_logger('LogHandlerErrorLog',
                                                  'log_handler_error.log',
                                                  stderr=True)
        self.loghandler = LogHandler(loghandler_error_logger,
                                     self.publisher_cfg)
Example #2
0
 def __init__(self):
     self.client_logger = LogHandler               \
     (                                             \
         log_type=LogHandler.LOG_CLIENTCONNECTION  \
     )
     self.progress_logger = LogHandler(log_type=LogHandler.LOG_FILEPROGRESS)
     self.list_logger = LogHandler(log_type=LogHandler.LOG_FILELIST)
Example #3
0
    def open_server(self, name=""):

        self.logger = LogHandler(log_type=LogHandler.LOG_SERVER)

        try:
            server_hash = name + ':' + ''.join(random.SystemRandom().choice( \
                                      string.ascii_uppercase + \
                                      string.digits + \
                                      string.ascii_lowercase \
                                  )for _ in range(11))
            self.logger.write_to_log(hash=server_hash, override=False)
            self.server_name[0] = server_hash
        except ServerNameSet:
            self.server_name[0] = self.logger.log_instance.get_hash()

        self.run = True
        self.close_run = True
        self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,
                                      1)
        self.server_socket.bind(('', 10031))

        self.listener_thread = threading.Thread \
        (                                       \
            target=self.listen_for_connections  \
        )
        self.listener_thread.start()

        self.closer_thread = threading.Thread       \
        (                                           \
            target=self.close_finished_connections  \
        )
        self.closer_thread.start()
Example #4
0
    def __init__(self, parent, client):
        super().__init__(parent)
        self.client = client

        root_layout = QVBoxLayout()

        list_logger = LogHandler(LogHandler.LOG_FILELIST)
        prog_logger = LogHandler(LogHandler.LOG_FILEPROGRESS)

        file_progress = prog_logger.log_instance.get_outstanding_files(
            self.client.current_client[1])
        file_list = list_logger.log_instance.get_file_list(
            self.client.current_client[1])

        if file_progress or file_list:
            dlg = YorNDialog(
                self,
                "There are files that have not yet been copied\nWould you like to copy them?"
            )
            if dlg.get_response():

                if file_progress:
                    print(file_progress)

                if file_list:
                    print(file_list)

            else:
                print("Deleting old logs")

        self.roots = self.client.get_roots()
        root_list = self.roots.split('/::/')

        for root in root_list:
            """name_layout = QHBoxLayout()
            icon_widget = QLabel()
            if root.split('-')[0] == '0':
                logo_path = "res/file_logo.png"
            else:
                logo_path = "res/folder_logo.png"
            icon_widget.setPixmap(QPixmap(logo_path).scaled(20, 20))
            icon_widget.show()
            name_layout.addWidget(icon_widget)
            root_label = QLabel(root.split('/')[1])
            root_label.setAccessibleName(root)
            name_layout.addWidget(root_label)
            name_layout.addStretch(1)"""
            name_layout = FileEntryLabel(root, self)
            root_layout.addLayout(name_layout)

        root_layout.addStretch(1)

        self.setLayout(root_layout)
    def __init__(self, durable=False, logdir='logs', message=None,
                 display_only=False, consumer_cfg=None, publisher_cfg=None,
                 label=None):
        self.durable = durable
        self.label = 'pulse-build-translator-%s' % (label or
                                                    socket.gethostname())
        self.logdir = logdir
        self.message = message
        self.display_only = display_only
        self.consumer_cfg = consumer_cfg
        self.publisher_cfg = publisher_cfg

        if not os.access(self.logdir, os.F_OK):
            os.mkdir(self.logdir)

        self.bad_pulse_msg_logger = self.get_logger('BadPulseMessage',
                                                    'bad_pulse_message.log')

        self.error_logger = self.get_logger('ErrorLog',
                                            'error.log',
                                            stderr=True)

        loghandler_error_logger = self.get_logger('LogHandlerErrorLog',
                                                  'log_handler_error.log',
                                                  stderr=True)
        self.loghandler = LogHandler(loghandler_error_logger,
                                     self.publisher_cfg)
Example #6
0
    def add(self, filepath, pos, parsers, inode, device, output, name,
            retention):
        directory = os.path.dirname(filepath)
        if directory not in self._event_handlers:
            self._event_handlers[directory] = LogHandler()

        self._event_handlers[directory].add_file(filepath, pos, parsers, inode,
                                                 device, output, name,
                                                 retention)
    def __init__(self,
                 node_id,
                 config_vector,
                 tx_power,
                 pkt_length,
                 rate,
                 gw_id='gateway_under_test'):
        """
        Worker normal, its main duty is to command nodes to send packets under a specific configuration
        :param node_id: The string by which the node will be identified
        :param config_vector: The vector indicating the probability of choosing one specific config
        :param tx_power: The transmission power to use.
        :param pkt_length: The length of the packets to be sent.
        :param rate: The rate at which packets should be sent.
        :param gw_id: gw_id: The gateway id if only packets from it must be listened
        """

        print("[Normal] Initializing node handler for node:", node_id)
        self.node_id = node_id
        self.last_counter = -1
        self.hits = list()
        self.snrs = list()
        self.gw_id = gw_id
        self.force_update = False
        self.hash = None

        self.g_tx_power = tx_power
        self.config_vector = config_vector
        self.g_pkt_length = pkt_length
        self.g_rate = float(rate)

        assert abs(self.config_vector.sum() - 1) < 1e-3
        assert np.all(self.config_vector <= 1)
        assert np.all(self.config_vector >= 0)
        assert self.g_tx_power in range(2, 15)
        assert 1 <= self.g_pkt_length <= 220
        assert 0 <= self.g_rate <= 1

        self.lh = LogHandler('log_normal_{}.csv'.format(self.node_id))
        self.lh.append_msg(self.node_id + " Base config: " + str(rate) + " " +
                           str(config_vector))
        self.am_done = False
Example #8
0
class ClientManager:
    clients = []
    busy_clients = []
    transfer_threads = {}
    inactive_threads = []
    current_client = None
    client_logger = None
    progress_logger = None
    list_logger = None

    def busy_client_check(func):
        def func_wrapper(context, *args, **kwargs):
            try:
                if context.current_client not in context.busy_clients:
                    return func(context, *args, **kwargs)
                else:
                    print("Client is busy")

            except:
                print(traceback.print_exc())
                exit(-1)

        return func_wrapper

    def __init__(self):
        self.client_logger = LogHandler               \
        (                                             \
            log_type=LogHandler.LOG_CLIENTCONNECTION  \
        )
        self.progress_logger = LogHandler(log_type=LogHandler.LOG_FILEPROGRESS)
        self.list_logger = LogHandler(log_type=LogHandler.LOG_FILELIST)

    def reestablish_connection(self, hash):
        connections = self.client_logger.log_instance.get_clients()
        failed_connections = []
        for conn in connections:
            try:
                self.connect_to_server(conn[0], conn[1], conn[2], False)

            except FailedToConnect as e:
                failed_connections.append(conn)

        if len(failed_connections):
            return constants.CONNECT_FAILED, failed_connections
        else:
            return constants.CONNECT_SUCCESS, []

    def resume_transfers(self):
        for conns in self.clients:
            file_info = self.progress_logger.get_outstanding_files(conns[1])

            if len(file_info):
                self.fetch_file_data                                 \
                (                                                    \
                    conns, file_info[4], file_info[5], file_info[4], \
                    file_info[3]                                     \
                )

            remaining_list, dest_path = self.list_logger.get_file_list()
            if len(remaining_list):
                for l in remaining_list:
                    self.fetch_file_data                          \
                    (                                             \
                        conns, get_real_path(l), dest_path, l[2:] \
                    )

        return

    def connect_to_server(self, address, port=10031, hash=None, new_conn=True):
        try:
            client_hash = None
            if hash:
                client_hash = hash

            if (address, port) in [i[0] for i in self.clients]:
                raise FailedToConnect("connection already established")

            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            sock.settimeout(10)
            sock.connect((address, port))
            sock.settimeout(None)
            if not hash:
                sock.send(F"{NetworkActivity.SERVER_NAME}".encode())
                sock.recv(1024)
                sock.send(b"{\"blank\": \"0\"}")
                client_hash = sock.recv(1024000).decode()
            sock.close()

            temp_addr = ((address, port), client_hash)

            self.current_client = temp_addr
            self.clients.append(self.current_client)

            self.client_logger.write_to_log(client_addr=temp_addr[0][0], \
                    client_port=temp_addr[0][1], \
                    client_hash=temp_addr[1])

        except socket.timeout:
            raise FailedToConnect("timed out")

        except ConnectionRefusedError:
            raise FailedToConnect("connection refused")

    def fetch_file_data(self, client, orig_path, dest_path, root, pos=0):
        #get information of file
        info = get_file_info_helper(orig_path, client[0])
        name = info['name']
        size = info['size']
        hash = info['hash']

        #create the destination path for the new file
        root = root.replace(constants.GENERIC_PATH, os.path.sep)
        basename = str(Path(root).parent)
        new_path = ''.join([dest_path, str_diff(orig_path, basename)])

        if not os.path.exists(str(Path(new_path).parent)):
            os.makedirs(str(Path(new_path).parent))
        new_file = open(new_path, 'wb')

        #request file data
        client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        client_socket.connect(client[0])
        client_socket.send(F"{NetworkActivity.FILE_DATA}".encode())
        client_socket.recv(1024)
        client_socket.send(
            json.dumps({
                "file": orig_path,
                "pos": pos
            }).encode())

        #receive data and write to file
        # TODO: Create a timeout in case the server goes down
        data = client_socket.recv(1024000)

        #This is for the logging function to keep track of how far the transfer is
        log_pos = len(data)
        self.list_logger.set_target_attr(client[1])
        self.list_logger.clean_log()
        while data:
            new_file.write(data)
            self.progress_logger.write_to_log           \
            (                                           \
                file_name=name,                         \
                file_size=size,                         \
                file_hash=hash,                         \
                transferred=log_pos,                    \
                orig_path=str(Path(orig_path).parent),  \
                dest_path=str(Path(new_path).parent),   \
                client_conn=client[1]                   \
            )
            data = client_socket.recv(1024000)
            input("::")
            log_pos += len(data)

        new_file.close()
        client_socket.close()

        # verify that file has been transferred correctly
        # TODO: Do some corrective action if transfer failed
        if hash_file(new_path) != hash:
            print("File transfer failed!")
            # do more error correcting here

        else:
            self.progress_logger.clean_log()

    def client_transfer_thread(self, client, orig_path, dest_path, pos=0):
        # TODO: Check if there are any uncopied files to finish (once again put holds if there are any problems)
        if orig_path[0] == '1':
            # fetch list of files from server
            client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            client_socket.connect(client[0])
            client_socket.send(F"{NetworkActivity.ROOT_SUBDIR_RECURSIVE}" \
                                                 .encode())
            client_socket.recv(1024)
            client_socket.send(json.dumps({"file": orig_path[2:]}).encode())
            data = client_socket.recv(1024000)
            files = ''

            while data:
                files += data.decode()
                data = client_socket.recv(1024000)

            client_socket.close()

            files = files.split("/::/")

            self.list_logger.write_to_log   \
            (                               \
                file_list=files,            \
                dest_dir=dest_path,         \
                client_conn=client[1]       \
            )

            for f in files:
                self.fetch_file_data                                   \
                (                                                      \
                    client, get_real_path(f), dest_path, orig_path[2:] \
                )

        else:
            self.fetch_file_data                                            \
            (                                                               \
                client, get_real_path(orig_path), dest_path, orig_path[2:], \
                pos                                                         \
            )

        # remove client from busy_clients
        for c in range(0, len(self.busy_clients)):
            if self.busy_clients[c] == client:
                del self.busy_clients[c]
                break

    @busy_client_check
    def transfer_data(self, orig_path, dest_path, pos=0):
        self.busy_clients.append(self.current_client)
        self.transfer_threads[self.current_client] = threading.Thread   \
        (                                                               \
            target=self.client_transfer_thread,                         \
            args=(self.current_client, orig_path, dest_path, pos)       \
        )
        self.transfer_threads[self.current_client].start()

    def get_clients(self):
        return clients

    # TODO: Use the client's hash to switch client
    def switch_client(self, client):
        if client in self.clients:
            self.current_client = client
        else:
            raise Exception("Client not in list")

    @busy_client_check
    def get_file_info(self, file_name):
        return get_file_info_helper(file_name, self.current_client)

    @busy_client_check
    def get_directory_contents(self, directory):
        client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        client_socket.connect(self.current_client[0])
        client_socket.send(F"{NetworkActivity.ROOT_SUBDIR}".encode())
        client_socket.recv(1024)
        client_socket.send(json.dumps({"file": directory}).encode())
        data = client_socket.recv(1024000)
        client_socket.close()
        return data.decode()

    @busy_client_check
    def get_roots(self):
        client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        client_socket.connect(self.current_client[0])
        client_socket.send(F"{NetworkActivity.ROOTS}".encode())
        client_socket.recv(1024)
        client_socket.send(b"{\"blank\": \"0\"}")
        data = client_socket.recv(1024000)
        client_socket.close()
        return data.decode()
Example #9
0
class FileServer:
    roots = activity.roots
    root_parents = activity.root_parents
    connections = {}
    closed_connections = []
    opened_files = {}
    server_socket = None
    listener_thread = None
    closer_thread = None
    run = True
    close_run = True
    logger = None
    server_name = activity.server_name

    def open_server(self, name=""):

        self.logger = LogHandler(log_type=LogHandler.LOG_SERVER)

        try:
            server_hash = name + ':' + ''.join(random.SystemRandom().choice( \
                                      string.ascii_uppercase + \
                                      string.digits + \
                                      string.ascii_lowercase \
                                  )for _ in range(11))
            self.logger.write_to_log(hash=server_hash, override=False)
            self.server_name[0] = server_hash
        except ServerNameSet:
            self.server_name[0] = self.logger.log_instance.get_hash()

        self.run = True
        self.close_run = True
        self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,
                                      1)
        self.server_socket.bind(('', 10031))

        self.listener_thread = threading.Thread \
        (                                       \
            target=self.listen_for_connections  \
        )
        self.listener_thread.start()

        self.closer_thread = threading.Thread       \
        (                                           \
            target=self.close_finished_connections  \
        )
        self.closer_thread.start()

    # Listen for incoming connectons and forward the connecton to a thread
    def listen_for_connections(self):
        self.server_socket.listen(5)
        data = ''
        read_list = [self.server_socket]

        # listen during the runtime of the program
        while self.run:

            try:

                # this makes the server listen in non-blocking mode
                readable, writable, errored = select.select(
                    read_list, [], [], 1)
                for s in readable:
                    if s is self.server_socket:

                        # accept connection
                        client_socket, address = self.server_socket.accept()

                        # create a thread and pass connection to the thread
                        self.connections[address] = threading.Thread    \
                        (                                               \
                            target=self.deal_with_client,               \
                            args=(client_socket, address)               \
                        )
                        self.connections[address].start()
            except:
                pass

        for key, value in self.connections.items():
            value.join()

    def close(self):
        self.run = False
        self.server_socket.shutdown(socket.SHUT_RDWR)
        self.server_socket.close()
        self.listener_thread.join()
        self.close_run = False
        self.closer_thread.join()
        self.roots.clear()
        self.root_parents.clear()

    # # TODO: Log root
    # TODO: Determine if root is file or folder
    def add_root(self, root):

        #first check if path is valid
        if os.path.exists(root):

            #hash the parent directory
            par_dir = str(pathlib.Path(root).parents[0])
            par_hash = hashlib.sha1(par_dir.encode()).hexdigest()

            #determine if root is a file or folder
            if os.path.isfile(root):
                par_hash = F"0-{par_hash}"
            else:
                par_hash = F"1-{par_hash}"

            self.root_parents[par_hash.split('-')[1]] = par_dir
            new_root = \
                F"{par_hash}{constants.GENERIC_PATH}{os.path.basename(root)}"
            if new_root not in self.roots:
                self.roots.append(new_root)
        else:
            raise FileNotFoundError

    def remove_root(self, root_name):

        parent_hash = []
        pos = 0
        for i in self.roots:
            if os.path.basename(i) == root_name:
                del self.roots[pos]
                parent_hash = i.split('/')[0]
                break
            pos += 1

        cnt = 0
        for i in self.roots:
            if i.split('/')[0] == parent_hash:
                cnt += 1

        if not cnt:
            del self.root_parents[parent_hash.split('-')[1]]

    # Periodically closes any connections that have been marked for closing
    def close_finished_connections(self):
        while self.close_run:
            try:
                # run through marked connections
                for c in self.closed_connections:

                    # stop the thread
                    self.connections[c].join()

                    del self.connections[c]

                # reset the closed_connections list
                self.closed_connections.clear()
            except:
                pass
            time.sleep(0.5)

    # listen for client requests and respond with an appropriate activity
    # to deal with the client's needs
    def deal_with_client(self, client, address):
        net_activity = NetworkActivity()
        while True:
            try:

                # listen in non-blocking mode
                readable, writable, errored = select.select([client], [], [])
                for s in readable:
                    data = s.recv(102400)
                    if data:
                        try:
                            data = data.decode()[0]

                            # get appropriate activity
                            function = net_activity.activity_function_factory( \
                                int(data)                                      \
                            )

                            # inform client request has been acknowledged
                            s.send("{0}".format(            \
                                net_activity.DATA_RECEIVED  \
                            ).encode())

                            # the activity will take over from here
                            function(s, get_list_from_json(s.recv(102400)))

                        # TODO: If clients arguments are invalid, inform the client
                        except json.decoder.JSONDecodeError:
                            raise Exception()
                            self.closed_connections.append(address)

                        # TODO: Make this better
                        except Exception as e:
                            print(traceback.print_exc())
                            s.send(b"Error in data transfer")

                    else:
                        raise Exception()

            # this is called when a client drops a connecton. The connection is marked for closing
            except Exception as e:
                self.closed_connections.append(address)
                break
class PulseBuildbotTranslator(object):

    def __init__(self, durable=False, logdir='logs', message=None,
                 display_only=False, consumer_cfg=None, publisher_cfg=None,
                 label=None):
        self.durable = durable
        self.label = 'pulse-build-translator-%s' % (label or
                                                    socket.gethostname())
        self.logdir = logdir
        self.message = message
        self.display_only = display_only
        self.consumer_cfg = consumer_cfg
        self.publisher_cfg = publisher_cfg

        if not os.access(self.logdir, os.F_OK):
            os.mkdir(self.logdir)

        self.bad_pulse_msg_logger = self.get_logger('BadPulseMessage',
                                                    'bad_pulse_message.log')

        self.error_logger = self.get_logger('ErrorLog',
                                            'error.log',
                                            stderr=True)

        loghandler_error_logger = self.get_logger('LogHandlerErrorLog',
                                                  'log_handler_error.log',
                                                  stderr=True)
        self.loghandler = LogHandler(loghandler_error_logger,
                                     self.publisher_cfg)

    def get_logger(self, name, filename, stderr=False):
        filepath = os.path.join(self.logdir, filename)
        logger = logging.getLogger(name)
        logger.setLevel(logging.DEBUG)
        handler = logging.handlers.RotatingFileHandler(
            filepath, mode='a+', maxBytes=300000, backupCount=2)
        formatter = logging.Formatter(
            "%(asctime)s - %(levelname)s: %(message)s", "%Y-%m-%d %H:%M:%S")
        handler.setFormatter(formatter)
        logger.addHandler(handler)

        if stderr:
            handler = logging.StreamHandler()
            logger.addHandler(handler)

        return logger

    def start(self):
        if self.message:
            # handle a test message
            json_data = open(self.message)
            data = json.load(json_data)
            self.on_pulse_message(data)
            return

        # Start listening for pulse messages. If 5 failures in a
        # minute, wait 5 minutes before retrying.
        failures = []
        while True:
            pulse = consumers.BuildConsumer(applabel=self.label, connect=False)
            pulse.configure(topic=['#.finished', '#.log_uploaded'],
                            callback=self.on_pulse_message,
                            durable=self.durable)
            if self.consumer_cfg:
                pulse.config = self.consumer_cfg

            try:
                pulse.listen()
            except Exception:
                self.error_logger.exception(
                    "Error occurred during pulse.listen()")

            now = datetime.datetime.now()
            failures = [x for x in failures
                        if now - x < datetime.timedelta(seconds=60)]
            failures.append(now)
            if len(failures) >= 5:
                failures = []
                time.sleep(5 * 60)

    def buildid2date(self, string):
        """Takes a buildid string and returns a python datetime and
           seconds since epoch.
        """

        date = parse(string)
        return (date, int(time.mktime(date.timetuple())))

    def process_unittest(self, data):
        data['insertion_time'] = calendar.timegm(time.gmtime())
        if not data.get('logurl'):
            raise NoLogUrlError(data['key'])
        if data['platform'] not in messageparams.platforms:
            raise BadPlatformError(data['key'], data['platform'])
        elif data['os'] not in messageparams.platforms[data['platform']]:
            raise BadOSError(data['key'], data['platform'], data['os'],
                             data['buildername'])

        if self.display_only:
            print "Test properties:\n%s\n" % json.dumps(data)
            return

        self.loghandler.handle_message(data)

    def process_build(self, data):
        if data['platform'] not in messageparams.platforms:
            raise BadPlatformError(data['key'], data['platform'])
        for tag in data['tags']:
            if tag not in messageparams.tags:
                raise BadTagError(data['key'], tag, data['platform'],
                                  data['product'])
        # Repacks do not have a buildurl included. We can remove this
        # workaround once bug 857971 has been fixed
        if not data['buildurl'] and not data['repack']:
            raise NoBuildUrlError(data['key'])

        if self.display_only:
            print "Build properties:\n%s\n" % json.dumps(data)
            return

        self.loghandler.handle_message(data)

    def on_pulse_message(self, data, message=None):
        key = 'unknown'
        stage_platform = None

        try:
            key = data['_meta']['routing_key']

            # Acknowledge the message so it doesn't hang around on the
            # pulse server.
            if message:
                message.ack()

            # Create a dict that holds build properties that apply to both
            # unittests and builds.
            builddata = { 'key': key,
                          'job_number': None,
                          'buildid': None,
                          'build_number': None,
                          'previous_buildid': None,
                          'status': None,
                          'platform': None,
                          'builddate': None,
                          'buildurl': None,
                          'locale': None,
                          'locales': None,
                          'logurl': None,
                          'testsurl': None,
                          'release': None,
                          'buildername': None,
                          'slave': None,
                          'repack': None,
                          'revision': None,
                          'product': None,
                          'version': None,
                          'tree': None,
                          'timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
                        }

            # scan the payload for properties applicable to both tests and
            # builds
            for prop in data['payload']['build']['properties']:

                # look for the job number
                if prop[0] == 'buildnumber':
                    builddata['job_number'] = prop[1]

                # look for revision
                if prop[0] == 'revision':
                    builddata['revision'] = prop[1]

                # look for product
                elif prop[0] == 'product':
                    # Bug 1010120:
                    # Ensure to lowercase to prevent issues with capitalization
                    builddata['product'] = prop[1].lower()

                # look for version
                elif prop[0] == 'version':
                    builddata['version'] = prop[1]

                # look for tree
                elif prop[0] == 'branch':
                    builddata['tree'] = prop[1]
                    # For builds, this property is sometimes a relative path,
                    # ('releases/mozilla-beta') and not just a name.  For
                    # consistency, we'll strip the path components.
                    if isinstance(builddata['tree'], basestring):
                        builddata['tree'] = os.path.basename(builddata['tree'])

                # look for buildid
                elif prop[0] == 'buildid':
                    builddata['buildid'] = prop[1]
                    date, builddata['builddate'] = self.buildid2date(prop[1])

                # look for the build number which comes with candidate builds
                elif prop[0] == 'build_number':
                    builddata['build_number'] = prop[1]

                # look for the previous buildid
                elif prop[0] == 'previous_buildid':
                    builddata['previous_buildid'] = prop[1]

                # look for platform
                elif prop[0] == 'platform':
                    builddata['platform'] = prop[1]
                    if (builddata['platform'] and
                        '-debug' in builddata['platform']):
                        # strip '-debug' from the platform string if it's
                        # present
                        builddata['platform'] = builddata['platform'][
                            0:builddata['platform'].find('-debug')]

                # look for the locale
                elif prop[0] == 'locale':
                    builddata['locale'] = prop[1]

                # look for the locale
                elif prop[0] == 'locales':
                    builddata['locales'] = prop[1]

                # look for build url
                elif prop[0] in ['packageUrl', 'build_url', 'fileURL']:
                    builddata['buildurl'] = prop[1]

                # look for log url
                elif prop[0] == 'log_url':
                    builddata['logurl'] = prop[1]

                # look for release name
                elif prop[0] in ['en_revision', 'script_repo_revision']:
                    builddata['release'] = prop[1]

                # look for tests url
                elif prop[0] == 'testsUrl':
                    builddata['testsurl'] = prop[1]

                # look for buildername
                elif prop[0] == 'buildername':
                    builddata['buildername'] = prop[1]

                # look for slave builder
                elif prop[0] == 'slavename':
                    builddata['slave'] = prop[1]

                # look for blobber files
                elif prop[0] == 'blobber_files':
                    try:
                        builddata['blobber_files'] = json.loads(prop[1])
                    except ValueError:
                        self.error_logger.exception(
                            "Malformed `blobber_files` buildbot property: {}".format(prop[1]))

                # look for stage_platform
                elif prop[0] == 'stage_platform':
                    # For some messages, the platform we really care about
                    # is in the 'stage_platform' property, not the 'platform'
                    # property.
                    stage_platform = prop[1]
                    for buildtype in messageparams.buildtypes:
                        if buildtype in stage_platform:
                            stage_platform = stage_platform[0:stage_platform.find(buildtype) - 1]

                elif prop[0] == 'completeMarUrl':
                    builddata['completemarurl'] = prop[1]

                elif prop[0] == 'completeMarHash':
                    builddata['completemarhash'] = prop[1]

            if not builddata['tree']:
                raise BadPulseMessageError(key, "no 'branch' property")

            # If no locale is given fallback to en-US
            if not builddata['locale']:
                builddata['locale'] = 'en-US'

            # status of the build or test notification
            # see http://hg.mozilla.org/build/buildbot/file/08b7c51d2962/master/buildbot/status/builder.py#l25
            builddata['status'] = data['payload']['build']['results']

            if 'debug' in key:
                builddata['buildtype'] = 'debug'
            elif 'pgo' in key:
                builddata['buildtype'] = 'pgo'
            else:
                builddata['buildtype'] = 'opt'

            # see if this message is for a unittest
            unittestRe = re.compile(r'build\.((%s)[-|_](.*?)(-debug|-o-debug|-pgo|_pgo|_test)?[-|_](test|unittest|pgo)-(.*?))\.(\d+)\.(log_uploaded|finished)' %
                                    builddata['tree'])
            match = unittestRe.match(key)
            if match:
                # for unittests, generate some metadata by parsing the key

                if match.groups()[7] == 'finished':
                    # Ignore this message, we only care about 'log_uploaded'
                    # messages for unittests.
                    return

                # The 'short_builder' string is quite arbitrary, and so this
                # code is expected to be fragile, and will likely need
                # frequent maintenance to deal with future changes to this
                # string.  Unfortunately, these items are not available
                # in a more straightforward fashion at present.
                short_builder = match.groups()[0]

                builddata['os'] = match.groups()[2]
                if builddata['os'] in messageparams.os_conversions:
                    builddata['os'] = messageparams.os_conversions[
                        builddata['os']](builddata)

                builddata['test'] = match.groups()[5]

                # yuck!!
                if builddata['test'].endswith('_2'):
                    short_builder = "%s.2" % short_builder[0:-2]
                elif builddata['test'].endswith('_2-pgo'):
                    short_builder = "%s.2-pgo" % short_builder[0:-6]

                builddata['talos'] = 'talos' in builddata['buildername']

                if stage_platform:
                    builddata['platform'] = stage_platform

                self.process_unittest(builddata)
            elif 'source' in key:
                # what is this?
                # ex: build.release-mozilla-esr10-firefox_source.0.finished
                pass
            elif [x for x in ['schedulers', 'tag', 'submitter', 'final_verification', 'fuzzer'] if x in key]:
                # internal buildbot stuff we don't care about
                # ex: build.release-mozilla-beta-firefox_reset_schedulers.12.finished
                # ex: build.release-mozilla-beta-fennec_tag.40.finished
                # ex: build.release-mozilla-beta-bouncer_submitter.46.finished
                pass
            elif 'jetpack' in key:
                # These are very awkwardly formed; i.e.
                # build.jetpack-mozilla-central-win7-debug.18.finished,
                # and the tree appears nowhere except this string.  In order
                # to support these we'd have to keep a tree map of all
                # possible trees.
                pass
            else:
                if not builddata['platform']:
                    if stage_platform:
                        builddata['platform'] = stage_platform
                    else:
                        # Some messages don't contain the platform
                        # in any place other than the routing key, so we'll
                        # have to guess it based on that.
                        builddata['platform'] = messageparams.guess_platform(key)
                        if not builddata['platform']:
                            raise BadPulseMessageError(key, 'no "platform" property')

                otherRe = re.compile(r'build\.((release-|jetpack-|b2g_)?(%s)[-|_](xulrunner[-|_])?(%s)([-|_]?)(.*?))\.(\d+)\.(log_uploaded|finished)' %
                                     (builddata['tree'], builddata['platform']))
                match = otherRe.match(key)
                if match:
                    if 'finished' in match.group(9):
                        # Ignore this message, we only care about 'log_uploaded'
                        # messages for builds
                        return

                    builddata['tags'] = match.group(7).replace('_', '-').split('-')

                    # There are some tags we don't care about as tags,
                    # usually because they are redundant with other properties,
                    # so remove them.
                    notags = ['debug', 'pgo', 'opt', 'repack']
                    builddata['tags'] = [x for x in builddata['tags'] if x not in notags]

                    # Sometimes a tag will just be a digit, i.e.,
                    # build.mozilla-central-android-l10n_5.12.finished;
                    # strip these.
                    builddata['tags'] = [x for x in builddata['tags'] if not x.isdigit()]

                    if isinstance(match.group(2), basestring):
                        if 'release' in match.group(2):
                            builddata['tags'].append('release')
                        if 'jetpack' in match.group(2):
                            builddata['tags'].append('jetpack')

                    if match.group(4) or 'xulrunner' in builddata['tags']:
                        builddata['product'] = 'xulrunner'

                    # Sadly, the build url for emulator builds isn't published
                    # to the pulse stream, so we have to guess it.  See bug
                    # 1071642.
                    if ('emulator' in builddata.get('platform', '') and
                            'try' not in key and builddata.get('buildid')):
                        builddata['buildurl'] = (
                            'https://pvtbuilds.mozilla.org/pub/mozilla.org/b2g/tinderbox-builds' +
                            '/%s-%s/%s/emulator.tar.gz' %
                            (builddata['tree'], builddata['platform'],
                             builddata['buildid']))

                    # In case of repacks we have to send multiple notifications,
                    # each for every locale included. We can remove this
                    # workaround once bug 857971 has been fixed.
                    if 'repack' in key:
                        builddata['repack'] = True

                        if not builddata["locales"]:
                            raise BadPulseMessageError(key, 'no "locales" property')

                        for locale in builddata["locales"].split(','):
                            if not locale:
                                raise BadLocalesError(key, builddata["locales"])

                            data = copy.deepcopy(builddata)
                            data['locale'] = locale
                            self.process_build(data)

                    else:
                        self.process_build(builddata)
                else:
                    raise BadPulseMessageError(key, "unknown message type")

        except BadPulseMessageError as inst:
            self.bad_pulse_msg_logger.exception(json.dumps(data.get('payload'),
                                                           indent=2))
            print(inst.__class__, str(inst))
        except Exception:
            self.error_logger.exception(json.dumps(data, indent=2))
class Worker():
    """
    Worker normal, its main duty is to command nodes to send packets under a specific configuration
    """

    # This table is used by the Pearson hashing algorithm
    hash_table = [
        247, 146, 42, 23, 21, 143, 201, 47, 100, 80, 12, 153, 58, 34, 238, 123,
        140, 61, 213, 43, 190, 110, 90, 35, 105, 250, 185, 73, 148, 230, 65,
        186, 28, 138, 149, 31, 166, 189, 164, 122, 227, 204, 6, 91, 96, 69, 60,
        3, 217, 32, 158, 40, 178, 89, 173, 53, 253, 55, 126, 248, 10, 205, 11,
        79, 160, 52, 85, 133, 182, 54, 27, 214, 107, 243, 22, 120, 194, 193,
        63, 95, 246, 226, 224, 239, 169, 241, 74, 180, 118, 234, 4, 30, 168,
        221, 183, 231, 177, 41, 25, 176, 191, 171, 119, 56, 59, 152, 78, 19,
        46, 172, 156, 18, 159, 103, 141, 161, 136, 170, 157, 9, 8, 97, 86, 255,
        134, 39, 174, 16, 151, 49, 220, 66, 117, 233, 132, 162, 14, 196, 135,
        36, 81, 45, 165, 38, 20, 116, 232, 223, 29, 76, 219, 137, 121, 203,
        145, 115, 129, 245, 202, 142, 57, 198, 62, 84, 50, 75, 167, 98, 197,
        154, 51, 225, 242, 207, 184, 2, 244, 155, 228, 150, 163, 210, 94, 83,
        249, 195, 240, 104, 187, 237, 109, 5, 68, 15, 229, 209, 181, 236, 215,
        211, 33, 92, 93, 127, 139, 208, 130, 252, 17, 188, 216, 131, 101, 67,
        144, 71, 77, 112, 87, 179, 7, 114, 99, 235, 212, 44, 26, 175, 200, 48,
        251, 113, 1, 102, 82, 192, 147, 111, 199, 124, 70, 218, 128, 64, 125,
        24, 108, 88, 222, 37, 206, 0, 72, 13, 254, 106
    ]

    def compute_hash(self, message):
        """
        Implementation of the Pearson Hashing. See https://en.wikipedia.org/wiki/Pearson_hashing
        :param message: The byte array encoding the payload of the message
        :return: The 8-bit hash
        """
        hash = len(message) % 256
        for i in message:
            hash = self.hash_table[(hash + i) % 256]

        return hash

    def is_done(self):
        """
        Returns whether we have ended
        :return: self.am_done
        """
        return self.am_done

    def dr_to_sf(self, data_rate):
        """
        Converts from data_rate (0-5) to Spreading Factor (7-12)
        :param data_rate: The data rate at which the communication is taking place
        :return: The corresponding Spreading Factor as an integer from 7 to 12
        """
        return 7 + (5 - data_rate)

    def sf_to_dr(self, sf):
        """
        Converts from Spreading Factor (7-12) to data_rate (0-5)
        :param sf: The Spreading Factor at which the communication is taking place
        :return: The corresponding Data Rate
        """
        return 5 - (sf - 7)

    @staticmethod
    def compress_config_vector(vector):
        """
        Compress a dense vector in a sparse vector by selecting the configs larger than 5% and zero-ing the rest.
        After it the vector is normalized so as to represent a discrete PDF
        :param vector: A dense vector with the probability for each configuration
        :return: A sparse vector with configs greater than 0.05%
        """
        vector_16 = vector.astype(np.float16)

        vector_16[vector_16 < 0.05] = 0
        vector_16 /= vector_16.sum()
        idxs = np.where(vector_16 > 0)[0]

        return idxs, vector_16[idxs]

    def get_updated_config(self):
        """
        Construct the byte array to be sent to the node. It contains the configuration that such a node must have.
        The configuration includes the new pkt_length, the rate at which packets should be sent and the probability
        of using each configuration.
        :return: The byte-array with the new config
        """
        rate_bytes = struct.pack("!f", self.g_rate)
        assert (len(rate_bytes)) == 4

        # the 3 is the indicator of a "normal operation" (as opossed to the 2 of a "sweep operation")
        # technically, g_tx_power should not be sent, but it is here as a part of legacy code :[
        ret = bytes([3, self.g_tx_power, self.g_pkt_length]) + rate_bytes

        config_ids, config_vals = self.compress_config_vector(
            self.config_vector)
        for i in range(len(config_ids)):
            ret += bytes([config_ids[i]]) + struct.pack(
                '<e', config_vals[i])  # np.float16(config_vals[i]).tostring()

        assert (len(ret) - 7) % 3 == 0
        return ret

    def update_config(self, new_rate=None, new_config_vector=None):
        """
        A method to receive the new config computed by the main thread. When possible, this new config will be sent
        to the corresponding node (note that each worker_normal thread is assigned to each IoT node).
        :param new_rate: the new \lambda at which packets should be sent
        :param new_config_vector: the new vector indicating the probability of choosing each config
        :return: None
        """
        self.lh.append_msg(self.node_id + " Updating config: " +
                           str(new_rate) + " " + str(new_config_vector))
        if new_rate is not None:
            self.g_rate = new_rate
            self.force_update = True
            print(self.node_id, "Forcing next update", self.force_update,
                  str(self))

        if new_config_vector is not None:
            self.config_vector = new_config_vector
            self.force_update = True
            print(self.node_id, "Forcing next update", self.force_update,
                  str(self))
        # setattr(self, 'force_update', False) is not None
        return

    def uplink_callback(self, msg, client):
        """
        Callback for each packet received
        :param msg: The message received
        :param client: The MQTT Client
        :return: None
        """
        assert self.node_id == msg.dev_id
        print(self.node_id, msg)
        self.lh.append_msg(str(msg))

        # print("-------->", self.node_id, "next update?", self.force_update, str(self))

        if self.am_done:
            print(msg.dev_id, "Commanding node to stop as we are done")
            client.send(msg.dev_id,
                        base64.b64encode(bytes([255, 0, 255])).decode('utf-8'),
                        port=1,
                        conf=False,
                        sched="replace")
            print(self.node_id, 'Sent')
            return

        paq_bytes = base64.b64decode(msg.payload_raw)
        if (len(paq_bytes) == 3 and paq_bytes[0] == 0x00 and paq_bytes[1] == 0x01 and paq_bytes[2] == 0x02) or \
                self.force_update:
            # this kind of message is received when the node has not received any order yet or its config must be updated
            # so let's comand it to start sending packets

            if not self.force_update:
                print(msg.dev_id, "Switching mote to NORMAL mode with:")
            else:
                print("Updating was forced")

            print(
                msg.dev_id,
                "\n\tTX POWER: {}\n\tRate: {}".format(self.g_tx_power,
                                                      self.g_rate))
            print("and config vector: ")
            config_ids, config_vals = self.compress_config_vector(
                self.config_vector)
            for i in range(len(config_ids)):
                print("\t\tConfig {} percentage {}".format(
                    config_ids[i], config_vals[i]))

            new_config = self.get_updated_config()
            client.send(msg.dev_id,
                        base64.b64encode(new_config).decode('utf-8'),
                        port=1,
                        conf=False,
                        sched="replace")
            print(self.node_id, 'Sent')
            self.hash = self.compute_hash(new_config)

            if self.force_update:
                self.force_update = False

            return

        counter = paq_bytes[0]
        rcv_hash = paq_bytes[1]

        if counter < self.last_counter:
            if abs(counter - self.last_counter) > 5 and counter < 5:
                print(self.node_id, "New config, reseting counters")
                self.last_counter = -1
            else:
                print(self.node_id, "-----MASIVE DISORDER!-------")
                if len(self.hits) > counter and self.hits[counter] is False:
                    print(
                        self.node_id,
                        "Intentando arreglar, he fijado {} a true... cuando era... {}"
                        .format(counter, self.hits[counter]))
                    self.hits[counter] = True

        if counter == self.last_counter:
            print(self.node_id, "SAME COUNTER!!!")
            if hasattr(msg, 'is_retry') and msg.is_retry:
                print("MSG is retry")
            return

        # continua la recepcion normal
        missed_packets = counter - self.last_counter - 1
        print(self.node_id, missed_packets, 'lost packets')
        for _ in range(missed_packets):
            self.hits.append(False)
            self.snrs.append(np.nan)

        # print(self.node_id, 'Normal hit :D')
        self.hits.append(True)

        if self.gw_id is not None:
            idx_my_gw = [g.gtw_id
                         for g in msg.metadata.gateways].index(self.gw_id)
        else:
            idx_my_gw = 0

        if self.hash is None:
            print(
                "-----[I do not have any stored hash. Accepting received hash]----"
            )
            self.hash = rcv_hash

        if rcv_hash != self.hash or len(paq_bytes) != self.g_pkt_length:
            print("Hash/Length mismatch, updating node config")
            print("and config vector: ")
            config_ids, config_vals = self.compress_config_vector(
                self.config_vector)
            for i in range(len(config_ids)):
                print("\t\tConfig {} percentage {}".format(
                    config_ids[i], config_vals[i]))

            new_config = self.get_updated_config()
            client.send(msg.dev_id,
                        base64.b64encode(new_config).decode('utf-8'),
                        port=1,
                        conf=False,
                        sched="replace")
            print(self.node_id, 'Sent')
            self.hash = self.compute_hash(new_config)
            # self.last_counter = -1

        self.snrs.append(msg.metadata.gateways[idx_my_gw].snr)
        self.last_counter = counter

        print(
            self.node_id, "PRR SO FAR: {} ({}/{})".format(
                self.hits.count(True) / len(self.hits), self.hits.count(True),
                len(self.hits)))
        self.lh.append_msg("PRR SO FAR: {} ({}/{})".format(
            self.hits.count(True) / len(self.hits), self.hits.count(True),
            len(self.hits)))
        self.lh.file.flush()

    def __init__(self,
                 node_id,
                 config_vector,
                 tx_power,
                 pkt_length,
                 rate,
                 gw_id='gateway_under_test'):
        """
        Worker normal, its main duty is to command nodes to send packets under a specific configuration
        :param node_id: The string by which the node will be identified
        :param config_vector: The vector indicating the probability of choosing one specific config
        :param tx_power: The transmission power to use.
        :param pkt_length: The length of the packets to be sent.
        :param rate: The rate at which packets should be sent.
        :param gw_id: gw_id: The gateway id if only packets from it must be listened
        """

        print("[Normal] Initializing node handler for node:", node_id)
        self.node_id = node_id
        self.last_counter = -1
        self.hits = list()
        self.snrs = list()
        self.gw_id = gw_id
        self.force_update = False
        self.hash = None

        self.g_tx_power = tx_power
        self.config_vector = config_vector
        self.g_pkt_length = pkt_length
        self.g_rate = float(rate)

        assert abs(self.config_vector.sum() - 1) < 1e-3
        assert np.all(self.config_vector <= 1)
        assert np.all(self.config_vector >= 0)
        assert self.g_tx_power in range(2, 15)
        assert 1 <= self.g_pkt_length <= 220
        assert 0 <= self.g_rate <= 1

        self.lh = LogHandler('log_normal_{}.csv'.format(self.node_id))
        self.lh.append_msg(self.node_id + " Base config: " + str(rate) + " " +
                           str(config_vector))
        self.am_done = False

        # new_config = self.get_updated_config()
        # self.hash = self.compute_hash(new_config)

    def close(self):
        self.lh.close()
Example #12
0
class PulseBuildbotTranslator(object):
    def __init__(self,
                 durable=False,
                 logdir='logs',
                 message=None,
                 display_only=False,
                 consumer_cfg=None,
                 publisher_cfg=None,
                 label=None):
        self.durable = durable
        self.label = 'pulse-build-translator-%s' % (label
                                                    or socket.gethostname())
        self.logdir = logdir
        self.message = message
        self.display_only = display_only
        self.consumer_cfg = consumer_cfg
        self.publisher_cfg = publisher_cfg

        if not os.access(self.logdir, os.F_OK):
            os.mkdir(self.logdir)

        self.bad_pulse_msg_logger = self.get_logger('BadPulseMessage',
                                                    'bad_pulse_message.log')

        self.error_logger = self.get_logger('ErrorLog',
                                            'error.log',
                                            stderr=True)

        loghandler_error_logger = self.get_logger('LogHandlerErrorLog',
                                                  'log_handler_error.log',
                                                  stderr=True)
        self.loghandler = LogHandler(loghandler_error_logger,
                                     self.publisher_cfg)

    def _quote_url(self, url):
        # Bug 1229761: URLs in build messages are not quoted and will cause bustage in mozharness
        return urllib2.quote(
            url, safe='%/:=&?~#+!$,;\'@()*[]|') if url is not None else url

    def get_logger(self, name, filename, stderr=False):
        filepath = os.path.join(self.logdir, filename)
        logger = logging.getLogger(name)
        logger.setLevel(logging.DEBUG)
        handler = logging.handlers.RotatingFileHandler(filepath,
                                                       mode='a+',
                                                       maxBytes=300000,
                                                       backupCount=2)
        formatter = logging.Formatter(
            "%(asctime)s - %(levelname)s: %(message)s", "%Y-%m-%d %H:%M:%S")
        handler.setFormatter(formatter)
        logger.addHandler(handler)

        if stderr:
            handler = logging.StreamHandler()
            logger.addHandler(handler)

        return logger

    def start(self):
        if self.message:
            # handle a test message
            json_data = open(self.message)
            data = json.load(json_data)
            self.on_pulse_message(data)
            return

        # Start listening for pulse messages. If 5 failures in a
        # minute, wait 5 minutes before retrying.
        failures = []
        while True:
            pulse = consumers.BuildConsumer(applabel=self.label, connect=False)
            pulse.configure(topic=['#.finished', '#.log_uploaded'],
                            callback=self.on_pulse_message,
                            durable=self.durable)
            if self.consumer_cfg:
                pulse.config = self.consumer_cfg

            try:
                pulse.listen()
            except Exception:
                self.error_logger.exception(
                    "Error occurred during pulse.listen()")

            now = datetime.datetime.now()
            failures = [
                x for x in failures if now - x < datetime.timedelta(seconds=60)
            ]
            failures.append(now)
            if len(failures) >= 5:
                failures = []
                time.sleep(5 * 60)

    def buildid2date(self, string):
        """Takes a buildid string and returns seconds since epoch.
        """

        try:
            date = parse(string)
            return int(time.mktime(date.timetuple()))
        except ValueError:
            return int(string)

    def process_unittest(self, data):
        data['insertion_time'] = calendar.timegm(time.gmtime())
        if data['platform'] in messageparams.ignored_platforms:
            return
        if not data.get('logurl'):
            raise NoLogUrlError(data['key'])
        if data['platform'] not in messageparams.platforms:
            raise BadPlatformError(data['key'], data['platform'])
        elif data['os'] not in messageparams.platforms[data['platform']]:
            raise BadOSError(data['key'], data['platform'], data['os'],
                             data['buildername'])

        if self.display_only:
            print "Test properties:\n%s\n" % json.dumps(data)
            return

        self.loghandler.handle_message(data)

    def process_build(self, data):
        if data['platform'] in messageparams.ignored_platforms:
            return
        if data['platform'] not in messageparams.platforms:
            raise BadPlatformError(data['key'], data['platform'])
        for tag in data['tags']:
            if tag not in messageparams.tags:
                raise BadTagError(data['key'], tag, data['platform'],
                                  data['product'])
        # Repacks do not have a buildurl included. We can remove this
        # workaround once bug 857971 has been fixed
        if not data['buildurl'] and not data['repack']:
            raise NoBuildUrlError(data['key'])

        if self.display_only:
            print "Build properties:\n%s\n" % json.dumps(data)
            return

        self.loghandler.handle_message(data)

    def on_pulse_message(self, data, message=None):
        key = 'unknown'
        stage_platform = None

        try:
            key = data['_meta']['routing_key']

            # Acknowledge the message so it doesn't hang around on the
            # pulse server.
            if message:
                message.ack()

            # Create a dict that holds build properties that apply to both
            # unittests and builds.
            builddata = {
                'key':
                key,
                'job_number':
                None,
                'buildid':
                None,
                'build_number':
                None,
                'previous_buildid':
                None,
                'status':
                None,
                'platform':
                None,
                'builddate':
                None,
                'buildurl':
                None,
                'locale':
                None,
                'locales':
                None,
                'logurl':
                None,
                'testsurl':
                None,
                'test_packages_url':
                None,
                'release':
                None,
                'buildername':
                None,
                'slave':
                None,
                'repack':
                None,
                'revision':
                None,
                'symbols_url':
                None,
                'product':
                None,
                'version':
                None,
                'tree':
                None,
                'timestamp':
                datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
            }

            # scan the payload for properties applicable to both tests and
            # builds
            for prop in data['payload']['build']['properties']:

                # look for the job number
                if prop[0] == 'buildnumber':
                    builddata['job_number'] = prop[1]

                # look for revision
                if prop[0] == 'revision':
                    builddata['revision'] = prop[1]

                # look for product
                elif prop[0] == 'product':
                    # Bug 1010120:
                    # Ensure to lowercase to prevent issues with capitalization
                    builddata['product'] = prop[1].lower()

                # look for version
                elif prop[0] == 'version':
                    builddata['version'] = prop[1]

                # look for tree
                elif prop[0] == 'branch':
                    builddata['tree'] = prop[1]
                    # For builds, this property is sometimes a relative path,
                    # ('releases/mozilla-beta') and not just a name.  For
                    # consistency, we'll strip the path components.
                    if isinstance(builddata['tree'], basestring):
                        builddata['tree'] = os.path.basename(builddata['tree'])

                # look for buildid
                elif prop[0] == 'buildid':
                    builddata['buildid'] = prop[1]
                    builddata['builddate'] = self.buildid2date(prop[1])

                # look for the build number which comes with candidate builds
                elif prop[0] == 'build_number':
                    builddata['build_number'] = prop[1]

                # look for the previous buildid
                elif prop[0] == 'previous_buildid':
                    builddata['previous_buildid'] = prop[1]

                # look for platform
                elif prop[0] == 'platform':
                    builddata['platform'] = prop[1]
                    if (builddata['platform']
                            and '-debug' in builddata['platform']):
                        # strip '-debug' from the platform string if it's
                        # present
                        builddata['platform'] = builddata['platform'][
                            0:builddata['platform'].find('-debug')]

                # look for the locale
                elif prop[0] == 'locale':
                    builddata['locale'] = prop[1]

                # look for the locale
                elif prop[0] == 'locales':
                    builddata['locales'] = prop[1]

                # look for build url
                elif prop[0] in ['packageUrl', 'build_url', 'fileURL']:
                    builddata['buildurl'] = self._quote_url(prop[1])

                # look for log url
                elif prop[0] == 'log_url':
                    builddata['logurl'] = self._quote_url(prop[1])

                # look for release name
                elif prop[0] in ['en_revision', 'script_repo_revision']:
                    builddata['release'] = prop[1]

                # look for tests url
                elif prop[0] == 'symbolsUrl':
                    builddata['symbols_url'] = self._quote_url(prop[1])

                # look for tests url
                elif prop[0] == 'testsUrl':
                    builddata['testsurl'] = self._quote_url(prop[1])

                # look for url to json manifest of test packages
                elif prop[0] == 'testPackagesUrl':
                    builddata['test_packages_url'] = self._quote_url(prop[1])

                # look for buildername
                elif prop[0] == 'buildername':
                    builddata['buildername'] = prop[1]

                # look for slave builder
                elif prop[0] == 'slavename':
                    builddata['slave'] = prop[1]

                # look for blobber files
                elif prop[0] == 'blobber_files':
                    try:
                        builddata['blobber_files'] = json.loads(prop[1])
                    except ValueError:
                        self.error_logger.exception(
                            "Malformed `blobber_files` buildbot property: {}".
                            format(prop[1]))

                # look for stage_platform
                elif prop[0] == 'stage_platform':
                    # For some messages, the platform we really care about
                    # is in the 'stage_platform' property, not the 'platform'
                    # property.
                    stage_platform = prop[1]
                    for buildtype in messageparams.buildtypes:
                        if buildtype in stage_platform:
                            stage_platform = stage_platform[0:stage_platform.
                                                            find(buildtype) -
                                                            1]

                elif prop[0] == 'completeMarUrl':
                    builddata['completemarurl'] = prop[1]

                elif prop[0] == 'completeMarHash':
                    builddata['completemarhash'] = prop[1]

            if not builddata['tree']:
                raise BadPulseMessageError(key, "no 'branch' property")

            # If no locale is given fallback to en-US
            if not builddata['locale']:
                builddata['locale'] = 'en-US'

            # Release build notifications do not contain a revision.
            # Lets fetch it via the release tag and the hg.m.o REST API
            if builddata['tree'].startswith(
                    'release-') and builddata['revision'] in [None, 'None']:
                try:
                    # Map for platforms which change their id
                    platform_map = {
                        'linux': 'linux-i686',
                        'linux64': 'linux-x86_64',
                        'macosx64': 'mac',
                        'win32': 'win32',
                        'win64': 'win64',
                    }

                    url = 'http://archive.mozilla.org/pub/{product}/candidates/{version}-' \
                          'candidates/build{build_number}/{platform}/en-US/firefox-' \
                          '{version}.json'.format(
                              product=builddata['product'],
                              version=builddata['version'],
                              build_number=builddata['build_number'],
                              platform=platform_map.get(builddata['platform'],
                                                        builddata['platform']),
                          )
                    response = requests.get(url)
                    builddata['revision'] = response.json()['moz_source_stamp']
                except Exception:
                    # We cannot raise an exception due to a broken release rev for repacks
                    # https://bugzilla.mozilla.org/show_bug.cgi?id=1219432#c1
                    pass

            # status of the build or test notification
            # see http://hg.mozilla.org/build/buildbot/file/08b7c51d2962/master/buildbot/status/builder.py#l25
            builddata['status'] = data['payload']['build']['results']

            if 'debug' in key:
                builddata['buildtype'] = 'debug'
            elif 'pgo' in key:
                builddata['buildtype'] = 'pgo'
            else:
                builddata['buildtype'] = 'opt'

            # see if this message is for a unittest
            unittestRe = re.compile(
                r'build\.((%s)[-|_](.*?)(-debug|-o-debug|-pgo|_pgo|_test)?[-|_](test|unittest|pgo)-(.*?))\.(\d+)\.(log_uploaded|finished)'
                % builddata['tree'])
            match = unittestRe.match(key)
            if match:
                # for unittests, generate some metadata by parsing the key

                if match.groups()[7] == 'finished':
                    # Ignore this message, we only care about 'log_uploaded'
                    # messages for unittests.
                    return

                # The 'short_builder' string is quite arbitrary, and so this
                # code is expected to be fragile, and will likely need
                # frequent maintenance to deal with future changes to this
                # string.  Unfortunately, these items are not available
                # in a more straightforward fashion at present.
                short_builder = match.groups()[0]

                builddata['os'] = match.groups()[2]
                if builddata['os'] in messageparams.os_conversions:
                    builddata['os'] = messageparams.os_conversions[
                        builddata['os']](builddata)

                builddata['test'] = match.groups()[5]

                # yuck!!
                if builddata['test'].endswith('_2'):
                    short_builder = "%s.2" % short_builder[0:-2]
                elif builddata['test'].endswith('_2-pgo'):
                    short_builder = "%s.2-pgo" % short_builder[0:-6]

                builddata['talos'] = 'talos' in builddata['buildername']

                if stage_platform:
                    builddata['platform'] = stage_platform

                self.process_unittest(builddata)

            elif 'source' in key:
                # what is this?
                # ex: build.release-mozilla-esr10-firefox_source.0.finished
                pass

            elif [
                    x for x in [
                        'schedulers', 'tag', 'submitter', 'final_verification',
                        'fuzzer'
                    ] if x in key
            ]:
                # internal buildbot stuff we don't care about
                # ex: build.release-mozilla-beta-firefox_reset_schedulers.12.finished
                # ex: build.release-mozilla-beta-fennec_tag.40.finished
                # ex: build.release-mozilla-beta-bouncer_submitter.46.finished
                pass

            elif 'jetpack' in key:
                # These are very awkwardly formed; i.e.
                # build.jetpack-mozilla-central-win7-debug.18.finished,
                # and the tree appears nowhere except this string.  In order
                # to support these we'd have to keep a tree map of all
                # possible trees.
                pass

            else:
                if not builddata['platform']:
                    if stage_platform:
                        builddata['platform'] = stage_platform
                    else:
                        # Some messages don't contain the platform
                        # in any place other than the routing key, so we'll
                        # have to guess it based on that.
                        builddata['platform'] = messageparams.guess_platform(
                            key)
                        if not builddata['platform']:
                            raise BadPulseMessageError(
                                key, 'no "platform" property')

                otherRe = re.compile(
                    r'build\.((release-|jetpack-|b2g_)?(%s)[-|_](xulrunner[-|_])?(%s)([-|_]?)(.*?))\.(\d+)\.(log_uploaded|finished)'
                    % (builddata['tree'], builddata['platform']))
                match = otherRe.match(key)
                if match:
                    if 'finished' in match.group(9):
                        # Ignore this message, we only care about 'log_uploaded'
                        # messages for builds
                        return

                    builddata['tags'] = match.group(7).replace('_',
                                                               '-').split('-')

                    # There are some tags we don't care about as tags,
                    # usually because they are redundant with other properties,
                    # so remove them.
                    notags = ['debug', 'pgo', 'opt', 'repack']
                    builddata['tags'] = [
                        x for x in builddata['tags'] if x not in notags
                    ]

                    # Sometimes a tag will just be a digit, i.e.,
                    # build.mozilla-central-android-l10n_5.12.finished;
                    # strip these.
                    builddata['tags'] = [
                        x for x in builddata['tags'] if not x.isdigit()
                    ]

                    if isinstance(match.group(2), basestring):
                        if 'release' in match.group(2):
                            builddata['tags'].append('release')
                        if 'jetpack' in match.group(2):
                            builddata['tags'].append('jetpack')

                    if match.group(4) or 'xulrunner' in builddata['tags']:
                        builddata['product'] = 'xulrunner'

                    # Sadly, the build url for emulator builds isn't published
                    # to the pulse stream, so we have to guess it.  See bug
                    # 1071642.
                    if ('emulator' in builddata.get('platform', '')
                            and 'try' not in key and builddata.get('buildid')):
                        builddata['buildurl'] = (
                            'https://pvtbuilds.mozilla.org/pub/mozilla.org/b2g/tinderbox-builds'
                            + '/%s-%s/%s/emulator.tar.gz' %
                            (builddata['tree'], builddata['platform'],
                             builddata['buildid']))

                    # In case of repack messages we have to send multiple
                    # notifications, each for every locale included.

                    # Current release-builds have a different data structure
                    # than nightly builds which are generated via mozharness.
                    # This will change once bug 1142872 is fixed and active.
                    if 'repack' in key:  # release build
                        builddata['repack'] = True

                        if not builddata["locales"]:
                            raise BadPulseMessageError(
                                key, 'no "locales" property')

                        for locale in builddata["locales"].split(','):
                            if not locale:
                                raise BadLocalesError(key,
                                                      builddata["locales"])

                            data = copy.deepcopy(builddata)
                            data['locale'] = locale
                            self.process_build(data)

                    elif builddata['locales']:  # nightly repack build
                        builddata['repack'] = True

                        locales = json.loads(builddata['locales'])
                        for locale, result in locales.iteritems():
                            # Use all properties except the locales array
                            data = copy.deepcopy(builddata)
                            del data['locales']

                            # Update overall status of the new message based on the locale status.
                            # Given that there are no clear result values, lets take the values
                            # from buildbot status: 0 = Success, 2 = Failed
                            status = str(result).lower() == "success" or str(
                                result) == '0'
                            data['status'] = 0 if status else 2

                            # Process locale
                            data['locale'] = locale
                            self.process_build(data)

                    else:  # single locale build
                        self.process_build(builddata)
                else:
                    raise BadPulseMessageError(
                        key, "unknown message type, platform: %s" %
                        builddata.get('platform', 'unknown'))

        except BadPulseMessageError as inst:
            self.bad_pulse_msg_logger.exception(
                json.dumps(data.get('payload'), indent=2))
            print(inst.__class__, str(inst))
        except Exception:
            self.error_logger.exception(json.dumps(data, indent=2))
Example #13
0
def main():
    """
     Main driver for the bot. First check for the configuration file
     and load values from it. Then check if the same value is passed
     as arguments then override the value from the configuration file

     Once values are obtained then start the bot
    """

    custompathstem = os.path.join(os.environ['DICTBOT_CONFIGDIR'],
                                  'dictbot.conf') \
                                  if 'DICTBOT_CONFIGDIR' in os.environ \
                                  else None

    log_file = os.path.join(os.environ['DICTBOT_LOGDIR'],
                                 'dictbot.log')\
                                 if 'DICTBOT_LOGDIR' in os.environ \
                                 else '/var/log/dictbot.log'

    config_file = custompathstem if custompathstem and os.path.exists(
        custompathstem)\
        else '/etc/dictbot/dictbot.conf'

    configdict = yaml.load(file(config_file).read())

    parser = ArgumentParser(description='A Jabber Dictionary Bot')
    parser.add_argument('-j',
                        '--jid',
                        help='Jabber ID for the bot to connect.',
                        required=False)
    parser.add_argument('-p',
                        '--password',
                        help='Password for Jabber account',
                        required=False)
    parser.add_argument('-d',
                        '--debug',
                        help='set logging to DEBUG',
                        action='store_const',
                        dest='loglevel',
                        const=logging.DEBUG,
                        default=logging.DEBUG)

    args = parser.parse_args()
    debug = logging.DEBUG if 'debug' in configdict and\
        configdict.get('debug') == 1 else logging.ERROR

    logger = LogHandler(debug, log_file)
    logging.basicConfig(level=debug, format='%(levelname)-8s %(message)s')
    if len(configdict.get('jabber')) == 1:
        acdetails = configdict.get('jabber')[0]
        jid = acdetails.get('jid')\
            if 'jid' in acdetails else args.jid
        password = acdetails.get('password')\
            if 'password' in acdetails else args.password

        if not jid or not password:
            print """Please provide JID and Password either through config or
 command line options"""
            sys.exit(2)

        xmpp = DictBot(jid, password, logger)

        if xmpp.connect():
            xmpp.process(block=True)
        else:
            print "Unable to connect"
    else:
        accounts = configdict.get('jabber')
        for acnt in accounts:
            jid = acnt.get('jid')
            password = acnt.get('password')
            p = Process(target=spawn_newbot, args=(jid, password, logger))
            p.start()
            p.join(15)
    def __init__(self, node_id, sfs, crs, tx_powers, certainty=0.2, gw_id='gateway_under_test', prev_model=None,
                 prev_margins=None, min_prr_testable=0, brute_force=False):
        """
        Worker sweep. His main duty is to construct the PRR model
        :param node_id: The string by which the node will be identified
        :param sfs: Set of Spreading Factors that will be swept
        :param crs: Set of Coding Rates that will be swept
        :param tx_powers: Set of Transmission Power values that will be swept
        :param certainty: The minimum level of certainty to be achieved (the difference between upper and lower bound)
        :param gw_id: The gateway id if only packets from it must be listened
        :param prev_model: Previous model or None if want to build it from scratch
        :param prev_margins: Previous margins or None if want to build it from scratch
        :param min_prr_testable: The minimum acceptable PRR. If we have some evidences that certain config will not achieve it, such a config will not be tested
        :param brute_force: Whether we are going with the brute force approach or the bounding technique
        """
        print("[Sweep] Initializing node handler for node:", node_id)

        self.node_id = node_id
        self.last_counter = -1
        self.hits = MyList(None, None, None)
        self.snrs = list()
        self.certainty = certainty
        self.confidence = 0.9  # confidence interval
        self.gw_id = gw_id
        self.min_prr_testable = min_prr_testable
        self.brute_force = brute_force
        self.g_tx_power = 14
        self.g_sf = 12  # SF= 10
        self.g_cr = 1  # CR = 4/7

        self.last_bomb_sf = None
        self.last_bomb_cr = None
        self.last_bomb_tx_power = None
        self.now = datetime.datetime.now().isoformat()

        assert self.g_tx_power in tx_powers
        assert self.g_sf in sfs
        assert self.g_cr in crs

        all_vars = list(product(sfs, crs, tx_powers))
        assert max(tx_powers) <= 14
        assert min(tx_powers) >= 2
        all_vars = sorted(all_vars, key=lambda x: (x[0] - 7) * 100 + abs(1.4 - int((x[2] - 2) / 4)) * 10 + x[1],
                          reverse=True)
        """
        Lo de arriba proyecta el espacio de variables a una dimension ordenable
        El primer dígito se lo lleva el SF -> lo mapeo de 7, 12 a (0, 5) * 100 (ocupa el digito más significativo)
        El segundo dígitoo se lo lleva el TXP -> lo mapeo de 2, 12 a (0, 1, 2, 4) * 10 y luego hago que el centro quede
        en 1.4. Asi, los primeros valores lo toman TXP14 y TXP2, luego TXP10 y TXP6
        El tercer digito se lo lleva el CR -> lo mapeo de 5, 8 a 5, 8 (no lo toco)
        """

        if prev_model is None:
            self.model = {v: np.nan for v in all_vars}
        else:
            assert isinstance(prev_model, OrderedDict)
            self.model = prev_model

        if prev_margins is None:
            self.margins = {v: [0, 1, 1] for v in all_vars}
        else:
            assert isinstance(prev_margins, OrderedDict)
            self.margins = prev_margins

        if prev_margins is not None or prev_model is not None:
            self.lh = LogHandler('log_{}.csv'.format(self.node_id), append=True)
        else:
            self.lh = LogHandler('log_{}.csv'.format(self.node_id))

        self.am_done = False
class Worker():
    """
    Worker sweep. His main duty is to construct the PRR model
    """
    def get_pos_interval(self, m, n, c):
        """
        Computes the positive interval for the Bernoulli distribution according to
        https://arxiv.org/pdf/1105.1486.pdf
        :param m: the positive cases (number of received packets)
        :param n: total cases (number of packets sent)
        :param c: c * 100% = confidence interval
        :return: With c*100% confidence interval, the mean will be smaller than the returned value
        """
        return ncephes.cprob.incbi(m + 1, n - m + 1, 0.5 * (1 + c))

    def get_neg_interval(self, m, n, c):
        """
        Computes the negative interval for the Bernoulli distribution according to
        https://arxiv.org/pdf/1105.1486.pdf
        :param m: the positive cases (number of received packets)
        :param n: total cases (number of packets sent)
        :param c: c * 100% = confidence interval
        :return: With c*100% confidence interval, the mean will be larger than the returned value
        """
        return ncephes.cprob.incbi(m + 1, n - m + 1, 0.5 * (1 - c))

    def compute_mean_uncertainty(self):
        """
        Computes the mean uncertainty in the estimation of the PRR
        The uncertainty is computed as the difference between the positive interval and the negative interval
        :return: The mean uncertainty of the acquired PRR values
        """
        uncertainties = list()

        for key in self.margins.keys():
            # if np.isnan(self.model[key]):
            #     uncertainties.append(1)
            # else:
            #     uncertainties.append(self.margins[key][2] / 2)
            uncertainties.append(self.margins[key][2])

        return np.mean(uncertainties)

    def bound_upper_rest(self, tx_power, sf, cr, prr):
        """
        Applies the "bounding" technique to the acquired model in the "upper" direction.
        See the article for more information on this technique.
        :param tx_power: TX Power of the configuration for which has a solid measure of the PRR
        :param sf: Spreading Factor of the configuration for which has a solid measure of the PRR
        :param cr: Coding Ration of the configuration for which has a solid measure of the PRR
        :param prr: The measured PRR for such a configuration
        :return: None
        """

        # If going with the brute force approach, bounding should not be done
        if self.brute_force:
            return

        for v in self.margins.keys():
            sf_key, cr_key, tx_power_key = v
            if tx_power_key >= tx_power and sf_key == sf and cr_key >= cr and np.isnan(self.model[v]):
                self.margins[v][0] = prr
                self.margins[v][1] = max(self.margins[v][1], prr)
                self.margins[v][2] = self.margins[v][1] - self.margins[v][0]

                assert self.margins[v][2] >= 0

    def bound_lower_rest(self, tx_power, sf, cr, prr):
        """
        Applies the "bounding" technique to the acquired model in the "lower" direction.
        See the article for more information on this technique.
        :param tx_power: TX Power of the configuration for which has a solid measure of the PRR
        :param sf: Spreading Factor of the configuration for which has a solid measure of the PRR
        :param cr: Coding Ration of the configuration for which has a solid measure of the PRR
        :param prr: The measured PRR for such a configuration
        :return: None
        """

        # If going with the brute force approach, bounding should not be done
        if self.brute_force:
            return

        for v in self.margins.keys():
            sf_key, cr_key, tx_power_key = v
            if tx_power_key <= tx_power and sf_key == sf and cr_key <= cr and np.isnan(self.model[v]):
                self.margins[v][0] = min(self.margins[v][0], prr)
                self.margins[v][1] = prr
                self.margins[v][2] = self.margins[v][1] - self.margins[v][0]

                assert self.margins[v][2] >= 0

    def get_greater_margin(self):
        """
        Gets the configuration for which we have the least information about his PRR.
        Information is measured in terms of uncertainty
        :return: A key in the format (SF, CR, TXP)
        """
        idx = None
        # max_margin = 0
        max_margin = self.certainty
        for key, val in self.margins.items():
            if val[2] > max_margin and np.isnan(self.model[key]):
                if val[1] > self.min_prr_testable:
                    max_margin = val[2]
                    idx = key
                else:
                    print("skipping not testable", key)

        return idx

    def update_params(self):
        """
        Considering the received and sent packets, computes the uncertainty over the current-configuration PRR
        Based on that uncertainty, this function decides whether to continue sensing such a configuration or
        move on to the next one (if there is any)
        :return: the new point (SF, CR, TXP) to scan and whether it is a new point or None if we are done
        """
        # formulas taken from https://arxiv.org/pdf/1105.1486.pdf
        changed = False
        m = self.hits.count(True)
        n = len(self.hits)
        x_pos = self.get_pos_interval(m, n, self.confidence)
        x_neg = self.get_neg_interval(m, n, self.confidence)
        assert x_pos >= x_neg
        certainty = abs(x_pos - x_neg)
        print(self.node_id, "m: {}, n: {}, certainty: {}".format(m, n, certainty))
        # e_x = (m + 1) / (n + 2)
        if certainty < self.certainty and len(self.hits) > 10:  # good enough precision
            self.model[(self.g_sf, self.g_cr, self.g_tx_power)] = m / n
            self.margins[(self.g_sf, self.g_cr, self.g_tx_power)] = [x_neg, x_pos, x_pos - x_neg]

            self.bound_upper_rest(self.g_tx_power, self.g_sf, self.g_cr, min(x_neg, m / n))
            self.bound_lower_rest(self.g_tx_power, self.g_sf, self.g_cr, max(x_pos, m / n))

            new_test_point = self.get_greater_margin()

            # partial saving :D
            pickle.dump(self.model, open('model_{}.p'.format(self.node_id), 'wb'))
            pickle.dump(self.margins, open('margins_{}.p'.format(self.node_id), 'wb'))
            self.lh.append_msg(
                "Current uncertainty is: {}. Time = '{}'".format(self.compute_mean_uncertainty(), self.now))

            if new_test_point == None:
                print(self.node_id, "[[[[[[[[[[[ENDED]]]]]]]]]]]")
                # mqtt_client.close()
                self.lh.close()
                self.am_done = True
                # exit(0)

            changed = True
            print(self.node_id, "on to the next point", new_test_point)
            # self.hits = MyList()
            # self.snrs = list()
        else:
            new_test_point = [self.g_sf, self.g_cr, self.g_tx_power]
            print(self.node_id, "not enough certainty, continuing with the same point".format(abs(x_pos - x_neg)),
                  new_test_point)

        return new_test_point, changed

    def is_done(self):
        """
        Getter for am_done.
        Dictates if the PRR modeling phase is ended
        :return: self.am_done
        """
        return self.am_done

    def dr_to_sf(self, data_rate):
        """
        Converts from data_rate (0-5) to Spreading Factor (7-12)
        :param data_rate: The data rate at which the communication is taking place
        :return: The corresponding Spreading Factor as an integer from 7 to 12
        """
        return 7 + (5 - data_rate)

    def sf_to_dr(self, sf):
        """
        Converts from Spreading Factor (7-12) to data_rate (0-5)
        :param sf: The Spreading Factor at which the communication is taking place
        :return: The corresponding Data Rate
        """
        return 5 - (sf - 7)

    def fill_up_holes(self):
        """
        For those transmission configurations for which the PRR has not been computed, estimate it from the margins
        :return: None
        """
        for key, val in self.model.items():
            if np.isnan(val):
                self.model[key] = np.mean(self.margins[key][:2])

    def uplink_callback(self, msg, client):
        """
        Callback for each packet received
        :param msg: The message received
        :param client: The MQTT Client
        :return: None
        """
        assert self.node_id == msg.dev_id
        print(self.node_id, msg)
        self.lh.append_msg(str(msg))

        try:
            self.now = msg.metadata.time
        except:  # if, somehow, the MSG does not include such a field
            self.now = datetime.datetime.now().isoformat()

        if self.am_done:  # The PRR model is completed
            print(msg.dev_id, "Commanding node to stop as we are done")
            client.send(msg.dev_id, base64.b64encode(bytes([255, 0, 255])).decode('utf-8'), port=1, conf=False,
                        sched="replace")
            self.fill_up_holes()
            print(self.node_id, 'Sent')
            return

        paq_bytes = base64.b64decode(msg.payload_raw)
        if len(paq_bytes) == 3 and paq_bytes[0] == 0x00 and paq_bytes[1] == 0x01 and paq_bytes[2] == 0x02:
            # this kind of message is received when the node has not received any order yet
            # so let's comand it to start acquiring the PRR model
            print(msg.dev_id, "Switching mote to SWEEP mode with:")
            print(msg.dev_id,
                  "\n\tSF: {}\n\tCoding Rate: {}\n\tTX POWER: {}".format(self.g_sf, self.g_cr, self.g_tx_power))
            client.send(msg.dev_id, base64.b64encode(
                bytes([2, self.sf_to_dr(self.g_sf), self.g_cr, self.g_tx_power,  # now, fill it up to 9 bytes
                       0x00, 0x00, 0x00, 0x00, 0x00])).decode('utf-8'), port=1, conf=False,
                        sched="replace")
            print(self.node_id, 'Sent')

            return

        assert len(paq_bytes) == 5, "Incorrect number of bytes {}".format(len(paq_bytes))
        data_rate, coding_rate, tx_power, counter, req_ack = [int(v) for v in paq_bytes]
        spreading_factor = self.dr_to_sf(data_rate)

        packet_cr = int(msg.metadata.coding_rate[2]) - 4
        sf_str_pos = msg.metadata.data_rate.find('SF')
        bw_str_pos = msg.metadata.data_rate.find('BW')
        packet_sf = int(msg.metadata.data_rate[sf_str_pos + 2:bw_str_pos])
        if (spreading_factor != packet_sf or packet_cr != coding_rate) and packet_sf == 12 and (
                        packet_cr == 3 or packet_cr == 4):
            # this type of packet is received when the node has not received any message from me (GW) in 20 tranmissions
            # it is called a "bomb" because it is node's last resort
            self.lh.append_msg(
                "Node has sent a bomb for params = {}, {}, {}".format(spreading_factor, coding_rate, tx_power))
            print("Node has sent a bomb for params =", spreading_factor, coding_rate, tx_power)
            # self.bombed[(spreading_factor, coding_rate, tx_power)] = True

            # this is the first bomb for this specific parameters (last response may have been lost)
            if self.last_bomb_cr != coding_rate or \
                            self.last_bomb_sf != spreading_factor or \
                            self.last_bomb_tx_power != tx_power:
                self.last_bomb_cr = coding_rate
                self.last_bomb_sf = spreading_factor
                self.last_bomb_tx_power = tx_power

                if self.hits.get_params() != (spreading_factor, coding_rate, tx_power):
                    self.hits = MyList(spreading_factor, coding_rate, tx_power)
                    lost = max(0, counter - (-1))
                else:
                    lost = max(0, counter - self.last_counter)

                self.last_counter = counter
                m = self.hits.count(True)
                n = len(self.hits) + lost
                x_pos = self.get_pos_interval(m, n, self.confidence)
                x_neg = self.get_neg_interval(m, n, self.confidence)
                print(self.node_id, "m: {}, n: {}, certainty: {}".format(m, n, x_pos - x_neg))
                self.model[(spreading_factor, coding_rate, tx_power)] = m / n
                self.margins[(spreading_factor, coding_rate, tx_power)] = [x_neg, x_pos, x_pos - x_neg]

                self.bound_upper_rest(tx_power, spreading_factor, coding_rate, min(x_neg, m / n))
                self.bound_lower_rest(tx_power, spreading_factor, coding_rate, max(x_pos, m / n))

                pickle.dump(self.model, open('model_{}.p'.format(self.node_id), 'wb'))
                pickle.dump(self.margins, open('margins_{}.p'.format(self.node_id), 'wb'))
                self.lh.append_msg(
                    "Current uncertainty is: {}. Time = '{}'".format(self.compute_mean_uncertainty(), self.now))

                new_test_point = self.get_greater_margin()
                # partial saving :D
                pickle.dump(self.model, open('model_{}.p'.format(self.node_id), 'wb'))
                pickle.dump(self.margins, open('margins_{}.p'.format(self.node_id), 'wb'))
                self.lh.append_msg(
                    "Current uncertainty is: {}. Time = '{}'".format(self.compute_mean_uncertainty(), self.now))

                if new_test_point == None:
                    print(self.node_id, "[[[[[[[[[[[ENDED]]]]]]]]]]]")
                    self.lh.close()
                    self.am_done = True
                    return

                self.g_sf, self.g_cr, self.g_tx_power = new_test_point
                self.last_counter = -1
                self.hits = MyList(self.g_sf, self.g_cr, self.g_tx_power)  # spreading_factor, coding_rate, tx_power
                # self.snrs = list()
                print(
                    "Setting new params to SF: {}, CR: {}, TX_POWER: {}".format(self.g_sf, self.g_cr, self.g_tx_power))

            client.send(msg.dev_id,
                        base64.b64encode(bytes([self.sf_to_dr(self.g_sf), self.g_cr, self.g_tx_power])).decode('utf-8'),
                        conf=False,
                        port=1, sched="replace")
            return

        print(self.node_id, "Received uplink from", msg.dev_id, "with pkt counter", counter)

        if spreading_factor == 0 and data_rate == 0 and tx_power == 0:
            print(self.node_id, "Node did not received updating packet, sending it")
            client.send(msg.dev_id,
                        base64.b64encode(bytes([self.sf_to_dr(self.g_sf), self.g_cr, self.g_tx_power])).decode('utf-8'),
                        conf=False,
                        port=1, sched="replace")
            return

        if spreading_factor != self.g_sf or coding_rate != self.g_cr or tx_power != self.g_tx_power:
            print(self.node_id, "Last downlink got lost?")
            print(self.node_id,
                  "Globals: {}, {}, {} vs Received: {}, {}, {}".format(self.g_sf, self.g_cr, self.g_tx_power,
                                                                       spreading_factor, coding_rate, tx_power))

            client.send(msg.dev_id,
                        base64.b64encode(bytes([self.sf_to_dr(self.g_sf), self.g_cr, self.g_tx_power])).decode('utf-8'),
                        conf=False,
                        port=1, sched="replace")

            # _, _= update_params(hits, snrs) # gratuitous update
            # no se puede re-actualizar pq los g_sf, g_cr, y g_tx_power cambiaron con el anterior pkt, y los perdi =(
            return

        # first uplink for this transmission configuration
        if self.hits.get_params() == (None, None, None):
            self.hits.set_params(self.g_sf, self.g_cr, self.g_tx_power)

        if counter < self.last_counter:
            if (spreading_factor, coding_rate, tx_power) != self.hits.get_params():
                print("New config, reseting counters")
                self.hits = MyList(spreading_factor, coding_rate, tx_power)
                self.snrs = list()
                self.last_counter = -1
        else:
            assert (spreading_factor, coding_rate, tx_power) == self.hits.get_params()
            # if (spreading_factor, coding_rate, tx_power) != self.hits.get_params():
            #     print("WOT?")

        if counter == self.last_counter:
            print(self.node_id, "SAME COUNTER!!!")
            if hasattr(msg, 'is_retry') and msg.is_retry:
                print("MSG is retry")
            return

        # continua la recepcion normal
        missed_packets = counter - self.last_counter - 1
        print(self.node_id, missed_packets, 'lost packets')
        for _ in range(missed_packets):
            self.hits.append(False)
            self.snrs.append(np.nan)

        print(self.node_id, 'Normal hit :D')
        self.hits.append(True)

        if self.gw_id is not None:
            idx_my_gw = [g.gtw_id for g in msg.metadata.gateways].index(self.gw_id)
        else:
            idx_my_gw = 0

        self.snrs.append(msg.metadata.gateways[idx_my_gw].snr)
        self.last_counter = counter

        assert len(self.hits) < msg.counter + 2, 'This should not happen'

        # the idea is, after we have collected enough evidence, either test that the current certainty is enough
        # (in that case, proceed with the next config) or command the node to continue with the current config
        # (node should have news from the GW at least, every 20 packets or will send a bomb)
        if counter >= 10:
            new_test_point, changed = self.update_params()

            if self.am_done:
                print(msg.dev_id, "Commanding node to stop as we are done")
                client.send(msg.dev_id, base64.b64encode(bytes([255, 0, 255])).decode('utf-8'), port=1, conf=False,
                            sched="replace")
                print(self.node_id, 'Sent')
            else:
                if (changed is True) or (req_ack == 1):  # (len(self.hits) % 10 == 0 and counter > 10) or
                    self.g_sf = new_test_point[0]
                    self.g_cr = new_test_point[1]
                    self.g_tx_power = new_test_point[2]

                    print(self.node_id, "Sending updated config to node")
                    client.send(msg.dev_id,
                                base64.b64encode(bytes([self.sf_to_dr(self.g_sf), self.g_cr, self.g_tx_power])).decode(
                                    'utf-8'),
                                port=1,
                                conf=False,
                                sched="replace")
                    print(self.node_id, 'Sent')
                    # msg -> MSG(app_id, dev_id, hardware_serial, port, counter, payload_raw, payload_fields, metadata)

    def __init__(self, node_id, sfs, crs, tx_powers, certainty=0.2, gw_id='gateway_under_test', prev_model=None,
                 prev_margins=None, min_prr_testable=0, brute_force=False):
        """
        Worker sweep. His main duty is to construct the PRR model
        :param node_id: The string by which the node will be identified
        :param sfs: Set of Spreading Factors that will be swept
        :param crs: Set of Coding Rates that will be swept
        :param tx_powers: Set of Transmission Power values that will be swept
        :param certainty: The minimum level of certainty to be achieved (the difference between upper and lower bound)
        :param gw_id: The gateway id if only packets from it must be listened
        :param prev_model: Previous model or None if want to build it from scratch
        :param prev_margins: Previous margins or None if want to build it from scratch
        :param min_prr_testable: The minimum acceptable PRR. If we have some evidences that certain config will not achieve it, such a config will not be tested
        :param brute_force: Whether we are going with the brute force approach or the bounding technique
        """
        print("[Sweep] Initializing node handler for node:", node_id)

        self.node_id = node_id
        self.last_counter = -1
        self.hits = MyList(None, None, None)
        self.snrs = list()
        self.certainty = certainty
        self.confidence = 0.9  # confidence interval
        self.gw_id = gw_id
        self.min_prr_testable = min_prr_testable
        self.brute_force = brute_force
        self.g_tx_power = 14
        self.g_sf = 12  # SF= 10
        self.g_cr = 1  # CR = 4/7

        self.last_bomb_sf = None
        self.last_bomb_cr = None
        self.last_bomb_tx_power = None
        self.now = datetime.datetime.now().isoformat()

        assert self.g_tx_power in tx_powers
        assert self.g_sf in sfs
        assert self.g_cr in crs

        all_vars = list(product(sfs, crs, tx_powers))
        assert max(tx_powers) <= 14
        assert min(tx_powers) >= 2
        all_vars = sorted(all_vars, key=lambda x: (x[0] - 7) * 100 + abs(1.4 - int((x[2] - 2) / 4)) * 10 + x[1],
                          reverse=True)
        """
        Lo de arriba proyecta el espacio de variables a una dimension ordenable
        El primer dígito se lo lleva el SF -> lo mapeo de 7, 12 a (0, 5) * 100 (ocupa el digito más significativo)
        El segundo dígitoo se lo lleva el TXP -> lo mapeo de 2, 12 a (0, 1, 2, 4) * 10 y luego hago que el centro quede
        en 1.4. Asi, los primeros valores lo toman TXP14 y TXP2, luego TXP10 y TXP6
        El tercer digito se lo lleva el CR -> lo mapeo de 5, 8 a 5, 8 (no lo toco)
        """

        if prev_model is None:
            self.model = {v: np.nan for v in all_vars}
        else:
            assert isinstance(prev_model, OrderedDict)
            self.model = prev_model

        if prev_margins is None:
            self.margins = {v: [0, 1, 1] for v in all_vars}
        else:
            assert isinstance(prev_margins, OrderedDict)
            self.margins = prev_margins

        if prev_margins is not None or prev_model is not None:
            self.lh = LogHandler('log_{}.csv'.format(self.node_id), append=True)
        else:
            self.lh = LogHandler('log_{}.csv'.format(self.node_id))

        self.am_done = False

    def close(self):
        self.lh.close()