Esempio n. 1
0
    def req_tasks(self, max_tasks: int):
        """
        Requests a task from the master node, if task failed to receive try up to 5 times
        TODO: make this request tasks for as many cpu cores
              as it has using multiprocessings cpu_count()

        :param max_tasks:
        :return List[Task] or None:
        """
        try:
            resp = self.session.get(
                f'http://{self.host}:{self.port}/'
                f'{endpoints.GET_TASKS}/{self.job_id}/{max_tasks}',
                timeout=5)
            tasks: List[Task] = pickle_loads(decompress(resp.content))
            return tasks

        except CompressionException as error:
            logger.log_error(f'Unable to decompress raw data\n{error}')
            return None
        except UnpicklingError as error:
            logger.log_error(f'Unable to unpickle decompressed tasks\n{error}')
            return None
        except Exception as error:
            logger.log_warn(f'Task data not received, trying again.\n{error}')
            return None
def index():
    # if post
    if request.method == 'POST':
        # capture the incoming data
        incoming = request.data
        # get the remote host address
        remote_addr = request.remote_addr
        # check if we expected to recieve something from that ip
        client = Session.query(Client).filter(
            Client.ip_address == remote_addr).one()
        # if client is found
        if client is not None:
            # generate decrypter algorithm
            # look for last saved token
            decrypter = Fernet(client.execution[-1].token)
            # proccess the decrypt the data
            d_statistics = decrypter.decrypt(incoming)
            # unpickle data
            statistics = pickle_loads(d_statistics)
            # associate statistics to execution
            save_statistics(client, statistics)
            handle_alerts(client, statistics)
            return str(statistics)

    return 'Works!'
Esempio n. 3
0
        def tasks_done(job_id: int):
            """
            arguments: job_id: int
            Gets completed task  and pass payload to application
            return 200 ok

            :param job_id:
            :return Response:
            """
            try:
                job_check(job_id)
                raw_data = request.get_data()
                tasks: List[Task] = pickle_loads(decompress(raw_data))
                self.task_manager.tasks_finished(tasks)
                return Response(status=200)

            except JobNotInitialized:
                return Response(response="Job Not Initialized", status=403)

            except WrongJob:
                return Response(response="Wrong Master", status=403)

            except CompressionException as error:
                logger.log_error(f'Unable to decompress raw data\n{error}')
                return Response(status=500)

            except UnpicklingError as error:
                logger.log_error(
                    f'Unable to unpickle decompressed tasks\n{error}')
                return Response(status=500)

            except Exception as error:
                logger.log_error(f'{type(error)} {error}')
                return Response(status=501)
Esempio n. 4
0
	def dataReceived(self, data):
		peer = self.transport.getPeer().host
		
		if data == "requestingList":
			print "[Birthday Reminder] sending birthday list to client", peer
			
			data = self.parent.readRawFile()
			if data:
				self.transport.write(data)
		else: # should be a pickled birthday list...
			receivedList = None
			try: # let's see if it's pickled data
				receivedList = pickle_loads(data)
				print "[Birthday Reminder] received birthday list from", peer
			except:
				print "[Birthday Reminder] received unknown package from", peer
				
			if receivedList is None:
				return
				
			self.parent.writeRawFile(data)
			self.parent.load()
			self.parent.addAllTimers()
			self.parent.showReceivedMessage(len(receivedList), peer)
			
		self.transport.loseConnection()
Esempio n. 5
0
    def batch_get_headers(self, uids):
        self.log('debug', f'Batch get {len(uids)} headers')

        return {
            uid: pickle_loads(header.data)
            for uid, header in self.batch_get_header_items(uids).items()
        }
Esempio n. 6
0
def home():

    form = SimpleForm()
    if form.validate_on_submit():

        # Form Inputs
        user_inputs = pickle_dumps({
            "STOCK":request.form["Asset"],
            "A":int(request.form["A_Input"]),
            "V":int(request.form["V_Input"]),
            "S":int(request.form["S_Input"]),
            "R":int(request.form["R_Input"]),
            "Res_Type":request.form["Res_Type"]
            })

        # establish new connection
        try:
            client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            client.connect(ADDR)
            print(f"** >> GAE: going to EC2 via ELB... << **\n")
            res = communicate(user_inputs, client)
            res = pickle_loads(res)
        except (ConnectionRefusedError, UnboundLocalError):
            return render_template('home.html',form=form, error_="Server Down. Please try again in a few minutes.")


        if res=="FAILED":
            return render_template('home.html',form=form, error_="Parameters values too large.")

        else:
            return render_template('home.html',form=form, plot=res["plot"], table=res["table"], summary=res["summary"])

    return render_template('home.html', form=form)
Esempio n. 7
0
    def __getitem__(self, key):
        data = self.db.hget(self.hkey, key)

        if data is not None:
            self.db.expire(self.hkey, self.ttl)
            return pickle_loads(data)
        else:
            raise KeyError(key)
Esempio n. 8
0
def handle_client(conn, addr):

    connected = True
    while connected:

        # receiving
        msg_length = conn.recv(HEADER).decode(FORMAT)

        if not msg_length: # ELB Health checks - Disconnect after ping
            print(f"[PING] {addr}")
            connected = False

        if msg_length: # first msg sent from client telling the server the length of upcoming msg
            print(f"[MESSAGE RECEIVED] {addr}")

            msg_length = int(msg_length)
            msg = b'' # user inputs from GAE
            while len(msg) < msg_length:
                msg += conn.recv(msg_length) # receive the whole main msg as we know the size of it

            user_inputs=pickle_loads(msg)

            # process received msg
            try:
                start_time = perf_counter()
                generated_res = ec2_process.generate_results(STOCK=user_inputs["STOCK"], A=user_inputs["A"], V=user_inputs["V"], S=user_inputs["S"], R=user_inputs["R"], Res_Type=user_inputs["Res_Type"]) # returns dict of pkls
                finish_time = perf_counter()
                print(f'[DONE CALCULATION] {addr} : Res_Type: {user_inputs["Res_Type"]}, R: {user_inputs["R"]}, Duration: {finish_time - start_time}')
                status = "OK"

            except:
                print(f"[FAILED CALCULATION] {addr}")
                status = "FAILED"
                

            if status=="OK":
                # sending results back
                s_msg_length = len(generated_res)
                s_send_length = str(s_msg_length).encode(FORMAT)
                s_send_length += b' ' * (HEADER - len(s_send_length))
                conn.send(s_send_length)
                conn.send(generated_res)
                connected = False

            else:
                # sending failure msg
                fail_msg = pickle_dumps(status)
                s_msg_length = len(fail_msg)
                s_send_length = str(s_msg_length).encode(FORMAT)
                s_send_length += b' ' * (HEADER - len(s_send_length))
                conn.send(s_send_length)
                conn.send(fail_msg)
                connected = False

    conn.close()
Esempio n. 9
0
 def inner(*args, **kwargs):
     data = self.hvals(key_name)
     if data:
         return [pickle_loads(item) for item in data]
     else:
         data = func(*args, **kwargs)
         if data:
             self.hmset(
                 key_name, {
                     field_finder(item): pickle_dumps(item)
                     for item in data
                 })
         return data
Esempio n. 10
0
 def get(self, name, default=None, expire=MARKER):
     try:
         binary = self.get_binary(name, expire=expire)
     except KeyError:
         return default
     else:
         try:
             value = pickle_loads(binary)
         except EOFError:
             # Something goes wrong! Delete file to prevent more errors
             self.remove(name)
             raise
         else:
             return value
Esempio n. 11
0
def pull_data(problem, data_type):
    '''
        pull the problem's data from data server.
        The choice of data_type can be one of the settings.META_TYPE
    '''
    msg = {
        'problem': problem,
        'type': data_type,
        'authkey': FETCH_DATA_AUTHKEY
    }
    try:
        r = post(url=FETCH_DATA_URL, data=msg)
        return pickle_loads(r.content)
    except Exception as e:
        return None
Esempio n. 12
0
def pull_data(problem, data_type):
    '''
        pull the problem's data from data server.
        The choice of data_type can be one of the settings.META_TYPE
    '''
    msg = {
        'problem': problem,
        'type': data_type,
        'authkey': FETCH_DATA_AUTHKEY}
    try:
        r = post(
            url=FETCH_DATA_URL,
            data=msg)
        return pickle_loads(r.content)
    except Exception as e:
        return None
Esempio n. 13
0
    def on_message(self, message):
        """Callback fired /on_message/.

        This hand of the web socket (Python side) will be
        fired whenever the asynch execution is
        completed.
        """
        if message is not None:
            msg = dict(pickle_loads(message))
            exec_output = None
            if EXEC_OUTPUT in msg:
                exec_output = msg.pop(EXEC_OUTPUT)

            # Look for modules to Import
            self._check_modules_import(msg)
            # Update Output History
            self._update_output_history(exec_output)
            self.ws_conn.close()
Esempio n. 14
0
	def dataReceived(self, data):
		peer = self.transport.getPeer().host
		
		receivedList = None
		try:
			receivedList = pickle_loads(data)
			print "[Birthday Reminder] received birthday list from", peer
		except:
			print "[Birthday Reminder] received unknown package from", peer
			
		if receivedList is None:
			return
			
		# save and load the received list
		self.parent.save(receivedList)
		self.parent.load()
		self.parent.addAllTimers()
		self.parent.showReceivedMessage(len(receivedList), peer)
Esempio n. 15
0
def remove_stale_headers():
    folder_id_to_uids = {}

    for folder in FolderCacheItem.query.all():
        if folder.uids:
            folder_id_to_uids[folder.id] = pickle_loads(folder.uids)

    all_headers = FolderHeaderCacheItem.query.all()
    headers_to_delete = []

    for header in all_headers:
        if header.uid not in folder_id_to_uids.get(header.folder_id, set()):
            logger.info(f'Deleting stale cache header: {header}')
            headers_to_delete.append(header)

    if headers_to_delete:
        delete_cache_items(*headers_to_delete)

    logger.info(f'Deleted {len(headers_to_delete)}/{len(all_headers)} cache headers')
Esempio n. 16
0
    def _handle_control(self, packet):
        """ This should be called from _service_io_thread
        """
        if packet[0] == b'UPDATE':
            # update service connections
            idx = packet.index(b'|')
            fresh_addrs = packet[1:idx]
            stale_addrs = packet[idx+1:]
            super(ThreadingRPCLoadBalancer, self)._update_connections(
                fresh_addrs, stale_addrs
            )

        elif packet[0] == b'SEND':
            # trigger sending of pending requests
            if len(packet) > 1:
                requests = pickle_loads(packet[1])
            else:
                requests = []
            super(ThreadingRPCLoadBalancer, self).send_requests(*requests)
Esempio n. 17
0
 def retrieve_model(self, uuid):
     filename = self.path_url + uuid + '.pkl'
     key = self.get_key(filename)
     pickled_obj = key.get_contents_as_string()
     return pickle_loads(pickled_obj)
def create_target_http_protocol(raw_str: str, target_config: TargetConfig,
                                target_type: str) -> Iterator[Target]:
    """
    На основании ip адреса и настроек возвращает через yield экзэмпляр Target.
    Каждый экземпляр Target содержит всю необходимую информацию(настройки и параметры) для функции worker.
    """
    if target_type == 'network':
        target_ip = raw_str
        endpoints = target_config.endpoint.split(RESERVED_CHAR)
        for endpoint in endpoints:
            kwargs = target_config.as_dict()
            url = f'{target_config.scheme}://{target_ip}:{target_config.port}{endpoint}'
            if target_config.list_payloads:
                for payload in target_config.list_payloads:
                    _headers: Dict = target_config.headers
                    data_payload = payload
                    if (target_config.single_payload_type).lower() == 'data':
                        data_payload = payload
                    elif (target_config.single_payload_type).lower() == 'json':
                        pass
                    elif (target_config.single_payload_type
                          ).lower() == 'files':  # TODO add exception
                        # _target_payload_string = (payload).decode('utf-8')
                        # target_payload_dict = ujson_loads(_target_payload_string)
                        # пока иначе не придумал
                        # TODO: переосмыслить
                        try:
                            target_payload_dict = pickle_loads(payload)
                            assert isinstance(target_payload_dict, dict)
                        except:
                            pass
                        else:
                            data_payload, _headers = encode_files_payload(
                                files=target_payload_dict,
                                data=None,
                                headers=target_config.headers)
                    additions = {
                        'data_payload': {
                            'payload_raw': b64encode(payload).decode('utf-8'),
                            'variables': []
                        }
                    }
                    try:
                        kwargs.pop('headers')
                        kwargs.pop('endpoint')
                    except:
                        pass
                    target_instance = Target(ip=target_ip,
                                             url=url,
                                             headers=_headers,
                                             payload=data_payload,
                                             endpoint=endpoint,
                                             additions=additions,
                                             **kwargs)
                    yield target_instance
            elif target_config.python_payloads:
                payloads_generator = get_generator(target_config)
                for payload in payloads_generator(target_ip, target_type,
                                                  kwargs):
                    payload = payload['payload']
                    additions = payload['data_payload']
                    _headers: Dict = target_config.headers
                    data_payload = payload
                    if (target_config.single_payload_type).lower() == 'data':
                        data_payload = payload
                    elif (target_config.single_payload_type).lower() == 'json':
                        pass
                    elif (target_config.single_payload_type
                          ).lower() == 'files':  # TODO add exception
                        # пока иначе не придумал
                        # TODO: переосмыслить
                        try:
                            target_payload_dict = pickle_loads(payload)
                            assert isinstance(target_payload_dict, dict)
                        except:
                            pass
                        else:
                            data_payload, _headers = encode_files_payload(
                                files=target_payload_dict,
                                data=None,
                                headers=target_config.headers)
                    try:
                        kwargs.pop('headers')
                        kwargs.pop('endpoint')
                    except:
                        pass
                    target_instance = Target(ip=target_ip,
                                             url=url,
                                             payload=data_payload,
                                             headers=_headers,
                                             endpoint=endpoint,
                                             additions=additions,
                                             **kwargs)
                    yield target_instance
            else:
                try:
                    kwargs.pop('endpoint')
                except:
                    pass
                target_instance = Target(ip=target_ip,
                                         url=url,
                                         payload=None,
                                         additions=None,
                                         endpoint=endpoint,
                                         **kwargs)
                yield target_instance
    elif target_type == 'hostname':
        endpoints = target_config.endpoint.split(RESERVED_CHAR)
        for endpoint in endpoints:
            kwargs = target_config.as_dict()
            hostname = raw_str
            if 'hostname' in kwargs.keys():
                kwargs.pop('hostname')

            url = f'{target_config.scheme}://{hostname}:{target_config.port}{endpoint}'
            if target_config.list_payloads:
                for payload in target_config.list_payloads:
                    additions = {
                        'data_payload': {
                            'payload_raw': b64encode(payload).decode('utf-8'),
                            'variables': []
                        }
                    }
                    try:
                        kwargs.pop('endpoint')
                    except:
                        pass
                    target_instance = Target(hostname=hostname,
                                             url=url,
                                             ip='',
                                             endpoint=endpoint,
                                             payload=payload,
                                             additions=additions,
                                             **kwargs)
                    yield target_instance
            elif target_config.python_payloads:
                payloads_generator = get_generator(target_config)
                for payload in payloads_generator(hostname, target_type,
                                                  kwargs):
                    payload = payload['payload']
                    additions = payload['data_payload']
                    try:
                        kwargs.pop('endpoint')
                    except:
                        pass
                    target_instance = Target(hostname=hostname,
                                             url=url,
                                             ip='',
                                             payload=payload,
                                             additions=additions,
                                             endpoint=endpoint,
                                             **kwargs)
                    yield target_instance
            else:
                try:
                    kwargs.pop('endpoint')
                except:
                    pass
                target_instance = Target(hostname=hostname,
                                         ip='',
                                         url=url,
                                         payload=None,
                                         additions=None,
                                         endpoint=endpoint,
                                         **kwargs)
                yield target_instance
    elif target_type == 'url':
        kwargs = target_config.as_dict()
        url_line = urlparse(raw_str)
        url = raw_str
        hostname = url_line.hostname
        scheme: str = url_line.scheme
        if not hasattr(url_line, 'port'):
            if scheme == 'https':
                port = 443  # default port
            elif scheme == 'http':
                port = 80
        else:
            port = url_line.port
        ip = ''
        _keys = ['url', 'ip', 'port', 'hostname']
        for k in _keys:
            if k in kwargs.keys():
                kwargs.pop(k)
        _struct = {'hostname': hostname, 'ip': ip, 'port': port, 'url': url}
        if target_config.list_payloads:
            for payload in target_config.list_payloads:
                additions = {
                    'data_payload': {
                        'payload_raw': b64encode(payload).decode('utf-8'),
                        'variables': []
                    }
                }
                target_instance = Target(payload=payload,
                                         additions=additions,
                                         **_struct,
                                         **kwargs)
                yield target_instance
        elif target_config.python_payloads:
            payloads_generator = get_generator(target_config)
            for payload in payloads_generator(hostname, target_type, kwargs):
                payload = payload['payload']
                additions = payload['data_payload']
                target_instance = Target(payload=payload,
                                         additions=additions,
                                         **_struct,
                                         **kwargs)
                yield target_instance
        else:
            target_instance = Target(payload=None,
                                     additions=None,
                                     **_struct,
                                     **kwargs)
            yield target_instance
Esempio n. 19
0
 def get_uids(self):
     uids = self.get_folder_cache_item().uids
     if uids:
         return pickle_loads(uids)
Esempio n. 20
0
 def get_headers(self, uid):
     headers = self.get_header_cache_item(uid)
     if headers:
         return pickle_loads(headers.data)
Esempio n. 21
0
 def test_pickle(self):
     loc = Location(40.768721, -111.901673)
     pickled = pickle_dumps(loc)
     unpickled = pickle_loads(pickled)
     self.assertEqual(loc.coords, unpickled.coords)
Esempio n. 22
0
 def __next__(self):
     return pickle_loads(next(self.fh))
Esempio n. 23
0
def decode_sent(data):
  pickle_size = unpack(INT32_FORMAT, data[:INT32_SIZE])[0]
  return pickle_loads(data[INT32_SIZE:INT32_SIZE + pickle_size])
Esempio n. 24
0
def decode_sent(data):
    pickle_size = unpack(INT32_FORMAT, data[:INT32_SIZE])[0]
    return pickle_loads(data[INT32_SIZE : INT32_SIZE + pickle_size])
Esempio n. 25
0
    def _execute_as_forked_process(become: str, task: TaskInterface,
                                   temp: TempManager, ctx: ExecutionContext):
        """Execute task code as a separate Python process

        The communication between processes is with serialized data and text files.
        One text file is a script, the task code is passed with stdin together with a whole context
        Second text file is a return from executed task - it can be a boolean or exeception.

        When an exception is returned by a task, then it is reraised there - so the original exception is shown
        without any proxies.
        """

        if not become:
            become = task.get_become_as()

        # prepare file with source code and context
        communication_file = temp.assign_temporary_file()
        task.io().debug('Assigning communication temporary file at "%s"' %
                        communication_file)

        context_to_pickle = {
            'task': task,
            'ctx': ctx,
            'communication_file': communication_file
        }

        try:
            task.io().debug('Serializing context')
            with open(communication_file, 'wb') as f:
                f.write(pickle_dumps(context_to_pickle))

        except (AttributeError, TypeError) as e:
            task.io().error(
                'Cannot fork, serialization failed. ' +
                'Hint: Tasks that are using internally inner-methods and ' +
                'lambdas cannot be used with become/fork')
            task.io().error(str(e))

            if task.io().is_log_level_at_least('debug'):
                task.io().error('Pickle trace: ' +
                                str(get_unpicklable(context_to_pickle)))

            return False

        # set permissions to temporary file
        if become:
            task.io().debug('Setting temporary file permissions')
            os.chmod(communication_file, 0o777)

            try:
                pwd.getpwnam(become)
            except KeyError:
                task.io().error('Unknown user "%s"' % become)
                return False

        task.io().debug('Executing python code')
        task.py(code=FORKED_EXECUTOR_TEMPLATE,
                become=become,
                capture=False,
                arguments=communication_file)

        # collect, process and pass result
        task.io().debug('Parsing subprocess results from a serialized data')
        with open(communication_file, 'rb') as conn_file:
            task_return = pickle_loads(conn_file.read())

        if isinstance(task_return, Exception):
            task.io().debug('Exception was raised in subprocess, re-raising')
            raise task_return

        return task_return