예제 #1
0
파일: master.py 프로젝트: catnapz/LANDS
        def get_tasks(job_id: int, num_tasks: int):
            """
            fetch task from the queue
            return this task "formatted" back to slave

            :param job_id: Integer
            :param num_tasks: Integer
            :return Any:
            """
            try:
                conn_id = request.cookies.get('id')
                job_check(job_id)
                tasks: List[Task] = self.task_manager.connect_available_tasks(
                    num_tasks, conn_id)
                pickled_tasks = pickle_dumps(tasks)
                compressed_data = compress(pickled_tasks)
                return create_binary_resp(compressed_data,
                                          f'tasks_job_{self.job.job_id}')

            except NoMoreTasks:
                if self.status_manager.is_job_done():
                    job_finished_task = Task(-1, "", [], None, "", "")
                    job_finished_task.set_message_type(TaskMessageType.JOB_END)
                    pickled_tasks = pickle_dumps([job_finished_task])
                    compressed_data = compress(pickled_tasks)
                    return create_binary_resp(compressed_data,
                                              f'job_{self.job.job_id}_done')

                logger.log_error('Unable to retrieve tasks from manager')
                return Response(status=500)

            except JobNotInitialized:
                return Response(response="Job Not Initialized", status=403)

            except WrongJob:
                return Response(response="Wrong Master", status=403)

            except PicklingError as error:
                logger.log_error(f'Unable to pickle tasks\n{error}')
                return Response(status=500)

            except CompressionException as error:
                logger.log_error(f'Unable to compress pickled tasks\n{error}')
                return Response(status=500)

            except Exception as error:
                logger.log_error(f'{type(error)} {error}')
                return Response(status=501)
예제 #2
0
파일: slave.py 프로젝트: catnapz/LANDS
    def send_tasks(self, tasks: List[Task]):
        """
        Sends (processed) tasks back to the master

        :param tasks:
        :return Boolean:
        """
        try:
            pickled_tasks = pickle_dumps(tasks)
            compressed_data = compress(pickled_tasks)
            response = self.session.post(
                f'http://{self.host}:{self.port}/'
                f'{endpoints.TASKS_DONE}/{self.job_id}',
                data=compressed_data)

            if response.status_code == 200:
                logger.log_info(
                    'Completed tasks sent back to master successfully')
            else:
                logger.log_error(
                    'Completed tasks failed to send back to master '
                    f'successfully, response_code: {response.status_code}')
            return True
        except PicklingError as error:
            logger.log_error(f'Unable to pickle tasks\n{error}')
            return False
        except CompressionException as error:
            logger.log_error(f'Unable to compress pickled tasks\n{error}')
            return False
        except FileNotFoundError as error:
            logger.log_error(f'Send_tasks file not found\n{error}')
            return False
        except Exception as error:
            logger.log_error(f'Send_tasks broad exception\n{error}')
            return False
예제 #3
0
def test_lconf_classes4():
   """ Tests: test_lconf_classes4
   """
   print('::: TEST: test_lconf_classes4()')

   obj_ = LconfRoot({
      'key1': 'value1',
      'key2': 'value2',
      'key3': 'value3'
   },
      ['key1', 'key2', 'key3'],
      {'key2': 'NOT-DEFINED', 'key3': 'NOT-DEFINED'}
   )
   obj_.set_class__dict__item('mydata', 'new value')

   dumps_result = pickle_dumps(obj_, protocol=P_HIGHEST_PROTOCOL)
   obj_from_pickle = LconfRoot.frompickle(dumps_result)

   eq_(obj_.mydata, 'new value', msg=None)
   eq_(obj_.key_order, ['key1', 'key2', 'key3'], msg=None)
   eq_(obj_.key_empty_replacementvalue, {'key3': 'NOT-DEFINED', 'key2': 'NOT-DEFINED'}, msg=None)

   eq_(obj_.mydata, obj_from_pickle.mydata, msg=None)
   eq_(obj_.key_order, obj_from_pickle.key_order, msg=None)
   eq_(obj_.key_empty_replacementvalue, obj_from_pickle.key_empty_replacementvalue, msg=None)
예제 #4
0
def test_lconf_structure_classes13_expect_failure():
   """ Tests: test_lconf_structure_classes13_expect_failure
   """
   print('::: TEST: test_lconf_structure_classes13_expect_failure()')

   dumps_result = pickle_dumps([('key', 'value'), ('key1', 'value1')], protocol=P_HIGHEST_PROTOCOL)
   obj_from_pickle = Root.frompickle(dumps_result)
예제 #5
0
def test_lconf_classes5_expect_failure():
    """ Tests: test_lconf_classes5_expect_failure
   """
    print('::: TEST: test_lconf_classes5_expect_failure()')

    obj_ = LconfRoot({
        'key1': 'value1',
        'key2': 'value2',
        'key3': 'value3'
    }, ['key1', 'key2', 'key3'], {
        'key2': 'NOT-DEFINED',
        'key3': 'NOT-DEFINED'
    })
    obj_.set_class__dict__item('mydata', 'new value')
    eq_(obj_.mydata, 'new value', msg=None)
    eq_(obj_.key_order, ['key1', 'key2', 'key3'], msg=None)
    eq_(obj_.key_empty_replacementvalue, {
        'key3': 'NOT-DEFINED',
        'key2': 'NOT-DEFINED'
    },
        msg=None)

    obj_.__reduce__()

    dumps_result = pickle_dumps([('key', 'value'), ('key1', 'value1')],
                                protocol=P_HIGHEST_PROTOCOL)
    obj_from_pickle = LconfRoot.frompickle(dumps_result)
예제 #6
0
def home():

    form = SimpleForm()
    if form.validate_on_submit():

        # Form Inputs
        user_inputs = pickle_dumps({
            "STOCK":request.form["Asset"],
            "A":int(request.form["A_Input"]),
            "V":int(request.form["V_Input"]),
            "S":int(request.form["S_Input"]),
            "R":int(request.form["R_Input"]),
            "Res_Type":request.form["Res_Type"]
            })

        # establish new connection
        try:
            client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            client.connect(ADDR)
            print(f"** >> GAE: going to EC2 via ELB... << **\n")
            res = communicate(user_inputs, client)
            res = pickle_loads(res)
        except (ConnectionRefusedError, UnboundLocalError):
            return render_template('home.html',form=form, error_="Server Down. Please try again in a few minutes.")


        if res=="FAILED":
            return render_template('home.html',form=form, error_="Parameters values too large.")

        else:
            return render_template('home.html',form=form, plot=res["plot"], table=res["table"], summary=res["summary"])

    return render_template('home.html', form=form)
예제 #7
0
def test_lconf_structure_classes12():
    """ Tests: test_lconf_structure_classes12
   """
    print('::: TEST: test_lconf_structure_classes12()')

    obj_ = Root([
        ('key1', 'value1'),
        ('key2', 'value2', None, 'NOT-DEFINED'),
        ('key3', '', None, 'NOT-DEFINED'),
    ])
    obj_.set_class__dict__item('mydata', 'new value')

    dumps_result = pickle_dumps(obj_, protocol=P_HIGHEST_PROTOCOL)
    obj_from_pickle = Root.frompickle(dumps_result)

    eq_(obj_.mydata, 'new value', msg=None)
    eq_(obj_.key_order, ['key1', 'key2', 'key3'], msg=None)
    eq_(obj_.key_empty_replacementvalue, {
        'key3': 'NOT-DEFINED',
        'key2': 'NOT-DEFINED'
    },
        msg=None)

    eq_(obj_.mydata, obj_from_pickle.mydata, msg=None)
    eq_(obj_.key_order, obj_from_pickle.key_order, msg=None)
    eq_(obj_.key_empty_replacementvalue,
        obj_from_pickle.key_empty_replacementvalue,
        msg=None)
예제 #8
0
def test_lconf_structure_classes13_expect_failure():
    """ Tests: test_lconf_structure_classes13_expect_failure
   """
    print('::: TEST: test_lconf_structure_classes13_expect_failure()')

    dumps_result = pickle_dumps([('key', 'value'), ('key1', 'value1')],
                                protocol=P_HIGHEST_PROTOCOL)
    obj_from_pickle = Root.frompickle(dumps_result)
예제 #9
0
파일: rpCache.py 프로젝트: brsynth/rpCache
 def store_cache_to_file(self, _attr_name, data, gzip=False):
     filename = self.cache_dir + '/' + _attr_name + '.pickle'
     pickle_obj = pickle_dumps(data)
     if gzip:
         filename += '.gz'
         with gzip_open(filename, "wb") as f:
             f.write(pickle_obj)
     else:
         with open(filename, "wb") as f:
             f.write(pickle_obj)
예제 #10
0
def handle_client(conn, addr):

    connected = True
    while connected:

        # receiving
        msg_length = conn.recv(HEADER).decode(FORMAT)

        if not msg_length: # ELB Health checks - Disconnect after ping
            print(f"[PING] {addr}")
            connected = False

        if msg_length: # first msg sent from client telling the server the length of upcoming msg
            print(f"[MESSAGE RECEIVED] {addr}")

            msg_length = int(msg_length)
            msg = b'' # user inputs from GAE
            while len(msg) < msg_length:
                msg += conn.recv(msg_length) # receive the whole main msg as we know the size of it

            user_inputs=pickle_loads(msg)

            # process received msg
            try:
                start_time = perf_counter()
                generated_res = ec2_process.generate_results(STOCK=user_inputs["STOCK"], A=user_inputs["A"], V=user_inputs["V"], S=user_inputs["S"], R=user_inputs["R"], Res_Type=user_inputs["Res_Type"]) # returns dict of pkls
                finish_time = perf_counter()
                print(f'[DONE CALCULATION] {addr} : Res_Type: {user_inputs["Res_Type"]}, R: {user_inputs["R"]}, Duration: {finish_time - start_time}')
                status = "OK"

            except:
                print(f"[FAILED CALCULATION] {addr}")
                status = "FAILED"
                

            if status=="OK":
                # sending results back
                s_msg_length = len(generated_res)
                s_send_length = str(s_msg_length).encode(FORMAT)
                s_send_length += b' ' * (HEADER - len(s_send_length))
                conn.send(s_send_length)
                conn.send(generated_res)
                connected = False

            else:
                # sending failure msg
                fail_msg = pickle_dumps(status)
                s_msg_length = len(fail_msg)
                s_send_length = str(s_msg_length).encode(FORMAT)
                s_send_length += b' ' * (HEADER - len(s_send_length))
                conn.send(s_send_length)
                conn.send(fail_msg)
                connected = False

    conn.close()
예제 #11
0
    def send_requests(self, *requests):
        """ Triggers sending of pending requests

            This thread-safe version delegates the task to the io_thread.

            Note: it should be called from the _peer_refresher thread only.
        """
        packet = [b'SEND']
        if requests:
            packet.append(pickle_dumps(requests, protocol=-1))
        self._control.send_multipart(packet)
예제 #12
0
 def inner(*args, **kwargs):
     data = self.hvals(key_name)
     if data:
         return [pickle_loads(item) for item in data]
     else:
         data = func(*args, **kwargs)
         if data:
             self.hmset(
                 key_name, {
                     field_finder(item): pickle_dumps(item)
                     for item in data
                 })
         return data
예제 #13
0
파일: test_task.py 프로젝트: catnapz/LANDS
 def test_req_task(self, mock_session: Session, mock_resp: Response):
     # Arrange
     expected_tasks: List[Task] = [Task(1, "", [], None, "", "")]
     pickled_tasks = pickle_dumps(expected_tasks)
     compressed_data = compress(pickled_tasks)
     mock_resp.status_code = 69
     mock_resp.content = compressed_data
     mock_session.return_value = mock_session
     mock_session.get.return_value = mock_resp
     self.slave.session = mock_session
     # Act
     actual_tasks = self.slave.req_tasks(1)
     # Assert
     assert expected_tasks[0].task_id == actual_tasks[0].task_id
예제 #14
0
    def set_headers(self, uid, headers):
        self.log('debug', f'Set headers for UID {uid}: {headers}')

        headers_data = pickle_dumps(headers)

        headers = self.get_header_cache_item(uid)
        if headers:
            headers.data = headers_data
        else:
            headers = FolderHeaderCacheItem(
                folder_id=self.get_folder_cache_item().id,
                uid=uid,
                data=headers_data,
            )

        save_cache_items(headers)
예제 #15
0
파일: main.py 프로젝트: grwlf/galaxy-lang
def loadsection(si:int)->List[Example]:
  print(f"Loading section {si}")
  acc:list=[]
  try:
    with open(mklens(load3(si,False)).out_examples.syspath, 'rb') as ef:
      _next=fd2examples(ef)
      e=_next()
      print(type(e))
      s=pickle_dumps(e, protocol=HIGHEST_PROTOCOL)
      print(s)
      # while True:
      #   acc.append(_next())
  except KeyboardInterrupt:
    raise
  except LookupError:
    pass
  return acc[:10]
예제 #16
0
 def _pack_namespace(self):
     """Collect all the /pickable/ objects from the namespace
     so to pass them to the async execution environment."""
     white_ns = dict()
     white_ns.setdefault('import_modules', list())
     for k, v in self.shell.user_ns.items():
         if not k in DEFAULT_BLACKLIST:
             try:
                 if inspect_ismodule(v):
                     white_ns['import_modules'].append((k, v.__name__))
                 else:
                     _ = pickle_dumps({k: v})
                     white_ns[k] = v
             except PicklingError:
                 continue
             except Exception:
                 continue
     white_ns['connection_id'] = self.connection_id
     return white_ns
예제 #17
0
    def on_connected(self, f):
        """Callback fired /on_connection/ established.

        Once the connection to the websocket has been established,
        all the currenct namespace is pickled and written to the
        corresponding web_socket connection.
        """
        try:
            ws_conn = f.result()
            self.ws_conn = ws_conn
            data = {
                'connection_id': self.connection_id,
                'nb_code_to_run_async': self.cell_source,
            }
            msg = json.dumps(data)
            ws_conn.write_message(message=msg)
            white_ns = self._pack_namespace()
            ws_conn.write_message(message=pickle_dumps(white_ns), binary=True)
        except PicklingError as e:
            print(str(e))
예제 #18
0
    def batch_set_headers(self, uid_to_headers):
        self.log('debug', f'Batch set {len(uid_to_headers)} headers')

        existing_headers = self.batch_get_header_items(uid_to_headers.keys())
        items_to_save = []

        for uid, headers in uid_to_headers.items():
            headers_data = pickle_dumps(headers)

            existing_header = existing_headers.get(uid)
            if existing_header:
                existing_header.data = headers_data
                items_to_save.append(existing_header)
            else:
                items_to_save.append(FolderHeaderCacheItem(
                    folder_id=self.get_folder_cache_item().id,
                    uid=uid,
                    data=headers_data,
                ))

        save_cache_items(*items_to_save)
예제 #19
0
파일: views.py 프로젝트: qscqesze/Lutece
def fetch_data(request):
    return HttpResponse(pickle_dumps(process(request), 2),
                        content_type='application/json')
예제 #20
0
 def ident(self):
     m = sha256()
     m.update(pickle_dumps(self))
     return urlsafe_b64encode(m.digest())
예제 #21
0
def generate_results(STOCK, A, V, S, R, Res_Type):

    # AWS S3 - Download Required csv
    if f"{STOCK}.csv" not in listdir("."):
        S3_CLIENT.download_file(BUCKET_NAME, f"{STOCK}.csv", f'./{STOCK}.csv')

    df = processData(stock=STOCK, A=A)

    df = applyStrategy(df, V=V)

    signalsWithGen = getGenerators(df, V=V)

    closedTrades = closeTrades(signals=signalsWithGen, df=df)

    # VAR calculation
    if Res_Type == "ec2":
        closedTrades["vars"] = closedTrades.apply(
            getVAR, axis=1,
            args=(S, R))  # calculate VAR on ec2 using multi-processing
        closedTrades["VAR_95"] = closedTrades["vars"].apply(lambda x: x[0])
        closedTrades["VAR_99"] = closedTrades["vars"].apply(lambda x: x[1])
        table = closedTrades[[
            "Signal", "Open_Date", "Close_Date", "Open_Price", "Close_Price",
            "P&L", "Cumulative_Profit", "VAR_95", "VAR_99"
        ]]

    elif Res_Type == "lambda":

        f_vars = []
        for index, row in closedTrades.iterrows():
            temp_vars = []
            json_trade = json_dumps({
                "signal": row["Signal"],
                "open_price": row["Open_Price"],
                "mu": row["generators(mu, std)"][0],
                "std": row["generators(mu, std)"][1],
                "units": UNITS,
                "n": int(S / R),
            })

            threads = []
            for _ in range(R):
                t = Thread(
                    target=connect_lambda,
                    args=[LAMBDA_API, LAMBDA_PATH, json_trade, temp_vars])
                t.start()
                threads.append(t)

            for thread in threads:
                thread.join()

            # average values of multiple threads
            sum_v95, sum_v99 = 0, 0
            for tv in temp_vars:
                sum_v95 += tv[0]
                sum_v99 += tv[1]

            f_vars.extend([[sum_v95 / R, sum_v99 / R]])

        v95, v99 = [], []
        for fv in f_vars:
            v95.append(fv[0])
            v99.append(fv[1])

        closedTrades["VAR_95"] = v95
        closedTrades["VAR_99"] = v99
        table = closedTrades[[
            "Signal", "Open_Date", "Close_Date", "Open_Price", "Close_Price",
            "P&L", "Cumulative_Profit", "VAR_95", "VAR_99"
        ]]

    # summary table
    sum_df = DataFrame(
        {
            "Total Trades": len(table),
            "Avg P&L": table["P&L"].mean(),
            "Total P&L": table["P&L"].sum(),
            "Avg. VAR 95": table["VAR_95"].mean(),
            "Avg. VAR 99": table["VAR_99"].mean(),
        },
        index=["Value"]).T

    return pickle_dumps({
        "table": drawTable(table),
        "plot": drawPlot(df, A=A, STOCK=STOCK),
        "summary": drawSumTable(sum_df)
    })
 def test_configuration_pickable_without_error(self):
     pickle_dumps(self.buildmaster.get_config())
예제 #23
0
 def store_model(self, model_obj):
     pickled_obj = pickle_dumps(model_obj)
     filename = self.get_filename_from_model_obj(model_obj)
     key = self.get_key(filename)
     key.set_contents_from_string(pickled_obj)
예제 #24
0
def api_buildmaster_config(request, identifier):
    master = get_object_or_404(Buildmaster, pk=identifier)
    return HttpResponse(pickle_dumps(master.get_config()))
예제 #25
0
def create_dcs_tasks_in_redis(
    redis_pool,
    spider_name: str,
    target_list: (list, tuple),
    base_name='fzhook',
    task_serializer: str = 'pickle',
    key_expire: (float, int) = 60 * 60,
    nx: bool = True,
    decode_responses: bool = False,
    encoding='utf-8',
    max_connections=None,
    logger=None,
) -> None:
    """
    根据target_list创建分布式任务并插入到redis
    :param redis_pool: from redis import ConnectionPool as RedisConnectionPool 实例对象
    :param spider_name: 爬虫名
    :param target_list: eg: [{'unique_id': 'xxx', 'value': 'xxxx',}, ...]
    :param base_name:
    :param task_serializer: 任务序列化方式 支持: 'pickle'
    :param key_expire: 单位秒, 默认60分钟
    :param nx: bool True 只有name不存在时, 当前set操作才执行
    :param decode_responses: False 以二进制编码 True 以字符串, 默认二进制, encoding='utf-8'
    :param encoding:
    :param max_connections:
    :return:
    """
    try:
        redis_cli = StrictRedis(
            connection_pool=redis_pool,
            decode_responses=decode_responses,
            encoding=encoding,
            max_connections=max_connections,
        )
    except Exception as e:
        _print(msg='遇到错误:', logger=logger, log_level=2, exception=e)
        return None

    for item in target_list:
        try:
            unique_id = item.get('unique_id', '')
            assert unique_id != ''
            value = item.get('value')
            assert value is not None

            if task_serializer == 'pickle':
                # value = pickle_dumps(value)
                # 避免取出时报错: 'utf-8' codec can't decode byte 0x80 in position 0: invalid start byte
                # 存入时: Python对象 -> 字节串 -> latin字符串 -> utf8字节串存储在Redis里
                # 取出时: utf8字节串 -> 解码变成字符串 -> 通过latin1编码成字节串 -> Python对象
                # eg: pickle.loads(r.get(sid).encode('latin1'))
                value = pickle_dumps(value).decode('latin1')
            else:
                raise ValueError('task_serializer value 异常!')

            name = '{base_name}:{spider_name}:{unique_id}'.format(
                base_name=base_name,
                spider_name=spider_name,
                unique_id=unique_id,
            )
            _print(msg='insert name: {} ...'.format(name), logger=logger)
            redis_cli.set(
                name=name,
                value=value,
                ex=key_expire,
                # 只有name不存在时, 当前set操作才执行
                nx=nx,
            )
        except (AssertionError, Exception) as e:
            _print(msg='遇到错误:', logger=logger, log_level=2, exception=e)
            continue

    return None
예제 #26
0
 def set_uids(self, uids):
     self.log('debug', f'Saving {len(uids)} UIDs')
     folder_cache_item = self.get_folder_cache_item()
     folder_cache_item.uids = pickle_dumps(uids)
     save_cache_items(folder_cache_item)
예제 #27
0
 def put(self, name, info, expire=MARKER):
     info = pickle_dumps(info)
     self.put_binary(name, info, expire=expire)
예제 #28
0
 def test_pickle(self):
     loc = Location(40.768721, -111.901673)
     pickled = pickle_dumps(loc)
     unpickled = pickle_loads(pickled)
     self.assertEqual(loc.coords, unpickled.coords)
예제 #29
0
파일: dsutil.py 프로젝트: eBay/accelerator
 def write(self, o):
     self.fh.write(pickle_dumps(o, 4))
예제 #30
0
    def _execute_as_forked_process(become: str, task: TaskInterface,
                                   temp: TempManager, ctx: ExecutionContext):
        """Execute task code as a separate Python process

        The communication between processes is with serialized data and text files.
        One text file is a script, the task code is passed with stdin together with a whole context
        Second text file is a return from executed task - it can be a boolean or exeception.

        When an exception is returned by a task, then it is reraised there - so the original exception is shown
        without any proxies.
        """

        if not become:
            become = task.get_become_as()

        # prepare file with source code and context
        communication_file = temp.assign_temporary_file()
        task.io().debug('Assigning communication temporary file at "%s"' %
                        communication_file)

        context_to_pickle = {
            'task': task,
            'ctx': ctx,
            'communication_file': communication_file
        }

        try:
            task.io().debug('Serializing context')
            with open(communication_file, 'wb') as f:
                f.write(pickle_dumps(context_to_pickle))

        except (AttributeError, TypeError) as e:
            task.io().error(
                'Cannot fork, serialization failed. ' +
                'Hint: Tasks that are using internally inner-methods and ' +
                'lambdas cannot be used with become/fork')
            task.io().error(str(e))

            if task.io().is_log_level_at_least('debug'):
                task.io().error('Pickle trace: ' +
                                str(get_unpicklable(context_to_pickle)))

            return False

        # set permissions to temporary file
        if become:
            task.io().debug('Setting temporary file permissions')
            os.chmod(communication_file, 0o777)

            try:
                pwd.getpwnam(become)
            except KeyError:
                task.io().error('Unknown user "%s"' % become)
                return False

        task.io().debug('Executing python code')
        task.py(code=FORKED_EXECUTOR_TEMPLATE,
                become=become,
                capture=False,
                arguments=communication_file)

        # collect, process and pass result
        task.io().debug('Parsing subprocess results from a serialized data')
        with open(communication_file, 'rb') as conn_file:
            task_return = pickle_loads(conn_file.read())

        if isinstance(task_return, Exception):
            task.io().debug('Exception was raised in subprocess, re-raising')
            raise task_return

        return task_return
예제 #31
0
 def __setitem__(self, key, value):
     log.debug("session set key: %s %s %r", self.hkey, key, value)
     self.db.hset(self.hkey, key, pickle_dumps(value))
     self.db.expire(self.hkey, self.ttl)