Esempio n. 1
0
    def fetch_ohlcv(self, bin_size, start_time, end_time):
        """
        fetch OHLCV data
        :param start_time: start time
        :param end_time: end time
        :return:
        """        
        self.__init_client()        
        fetch_bin_size = allowed_range[bin_size][0]
        left_time = start_time
        right_time = end_time
        data = to_data_frame([])

        while True:
            if left_time > right_time:
                break
            
            left_time_to_timestamp = int(datetime.timestamp(left_time)*1000)
            right_time_to_timestamp = int(datetime.timestamp(right_time)*1000)   

            logger.info(f"fetching OHLCV data - {left_time}")         

            source = retry(lambda: self.client.futures_klines(symbol=self.pair, interval=fetch_bin_size,
                                                                              startTime=left_time_to_timestamp, endTime=right_time_to_timestamp,
                                                                              limit=1500))
            if len(source) == 0:
                break
            
            source_to_object_list =[]
           
            for s in source:   
                timestamp_to_datetime = datetime.fromtimestamp(s[6]/1000).astimezone(UTC)               
                source_to_object_list.append({
                        "timestamp" : timestamp_to_datetime,
                        "high" : float(s[2]),
                        "low" : float(s[3]),
                        "open" : float(s[1]),
                        "close" : float(s[4]),
                        "volume" : float(s[5])
                    })
                                   
            source = to_data_frame(source_to_object_list)

            data = pd.concat([data, source])
                       
            if right_time > source.iloc[-1].name + delta(fetch_bin_size):
                left_time = source.iloc[-1].name + delta(fetch_bin_size)
                time.sleep(2)                
            else:                
                break
        
        return resample(data, bin_size)        
Esempio n. 2
0
        def objective(args):
            logger.info(f"Params : {args}")
            try:
                self.params = args
                self.exchange = BitMexBackTest()
                self.exchange.on_update(self.bin_size, self.strategy)
                profit_factor = self.exchange.win_profit / self.exchange.lose_loss
                logger.info(f"Profit Factor : {profit_factor}")
                ret = {'status': STATUS_OK, 'loss': 1 / profit_factor}
            except Exception as e:
                ret = {'status': STATUS_FAIL}

            return ret
Esempio n. 3
0
 def __on_update_margin(self, action, margin):
     """
      Update margin 
     """
     if self.margin is not None:
         self.margin[0] = {
                             "asset": self.quote_asset,
                             "balance": float(margin['wb']),
                             "crossWalletBalance": float(margin['cw'])
                          }             
     else: self.get_margin() 
     notify(f"Balance: {self.margin[0]['balance']}")
     logger.info(f"Balance: {self.margin[0]['balance']} Cross Balance: {self.margin[0]['crossWalletBalance']}")     
Esempio n. 4
0
 def cancel_all(self):
     """
     すべての注文をキャンセルする。
     """
     self.__init_client()
     orders = retry(
         lambda: self.private_client.Order.Order_cancelAll().result())
     for order in orders:
         logger.info(
             f"Cancel Order : (orderID, orderType, side, orderQty, limit, stop) = "
             f"({order['orderID']}, {order['ordType']}, {order['side']}, {order['orderQty']}, "
             f"{order['price']}, {order['stopPx']})")
     logger.info(f"Cancel All Order")
Esempio n. 5
0
 def upload_file(self, folder_name=None, file_path=None, file_key=None):
     kwargs = {'Bucket': self.properties['input_bucket']}
     kwargs['Key'] = self.get_file_key(folder_name, file_path, file_key)
     if file_path:
         kwargs['Body'] = utils.read_file(file_path, 'rb')
     if folder_name and not file_path:
         logger.info("Folder '{0}' created in bucket '{1}'".format(
             kwargs['Key'], kwargs['Bucket']))
     else:
         logger.info(
             "Uploading file '{0}' to bucket '{1}' from '{2}'".format(
                 kwargs['Key'], kwargs['Bucket'], file_path))
     self.client.upload_file(**kwargs)
Esempio n. 6
0
def add_job():
    """
    Runs sum.
    Expected format of response:
    {
        "sum": "<sum of a and b>"
    }
    """
    logger.info("Serving add endpoint.")
    data = request.json
    logger.info(f"Calling celery worker with arguments {data}.")
    add.delay(data)
    return jsonify({'status': 'created'}), 201
Esempio n. 7
0
 def cancel_all(self):
     """
     market close opened position for this pair
     """
     self.__init_client()
     orders = retry(lambda: self.private_client.Order.Order_cancelAll(
         symbol=self.pair).result())
     for order in orders:
         logger.info(
             f"Cancel Order : (orderID, orderType, side, orderQty, limit, stop) = "
             f"({order['orderID']}, {order['ordType']}, {order['side']}, {order['orderQty']}, "
             f"{order['price']}, {order['stopPx']})")
     logger.info(f"Cancel All Order")
Esempio n. 8
0
 def upload_file_to_s3(self, bucket_name, bucket_folder, file_path):
     file_data = utils.get_file_as_byte_array(file_path)
     file_name = os.path.basename(file_path)
     file_key = "{0}".format(file_name)
     if bucket_folder and bucket_folder != "" and bucket_folder.endswith(
             "/"):
         file_key = "{0}{1}".format(bucket_folder, file_name)
     else:
         file_key = "{0}/{1}".format(bucket_folder, file_name)
     logger.info(
         "Uploading file '{0}' to bucket '{1}' with key '{2}'".format(
             file_path, bucket_name, file_key))
     self.s3.upload_file(bucket_name, file_key, file_data)
Esempio n. 9
0
 def delete_job_definitions(self, name):
     job_definitions = []
     # Get IO definitions (if any)
     kwargs = {"jobDefinitionName" : '{0}-io'.format(name)}
     io_job_info = self.client.describe_job_definitions(**kwargs)
     job_definitions.extend(self.get_job_definitions(io_job_info))
     # Get main job definition
     kwargs = {"jobDefinitionName" : name}
     job_info = self.client.describe_job_definitions(**kwargs)
     job_definitions.extend(self.get_job_definitions(job_info))
     for job_def in job_definitions:
         kwars = {"jobDefinition" : job_def}
         self.client.deregister_job_definition(**kwars)
     logger.info("Job definitions deleted")            
Esempio n. 10
0
    def compute_reward_function_metrics(self, epoch, episode_count):
        # Compute and log reward function metrics
        if self.rank == 0:
            if self.params['conditions']['reward_function'] in [
                    'learned_randomforest', 'pretrained_lstm', 'learned_lstm'
            ]:
                save_header = False
                if epoch == 0:
                    save_header = True
                if len(self.reward_function.recent_metrics_record) > 0:
                    with open(
                            os.path.join(self.logdir,
                                         'reward_func_metrics.csv'), 'a') as f:
                        df = self.reward_function.recent_metrics_record[
                            -1].reset_index()
                        df['epoch'] = epoch
                        logger.info(df)
                        df.to_csv(f, header=save_header, index=False)

            # Save stats confusion matrix
            stats_confusion_rew_func = [
                np.zeros([2, 2])
                for _ in range(len(self.params['train_descriptions']))
            ]
            stats_confusion = self.data_processor.stats_confusion_rew_func
            for i in range(len(self.params['train_descriptions'])):
                if len(stats_confusion[i][0]) < 20:
                    stats_confusion_rew_func[i][0, 0] = 0.5
                    stats_confusion_rew_func[i][0, 1] = 0.5
                else:
                    stats_confusion_rew_func[i][0, 0] = 1 - np.mean(
                        stats_confusion[i][0])
                    stats_confusion_rew_func[i][0, 1] = np.mean(
                        stats_confusion[i][0])
                if len(stats_confusion[i][1]) < 20:
                    stats_confusion_rew_func[i][1, 0] = 0.5
                    stats_confusion_rew_func[i][1, 1] = 0.5
                else:
                    stats_confusion_rew_func[i][1, 0] = 1 - np.mean(
                        stats_confusion[i][1])
                    stats_confusion_rew_func[i][1, 1] = np.mean(
                        stats_confusion[i][1])
                for j in range(2):
                    for k in range(2):
                        if np.isnan(stats_confusion_rew_func[i][j, k]):
                            stats_confusion_rew_func[i][j, k] = 0.5
            with open(
                    self.logdir + 'goal_info/stats_confusion_rew_func_' +
                    str(episode_count) + '.pk', 'wb') as f:
                pickle.dump(stats_confusion_rew_func, f)
Esempio n. 11
0
    def eval_exit(self):
        """
        利確、損切戦略の評価
        """
        if self.get_position_size() == 0:
            return

        unrealised_pnl = self.get_position()['unrealisedPnl']

        # trail assetが設定されていたら
        if self.get_exit_order()['trail_offset'] > 0 and self.get_trail_price() > 0:
            if self.get_position_size() > 0 and \
                    self.get_market_price() - self.get_exit_order()['trail_offset'] < self.get_trail_price():
                logger.info(f"Loss cut by trailing stop: {self.get_exit_order()['trail_offset']}")
                self.close_all()
            elif self.get_position_size() < 0 and \
                    self.get_market_price() + self.get_exit_order()['trail_offset'] > self.get_trail_price():
                logger.info(f"Loss cut by trailing stop: {self.get_exit_order()['trail_offset']}")
                self.close_all()

        # lossが設定されていたら
        if unrealised_pnl < 0 and \
                0 < self.get_exit_order()['loss'] < abs(unrealised_pnl / 100000000):
            logger.info(f"Loss cut by stop loss: {self.get_exit_order()['loss']}")
            self.close_all()

        # profitが設定されていたら
        if unrealised_pnl > 0 and \
                0 < self.get_exit_order()['profit'] < abs(unrealised_pnl / 100000000):
            logger.info(f"Take profit by stop profit: {self.get_exit_order()['profit']}")
            self.close_all()
Esempio n. 12
0
 def put(self):
     """
     """
     schema = Schema({
         "user_code": And(str),
         "category": And(Use(int), lambda x: x in list(ConfigNameMap.name.keys())),
     }, ignore_extra_keys=True)
     req, error = validate_schema(schema, request.json)
     if error:
         return error, 400
     user = TbUser.query.filter_by(code=request.json.pop("user_code")).first()
     # 解锁
     release_lock(req, user)
     request.json.pop("category")
     category = ConfigNameMap.name[req["category"]]
     if not user:
         return Msg.USER_NOT_EXISTS, 400
     # 处理 VERSION
     version = get_version(category)
     # 处理 python 精度问题
     request.json["VERSION"] = round(version + 0.1, 2)
     # support 的配置里面需要隔离掉 SQLALCHEMY_DATABASE_URI
     if category == ConfigNameMap.SUPPORT and request.json.get("SQLALCHEMY_DATABASE_URI"):
         request.json.pop("SQLALCHEMY_DATABASE_URI")
     data = {
         "config": request.json,
         "creator_name": user.name,
         "creator_code": user.code,
         "create_time": utc_timestamp(),
         "category": category,
         "is_sync": False
     }
     msg = "SUPPORT | MONITOR | EDIT_CONFIG | SUCCESS | USER: {} {} | CATEGORY: {}".format(
         user.code, user.name, category)
     try:
         support_config_storage.insert_one(data)
         operation = TbOperation(
             operator_code=user.code,
             content=msg,
             operate_time=utc_timestamp(),
             category=category,
             type="EDIT"
         )
         db.session.add(operation)
         db.session.commit()
         logger.info(msg)
         return {}, 200
     except Exception as e:
         logger.warn("SUPPORT | MONITOR | EDIT_CONFIG | FAILED | CATEGORY: {}| ERROR: {}".format(category, str(e)))
         return Msg.UPDATE_CONFIG_FAILED, 400
Esempio n. 13
0
def scrap_users(website_name, num_users_to_scrap):
    """
    receives website name and scrap individual users data (via the classes generators in the related files)
    the information scrapped is inserted to the database.
    the function generates a random user for a sanity check , logs information that can be checked manually
    when the user index reaches the last user needed (per website) finish code.
    on the Multi Process mode, this function runs concurrently on different websites
    :param website_name: domain name of the website that is been scrapped (str)
    :param num_users_to_scrap: number of users to scrap in the following session
    :return: None
    """

    first_instance_to_scrap, index_first_page, index_first_instance_in_first_page = general.arrange_first_user_to_scrap(
        website_name)

    # commit website data enrichment from the API
    database.commit_website_to_DB(WebsiteAPI(website_name).website_info)

    logger.info(
        config.WEBSITE_SCRAPP_INFO.format(
            website_name, first_instance_to_scrap,
            first_instance_to_scrap + num_users_to_scrap - 1))

    random_user_to_check = random.randint(0, num_users_to_scrap - 1)

    # create user link generator
    user_page = UserAnalysis(website_name, index_first_page,
                             index_first_instance_in_first_page)
    user_links_generator = user_page.generate_users_links()
    for num_user, link in enumerate(
            tqdm(user_links_generator,
                 desc=f"{website_name}",
                 total=num_users_to_scrap,
                 position=1,
                 leave=False)):
        # create a new user
        user = UserScraper(link, website_name, first_instance_to_scrap)
        # insert user to the database
        database.insert_user_to_DB(user)

        # log a sanity check for a random user
        if num_user == random_user_to_check:
            logger.info(
                config.SANITY_CHECK_STRING.format(
                    link, website_name,
                    user._rank // config.NUM_INSTANCES_IN_PAGE,
                    user._reputation_now))
        # stop when reached to requested number of users
        if num_user == num_users_to_scrap - 1:
            break
Esempio n. 14
0
 def undo_task(self, taskroom_id, task_id):
     """
     Activate the task with id
     :param taskroom_id:
     :param task_id:
     :return:
     """
     email = get_jwt_identity()
     payload = update_common_payload(prefix="tasks.$")
     payload["tasks.$.meta.is_archived"], payload["tasks.$.meta.is_deleted"] = False, False
     result = base_obj.update(COLLECTIONS['ROOMS']
         , {'_id': ObjectId(taskroom_id), "tasks._id": ObjectId(task_id), "users": email}
         , {"$set": payload})
     logger.info("payload: {}, result: {}".format(payload, result))
Esempio n. 15
0
    def eval_exit(self):
        """
        evalution of profit target and stop loss and trailing
        """
        if self.get_position_size() == 0:
            return

        unrealised_pnl = float(self.get_position()['unRealizedProfit'])

        # trail asset
        if self.get_exit_order()['trail_offset'] > 0 and self.get_trail_price() > 0:
            if self.get_position_size() > 0 and \
                    self.get_market_price() - self.get_exit_order()['trail_offset'] < self.get_trail_price():
                logger.info(f"Loss cut by trailing stop: {self.get_exit_order()['trail_offset']}")
                self.close_all()
            elif self.get_position_size() < 0 and \
                    self.get_market_price() + self.get_exit_order()['trail_offset'] > self.get_trail_price():
                logger.info(f"Loss cut by trailing stop: {self.get_exit_order()['trail_offset']}")
                self.close_all()

        #stop loss
        if unrealised_pnl < 0 and \
                0 < self.get_exit_order()['loss'] < abs(unrealised_pnl):
            logger.info(f"Loss cut by stop loss: {self.get_exit_order()['loss']}")
            self.close_all()

        # profit take
        if unrealised_pnl > 0 and \
                0 < self.get_exit_order()['profit'] < abs(unrealised_pnl):
            logger.info(f"Take profit by stop profit: {self.get_exit_order()['profit']}")
            self.close_all()
Esempio n. 16
0
 def update_task(self, taskroom_id, task_id, payload):
     """
     Update the task id with payload, all users under "users" can update tasks
     :param taskroom_id:
     :param task_id:
     :param payload:
     :return:
     """
     email = get_jwt_identity()
     payload = custom_marshal(payload, task_request, 'update', prefix="tasks.$")
     result = base_obj.update(COLLECTIONS['ROOMS']
         , {'_id': ObjectId(taskroom_id), 'tasks._id':ObjectId(task_id), 'users': email}
         , {"$set": payload})
     logger.info("payload: {}, result: {}".format(payload, result))
Esempio n. 17
0
    def __crawler_run(self):
        """
        Get the data and execute the strategy.
        """
        start = time.time()

        for i in range(self.ohlcv_len):
            self.balance_history.append(
                (self.get_balance() -
                 self.start_balance))  #/100000000*self.get_market_price())
            self.draw_down_history.append(self.max_draw_down_session_perc)

        for i in range(len(self.df_ohlcv) - self.ohlcv_len):
            self.data = self.df_ohlcv.iloc[i:i + self.ohlcv_len, :]
            index = self.data.iloc[-1].name
            self.timestamp = self.data.iloc[-1][0]
            close = self.data['close'].values
            open = self.data['open'].values
            high = self.data['high'].values
            low = self.data['low'].values
            volume = self.data['volume'].values

            if self.get_position_size() > 0 and low[-1] > self.get_trail_price(
            ):
                self.set_trail_price(low[-1])
            if self.get_position_size(
            ) < 0 and high[-1] < self.get_trail_price():
                self.set_trail_price(high[-1])

            self.market_price = close[-1]
            self.OHLC = {
                'open': open,
                'high': high,
                'low': low,
                'close': close
            }
            # self.time = timestamp.tz_convert('Asia/Tokyo')
            self.index = index
            #self.eval_sltp()
            self.strategy(open, close, high, low, volume)

            self.balance_history.append(
                (self.get_balance() -
                 self.start_balance))  #/ 100000000 * self.get_market_price())
            #self.eval_exit()
            #self.eval_sltp()

        self.close_all()
        logger.info(f"Back test time : {time.time() - start}")
Esempio n. 18
0
 def post():
     try:
         email, password = (
             request.json.get("email").strip(),
             request.json.get("password").strip(),
         )
     except Exception as e:
         logger.info(f"Email or password is wrong. {str(e)}")
         return error.INVALID_INPUT
     if email is None or password is None:
         return error.INVALID_INPUT
     user = User.query.filter_by(email=email, password=password).first()
     if user is None:
         return error.UNAUTHORIZED
     return {"token": user.generate_auth_token(0).decode()}
Esempio n. 19
0
def judge_version(now_config: dict, wait_to_sync_config: dict) -> bool:
    """
    判断 version
    """
    if not now_config.get("VERSION"):
        # 如果现在配置里面没有 VERSIOIN,初始化为 1.0
        now_config["VERSION"] = 1.0
    using_version = now_config.get("VERSION") or 1.0
    wait_to_sync_config_version = float(
        wait_to_sync_config["config"]["VERSION"])
    if float(using_version) >= float(wait_to_sync_config_version):
        logger.info(
            "WRONG VERSION | USING CONFIG VERSION: {} | WAIT_TO_SYNC_CONFIG_VERSION: {}"
            .format(using_version, wait_to_sync_config_version))
        return False
    return True
Esempio n. 20
0
 def delete_folder(self, folder_name):
     url = 'https://{0}{1}{2}/{3}/'.format(self.oneprovider_host,
                                           self.cdmi_path,
                                           self.onedata_space, folder_name)
     headers = {**self.cdmi_version_header, **self.onedata_auth_header}
     try:
         r = requests.delete(url, headers=headers)
         if r.status_code == 204:
             logger.info(
                 f'Folder "{folder_name}" deleted successfully in space "{self.onedata_space}"'
             )
         else:
             raise Exception(r.status_code)
     except Exception as e:
         logger.warning(
             f'Unable to delete folder "{folder_name}". Error: {e}')
Esempio n. 21
0
def get_latent(vae: "VAE", data_loader):
    from src.data.dataset import _ct

    z_loc, z_scale = None, None
    logger.info(f"Encoding observation into latent space")
    with torch.no_grad():
        for d in data_loader:
            if z_loc is None:
                z_loc, z_scale = vae.encode(_ct(d.diurnality), _ct(d.viirs))
            else:
                z_loc_i, z_scale_i = vae.encode(_ct(d.diurnality),
                                                _ct(d.viirs))
                z_loc = np.concatenate((z_loc, z_loc_i), axis=1)
                z_scale = np.concatenate((z_scale, z_scale_i), axis=1)

    return z_loc.swapaxes(0, 1), z_scale.swapaxes(0, 1)
Esempio n. 22
0
def eval_mse(f_12, f_24, viirs_12, viirs_24):
    def _mse(m1, m2):
        return ((m1 - m2)**2).mean()

    logger.info(f"f_12.shape: {f_12.shape}")
    logger.info(f"f_24.shape: {f_24.shape}")
    logger.info(f"viirs_12.shape: {viirs_12.shape}")
    logger.info(f"viirs_24.shape: {viirs_24.shape}")
    return _mse(f_12, viirs_12), _mse(f_24, viirs_24)
Esempio n. 23
0
    def close_all(self):
        """
        market close opened position for this pair
        """
        self.__init_client()
        position_size = self.get_position_size()
        if position_size == 0:
            return

        side = False if position_size > 0 else True
        
        self.order("Close", side, abs(position_size))
        position_size = self.get_position_size()
        if position_size == 0:
            logger.info(f"Closed {self.pair} position")
        else:
            logger.info(f"Failed to close all {self.pair} position, still {position_size} amount remaining")
Esempio n. 24
0
    def entry(self,
              id,
              long,
              qty,
              limit=0,
              stop=0,
              post_only=False,
              reduce_only=False,
              when=True):
        """
        places an entry order, works as equivalent to tradingview pine script implementation
        https://tradingview.com/study-script-reference/#fun_strategy{dot}entry
        :param id: Order id
        :param long: Long or Short
        :param qty: Quantity
        :param limit: Limit price
        :param stop: Stop limit
        :param post_only: Post only
        :param reduce_only: Reduce Only means that your existing position cannot be increased only reduced by this order
        :param when: Do you want to execute the order or not - True for live trading
        :return:
        """
        self.__init_client()

        # if self.get_margin()['excessMargin'] <= 0 or qty <= 0:
        #     return

        if not when:
            return

        pos_size = self.get_position_size()
        logger.info(f"pos_size: {pos_size}")

        if long and pos_size > 0:
            return

        if not long and pos_size < 0:
            return

        ord_qty = qty + abs(pos_size)

        trailing_stop = 0
        activationPrice = 0

        self.order(id, long, ord_qty, limit, stop, post_only, reduce_only,
                   trailing_stop, activationPrice, when)
    def __init__(self,
                 style_filename: str,
                 num_hidden_relu: int = 10,
                 image_size: int = 512,
                 block_indices: Optional[List[int]] = None):
        super().__init__()
        if block_indices is None:
            block_indices = [1, 2, 3, 4, 5, 6, 7, 8]

        self._block_indices = block_indices[:]
        self._losses = nn.ModuleDict()
        self._loaded = False
        logger.info(
            f"Style loss with indices={str(self._block_indices)} created")
        self.load_style_from_file(style_filename=style_filename,
                                  num_hidden_relu=num_hidden_relu,
                                  image_size=image_size)
Esempio n. 26
0
    def __get_auth(self):
        """
        認証情報を設定する
        """
        api_key = os.environ.get("BITMEX_TEST_APIKEY") if self.testnet else os.environ.get("BITMEX_APIKEY")
        api_secret = os.environ.get("BITMEX_TEST_SECRET") if self.testnet else os.environ.get("BITMEX_SECRET")

        if len(api_key) > 0 and len(api_secret):
            nonce = generate_nonce()
            return [
                "api-nonce: " + str(nonce),
                "api-signature: " + generate_signature(api_secret, 'GET', '/realtime', nonce, ''),
                "api-key:" + api_key
            ]
        else:
            logger.info("WebSocket is not authenticating.")
            return []
Esempio n. 27
0
 def __get_auth(self):
     """
     get auth info
     """        
     api_key =  conf['bitmex_test_keys'][self.account]['API_KEY'] if self.testnet else conf['bitmex_keys'][self.account]['API_KEY']       
     api_secret = conf['bitmex_test_keys'][self.account]['SECRET_KEY'] if self.testnet else conf['bitmex_keys'][self.account]['SECRET_KEY']       
     
     if len(api_key) > 0 and len(api_secret):
         nonce = generate_nonce()
         return [
             "api-nonce: " + str(nonce),
             "api-signature: " + generate_signature(api_secret, 'GET', '/realtime', nonce, ''),
             "api-key:" + api_key
         ]
     else:
         logger.info("WebSocket is not authenticating.")
         return []
Esempio n. 28
0
    def strategy(self, action, open, close, high, low, volume):
        lot = self.exchange.get_lot()

        variant_type = self.input(defval=5, title="variant_type", type=int)
        basis_len = self.input(defval=19,  title="basis_len", type=int)
        resolution = self.input(defval=2, title="resolution", type=int)
        sma_len = self.input(defval=9, title="sma_len", type=int)
        div_threshold = self.input(defval=3.0, title="div_threshold", type=float)

        source = self.exchange.security(str(resolution) + 'm')

        if self.eval_time is not None and \
                self.eval_time == source.iloc[-1].name:
            return

        series_open = source['open'].values
        series_close = source['close'].values

        variant = self.variants[variant_type]

        val_open = variant(series_open,  basis_len)
        val_close = variant(series_close, basis_len)

        if val_open[-1] > val_close[-1]:
            high_val = val_open[-1]
            low_val = val_close[-1]
        else:
            high_val = val_close[-1]
            low_val = val_open[-1]

        sma_val = sma(close, sma_len)
        logger.info("lagging log")
        self.exchange.plot('val_open', val_open[-1], 'b')
        self.exchange.plot('val_close', val_close[-1], 'r')

        self.exchange.entry("Long", True,   lot, stop=math.floor(low_val), when=(sma_val[-1] < low_val))
        self.exchange.entry("Short", False, lot, stop=math.ceil(high_val), when=(sma_val[-1] > high_val))

        open_close_div = sma(numpy.abs(val_open - val_close), sma_len)

        if open_close_div[-1] > div_threshold and \
                open_close_div[-2] > div_threshold < open_close_div[-2]:
            self.exchange.close_all()

        self.eval_time = source.iloc[-1].name
Esempio n. 29
0
    def __on_update_position(self, action, position):
        """
        Update position
        """    

        if len(position) > 0:
            position = [p for p in position if p["s"].startswith(self.pair)]   
            if len(position) == 0:
                # logger.info(f"Some other pair was traded!")
                return
        else:
            return         
            
        # Was the position size changed?
        is_update_pos_size = self.get_position_size() != float(position[0]['pa'])        

        # Reset trail to current price if position size changes
        if is_update_pos_size and float(position[0]['pa']) != 0:
            self.set_trail_price(self.market_price)
        
        if is_update_pos_size:
            logger.info(f"Updated Position\n"
                        f"Price: {self.position[0]['entryPrice']} => {position[0]['ep']}\n"
                        f"Qty: {self.position[0]['positionAmt']} => {position[0]['pa']}\n"
                        f"Balance: {self.get_balance()} {self.quote_asset}")
            notify(f"Updated Position\n"
                   f"Price: {self.position[0]['entryPrice']} => {position[0]['ep']}\n"
                   f"Qty: {self.position[0]['positionAmt']} => {position[0]['pa']}\n"
                   f"Balance: {self.get_balance()} {self.quote_asset}")
       
        self.position[0] = {
                            "entryPrice": position[0]['ep'],
                            "marginType": position[0]['mt'],                            
                            "positionAmt":  position[0]['pa'], 
                            "symbol": position[0]['s'], 
                            "unRealizedProfit":  position[0]['up'], 
                            "positionSide": position[0]['ps'],
                            } if self.position is not None else self.position[0]

        self.position_size = float(self.position[0]['positionAmt'])
        self.entry_price = float(self.position[0]['entryPrice'])        
    
        # Evaluation of profit and loss
        self.eval_exit()
        self.eval_sltp()
Esempio n. 30
0
    def create(args):
        """
        This Function creates the bot.
        :param args: stratergy's args.
        :return: Bot
        """
        try:
            strategy_module = importlib.import_module("src.strategies."+args.strategy)
            cls = getattr(strategy_module, args.strategy)
            bot = cls()
            bot.test_net  = args.demo
            bot.back_test = args.test
            bot.stub_test = args.stub
            bot.hyperopt  = args.hyperopt
            bot.account = args.account
            bot.exchange_arg = args.exchange
            bot.pair = args.pair

            STRATEGY_FILENAME = os.path.join(os.getcwd(), f"src/strategies/{args.strategy}.py")
            symlink(STRATEGY_FILENAME, 'html/data/strategy.py', overwrite=True)
            
            if args.session != None:
                try:
                    bot.session_file_name = args.session
                    bot.session_file = open(args.session,"r+")
                except Exception as e:
                    logger.info("Session file not found - Creating!")
                    bot.session_file = open(args.session,"w")
                
                try:
                    # vars = pickle.load(bot.session_file)
                    vars = json.load(bot.session_file)
                    # vars = jsonpickle.decode(bot.session_file.read())

                    use_stored_session = query_yes_no("Session Found. Do you want to use it?", "no")
                    if use_stored_session:
                        bot.set_session(vars)
                except Exception as _:
                    logger.info("Session file is empty!")
            else:
                bot.session_file = None

            return bot
        except Exception as _:
            raise Exception(f"Not Found Strategy : {args.strategy}")