def analyze(self, share): history = share.daily_history if share.is_rights_issue or share.day_history(0)[ 'date'] >= Share.get_today() - timedelta(days=self.threshold): return if history['high'].max() == share.last_day_history['high']: return {"aired": {"price": share.last_day_history['high']}} if history['low'].min() == share.last_day_history['low']: return {"dumped": {"price": share.last_day_history['low']}} if history[history['date'] >= Share.get_today() - timedelta(days=30)]['low'].min() == \ share.last_day_history['low']: return { "monthly lower bound": { "price": share.last_day_history['low'] } } if history[history['date'] >= Share.get_today() - timedelta(days=180)]['low'].min() == \ share.last_day_history['low']: return { "half year lower bound": { "price": share.last_day_history['low'] } }
def search_share(keyword): response = submit_request( 'http://www.tsetmc.com/tsev2/data/search.aspx', params=(('skey', keyword), ), headers=get_headers(None, 'http://www.tsetmc.com/Loader.aspx?ParTree=15'), timeout=25) if len(response.text) == 0: return lables = [ 'ticker', 'description', 'id', '', '', '', 'bazaar type', 'enable', 'bazaar', 'bazaar' ] df = pd.read_csv(StringIO(response.text), sep=',', lineterminator=';', header=None) df = df.where((pd.notnull(df)), None) new_list, update_list = [], [] for index, row in df.iterrows(): id = row[2] try: share = Share.objects.get(id=id) except Share.DoesNotExist: for share in new_list: if share.id == row[2]: break else: share = Share() (update_list if share.id else new_list).append(share) share.ticker = characters.ar_to_fa(str(row[0])).strip() share.description = characters.ar_to_fa(row[1]).strip() share.id = row[2] share.bazaar_type = row[6] share.enable = bool(row[7]) share.strike_date, share.option_strike_price, share.base_share = share.parse_data( ) Share.objects.bulk_create(new_list, batch_size=100) Share.objects.bulk_update(update_list, [ 'ticker', 'description', 'bazaar_type', 'enable', 'option_strike_price', 'strike_date', 'base_share' ], batch_size=100) if new_list: logger.info( f"update share list, {len(new_list)} added ({new_list}), {len(update_list)} updated." )
def handle(self, *args, **options): Share.DAY_OFFSET = options.get('days', 0) row_list = [] for share in Share.objects.all().order_by('ticker'): if share.history_size > 0 and share.last_day_history[ 'date'] >= Share.get_today() - timedelta(days=1): results = dict() for analyzer in self.daily_analyzers: result = analyzer.analyze(share) if result: results.update( {key: str(value) for key, value in result.items()}) if results: ticker_link = f'<a href="http://www.tsetmc.com/Loader.aspx?ParTree=151311&i={share.id}">{share.ticker}</a>' row_list.append({"ticker": ticker_link, **results}) logger.info(f"{share.ticker}: {results}") if row_list: df = pd.DataFrame(row_list) df.sort_values(by=list(df), inplace=True) pd.set_option('display.max_colwidth', None) from django.template import loader template = loader.get_template('daily_report.html') html_out = template.render({ 'date': Share.get_today(), 'daily_report_dataframe': df.to_html(escape=False) }) with open(settings.BASE_DIR + "/report.html", 'w') as f: f.write(html_out)
def analyze(self, share): active_options = share.options.filter(enable=True) if not active_options.exists(): return for letter in ['ض', 'ط']: similar_options = active_options.filter(ticker__startswith=letter) for d in set(similar_options.values_list('strike_date', flat=True)): options = list( similar_options.filter( strike_date=d).order_by('option_strike_price')) for i in range(1, len(options) - 1): if options[i].history_size == 0 or options[ i + 1].history_size == 0 or options[ i - 1].history_size == 0: continue df = pd.merge(options[i + 1].daily_history, options[i - 1].daily_history, left_on='date', right_on='date', how='inner', suffixes=('_nxt', '_prv')) df = pd.merge(options[i].daily_history, df, left_on='date', right_on='date', how='inner') df['arbitrage'] = df['close'] / (df['close_nxt'] + df['close_prv']) * 2 if df.shape[0] > 0 and abs( 1 - df.iloc[-1]['arbitrage'] ) > 0.1 and df.iloc[-1]['date'] == Share.get_today(): logger.info( f'{options[i].ticker}, {df[["date", "arbitrage"]]}' )
def update_share_list(batch_size=100): text = get_watch_list() df = pd.read_csv(StringIO(text.split("@")[2]), sep=',', lineterminator=';', header=None) df = df.where((pd.notnull(df)), None) new_list, update_list = [], [] for index, row in df.iterrows(): try: share = Share.objects.get(id=row[0]) except Share.DoesNotExist: share = Share() (update_list if share.id else new_list).append(share) share.enable = True share.id = row[0] share.ticker = characters.ar_to_fa(str(row[2])).strip() share.description = characters.ar_to_fa(row[3]).strip() share.eps = row[14] share.base_volume = row[15] share.bazaar_type = row[17] share.group = ShareGroup.objects.get(id=row[18]) share.total_count = row[21] share.bazaar_group = row[22] share.strike_date, share.option_strike_price, share.base_share = share.parse_data( ) Share.objects.bulk_create(new_list, batch_size=batch_size) Share.objects.bulk_update(update_list, [ 'enable', 'ticker', 'description', 'eps', 'base_volume', 'bazaar_type', 'group', 'total_count', 'bazaar_group', 'option_strike_price', 'strike_date', 'base_share' ], batch_size=100) logger.info( f"update share list, {len(new_list)} ({new_list}) added, {len(update_list)} updated." )
def search_stock(keyword): try: headers = { 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:74.0) Gecko/20100101 Firefox/74.0', 'Accept': '*/*', 'Accept-Language': 'en-US,en;q=0.5', 'X-Requested-With': 'XMLHttpRequest', 'DNT': '1', 'Connection': 'keep-alive', 'Referer': 'http://www.tsetmc.com/Loader.aspx?ParTree=15', } response = requests.get( 'http://www.tsetmc.com/tsev2/data/search.aspx?skey={}'.format( keyword), headers=headers) if response.status_code != 200: raise Exception("Http Error: {}".format(response.status_code)) if len(response.text) == 0: return lables = [ 'ticker', 'description', 'id', '', '', '', 'bazaar type', 'enable', 'bazaar', 'bazaar' ] df = pd.read_csv(StringIO(response.text), sep=',', lineterminator=';', header=None) df = df.where((pd.notnull(df)), None) new_list, update_list = [], [] for index, row in df.iterrows(): id = row[2] try: share = Share.objects.get(id=id) except Share.DoesNotExist: share = Share() (update_list if share.id else new_list).append(share) share.ticker = characters.ar_to_fa(str(row[0])) share.description = characters.ar_to_fa(row[1]) share.id = row[2] share.bazaar_type = row[6] share.enable = row[7] if new_list: logger.info("new stocks: {}".format(new_list)) Share.objects.bulk_create(new_list, batch_size=100) Share.objects.bulk_update( update_list, ['ticker', 'description', 'bazaar_type', 'enable'], batch_size=100) if new_list: logger.info("update stock list, {} added, {} updated.".format( len(new_list), len(update_list))) except Exception as e: logger.exception(e) raise e
def update_stock_list(batch_size=100): headers = HEADERS.copy() headers['Referer'] = 'http://www.tsetmc.com/Loader.aspx?ParTree=15131F' params = ( ('h', '0'), ('r', '0'), ) response = requests.get( 'http://www.tsetmc.com/tsev2/data/MarketWatchInit.aspx', headers=headers, params=params) if response.status_code != 200: raise Exception("Http Error: {}".format(response.status_code)) ''' separated with @ text part 0: ? part 1: general info of bazaar ['date and time of last_transaction', 'boorse_status', 'boorse_index', 'boorse_index_diff', 'boorse_market cap', 'boorse_volume', 'boorse_value', 'boorse_count', 'faraboorse_status', 'faraboorse_volume', 'faraboorse_value', 'faraboorse_count', 'moshtaghe_status', 'moshtaghe_volume', 'moshtaghe_value', 'moshtaghe_count'] part 2: ['id', 'IR', 'ticker', 'description', '?', 'first', 'tomorrow', 'last', 'count', 'volume', 'value', 'low', 'high', 'yesterday', 'eps', 'base volume', '', 'bazaar type', 'group', 'max_price_possible', 'min_price_possible', 'number of stock', 'bazaar group'] part 3; ['id', 'order', 'sell_count', 'buy_count', 'buy_price', 'sell_price', 'buy_volume', 'sell_volume'] part 4: ? ''' df = pd.read_csv(StringIO(response.text.split("@")[2]), sep=',', lineterminator=';', header=None) df = df.where((pd.notnull(df)), None) new_list, update_list = [], [] for index, row in df.iterrows(): try: share = Share.objects.get(id=row[0]) except Share.DoesNotExist: share = Share() (update_list if share.id else new_list).append(share) share.enable = True share.id = row[0] share.ticker = characters.ar_to_fa(str(row[2])) share.description = characters.ar_to_fa(row[3]) share.eps = row[14] share.base_volume = row[15] share.bazaar_type = row[17] share.group = row[18] share.total_count = row[21] share.bazaar_group = row[22] if new_list: logger.info("new stocks: {}".format(new_list)) Share.objects.bulk_create(new_list, batch_size=100) Share.objects.bulk_update(update_list, [ 'enable', 'ticker', 'description', 'eps', 'base_volume', 'bazaar_type', 'group', 'total_count', 'bazaar_group' ], batch_size=100) logger.info("update stock list, {} added, {} updated.".format( len(new_list), len(update_list)))