def saveUserDuringOrdersTask(user_id, update_from=None, update_to=None, status=None): update_tids = [] try: update_from = format_datetime(update_from) if update_from else None update_to = format_datetime(update_to) if update_to else None has_next = True cur_page = 1 from shopback.trades.service import TradeService, OrderService while has_next: response_list = apis.taobao_trades_sold_get(tb_user_id=user_id, page_no=cur_page, use_has_next='true', fields='tid,modified', page_size=settings.TAOBAO_PAGE_SIZE, start_created=update_from, end_created=update_to, status=status) order_list = response_list['trades_sold_get_response'] if order_list.has_key('trades'): for trade in order_list['trades']['trade']: if not Trade.objects.filter(id=trade['tid']).exists(): trade_dict = OrderService.getTradeFullInfo(user_id, trade['tid']) order_trade = OrderService.saveTradeByDict(user_id, trade_dict) update_tids.append(trade['tid']) has_next = order_list['has_next'] cur_page += 1 except Exception, exc: logger.error(u'淘宝订单批量下载错误:%s' % exc.message, exc_info=True) raise saveUserDuringOrdersTask.retry(exc=exc, countdown=60)
def saveUserIncrementOrdersTask(user_id, update_from=None, update_to=None): s_dt_f = format_datetime(update_from) s_dt_t = format_datetime(update_to) has_next = True cur_page = 1 from shopback.trades.service import TradeService, OrderService while has_next: response_list = apis.taobao_trades_sold_increment_get(tb_user_id=user_id, page_no=cur_page, fields='tid,modified', page_size=settings.TAOBAO_PAGE_SIZE, use_has_next='true', start_modified=s_dt_f, end_modified=s_dt_t) trade_list = response_list['trades_sold_increment_get_response'] if trade_list.has_key('trades'): for trade in trade_list['trades']['trade']: modified = parse_datetime(trade['modified']) if trade.get('modified', None) else None if TradeService.isValidPubTime(user_id, trade['tid'], modified): try: trade_dict = OrderService.getTradeFullInfo(user_id, trade['tid']) order_trade = OrderService.saveTradeByDict(user_id, trade_dict) OrderService.createMergeTrade(order_trade) except Exception, exc: logger.error(u'淘宝订单下载失败:%s' % exc.message, exc_info=True) has_next = trade_list['has_next'] cur_page += 1
def saveUserOrdersLogisticsTask(user_id, update_from=None, update_to=None): if not (update_from and update_to): dt = datetime.datetime.now() update_from = datetime.datetime(dt.year, dt.month, dt.day, 0, 0, 0) - datetime.timedelta(1, 0, 0) update_to = datetime.datetime(dt.year, dt.month, dt.day, 0, 0, 0) update_from = format_datetime(update_from) update_to = format_datetime(update_to) has_next = True cur_page = 1 while has_next: response_list = apis.taobao_logistics_orders_detail_get( tb_user_id=user_id, page_no=cur_page, page_size=settings.TAOBAO_PAGE_SIZE, start_created=update_from, end_created=update_to) logistics_list = response_list['logistics_orders_detail_get_response'] if logistics_list['total_results'] > 0: for logistics in logistics_list['shippings']['shipping']: Logistics.save_logistics_through_dict(user_id, logistics) total_nums = logistics_list['total_results'] cur_nums = cur_page * settings.TAOBAO_PAGE_SIZE has_next = cur_nums < total_nums cur_page += 1
def saveUserIncrementPurchaseOrderTask(seller_id, update_from=None, update_to=None): seller = Seller.getSellerByVisitorId(seller_id) if not seller.has_fenxiao: return update_from = format_datetime(update_from) update_to = format_datetime(update_to) from shopback.trades.service import TradeService, PurchaseOrderService has_next = True cur_page = 1 while has_next: response_list = apis.taobao_fenxiao_orders_get(tb_user_id=seller_id, page_no=cur_page, time_type='update_time_type', page_size=settings.TAOBAO_PAGE_SIZE / 2, start_created=update_from, end_created=update_to) orders_list = response_list['fenxiao_orders_get_response'] if orders_list['total_results'] > 0: for o in orders_list['purchase_orders']['purchase_order']: modified = datetime.datetime.strptime(o['modified'], '%Y-%m-%d %H:%M:%S') if TradeService.isValidPubTime(seller_id, o['id'], modified): purchase_order = PurchaseOrderService.savePurchaseOrderByDict(seller_id, o) PurchaseOrderService.createMergeTrade(purchase_order) total_nums = orders_list['total_results'] cur_nums = cur_page * settings.TAOBAO_PAGE_SIZE has_next = cur_nums < total_nums cur_page += 1
def complete_staff_event(request, id): try: event = StaffEvent.objects.get(id=id) except StaffEvent.DoesNotExist: ret = {'code': 1, 'response_error': '未找到该事件'} else: event.is_finished = True event.save() ret = { 'code': 0, 'response_content': { 'id': event.id, 'creator': event.creator.username, 'executor': event.executor.username, 'start': format_datetime(event.start), 'end': format_datetime(event.end), 'interval_day': event.interval_day, 'title': event.title, 'type': event.type, 'created': format_datetime(event.created), 'modified': format_datetime(event.modified), 'is_finished': event.is_finished, } } log_action(request.user.id, event, CHANGE, u'完成事件') return HttpResponse(json.dumps(ret), content_type="application/json")
def _get_cve_details(synopsis): try: data = (CveMetadata.select(CveImpact.name.alias('impact'), CveMetadata.cve.alias('synopsis'), CveMetadata.description, CveMetadata.cvss2_metrics, CveMetadata.cvss2_score, CveMetadata.cvss3_score, CveMetadata.cvss3_metrics, CveMetadata.modified_date, CveMetadata.public_date, CveMetadata.redhat_url, CveMetadata.secondary_url, CveMetadata.id) .join(CveImpact, on=(CveMetadata.impact_id == CveImpact.id)) .where(CveMetadata.cve == synopsis)).dicts()[0] rules_map = get_rules_for_cves([data['id']]) return { "cvss2_metrics": str_or_none(data['cvss2_metrics']), "cvss2_score": str_or_none(data['cvss2_score']), "cvss3_metrics": str_or_none(data['cvss3_metrics']), "cvss3_score": str_or_none(data['cvss3_score']), "description": data['description'], "impact": data['impact'], "public_date": format_datetime(data['public_date']), "modified_date": format_datetime(data['modified_date']), "redhat_url": data['redhat_url'], "rules": rules_map.get(data['id'], []), "secondary_url": data['secondary_url'], "synopsis": data['synopsis'], } except IndexError: raise ApplicationException('No such CVE ID', 404)
def decorate(*args, **kwargs): """ docstring for decorate """ params = { 'method': api_method, 'app_key': settings.JD_APP_KEY, 'timestamp': format_datetime(datetime.datetime.now()), 'v': '2.0'} app_params = {} if func_defaults: app_params.update(dict(zip(reversed(func_args), reversed(list(func_defaults))))) app_params.update(dict(zip(func_args, args))) app_params.update(kwargs) from shopback.users.models import User # refresh user taobao session if not app_params.has_key('access_token'): jd_user_id = app_params.pop('jd_user_id') user = User.objects.get(visitor_id=jd_user_id) access_token = refreshAccessToken(user) else: access_token = app_params.pop('access_token') # remove the field with value None params['access_token'] = access_token params_copy = dict(app_params) for k, v in params_copy.iteritems(): if v == None: app_params.pop(k) elif type(v) == bool: app_params[k] = v and 'true' or 'false' elif type(v) == unicode: app_params[k] = v.encode('utf8') elif type(v) == datetime.datetime: app_params[k] = format_datetime(v) elif type(v) == datetime.date: app_params[k] = format_date(v) params['360buy_param_json'] = json.dumps(app_params) params['sign'] = getJDSignature(params, settings.JD_APP_SECRET, both_side=True) params = urllib.urlencode(params) url = settings.JD_API_ENDPOINT if method == 'GET': uri = '%s?%s' % (url, params) req = urllib2.urlopen(uri) content = req.read() else: rst = urllib2.Request(url) req = urllib2.urlopen(rst, params) content = req.read() return raise_except_or_ret_json(content)
def getYundaYJSWData(self, obj): return [ obj.valid_code, obj.out_sid, None, '20', self.parseTradeWeight(obj.weight), '0', '199886', None, '199886', None, None, 200000, None, '199886', '0', '14', format_datetime(obj.weighted) ]
def _getYundaYJSWData(self, obj): return [ obj.get('valid_code', ''), obj['package_id'], None, '20', '%.2f' % obj['upload_weight'], '0', self.yd_account.cus_id, None, self.yd_account.cus_id, None, None, 200000, None, self.yd_account.cus_id, '0', '14', format_datetime(obj['weighted']) ]
def _get_cve_details(synopsis, args): try: data = (CveMetadata.select( CveImpact.name.alias('impact'), CveMetadata.cve.alias('synopsis'), CveMetadata.description, CveMetadata.cvss2_metrics, CveMetadata.cvss2_score, CveMetadata.cvss3_score, CveMetadata.cvss3_metrics, CveMetadata.modified_date, CveMetadata.celebrity_name, CveMetadata.public_date, CveMetadata.redhat_url, CveMetadata.secondary_url, CveMetadata.id, CveMetadata.advisories_list, CveMetadata.exploits).join( CveImpact, on=(CveMetadata.impact_id == CveImpact.id)).where( CveMetadata.cve == synopsis)).dicts()[0] rules_map = get_rules_for_cves([data['id']], connexion.context['user']) retval = { "celebrity_name": str_or_none(data['celebrity_name']), "cvss2_metrics": str_or_none(data['cvss2_metrics']), "cvss2_score": str_or_none(data['cvss2_score']), "cvss3_metrics": str_or_none(data['cvss3_metrics']), "cvss3_score": str_or_none(data['cvss3_score']), "description": data['description'], "impact": data['impact'], "public_date": format_datetime(data['public_date']), "modified_date": format_datetime(data['modified_date']), "redhat_url": data['redhat_url'], "rules": rules_map.get(data['id'], []), "secondary_url": data['secondary_url'], "synopsis": data['synopsis'], "known_exploit": bool(data['exploits']) } advisories_list = [] if data.get( 'advisories_list') is None else data['advisories_list'] if 'show_advisories_details' in args and args[ 'show_advisories_details'] is True and advisories_list: retval["advisories_list"] = GetCves._patch_request_advisories( advisories_list) else: retval["advisories_list"] = advisories_list except IndexError as exc: raise ApplicationException('No such CVE ID', 404) from exc return retval
def get(self, request, *args, **kwargs): dt_f = kwargs.get('dt_f') dt_t = kwargs.get('dt_t') dt_f = parse_date(dt_f) dt_t = parse_date(dt_t) + datetime.timedelta(1, 0, 0) queryset = Order.objects.filter(created__gte=dt_f, created__lte=dt_t, refund_status=pcfg.REFUND_SUCCESS) total_refund_num = queryset.count() full_refunds_num = 0 part_refunds_num = 0 consign_full_refunds_num = 0 consign_part_refunds_num = 0 refund_orders = queryset.values_list('trade', flat=True).distinct() # refund_orders = queryset.values_list('trade',flat=True) for trade in refund_orders: trade = Trade.objects.get(id=trade) refunds = Order.objects.filter(trade=trade).exclude(refund_status=pcfg.REFUND_SUCCESS) if refunds.count() > 0: part_refunds_num += 1 if trade.consign_time: consign_part_refunds_num += 1 else: full_refunds_num += 1 if trade.consign_time: consign_full_refunds_num += 1 cursor = connection.cursor() cursor.execute(self.gen_refund_sql(format_datetime(dt_f), format_datetime(dt_t))) result = cursor.fetchall() ret_dict = { 'result': result, 'total_refunds': total_refund_num, 'full_refunds': full_refunds_num, 'part_refunds': part_refunds_num, 'consign_part_refunds': consign_part_refunds_num, 'consign_full_refunds': consign_full_refunds_num, } # print "eeeeeeeeeeeeeeeeee",ret_dict return Response({"object": ret_dict})
def _get_shipment_cycle_info(self, rows, jsf_state, context, number_type, number, jsessionid, current=True): from common.utils import format_datetime shipment_cycle = [] last_vessel_info = None for row in rows: cols = row.find_all(attrs={'class': 'labelTextMyFocus'}) if cols: status = cols[0].getText() location = cols[1].getText() time = cols[2].getText() mode = cols[3].getText().strip() shipment = { 'status': status, 'location': location, 'time': format_datetime(time, 'Hongkong', 'UTC'), 'mode': mode, } a_tag = cols[3].find_parent(name='a') if current and mode == 'Vessel' and a_tag: a_id = a_tag.get('id') data = self._get_common_post_data(number_type, number, jsf_state) data['cntrNum'] = number data['cntrStatus'] = status data['containerHistorySize'] = len(rows) data['containerSize'] = 1 data['issueTime'] = time data[a_id] = a_id data['javax.faces.partial.render'] = 'vesselInfoField' data['javax.faces.source'] = a_id data['numberType'] = number_type post_response = self._post(context, data, number, number_type, jsessionid, jsf_state) jsf_state = self._get_updated_value( "javax.faces.ViewState", post_response) vessel_html = self._get_updated_value( "vesselInfoField", post_response) vessel_info = self._parse_vessel_info(vessel_html) last_vessel_info = vessel_info shipment.update(vessel_info) if mode == '' and last_vessel_info: shipment.update(last_vessel_info) shipment_cycle.append(shipment) return shipment_cycle
def saveUserRefundOrderTask(user_id, update_from=None, update_to=None): update_from = format_datetime(update_from) update_to = format_datetime(update_to) has_next = True cur_page = 1 while has_next: response_list = apis.taobao_refunds_receive_get(tb_user_id=user_id, page_no=cur_page, page_size=settings.TAOBAO_PAGE_SIZE, start_modified=update_from, end_modified=update_to) refund_list = response_list['refunds_receive_get_response'] if refund_list['total_results'] > 0: for r in refund_list['refunds']['refund']: refund, state = Refund.objects.get_or_create(refund_id=r['refund_id']) refund.save_refund_through_dict(user_id, r) total_nums = refund_list['total_results'] cur_nums = cur_page * settings.TAOBAO_PAGE_SIZE has_next = cur_nums < total_nums cur_page += 1
def decorate(*args, **kwargs): """ docstring for decorate """ params = { 'method': api_method, 'format': 'json', 'v': '2.0', } if func_defaults: params.update( dict( zip(reversed(func_args), reversed(list(func_defaults))))) params.update(dict(zip(func_args, args))) params.update(kwargs) from shopback.users.models import User # refresh user taobao session tb_user_id = params.pop('tb_user_id') user = User.objects.get(visitor_id=tb_user_id) # refresh_session(user,settings.APPKEY,settings.APPSECRET,settings.REFRESH_URL) # remove the field with value None params['access_token'] = user.top_session params_copy = dict(params) for k, v in params_copy.iteritems(): if v == None: params.pop(k) elif type(v) == bool: params[k] = v and 'true' or 'false' elif type(v) == unicode: params[k] = v.encode('utf8') elif type(v) == datetime.datetime: params[k] = format_datetime(v) elif type(v) == datetime.date: params[k] = format_date(v) params_copy = None url = settings.TAOBAO_API_ENDPOINT if method == 'GET': uri = '%s?%s' % (url, urllib.urlencode(params)) req = urllib2.urlopen(uri) content = req.read() else: rst = urllib2.Request(url) req = urllib2.urlopen(rst, urllib.urlencode(params)) content = req.read() return raise_except_or_ret_json(content)
def _get_container_info(self, soup, number_type): if number_type != 'CONTAINER': return {} top = soup.find(id='CargoTracking1') \ .find(attrs={'class': 'Containerkuang3'}) rows = top.find(name='table').findChildren(name='tr') result = rows[-1].find_all(attrs={'class': 'labelTextMyFocus'}) from common.utils import format_datetime return { 'container_num': result[0].getText(), 'container_size': result[1].getText(), 'seal_no': result[2].getText(), 'location': result[3].getText(), 'status': result[4].getText(), 'datetime': format_datetime(result[5].getText(), 'Hongkong', 'UTC'), }
def getTradeMessageByMobile(self, mobile): trade_array = [] trade = self.getLatestTradeByMobile(mobile) trade_array.append((u'昵称', trade.buyer_nick)) trade_array.append((u'成交时间', trade.pay_time and format_datetime(trade.pay_time))) trade_array.append( (u'订单状态', self.getTrade2BuyerStatus(trade.status, trade.sys_status))) orders = [] for order in trade.merge_orders.filter(sys_status=pcfg.IN_EFFECT): orders.append(order.getSimpleName()) trade_array.append((u'订单详细', orders)) return self.genTextRespJson(self.formatJsonToPrettyString(trade_array))
def check_ohlcs(broker_id, market_id, timeframe, from_date, to_date): last_ohlcs = {} ohlc_streamer = Database.inst().create_ohlc_streamer(broker_id, market_id, timeframe, from_date=from_date, to_date=to_date, buffer_size=100) timestamp = from_date.timestamp() to_timestamp = to_date.timestamp() progression = 0.0 prev_update = timestamp count = 0 total_count = 0 progression_incr = (to_timestamp - timestamp) * 0.01 tts = 0.0 prev_tts = 0.0 while not ohlc_streamer.finished(): ohlcs = ohlc_streamer.next(timestamp + timeframe * 100) # per 100 count = len(ohlcs) total_count += len(ohlcs) for ohlc in ohlcs: tts = ohlc.timestamp if not prev_tts: prev_tts = tts gap_duration = tts - prev_tts - timeframe if gap_duration != 0: date = format_datetime(tts) Terminal.inst().warning("Ohlc gap of %s on %s !" % (format_delta(gap_duration), date)) if ohlc.bid_open <= 0.0: Terminal.inst().warning( "Bid open price is lesser than 0 %s on %s !" % (ohlc.bid_open, date)) if ohlc.bid_high <= 0.0: Terminal.inst().warning( "Bid high price is lesser than 0 %s on %s !" % (ohlc.bid_high, date)) if ohlc.bid_low <= 0.0: Terminal.inst().warning( "Bid close price is lesser than 0 %s on %s !" % (ohlc.bid_low, date)) if ohlc.bid_close <= 0.0: Terminal.inst().warning( "Bid close price is lesser than 0 %s on %s !" % (ohlc.bid_close, date)) if ohlc.ofr_open <= 0.0: Terminal.inst().warning( "Ofr open price is lesser than 0 %s on %s !" % (ohlc.ofr_open, date)) if ohlc.ofr_high <= 0.0: Terminal.inst().warning( "Ofr high price is lesser than 0 %s on %s !" % (ohlc.ofr_high, date)) if ohlc.ofr_low <= 0.0: Terminal.inst().warning( "Ofr low price is lesser than 0 %s on %s !" % (ohlc.ofr_low, date)) if ohlc.ofr_close <= 0.0: Terminal.inst().warning( "Ofr close price is lesser than 0 %s on %s !" % (ohlc.ofr_close, date)) if ohlc.volume < 0.0: Terminal.inst().warning( "Volume quantity is lesser than 0 %s on %s !" % (ohlc.volume, date)) prev_tts = tts timestamp = tts if tts > to_timestamp: break if tts - prev_update >= progression_incr: progression += 1 Terminal.inst().info( "%i%% on %s, %s for last 100 candles, current total of %s..." % (progression, format_datetime(timestamp), count, total_count)) prev_update = timestamp count = 0 if tts > to_timestamp: break if len(ohlcs) == 0: # no results, inc from one step timestamp += timeframe * 100 if progression < 100: Terminal.inst().info( "100%% on %s, %s for last 100 candles, current total of %s..." % (format_datetime(timestamp), count, total_count)) Terminal.inst().info("Last candle datetime is %s" % (format_datetime(tts), ))
def export_ohlcs_siis_1_0_0(broker_id, market_id, timeframe, from_date, to_date, dst): last_ohlcs = {} ohlc_streamer = Database.inst().create_ohlc_streamer(broker_id, market_id, timeframe, from_date=from_date, to_date=to_date, buffer_size=100) timestamp = from_date.timestamp() to_timestamp = to_date.timestamp() progression = 0.0 prev_update = timestamp count = 0 total_count = 0 progression_incr = (to_timestamp - timestamp) * 0.01 tts = 0.0 prev_tts = 0.0 while not ohlc_streamer.finished(): ohlcs = ohlc_streamer.next(timestamp + timeframe * 100) # per 100 count = len(ohlcs) total_count += len(ohlcs) for ohlc in ohlcs: ohlc_dt = datetime.utcfromtimestamp( ohlc.timestamp).strftime("%Y%m%d %H%M%S") dst.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % (ohlc_dt, ohlc.bid_open, ohlc.bid_high, ohlc.bid_low, ohlc.bid_close, ohlc.ofr_open, ohlc.ofr_high, ohlc.ofr_low, ohlc.ofr_close, ohlc.volume)) tts = ohlc.timestamp if not prev_tts: prev_tts = tts prev_tts = tts timestamp = tts if timestamp > to_timestamp: break if timestamp - prev_update >= progression_incr: progression += 1 Terminal.inst().info( "%i%% on %s, %s for last 100 candles, current total of %s..." % (progression, format_datetime(timestamp), count, total_count)) prev_update = timestamp count = 0 if timestamp > to_timestamp: break if count == 0: timestamp += timeframe * 100 if progression < 100: Terminal.inst().info( "100%% on %s, %s for last 100 candles, current total of %s..." % (format_datetime(timestamp), count, total_count)) Terminal.inst().info("Last candle datetime is %s" % (format_datetime(tts), ))
def check_ticks(broker_id, market_id, from_date, to_date): last_ticks = [] tick_streamer = Database.inst().create_tick_streamer(broker_id, market_id, from_date=from_date, to_date=to_date) timestamp = from_date.timestamp() to_timestamp = to_date.timestamp() progression = 0.0 prev_update = timestamp count = 0 total_count = 0 progression_incr = (to_timestamp - timestamp) * 0.01 tts = 0.0 prev_tts = 0.0 while not tick_streamer.finished(): # return any ticks until last time to 1 min more ticks = tick_streamer.next(timestamp + Instrument.TF_1M) count = len(ticks) total_count += len(ticks) for data in ticks: tts = data[0] bid = data[1] ofr = data[2] vol = data[3] if not prev_tts: prev_tts = tts gap_duration = tts - prev_tts if tts != prev_tts and gap_duration < 0.0: date = format_datetime(timestamp) Terminal.inst().error( "Tick timestamp is before previous of %s on %s ! Broken file !" % (format_delta(gap_duration), date)) if gap_duration > 60.0: date = format_datetime(timestamp) Terminal.inst().warning("Tick gap of %s on %s !" % (format_delta(gap_duration), date)) if bid <= 0.0: Terminal.inst().warning( "Bid price is lesser than 0 %s on %s !" % (bid, date)) if ofr <= 0.0: Terminal.inst().warning( "Ofr price is lesser than 0 %s on %s !" % (ofr, date)) if vol < 0.0: Terminal.inst().warning( "Volume quantity is lesser than 0 %s on %s !" % (vol, date)) prev_tts = tts if tts > to_timestamp: break if tts - prev_update >= progression_incr: progression += 1 Terminal.inst().info( "%i%% on %s, %s ticks/trades for 1 minute, current total of %s..." % (progression, format_datetime(timestamp), count, total_count)) prev_update = tts count = 0 if tts > to_timestamp: break timestamp += Instrument.TF_1M # by step of 1m if progression < 100: Terminal.inst().info( "100%% on %s, %s ticks/trades for 1 minute, current total of %s..." % (format_datetime(timestamp), count, total_count)) Terminal.inst().info("Last tick datetime is %s" % (format_datetime(tts), ))
def get_rules_for_cves(cves: list, rh_account=None) -> dict: """Return associated rules for a CVE""" # pylint: disable=singleton-comparison, unsubscriptable-object rules_map = {} rule_cve_mapping = {} rh_account_id = None if rh_account is not None: try: rh_account_id = RHAccount.select( RHAccount.id).where(RHAccount.name == rh_account)[0] except IndexError: pass if rh_account_id is not None: subquery = ( SystemVulnerabilities.select( SystemVulnerabilities.id, SystemVulnerabilities.rule_id, SystemVulnerabilities.cve_id).join( SystemPlatform, on=((SystemVulnerabilities.system_id == SystemPlatform.id) & (SystemPlatform.when_deleted.is_null(True)) & (SystemPlatform.stale == False) & (SystemPlatform.opt_out == False))) # noqa: E712 .where(SystemVulnerabilities.cve_id.in_(cves)).where( (SystemVulnerabilities.rh_account_id == rh_account_id) & (SystemVulnerabilities.mitigation_reason.is_null(True)))) subquery = cyndi_join(subquery) mapping = (CveRuleMapping.select(CveRuleMapping.cve_id, InsightsRule.name, InsightsRule.description_text, InsightsRule.summary_text, InsightsRule.reboot_required, InsightsRule.playbook_count, InsightsRule.change_risk, InsightsRule.kbase_node_id, InsightsRule.active, CveMetadata.cve, InsightsRule.rule_impact, InsightsRule.publish_date) .join(InsightsRule, on=(CveRuleMapping.rule_id == InsightsRule.id)) .join(CveMetadata, on=(CveRuleMapping.cve_id == CveMetadata.id)) .where((InsightsRule.active == True) & (~InsightsRule.rule_only)) # noqa: E712 .where(CveRuleMapping.rule_id.in_(CveRuleMapping.select(CveRuleMapping.rule_id).where(CveRuleMapping.cve_id.in_(cves)))).dicts()) \ if rh_account_id is None else \ (CveRuleMapping.select(CveRuleMapping.cve_id, InsightsRule.name, InsightsRule.description_text, InsightsRule.summary_text, InsightsRule.reboot_required, InsightsRule.playbook_count, InsightsRule.change_risk, InsightsRule.kbase_node_id, InsightsRule.active, CveMetadata.cve, InsightsRule.rule_impact, InsightsRule.publish_date, fn.COUNT(subquery.c.id).alias('systems_affected')) .join(InsightsRule, on=(CveRuleMapping.rule_id == InsightsRule.id)) .join(CveMetadata, on=(CveRuleMapping.cve_id == CveMetadata.id)) .join(subquery, JOIN.LEFT_OUTER, on=((subquery.c.rule_id == InsightsRule.id) & (subquery.c.cve_id == CveRuleMapping.cve_id))) .where((InsightsRule.active == True) & (~InsightsRule.rule_only)) # noqa: E712 .where(CveRuleMapping.rule_id.in_(CveRuleMapping.select(CveRuleMapping.rule_id).where(CveRuleMapping.cve_id.in_(cves)))) .group_by(CveRuleMapping.cve_id, InsightsRule.name, InsightsRule.description_text, InsightsRule.summary_text, InsightsRule.reboot_required, InsightsRule.playbook_count, InsightsRule.change_risk, InsightsRule.kbase_node_id, InsightsRule.active, CveMetadata.cve, InsightsRule.rule_impact, InsightsRule.publish_date) .dicts()) for row in mapping: rule_cve_mapping.setdefault(row['name'], []).append(row['cve']) for row in mapping: rule_detail = { 'rule_id': row['name'], 'description': row['description_text'], 'summary': row['summary_text'], 'reboot_required': row['reboot_required'], 'playbook_count': row['playbook_count'], 'change_risk': row['change_risk'], 'kbase_node_id': row['kbase_node_id'], 'associated_cves': rule_cve_mapping[row['name']], 'rule_impact': row['rule_impact'], 'publish_date': format_datetime(row['publish_date']) } if rh_account_id is not None: rule_detail['systems_affected'] = row['systems_affected'] rules_map.setdefault(row['cve_id'], []).append(rule_detail) return rules_map
def export_ticks_siis_1_0_0(broker_id, market_id, from_date, to_date, dst): last_ticks = [] tick_streamer = Database.inst().create_tick_streamer(broker_id, market_id, from_date=from_date, to_date=to_date) timestamp = from_date.timestamp() to_timestamp = to_date.timestamp() progression = 0.0 prev_update = timestamp count = 0 total_count = 0 progression_incr = (to_timestamp - timestamp) * 0.01 tts = 0.0 prev_tts = 0.0 while not tick_streamer.finished(): ticks = tick_streamer.next(timestamp + Instrument.TF_1M) count = len(ticks) total_count += len(ticks) for data in ticks: tick_dt = datetime.utcfromtimestamp( ohlc.timestamp).strftime("%Y%m%d %H%M%S%f") dst.write("%s\t%s\t%s\t%s\n" % (tick_dt, data[1], data[2], data[3])) if not prev_tts: prev_tts = tts prev_tts = tts if tts > to_timestamp: break if timestamp - prev_update >= progression_incr: progression += 1 Terminal.inst().info( "%i%% on %s, %s ticks/trades for 1 minute, current total of %s..." % (progression, format_datetime(timestamp), count, total_count)) prev_update = timestamp count = 0 if timestamp > to_timestamp: break timestamp += Instrument.TF_1M # by step of 1m if progression < 100: Terminal.inst().info( "100%% on %s, %s ticks/trades for 1 minute, current total of %s..." % (format_datetime(timestamp), count, total_count)) Terminal.inst().info("Last tick datetime is %s" % (format_datetime(tts), ))