def draw_price(ax, instrument, end_date, levels, start, colors): params = get_params(instrument, end_date, 0) data_path = os.path.join(DATA_DIR, instrument) curr_price = PriceCurves(params, data_path, start) x = curr_price.get_point_x() y = curr_price.get_point_y() # print curr_price.len graph = Graph(ax, x, y) from v2_19.test import DrawPrice log_path = '/home/zj/detail_logs/%s-%s.log' draw_price = DrawPrice(ax, (x, y), instrument, levels, log_path) lines = draw_price.draw(colors) graph.draw_line(lines)
def draw_price(ax, instrument, end_date, level, start): params = get_params(instrument, end_date, 0) data_path = os.path.join(DATA_DIR, instrument) curr_price = PriceCurves(params, data_path, start) x = curr_price.get_point_x() y = curr_price.get_point_y() graph = Graph(ax, x, y) iii = 0 lines = [Line(ax, x, y, 'black', sz='0.5', sp='--', zo=1)] for lvl in level: save_pos_x, save_pos_y = get_data_from_sigle_file( curr_price, lvl, instrument, end_date) line_color = colors[iii % 10] lines += [ Line(ax, save_pos_x['o_l'], save_pos_y['o_l'], line_color, sz='5', sp='o', zo=2, ap=0.3), Line(ax, save_pos_x['c_l'], save_pos_y['c_l'], line_color, sz='5', sp='x', zo=2, ap=0.3), Line(ax, save_pos_x['o_s'], save_pos_y['o_s'], line_color, sz='5', sp='s', zo=2, ap=0.3), Line(ax, save_pos_x['c_s'], save_pos_y['c_s'], line_color, sz='5', sp='^', zo=2, ap=0.3), ] iii += 1 graph.draw_line(lines)
def draw_price(ax, instrument, end_date, level, start): params = get_params(instrument, end_date, 0) data_path = os.path.join(DATA_DIR, instrument) curr_price = PriceCurves(params, data_path, start) x = curr_price.get_point_x() y = curr_price.get_point_y() graph = Graph(ax, x, y) save_pos_x, save_pos_y = get_data_from_file(curr_price, level, instrument, end_date) lines = [ Line(ax, x, y, 'b', sp='-'), Line(ax, save_pos_x['o_l'], save_pos_y['o_l'], 'r', sp='o'), Line(ax, save_pos_x['c_l'], save_pos_y['c_l'], 'r', sp='^'), Line(ax, save_pos_x['o_s'], save_pos_y['o_s'], 'g', sp='o'), Line(ax, save_pos_x['c_s'], save_pos_y['c_s'], 'g', sp='^') ] graph.draw_line(lines)
def illust_data(cls, response: HtmlResponse): page_data = cls.page_data(response) params = get_params(response.url) illust_id = int(params['illust_id']) illust_data = page_data['preload']['illust'][illust_id] return illust_data
def author_data(cls, response: HtmlResponse): page_data = cls.page_data(response) params = get_params(response.url) illust_id = int(params['id']) author = page_data['preload']['user'][illust_id] return author
gc.enable() # if len(sys.argv) < 3: # print 'Usage: python dl6.py <instrument> [<level>...]\n' # exit(0) # instrument = sys.argv[1].encode('ascii') # levels = list(set(sys.argv[2::])) # levels.sort() instrument = 'ni888' levels = ['270'] end_date = '20171108' o_l, o_s, c_l, c_s = load_trade_point_data(levels, instrument, end_date) params = get_params(instrument, end_date, 0) data_path = os.path.join(DATA_DIR, instrument) curr_price = PriceCurves(params, data_path, 2099200) curr_price.get_point_y() gom = Gom() file_name = '%s-%s-%s.log' % (instrument, end_date, '_'.join(levels)) for point in curr_price.get_point_x(): price = curr_price.get_y_from_x(point) # print 'curr price is %s' % price if point in c_l: gom.close_interest(instrument, DIRECTION_TYPE.LONG, c_l.count(point), price, time.time(), point) # print 'close long' # print 'volumn is %d' % o_s.count(point) # exit(0)
def main(): print('''%s _ /_| _ ' ( |/ /(//) %sv1.3%s _/ %s''' % (green, white, green, end)) parser = argparse.ArgumentParser() #defines the parser #Arguments that can be supplied parser.add_argument('-u', help='target url', dest='url', required=True) parser.add_argument('-d', help='request delay', dest='delay', type=int) parser.add_argument('-t', help='number of threads', dest='threads', type=int) parser.add_argument('-f', help='file path', dest='file') parser.add_argument('-o', help='Path for the output file', dest='output_file') parser.add_argument('--get', help='use get method', dest='GET', action='store_true') parser.add_argument('--post', help='use post method', dest='POST', action='store_true') parser.add_argument('--headers', help='http headers prompt', dest='headers', action='store_true') parser.add_argument('--include', help='include this data in every request', dest='include') args = parser.parse_args() #arguments to be parsed url = args.url params_file = args.file or './db/params.txt' headers = args.headers delay = args.delay or 0 include = args.include or {} threadCount = args.threads or 2 if headers: headers = extract_headers(prompt()) else: headers = {} if args.GET: GET = True else: GET = False include = get_params(include) paramList = [] try: with open(params_file, 'r') as params_file: for line in params_file: paramList.append(line.strip('\n')) except FileNotFoundError: print('%s The specified file doesn\'t exist' % bad) quit() url = stabilize(url) print('%s Analysing the content of the webpage' % run) firstResponse = requester(url, include, headers, GET, delay) print('%s Now lets see how target deals with a non-existent parameter' % run) originalFuzz = random_string(6) data = {originalFuzz: originalFuzz[::-1]} data.update(include) response = requester(url, data, headers, GET, delay) reflections = response.text.count(originalFuzz[::-1]) print('%s Reflections: %s%i%s' % (info, green, reflections, end)) originalResponse = response.text originalCode = response.status_code print('%s Response Code: %s%i%s' % (info, green, originalCode, end)) newLength = len(response.text) plainText = remove_tags(originalResponse) plainTextLength = len(plainText) print('%s Content Length: %s%i%s' % (info, green, newLength, end)) print('%s Plain-text Length: %s%i%s' % (info, green, plainTextLength, end)) factors = {'sameHTML': False, 'samePlainText': False} if len(firstResponse.text) == len(originalResponse): factors['sameHTML'] = True elif len(remove_tags(firstResponse.text)) == len(plainText): factors['samePlainText'] = True print('%s Parsing webpage for potential parameters' % run) heuristic(firstResponse.text, paramList) fuzz = random_string(8) data = {fuzz: fuzz[::-1]} data.update(include) print('%s Performing heuristic level checks' % run) toBeChecked = slicer(paramList, 25) foundParams = [] while True: toBeChecked = narrower(toBeChecked) toBeChecked = unity_extracter(toBeChecked, foundParams) if not toBeChecked: break if foundParams: print('%s Heuristic found %i potential parameters.' % (info, len(foundParams))) paramList = foundParams finalResult = [] jsonResult = [] threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(bruter, param, originalResponse, originalCode, factors, include, reflections, delay, headers, url, GET) for param in foundParams) for i, result in enumerate(concurrent.futures.as_completed(futures)): if result.result(): finalResult.append(result.result()) print('%s Progress: %i/%i' % (info, i + 1, len(paramList)), end='\r') print('%s Scan Completed' % info) for each in finalResult: for param, reason in each.items(): print('%s Valid parameter found: %s%s%s' % (good, green, param, end)) print('%s Reason: %s' % (info, reason)) jsonResult.append({"param": param, "reason": reason}) # Finally, export to json if args.output_file and jsonResult: print("Saving output to JSON file in %s" % args.output_file) with open(str(args.output_file), 'w') as json_output: json.dump( {"results": jsonResult}, json_output, sort_keys=True, indent=4, )