def main(): parser = argparse.ArgumentParser() parser.add_argument("-q", "--quiet", help="Display only error messages", action="store_true", required=False) parser.add_argument("-d", "--debug", help="Display debug messages", action="store_true", required=False) args = parser.parse_args() init_log("service") if args.quiet: log.setLevel(logging.WARN) elif args.debug: log.setLevel(logging.DEBUG) s = service() try: s.run() except KeyboardInterrupt: log.warning("Keyboard interrupt, stopping...") s.stop()
def setUp(self): self.url = "http://pycm.baidu.com:8081" self.out_dir = "output_here" self.url_content = \ """<!DOCTYPE html> <html> <head> <meta charset=utf8> <title>Crawl Me</title> </head> <body> <ul> <li><a href=page1.html>page 1</a></li> <li><a href="page2.html">page 2</a></li> <li><a href='page3.html'>page 3</a></li> <li><a href='mirror/index.html'>mirror</a></li> <li><a href='javascript:location.href="page4.html"'>page 4</a></li> </ul> </body> </html> """ gl_value.URL_LIST_FILE = "./urls" gl_value.OUTPUT_DIRECTORY = "../output" gl_value.MAX_DEPTH = float(8) gl_value.CRAWL_INTERVA = 0.1 gl_value.CRAWL_TIMEOUT = float(1) gl_value.THREAD_COUNT = 12 gl_value.TARGET_URL = ".*.(gif|png|jpg|bmp)$" log.init_log('%s/test.log' % gl_value.OUTPUT_DIRECTORY)
def get_weibo_by_coordinate(session, coordinate, starttime, endtime, range=2000, sort=0, count=20, page=1, offset=0): if log_date.log_date.year != datetime.datetime.now(): log_date.change_log_date() init_log() num = 0 pd_403 = [0] * len(APP_SOURCE_LIST) end_403 = [1] * len(APP_SOURCE_LIST) while True: try: app_id = random.randint(0, len(APP_SOURCE_LIST)-1) url = "http://api.weibo.com/2/place/nearby_timeline.json?" url += "source="+APP_SOURCE_LIST[app_id] url += "&lat="+coordinate['latitude']+"&long="+coordinate['longitude'] url += "&starttime="+str(starttime)+"&range="+str(range)+"&sort="+str(sort) url += "&count="+str(count)+"&page="+str(page)+"&offset="+str(offset) text = session.get(url) if text.status_code == 403: pd_403[app_id] = 1 if pd_403 == end_403: sleep_time = 15600 else: sleep_time = random.randint(12, 30) wait_time(sleep_time) continue break except ConnectionError: num += 1 lg_warning(ConnectionError) lg_debug('connect fail'+str(num)) sleep_time = random.randint(6, 10) wait_time(str(sleep_time)) continue except Exception: num += 1 print('Connection reset by peer error') lg_warning(Exception) lg_debug('Connection reset by peer'+str(num)) sleep_time = random.randint(10, 20) wait_time(str(sleep_time)) continue text_dict = None text_list_dict = None try: text_dict = text.json() if text_dict.has_key('statuses'): text_list_dict = text_dict['statuses'] lg_debug('success catch the info_list') else: lg_debug("get_weibo_by_coordinate: No Json") except Exception: lg_warning(Exception.message) lg_debug("get_weibo_by_coordinate: No Json") return text_list_dict
def setUp(self): """ Before run test, init gloable varibles. """ self.lock = threading.Lock() self.url_queue = Queue.Queue() self.crawed_urls = set() gl_value.URL_LIST_FILE = "./urls" gl_value.OUTPUT_DIRECTORY = "./output_here" gl_value.MAX_DEPTH = float(8) gl_value.CRAWL_INTERVA = 0.1 gl_value.CRAWL_TIMEOUT = float(1) gl_value.THREAD_COUNT = 12 gl_value.TARGET_URL = ".*.(gif|png|jpg|bmp)$" log.init_log('%s/test.log' % gl_value.OUTPUT_DIRECTORY)
def main(): """main func: parse sys.argv firstly,and read conf to set global variable. start threads by conf to spider,then stop thread when over. """ log.init_log('./log/spider') conf = opt_parser.opt_parser(sys.argv) """init global variables""" try: conf_parser.conf_parser(conf) except UnboundLocalError as msg: logging.error("Read conf fail. Message: %s" % msg) return """init queue by url file""" lock = threading.Lock() url_queue = Queue.Queue() crawed_urls = set() try: fp = open(gl_value.URL_LIST_FILE) except IOError as msg: logging.error("Open url file %s fail. Message: %s" % (gl_value.URL_LIST_FILE,msg)) return for start_point in fp.readlines(): if not start_point.startswith('http'): break start_url = url_info.Url(start_point.strip('/\n\r')) url_queue.put(start_url) threads = [] """start thread""" for i in xrange(gl_value.THREAD_COUNT): thread = SpiderThread(url_queue,lock,crawed_urls) threads.append(thread) time.sleep(1) thread.start() logging.info("Staring spider thread...") """stop thread""" for thread in threads: thread.join() logging.info("Spider work is done!") print "Spider work is done!"
def main(): usage = 'usage: python ./openswitch.py [options]... ' parser = OptionParser(usage=usage) parser.add_option('-u', '--gobpg-url', dest='gobgp_url', default='127.0.0.1', help='specifying an url') parser.add_option('-p', '--gobgp-port', dest='gobgp_port', default=50051, help='specifying a port') parser.add_option('-o', '--ovsdb-sock', dest='ovsdb', default='unix:/var/run/openvswitch/db.sock', help='specifying the connection destination of the ovsdb ' 'Example ' ' - unix:<socket file path> ' ' - tcp:<address>:<port>') parser.add_option('-l', '--log-level', dest='log_level', default='info', help='specifying a log level') parser.add_option('-f', '--log-file', dest='log_file', default=None, help='specifying the output destination of the log file') (options, args) = parser.parse_args() log.init_log(options.log_level, options.log_file) signal.signal(signal.SIGINT, utils.receive_signal) # connection with each ops = OpsConnection(options.ovsdb) ops.connect() gobgp = GobgpConnection(options.gobgp_url, int(options.gobgp_port)) gobgp.connect() # get handler ops_hdr = ops.get_handler() gobgp_hdr = gobgp.get_handler() # set each other's handler ops.hdr.set_handler(gobgp_hdr) gobgp.hdr.set_handler(ops_hdr) # run thread threads = [] threads.append(ops.start()) threads.append(gobgp.start()) for th in threads: while th.isAlive(): time.sleep(1) th.join()
def initialize(self): # load settings # settings = config.settings self.set_working_dir() parse_options() # create app and update settings init_log() from app.app import make_app self.app = make_app(options.COOKIE_SECRET, options.DEBUG) # send ping to web socket connect users BeatPing() self._get_loop() from lib.heartbeat import heartbeat self.loop.call_later(0.5, heartbeat.ticker, app=self.app) self.loop.run_sync(self.app.mq.connect) self._init_subscribe()
def main(): parser = argparse.ArgumentParser() parser.add_argument("-q", "--quiet", help="Display only error messages", action="store_true", required=False) parser.add_argument("-d", "--debug", help="Display debug messages", action="store_true", required=False) args = parser.parse_args() init_log("remote_service") if args.quiet: log.setLevel(logging.WARN) elif args.debug: log.setLevel(logging.DEBUG) s = remote_service() try: s.run() except KeyboardInterrupt: log.warning("Keyboard interrupt, stopping...") s.stop()
def ac_monitor(request): action = request.GET.get('action') service = request.GET.get('service') idc = request.GET.get('idc') log.init_log(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'lib/log/block')) respons_data={} if action == 'block': rtcode,msg = block.block_monitor(idc) respons_data['status'] = rtcode respons_data['msg'] = msg return HttpResponse(json.dumps(respons_data),content_type="application/json") elif action == 'unblock': rtcode,msg = block.unblock_monitor(idc) respons_data['status'] = rtcode respons_data['msg'] = msg return HttpResponse(json.dumps(respons_data),content_type="application/json") else: respons_data['status'] = 1 respons_data['msg'] = "Invalid action '%s' ,must be block or unblock!" % action return HttpResponse(json.dumps(respons_data),content_type="application/json")
import random from datetime import datetime from user.login import get_session, wblogin from settings.settings import USERNAME, PASSWORD, START_NUM, QUERY_COORDINATE_LIST from lib.base import init_xls, init_env from lib.log import lg_debug, lg_info, lg_warning, init_log from lib.lib_func import convert_time, wait_time, arbitrary_precision_compare from official.weibo_api import get_weibo_by_coordinate from save import save_data_by_db from spider import get_info_history if __name__ == '__main__': init_env() lg_info(json.dumps(wblogin(USERNAME, PASSWORD), ensure_ascii=False)) init_log() num = START_NUM init_xls() session = get_session() get_info_history(session) #根据关键词搜索 # for i in KEY_WORDS: # for j in range(0, 3): # start_time = '2015-5-'+str(3*j+1) # end_time = '2015-5-'+str(3*j+3) # num = search_info_by_id(session, KEY_WORDS[0], start_time, end_time, num, 0) #按时间来计算 # for y in range(2015, 2016): # for m in range(1, 2): # for d in range(1, DAY_NUM[m]):
print model if model.table_exists(): model.drop_table() model.create_table() logging.info('created table:%s' % model._meta.db_table) def runserver(): http_server = HTTPServer(Application(), xheaders = True) http_server.listen(options.port) # http_server.bind(options.port) # http_server.start(num_processes=0) loop = tornado.ioloop.IOLoop.instance() # add loop for get proxy url list tornado.ioloop.PeriodicCallback(save_proxylist, 10*1000).start() logging.info("Server running on http://0.0.0.0:%d" %(options.port)) # sys.excepthook = log.log_exception loop.start() if __name__ == '__main__': parse_command_line() log.init_log(settings['log_name']) if options.cmd == 'syncdb': syncdb() else: runserver()
import json import base64 import binascii import rsa import requests import logging #excel读写操作 from openpyxl import Workbook, load_workbook from lib.log import lg_info, lg_warning, lg_debug, init_log from lib.lib_func import wait_time #logging.basicConfig(level=logging.DEBUG) init_log() WBCLIENT = 'ssologin.js(v1.4.5)' user_agent = ( 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.11 (KHTML, like Gecko) ' 'Chrome/20.0.1132.57 Safari/536.11') session = requests.session() session.headers['User-Agent'] = user_agent PROXIES = { 'http': 'http://1.202.150.116:8080', 'https': 'http://180.109.171.64:8118', } def encrypt_passwd(passwd, pubkey, servertime, nonce):
""" import getopt import logging import os import sys import threading from lib import common from lib import conf_manager from lib import crawl_thread from lib import log from lib import usage # init log log.init_log("./log/min_crawl") def main(): """main process""" # analysis the command line parameters try: opts, args = getopt.getopt(sys.argv[1:], "hc:", ["help"]) for opt, param in opts: if opt in ("-h", "--help"): usage.usage() sys.exit(0) if opt == "-c": conf_name = param conf_file_path_name = os.path.join(conf_manager.CONF_PATH, conf_name) if not os.path.isfile(conf_file_path_name):
def main(): usage = 'usage: python ./openswitch.py [options]... ' parser = OptionParser(usage=usage) parser.add_option('-u', '--gobpg-url', dest='gobgp_url', default='127.0.0.1', help='specifying an url') parser.add_option('-p', '--gobgp-port', dest='gobgp_port', default=50051, help='specifying a port') parser.add_option( '-o', '--ovsdb-sock', dest='ovsdb', default='unix:/var/run/openvswitch/db.sock', help='specifying the connection destination of the ovsdb ' 'Example ' ' - unix:<socket file path> ' ' - tcp:<address>:<port>') parser.add_option('-l', '--log-level', dest='log_level', default='info', help='specifying a log level') parser.add_option('-f', '--log-file', dest='log_file', default=None, help='specifying the output destination of the log file') (options, args) = parser.parse_args() log.init_log(options.log_level, options.log_file) signal.signal(signal.SIGINT, utils.receive_signal) # connection with each ops = OpsConnection(options.ovsdb) ops.connect() gobgp = GobgpConnection(options.gobgp_url, int(options.gobgp_port)) gobgp.connect() # get handler ops_hdr = ops.get_handler() gobgp_hdr = gobgp.get_handler() # set each other's handler ops.hdr.set_handler(gobgp_hdr) gobgp.hdr.set_handler(ops_hdr) # run thread threads = [] threads.append(ops.start()) threads.append(gobgp.start()) for th in threads: while th.isAlive(): time.sleep(1) th.join()
def get_weibo_by_coordinate(session, coordinate, starttime, endtime, range=2000, sort=0, count=20, page=1, offset=0): if log_date.log_date.year != datetime.datetime.now(): log_date.change_log_date() init_log() num = 0 pd_403 = [0] * len(APP_SOURCE_LIST) end_403 = [1] * len(APP_SOURCE_LIST) while True: try: app_id = random.randint(0, len(APP_SOURCE_LIST) - 1) url = "http://api.weibo.com/2/place/nearby_timeline.json?" url += "source=" + APP_SOURCE_LIST[app_id] url += "&lat=" + coordinate['latitude'] + "&long=" + coordinate[ 'longitude'] url += "&starttime=" + str(starttime) + "&range=" + str( range) + "&sort=" + str(sort) url += "&count=" + str(count) + "&page=" + str( page) + "&offset=" + str(offset) text = session.get(url) if text.status_code == 403: pd_403[app_id] = 1 if pd_403 == end_403: sleep_time = 15600 else: sleep_time = random.randint(12, 30) wait_time(sleep_time) continue break except ConnectionError: num += 1 lg_warning(ConnectionError) lg_debug('connect fail' + str(num)) sleep_time = random.randint(6, 10) wait_time(str(sleep_time)) continue except Exception: num += 1 print('Connection reset by peer error') lg_warning(Exception) lg_debug('Connection reset by peer' + str(num)) sleep_time = random.randint(10, 20) wait_time(str(sleep_time)) continue text_dict = None text_list_dict = None try: text_dict = text.json() if text_dict.has_key('statuses'): text_list_dict = text_dict['statuses'] lg_debug('success catch the info_list') else: lg_debug("get_weibo_by_coordinate: No Json") except Exception: lg_warning(Exception.message) lg_debug("get_weibo_by_coordinate: No Json") return text_list_dict
def main(): parser = argparse.ArgumentParser() parser.add_argument("-p","--print_config",help="Print current config", action="store_true", required=False) parser.add_argument("-ls","--list_servers",help="List all cuckoo servers information", action="store_true", required=False) parser.add_argument("-lr","--list_remote_src",help="List all remote sources", action="store_true", required=False) parser.add_argument("-ll","--list_local_src",help="List all local sources", action="store_true", required=False) parser.add_argument("-r","--download_report",metavar="<report_id>",type=int,help="Download task reports (overwrite existing)", action="store", required=False) parser.add_argument("-s","--submit_task",metavar="<task_id>",type=str,help="Submit new file for analysis", action="store", required=False) parser.add_argument("-t","--print_task_status",metavar="<task_id>",type=int,help="Display task information", action="store", required=False) parser.add_argument("-d","--debug",help="Display debug messages", action="store_true", required=False) parser.add_argument("--add_remote_src",metavar="<remote ip address>",type=str,help="Add remote source", action="store", required=False) parser.add_argument("--set_remote_src",nargs=2,metavar=("<src id>","<on/off>"),type=str,help="Set remote source state", action="store", required=False) parser.add_argument("--add_local_src",metavar="<local folder>",type=str,help="Add local source (folder)", action="store", required=False) parser.add_argument("--set_local_src",nargs=2,metavar=("<src id>","<on/off>"),type=str,help="Set local source state (on/off)", action="store", required=False) parser.add_argument("--add_server",metavar=("<ip address>","<ssh port>","<login>","<password>","<cuckoo path>"),nargs=5,type=str,help="Add cuckoo server", action="store", required=False) parser.add_argument("--set_server",metavar=("<server id>","<on/off>"),type=str,nargs=2,help="Set cuckoo server state", action="store", required=False) parser.add_argument("--set_reports_autodownload",type=str,metavar="<on/off>",choices=["on","off"],help="Set reports autodownload", action="store", required=False) parser.add_argument("--set_usermode_analysis",type=str,metavar="<on/off>",choices=["on","off"],help="Set usermode analysis", action="store", required=False) parser.add_argument("--set_kernelmode_analysis",metavar="<on/off>",type=str,choices=["on","off"],help="Set kernelmode analysis (on/off)", action="store", required=False) parser.add_argument("--set_usermode_warn_limit",metavar="<score>",type=int,help="Set usermode analysis warning score", action="store", required=False) parser.add_argument("--set_kernelmode_warn_limit",metavar="<score>",type=int,help="Set kernelmode analysis warning score", action="store", required=False) parser.add_argument("--set_usermode_alert_limit",metavar="<score>",type=int,help="Set usermode analysis alert score", action="store", required=False) parser.add_argument("--set_kernelmode_alert_limit",metavar="<score>",type=int,help="Set kernelmode analysis alert score", action="store", required=False) parser.add_argument("--set_usermode_timeout",metavar="<timeout, seconds>",type=int,help="Set usermode analysis timeout (seconds)", action="store", required=False) parser.add_argument("--set_kernelmode_timeout",metavar="<timeout, seconds>",type=int,help="Set kernelmode analysis timeout (seconds)", action="store", required=False) parser.add_argument("--set_sampling",metavar="<rate, percent>",type=int,help="Set sampling feature (analyze only x percent files)", action="store", required=False) parser.add_argument("--download_all_reports",help="Download all tasks reports (overwrite existing)", action="store_true", required=False) args = parser.parse_args() init_log("query") obj = cmdline() if args.debug: log.setLevel(logging.DEBUG) if args.print_config: obj.print_configuration() elif args.list_remote_src: obj.list_remote_src() elif args.add_remote_src: obj.add_remote_src(args.add_remote_src) elif args.set_remote_src: obj.set_remote_src(args.set_remote_src) elif args.list_local_src: obj.list_local_src() elif args.add_local_src: obj.add_local_src(args.add_local_src) elif args.set_local_src: obj.set_local_src(args.set_local_src) elif args.list_servers: obj.list_servers() elif args.add_server: obj.add_server(args.add_server) elif args.set_server: obj.set_server(args.set_server) elif args.set_reports_autodownload: obj.set_reports_autodownload(args.set_reports_autodownload) elif args.set_usermode_analysis: obj.set_usermode_analysis(args.set_usermode_analysis) elif args.set_usermode_warn_limit: obj.set_usermode_warn_limit(args.set_usermode_warn_limit) elif args.set_usermode_alert_limit: obj.set_usermode_alert_limit(args.set_usermode_alert_limit) elif args.set_usermode_timeout: obj.set_usermode_timeout(args.set_usermode_timeout) elif args.set_kernelmode_analysis: obj.set_kernelmode_analysis(args.set_kernelmode_analysis) elif args.set_kernelmode_warn_limit: obj.set_kernelmode_warn_limit(args.set_kernelmode_warn_limit) elif args.set_kernelmode_alert_limit: obj.set_kernelmode_alert_limit(args.set_kernelmode_alert_limit) elif args.set_kernelmode_timeout: obj.set_kernelmode_timeout(args.set_kernelmode_timeout) elif args.submit_task: obj.submit_task(args.submit_task) elif args.set_sampling: obj.set_sampling(args.set_sampling) elif args.download_all_reports: obj.download_all_reports() elif args.download_report: obj.download_report(args.download_report) elif args.print_task_status: obj.print_task(args.print_task_status)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-p", "--print_config", help="Print current config", action="store_true", required=False) parser.add_argument("-ls", "--list_servers", help="List all cuckoo servers information", action="store_true", required=False) parser.add_argument("-lr", "--list_remote_src", help="List all remote sources", action="store_true", required=False) parser.add_argument("-ll", "--list_local_src", help="List all local sources", action="store_true", required=False) parser.add_argument("-r", "--download_report", metavar="<report_id>", type=int, help="Download task reports (overwrite existing)", action="store", required=False) parser.add_argument("-s", "--submit_task", metavar="<task_id>", type=str, help="Submit new file for analysis", action="store", required=False) parser.add_argument("-t", "--print_task_status", metavar="<task_id>", type=int, help="Display task information", action="store", required=False) parser.add_argument("-d", "--debug", help="Display debug messages", action="store_true", required=False) parser.add_argument("--add_remote_src", metavar="<remote ip address>", type=str, help="Add remote source", action="store", required=False) parser.add_argument("--set_remote_src", nargs=2, metavar=("<src id>", "<on/off>"), type=str, help="Set remote source state", action="store", required=False) parser.add_argument("--add_local_src", metavar="<local folder>", type=str, help="Add local source (folder)", action="store", required=False) parser.add_argument("--set_local_src", nargs=2, metavar=("<src id>", "<on/off>"), type=str, help="Set local source state (on/off)", action="store", required=False) parser.add_argument("--add_server", metavar=("<ip address>", "<ssh port>", "<login>", "<password>", "<cuckoo path>"), nargs=5, type=str, help="Add cuckoo server", action="store", required=False) parser.add_argument("--set_server", metavar=("<server id>", "<on/off>"), type=str, nargs=2, help="Set cuckoo server state", action="store", required=False) parser.add_argument("--set_reports_autodownload", type=str, metavar="<on/off>", choices=["on", "off"], help="Set reports autodownload", action="store", required=False) parser.add_argument("--set_usermode_analysis", type=str, metavar="<on/off>", choices=["on", "off"], help="Set usermode analysis", action="store", required=False) parser.add_argument("--set_kernelmode_analysis", metavar="<on/off>", type=str, choices=["on", "off"], help="Set kernelmode analysis (on/off)", action="store", required=False) parser.add_argument("--set_usermode_warn_limit", metavar="<score>", type=int, help="Set usermode analysis warning score", action="store", required=False) parser.add_argument("--set_kernelmode_warn_limit", metavar="<score>", type=int, help="Set kernelmode analysis warning score", action="store", required=False) parser.add_argument("--set_usermode_alert_limit", metavar="<score>", type=int, help="Set usermode analysis alert score", action="store", required=False) parser.add_argument("--set_kernelmode_alert_limit", metavar="<score>", type=int, help="Set kernelmode analysis alert score", action="store", required=False) parser.add_argument("--set_usermode_timeout", metavar="<timeout, seconds>", type=int, help="Set usermode analysis timeout (seconds)", action="store", required=False) parser.add_argument("--set_kernelmode_timeout", metavar="<timeout, seconds>", type=int, help="Set kernelmode analysis timeout (seconds)", action="store", required=False) parser.add_argument( "--set_sampling", metavar="<rate, percent>", type=int, help="Set sampling feature (analyze only x percent files)", action="store", required=False) parser.add_argument("--download_all_reports", help="Download all tasks reports (overwrite existing)", action="store_true", required=False) args = parser.parse_args() init_log("query") obj = cmdline() if args.debug: log.setLevel(logging.DEBUG) if args.print_config: obj.print_configuration() elif args.list_remote_src: obj.list_remote_src() elif args.add_remote_src: obj.add_remote_src(args.add_remote_src) elif args.set_remote_src: obj.set_remote_src(args.set_remote_src) elif args.list_local_src: obj.list_local_src() elif args.add_local_src: obj.add_local_src(args.add_local_src) elif args.set_local_src: obj.set_local_src(args.set_local_src) elif args.list_servers: obj.list_servers() elif args.add_server: obj.add_server(args.add_server) elif args.set_server: obj.set_server(args.set_server) elif args.set_reports_autodownload: obj.set_reports_autodownload(args.set_reports_autodownload) elif args.set_usermode_analysis: obj.set_usermode_analysis(args.set_usermode_analysis) elif args.set_usermode_warn_limit: obj.set_usermode_warn_limit(args.set_usermode_warn_limit) elif args.set_usermode_alert_limit: obj.set_usermode_alert_limit(args.set_usermode_alert_limit) elif args.set_usermode_timeout: obj.set_usermode_timeout(args.set_usermode_timeout) elif args.set_kernelmode_analysis: obj.set_kernelmode_analysis(args.set_kernelmode_analysis) elif args.set_kernelmode_warn_limit: obj.set_kernelmode_warn_limit(args.set_kernelmode_warn_limit) elif args.set_kernelmode_alert_limit: obj.set_kernelmode_alert_limit(args.set_kernelmode_alert_limit) elif args.set_kernelmode_timeout: obj.set_kernelmode_timeout(args.set_kernelmode_timeout) elif args.submit_task: obj.submit_task(args.submit_task) elif args.set_sampling: obj.set_sampling(args.set_sampling) elif args.download_all_reports: obj.download_all_reports() elif args.download_report: obj.download_report(args.download_report) elif args.print_task_status: obj.print_task(args.print_task_status)