sys.exit()
            if maxlinks < 1:
                print("maximum links should be minimum 1")
                sys.exit()
    else:
        print("Invalid number of arguments")
        sys.exit()
    try:
        signal.signal(signal.SIGINT, signal_handler)
        CrawlerLogger.init()
        log = CrawlerLogger.getlogger()
        if not url[len(url)-1] == '/':
            url = url + '/'
        crawler = Crawler(url,'crawler.db','links.txt',maxlinks)
        print('Crawling ....')
        res = crawler.Crawl()
        if res:
            webbrowser.open("links.txt")
        
    except CrawlerError as ce:
        print(ce)
        
    except Exception as e:
        if not log is None:
            log.error(e,exc_info=sys.exc_info()[2])
            print("Unexpected error occurred. Check log file for details")
        else:
            print("Unexpected error occurred")
            
    removelock()