Esempio n. 1
0
def parse_refs(data):
    try:
        fetch_heads = re.findall(r'([a-z0-9]{40})\trefs/heads/(.+?)\n', data,
                                 re.M)
        FETCH_HEAD = ""
        for index in fetch_heads:
            writeFile(
                os.path.join(paths.GITHACK_DIST_TARGET_GIT_PATH,
                             "refs/remotes/origin/%s" % str(index[1])),
                "%s\n" % (index[0]))
            FETCH_HEAD += "%s\tnot-for-merge\t'%s' of %s\n" % (
                index[0], index[1], target.TARGET_GIT_URL[:-5])

        config = """[core]
        repositoryformatversion = 0
        filemode = true
        bare = false
        logallrefupdates = true
        ignorecase = true
        precomposeunicode = true
    [remote "origin"]
        url = %s
        fetch = +refs/heads/*:refs/remotes/origin/*
    """ % (target.TARGET_GIT_URL[:-1])
        writeFile(os.path.join(paths.GITHACK_DIST_TARGET_GIT_PATH, "config"),
                  config)
    except:
        logger.warning("Parse refs Fail")
Esempio n. 2
0
def method_a():
    logger.info("Try to Clone straightly")
    git_dir = os.path.join(paths.GITHACK_DIST_TARGET_PATH, ".git")
    if os.path.exists(git_dir):
        logger.warning("[Skip][First Try] %s already exists." % (git_dir))
        return valid_git_repo()
    return clone()
Esempio n. 3
0
def DNSzoneTransfer():
    path = os.path.join(paths.OUTPUT_PATH, "DNS-zoneTransfer.txt")
    logger.info("Target domain: " + conf.TARGET)
    if zonetransfer_poc(conf.TARGET, path):
        logger.warning("Vulnerable!")
        logger.info("Save results to %s" % path)
    else:
        logger.info("Not vulnerable.")
Esempio n. 4
0
def DNSzoneTransfer():
    path = os.path.join(paths.OUTPUT_PATH, 'DNS-zoneTransfer.txt')
    logger.info('Target domain: ' + conf.TARGET)
    if zonetransfer_poc(conf.TARGET, path):
        logger.warning('Vulnerable!')
        logger.info('Save results to %s' % path)
    else:
        logger.info('Not vulnerable.')
Esempio n. 5
0
def method_c():
    logger.info("Try to clone with Cache")
    git_dir = os.path.join(paths.GITHACK_DIST_TARGET_PATH, ".git")
    if not os.path.exists(git_dir):
        init()
    clone_from_cache()
    if not valid_git_repo():
        logger.warning("Clone With Cache end. But missed some files.")
    return True
Esempio n. 6
0
def request_data(url):
    for i in range(3):
        data = None
        try:
            request = urllib2.Request(url, None, {'User-Agent': randomAgent()})
            data = urllib2.urlopen(request).read()
            if data:
                return data
        except Exception, e:
            if DEBUG:
                logger.warning("Request Exception: %s" % str(e))
Esempio n. 7
0
def clone():
    logger.info("Clone")
    cmd = "git clone %s %s" % (target.TARGET_GIT_URL, paths.GITHACK_DIST_TARGET_PATH)
    # process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    # stdout, stderr = process.communicate()
    # process.wait()
    ret = os.system(cmd)
    if ret:
        mkdir_p(paths.GITHACK_DIST_TARGET_PATH)
        logger.warning("Clone Error")
        return False
    return True
Esempio n. 8
0
def parse_commit(data, commithash):
    obj = None
    try:
        de_data = zlib.decompress(data)
        m = re.search(
            'commit \d+?\x00tree ([a-z0-9]{40})\n', de_data, re.M | re.S | re.I)
        if m:
            obj = m.group(1)
        parents = re.findall('parent ([a-z0-9]{40})\n', de_data, re.M | re.S | re.I)
    except:
        parents = []
        if DEBUG:
            logger.warning("Decompress Commit(%s) Fail" % commithash)
    return (obj, parents)
Esempio n. 9
0
def method_b():
    logger.info("Try to Clone with Directory Listing")
    if isdirlist():
        try:
            git_dir = os.path.join(paths.GITHACK_DIST_TARGET_PATH, ".git")
            if not os.path.exists(git_dir):
                init()
            clone_from_list("/")
            refresh_files()
            if not valid_git_repo():
                logger.warning(
                    "Clone With Directory Listing end. But missed some files.")
            return True
        except:
            return False
    logger.warning("[Skip][First Try] Target is not support Directory Listing")
    return False
Esempio n. 10
0
    def execute(self, request: Request, response: Response):
        self.target = ''
        self.requests = request
        self.response = response
        output = None
        try:
            output = self.audit()
        except NotImplementedError:
            logger.error('Plugin: {0} not defined "{1} mode'.format(
                self.name, 'audit'))

        except ConnectTimeout:
            retry = RETRY
            while retry > 0:
                logger.debug('Plugin: {0} timeout, start it over.'.format(
                    self.name))
                try:
                    output = self.audit()
                    break
                except ConnectTimeout:
                    logger.debug('POC: {0} time-out retry failed!'.format(
                        self.name))
                retry -= 1
            else:
                msg = "connect target '{0}' failed!".format(self.target)
                logger.error(msg)

        except HTTPError as e:
            logger.warning(
                'Plugin: {0} HTTPError occurs, start it over.'.format(
                    self.name))

        except ConnectionError as e:
            msg = "connect target '{0}' failed!".format(self.target)
            logger.error(msg)

        except TooManyRedirects as e:
            logger.error(str(e))

        except Exception as e:
            logger.error(str(e))

        return output
Esempio n. 11
0
def cache_objects():
    for entry in parse_index(os.path.join(paths.GITHACK_DIST_TARGET_GIT_PATH, "index")):
        if 'sha1' in entry.keys():
            try:
                data = get_objects(entry["sha1"])
                if data:
                    data = zlib.decompress(data)
                    data = re.sub('blob \d+\00', '', data)
                    target_dir = os.path.join(paths.GITHACK_DIST_TARGET_PATH, os.path.dirname(entry["name"]))
                    if target_dir and not os.path.exists(target_dir):
                        os.makedirs(target_dir)
                    with open(os.path.join(paths.GITHACK_DIST_TARGET_PATH, entry["name"]), 'wb') as f:
                        f.write(data)
                    # logger.error(data[:4])
                    # if data[:4] == 'tree':
                    #     objs = parse_tree(data[11:])
                    #     for obj in objs:
                    #         cache_commits(obj)
            except:
                logger.warning("Clone Objects(%s) Fail" % (entry["sha1"]))
Esempio n. 12
0
def parse_tree(text, strict=False):
    count = 0
    retVal = []
    l = len(text)
    while count < l:
        mode_end = text.index(b' ', count)
        mode_text = text[count:mode_end]
        if strict and mode_text.startswith(b'0'):
            logger.warning("Invalid mode '%s'" % mode_text)
            break
        try:
            mode = int(mode_text, 8)
        except ValueError:
            logger.warning("Invalid mode '%s'" % mode_text)
            break
        name_end = text.index(b'\0', mode_end)
        name = text[mode_end + 1:name_end]
        count = name_end + 21
        sha = text[name_end + 1:count]
        if len(sha) != 20:
            logger.warning("Sha has invalid length")
            break
        hexsha = sha_to_hex(sha)
        # print (name, mode, hexsha)
        retVal.append(hexsha)
    return retVal
Esempio n. 13
0
def loadPlugin(url, poc=None):
    """load all plugins.
    """
    if "://" not in url:
        url = "http://" + url
    url = url.strip("/")
    logger.info("Target url: %s" % url)

    plugin_path = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),"plugins")
    if not os.path.isdir(plugin_path):
        logger.warning("%s is not a directory! " % plugin_path)
        raise EnvironmentError
    logger.info("Plugin path: %s " % plugin_path)
    
    items = os.listdir(plugin_path)
    if poc:
        logger.info('Loading plugins with "%s" key words.' % poc)
    else:
        poc=""
    for item in items:
        if item.endswith(".py") and not item.startswith('__'):
            plugin_name = item[:-3]
            if poc in plugin_name:
                logger.info("Loading plugin: %s" % plugin_name)

                module = importlib.import_module("plugins." + plugin_name)

                try:
                    result = module.run(url)
                    if result:
                        logger.success(result)
                    else:
                        logger.error("Not Vulnerable %s " % plugin_name)
                except:
                    logger.warning("ConnectionError ")
            else:
                continue

    logger.info("Finished")
Esempio n. 14
0
    async def _request(self, method, url, **kwargs):
        headers = kwargs.get('headers')
        if headers == None:
            headers = {}

        if 'Accept' not in headers.keys():
            headers[
                "Accept"] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'

        if 'Accept-Charset' not in headers.keys():
            headers["Accept-Charset"] = 'GB2312,utf-8;q=0.7,*;q=0.7'

        if 'Accept-Encoding' not in headers.keys():
            headers["Accept-Encoding"] = 'gzip, deflate, sdch, br'

        headers['Referer'] = url
        if 'User-Agent' not in headers.keys(
        ) or 'aiohttp' in headers["User-Agent"]:
            headers["User-Agent"] = random.choice(USER_AGENTS)

        # random_ip = random_IP()
        # if 'Client_IP' not in headers.keys():
        #     headers['Client_IP'] = random_ip
        # if 'X-Forwarded-For' not in headers.keys():
        #     headers['X-Forwarded-For'] = random_ip

        kwargs.setdefault('headers', headers)
        kwargs.setdefault('verify_ssl', False)

        if self._limit:
            await self._limit.wait_available()
        total = self._max_fail_retries if method.lower() == 'get' else 0

        timeout = int(conf['config']['basic']['timeout'])

        if 'timeout' not in kwargs.keys():
            kwargs.setdefault('timeout', timeout)

        for count in range(total):
            resp = None
            try:
                resp = await super()._request(method, url, **kwargs)
                return resp
            except Exception as ex:
                pass
            logger.warning(
                'request to {url} failed, retrying ({count} / {total})...'.
                format(url=url, count=count, total=total))
            if resp:
                resp.close()
            await asyncio.sleep(self._retry_interval)
        try:
            return await super()._request(method, url, **kwargs)
        except TooManyRedirects:
            kwargs.setdefault('max_redirects', 3)
            try:
                return await self._request(method, url, **kwargs)
            except:
                return None
        except (ClientOSError, ClientResponseError, ClientConnectorError,
                ServerDisconnectedError):
            return None
        except Exception as e:
            if str(e).strip() != '':
                # errmsg = traceback.format_exc()
                # logger.error(errmsg)
                logger.error("Curl error: %s for %s" % (str(e), url))
            return None