Beispiel #1
0
    def poll(self):
        self.log.info("Downloading {0}".format(self.feed_url))
        try:
            info, fileobj = yield utils.fetch_url(self.feed_url)
        except utils.FetchUrlFailed as fuf:
            raise bot.PollSkipped("failed to download {0} ({1})".format(
                self.feed_url, fuf))
        self.log.info("Downloaded")

        for line in fileobj:
            url, netloc = parseURL(line)
            if url is None:
                continue
            event = events.Event()
            event.add("url", url)
            if i_am_a_name(netloc):
                event.add("domain name", netloc)
            else:
                event.add("ip", netloc)
            event.add("feeder", "siri urz")
            event.add("feed", "vxvault")
            event.add("feed url", self.feed_url)
            event.add("type", "malware url")
            event.add("description",
                      "This host is most likely hosting a malware URL.")
            yield idiokit.send(event)
Beispiel #2
0
 def _poll(self, url):
     self.log.info("Downloading %s" % url)
     try:
         info, fileobj = yield utils.fetch_url(url)
     except utils.FetchUrlFailed, fuf:
         raise bot.PollSkipped("failed to download {0!r} ({1})".format(
             url, fuf))
Beispiel #3
0
    def poll(self):
        self.log.info("Downloading updates from {0!r}".format(self.url))
        try:
            info, fileobj = yield utils.fetch_url(self.url)
        except utils.FetchUrlFailed as fuf:
            raise bot.PollSkipped("Downloading {0!r} failed ({1})".format(self.url, fuf))
        self.log.info("Updates downloaded from {0!r}".format(self.url))

        yield idiokit.pipe(
            utils.csv_to_events(fileobj, columns=self._columns),
            idiokit.map(self._normalize))
Beispiel #4
0
    def poll(self):
        url = self.feed_url % self.application_key

        try:
            self.log.info("Checking if {0!r} has new data".format(url))
            info, _ = yield utils.fetch_url(HeadRequest(url))

            etag = info.get("etag", None)
            if etag is not None and self._etag == etag:
                raise bot.PollSkipped(
                    "no new data detected (ETag stayed the same)")

            self.log.info("Downloading data from {0!r}".format(url))
            _, fileobj = yield utils.fetch_url(url)
        except utils.FetchUrlFailed as error:
            raise bot.PollSkipped("failed to download {0!r} ({1})".format(
                url, error))

        self.log.info("Downloaded data from {0!r}".format(url))

        reader = BZ2Reader(fileobj)
        try:
            depth = 0
            sites = dict()

            for event, element in etree.iterparse(reader,
                                                  events=("start", "end")):
                if event == "start" and element.tag == "entry":
                    depth += 1

                if event == "end" and element.tag == "entry":
                    yield self._handle_entry(element, sites)
                    depth -= 1

                if event == "end" and depth == 0:
                    element.clear()
        except SyntaxError as error:
            raise bot.PollSkipped("syntax error in report {0!r} ({1})".format(
                url, error))
        else:
            self._etag = etag
    def poll(self):
        self.log.info("Downloading {0}".format(self.feed_url))
        try:
            info, fileobj = yield utils.fetch_url(self.feed_url)
        except utils.FetchUrlFailed as fuf:
            raise bot.PollSkipped("Download failed: {0}".format(fuf))

        lines = []
        for line in fileobj:
            line = line.strip()

            if line and not line.startswith("#"):
                lines.append(line)

        yield idiokit.pipe(
            utils.csv_to_events(tuple(lines),
                                columns=COLUMNS,
                                charset=info.get_param("charset", None)),
            _parse())