예제 #1
0
        def run(self, argv):
            # The real thing starts here
            # FIXME: Maybe this should be done on __init__?
            self.config = configlib.ConfigAgent()
            self.config.parseCmdLine(sys.argv[1:]) # We need some config data before..
            self.config.parseConfigFile()
            # But we also need to give priority to parseCmdLine so it's parsed again...
            self.config.parseCmdLine(sys.argv[1:])
            self.logAgent = logger.LogAgent()
            self.loadReplyList()

            self.checkLock()
            self.createLock()


            # Prevent the list growing too much
            if len(self.repliedList) > 500:
                for i in xrange(len(self.repliedList) - 500):
                    del self.repliedList[0]

            # Set the timeout
            # FIXME: Duplicated in configlib.py!!!
            if self.config.alarmtime != 0:
                # 60 by default
                setDefaultSocketTimeout(self.config.alarmtime)

            self.CLock = thread.allocate_lock()
            self.repLock = thread.allocate_lock()
            # Main thread will create per-server threads
            self.mainThread = threading.Thread(target=self.main)
            self.mainThread.start()
            return 0
예제 #2
0
 def connect(cls, host, port=21, user=None, password=None, account=None,
         ftp_client=ftplib.FTP, debuglevel=0, timeout=None):
     """Connect to host, using port. If user is given, login with given
     user, password and account. The two latter can be None, in which case
     ftplib will set the password to 'anonymous@'. You can choose which
     class to instance by means of ftp_client.
     """
     ftp_obj = ftp_client()
     ftp_obj.set_debuglevel(debuglevel)
     # Hack alert: Make socket.socket be the timeoutsocket instead of the
     # real socket.socket when a timeout is specified.
     if timeout and not timeoutsocket:
         raise ImportError("previously failed to import timeoutsocket")
     elif timeout and timeoutsocket:
         timeoutsocket.setDefaultSocketTimeout(timeout)
         socket.socket = timeoutsocket.timeoutsocket
     ftp_obj.connect(host, port)
     # Then restore the real socket.socket and timeout.
     socket.socket = socket._no_timeoutsocket
     if timeoutsocket:
         timeoutsocket.setDefaultSocketTimeout(None)
     # And log in.
     if user:
         ftp_obj.login(user, password, account)
     return cls(ftp_obj)
예제 #3
0
 def exploit(self):
     
     #win32 python required the following
     timeoutsocket.setDefaultSocketTimeout(5)
     
     for i in range(0, 30):
         #solaris 8
         retloc = 0x544a0
         
         self.setup()
         self.ttdb_makesploit()
         
         #raw_input("attach")
         
         if self.ISucceeded():
             self.log("Succeeded!")
             return 1
         
         try:
             #self.ttdb_dequeue_msgs()
             #self.ttdb_addsession()
             self.log("Attacking: %d/%d"%(i,30))
             self.ttdb_create_obj_args(retloc, self.scbuf)
         except Exception, msg:
             print str(msg)
             
         time.sleep(2)
         #check if ny shell listener.
         if self.ISucceeded():
             self.log("Succeeded!")
             return 1
         
         retloc += 1024
예제 #4
0
파일: ftptool.py 프로젝트: sniku/ftptool
 def connect(cls, host, port=21, user=None, password=None, account=None,
         ftp_client=ftplib.FTP, debuglevel=0, timeout=None):
     """Connect to host, using port. If user is given, login with given
     user, password and account. The two latter can be None, in which case
     ftplib will set the password to 'anonymous@'. You can choose which
     class to instance by means of ftp_client.
     """
     ftp_obj = ftp_client()
     ftp_obj.set_debuglevel(debuglevel)
     # Hack alert: Make socket.socket be the timeoutsocket instead of the
     # real socket.socket when a timeout is specified.
     if timeout and not timeoutsocket:
         raise ImportError("previously failed to import timeoutsocket")
     elif timeout and timeoutsocket:
         timeoutsocket.setDefaultSocketTimeout(timeout)
         socket.socket = timeoutsocket.timeoutsocket
     ftp_obj.connect(host, port)
     # Then restore the real socket.socket and timeout.
     socket.socket = socket._no_timeoutsocket
     if timeoutsocket:
         timeoutsocket.setDefaultSocketTimeout(None)
     # And log in.
     if user:
         ftp_obj.login(user, password, account)
     return cls(ftp_obj)
예제 #5
0
    def run(self, argv):
        # The real thing starts here
        # FIXME: Maybe this should be done on __init__?
        self.config = configlib.ConfigAgent()
        self.config.parseCmdLine(
            sys.argv[1:])  # We need some config data before..
        self.config.parseConfigFile()
        # But we also need to give priority to parseCmdLine so it's parsed again...
        self.config.parseCmdLine(sys.argv[1:])
        self.logAgent = logger.LogAgent()
        self.loadReplyList()

        self.checkLock()
        self.createLock()

        # Prevent the list growing too much
        if len(self.repliedList) > 500:
            for i in xrange(len(self.repliedList) - 500):
                del self.repliedList[0]

        # Set the timeout
        # FIXME: Duplicated in configlib.py!!!
        if self.config.alarmtime != 0:
            # 60 by default
            setDefaultSocketTimeout(self.config.alarmtime)

        self.CLock = thread.allocate_lock()
        self.repLock = thread.allocate_lock()
        # Main thread will create per-server threads
        self.mainThread = threading.Thread(target=self.main)
        self.mainThread.start()
        return 0
def set_socket_timeout(n):
	"""Set the system socket timeout."""
	if hasattr(socket, "setdefaulttimeout"):
		socket.setdefaulttimeout(n)
	else:
		# Python 2.2 and earlier need to use an external module.
		import timeoutsocket
		timeoutsocket.setDefaultSocketTimeout(n)
예제 #7
0
파일: cia.py 프로젝트: vmiklos/vmexam
def cmd_cia(data, server, witem):
	"""data - contains the parameters for /dict
server - the active server in window
witem - the active window item (eg. channel, query)
        or None if the window is empty"""
	timeoutsocket.setDefaultSocketTimeout(20)
	try:
		sock = urllib.urlopen("http://cia.vc/stats/author/Miklos%20Vajna%20%3Cvmiklos%40frugalware.org%3E")
		data = sock.read()
		sock.close()
	except timeoutsocket.Timeout, s:
		print s
		return
예제 #8
0
파일: mu.py 프로젝트: tsondt/Canvas
    def run(self):
        import timeoutsocket
        timeoutsocket.setDefaultSocketTimeout(self.timeout)

        self.s.listen(1)
        self.s.set_timeout(self.timeout)
        try:
            (c, addr) = self.s.accept()
            self.accepted = 1
            self.buf = self.recvstuff(c)
        except timeoutsocket.Timeout, msg:
            self.error = 1
            self.buf = "Timeout has been reached: %s" % str(msg)
            self.lock = 0
예제 #9
0
def print_valid_candidates():
    timeoutsocket.setDefaultSocketTimeout(10)
    first_line = 1  #Used to keep track of commas
    for url in candidates:
        wrote = 0
        try:
            #f=urlopen(url+"?hostfile=1")
            f = urlopen(url + "?client=TEST&version=GWC0.8.5&hostfile=1")
            if valid_file(f):
                wrote = print_candidate(url, 1, first_line)
            else:
                wrote = print_candidate(url, 0, first_line, "malformed")
            f.close()
        except:
            wrote = print_candidate(url, 0, first_line, "unreachable")
        first_line = first_line and not wrote
예제 #10
0
def print_valid_candidates():
    timeoutsocket.setDefaultSocketTimeout(60)
    first_line = 1  # Used to keep track of commas
    for url in candidates:
        wrote = 0
        try:
            # f=urlopen(url+"?hostfile=1")
            f = urlopen(url + "?client=TEST&version=GWC0.8.5&hostfile=1")
            if valid_file(f):
                wrote = print_candidate(url, 1, first_line)
            else:
                wrote = print_candidate(url, 0, first_line, "malformed")
            f.close()
        except:
            wrote = print_candidate(url, 0, first_line, "unreachable")
        first_line = first_line and not wrote
예제 #11
0
    def exploit(self):

        timeoutsocket.setDefaultSocketTimeout(5)
        self.ypbind_makesploit()
        self.setup()

        self.result = self.ypbindproc_domain(self.domain)
        if self.result == 2:
            self.log("RPC call failed.")
        elif self.result == 1:
            self.log("RPC call succeeded. Exploit most likely failed.")
        else:
            self.log("Unexpected response: %d" % self.result)

        time.sleep(2)

        if self.ISucceeded():
            self.log("Succeeded!")
            return 1

        return 0
예제 #12
0
def http_get(url, proxy=None, header=None):
	#if proxy:
	#	log_debug('http_get(): url=%s, header=%s, proxy=%s:%s' % (url, header, proxy['ip'], proxy['port']))
	#else:
	#	log_debug('http_get(): url=%s, header=%s' % (url, header))
	s = ''
	timeoutsocket.setDefaultSocketTimeout(SOCKET_TIMEOUT)
	req = urllib2.Request(url)
	try:
		if proxy:
			proxy_handler = urllib2.ProxyHandler({'http': 'http://%s:%s/' % (proxy['ip'], proxy['port'])})
			opener = urllib2.build_opener(proxy_handler)
		else:
			opener = urllib2.build_opener()
		if header:
			opener.addheaders = header
		response = opener.open(url)
		s = response.read()
	except timeoutsocket.Timeout:
		pass
	except urllib2.URLError, e:
		pass
예제 #13
0
    def exploit(self):

        #win32 python required the following
        timeoutsocket.setDefaultSocketTimeout(5)
        self.setInfo("%s attacking %s:%d" % (NAME, self.host, self.port))

        self.setup()
        self.xdmi_makesploit()
        try:
            self.xdmi_addcomponent(self.expbuf, self.scbuf)
        except:
            pass

        time.sleep(8)
        #check if my shell listener has accepted a connection
        if self.ISucceeded():
            self.setInfo("%s attacking %s:%d - done (succeeded!)" %
                         (NAME, self.host, self.port))
            self.log("Succeeded!")
            return 1

        self.setInfo("%s attacking %s:%d - done (failed!)" %
                     (NAME, self.host, self.port))
        return 0
예제 #14
0
    def pwn_it(self, alternate_fname=None):
        """
        Attempts to exploit the LFI.
        """

        ret = 0
        vhost = self.vhost
        uri = self.path

        self.setInfo("Running %s against %s [vhost:%s]" % (NAME, self.host, self.vhost))

        # First of all we need to change the default socket timeout
        # as Immunity's spike is _very_ slow. The original value will be
        # restored before ending the function.
        old_timeout = timeoutsocket.getDefaultSocketTimeout()
        timeoutsocket.setDefaultSocketTimeout(BIG_TIMEOUT)

        if alternate_fname:
            logging.info('Attempting to fetch %s' % alternate_fname)
            if alternate_fname.startswith("../"):
                logging.warning('Relative path detected, only working against Windows!')
                ret, content = self.perform_lfi(vhost, uri, alternate_fname, absolute_path=False)
            else:
                # To allow users to use Windows path we need a few modifications.
                if alternate_fname[1:3] == ':\\':
                    logging.warning('Detected a Windows path, normalizing it.')
                    alternate_fname = '/' + alternate_fname.replace('\\', '/')
                    logging.info('New path is now: %s' % alternate_fname)
                ret, content = self.perform_lfi(vhost, uri, alternate_fname, absolute_path=True)
            if not ret:
                logging.info('Server returned:')
                logging.info('\n'+content)
                timeoutsocket.setDefaultSocketTimeout(old_timeout)
                return 1
        else:
            ret = self.get_confluence_database(self.vhost, uri)
            timeoutsocket.setDefaultSocketTimeout(old_timeout)
            if not ret:
                return 1
            else:
                return 0
예제 #15
0
	def getArduinoTemp(self, ip_addr, port, timeout, retry):
		for i in range(retry):
			try:
				#conn = http.client.HTTPConnection(ip_addr, port, timeout)

				if self.logger:
					self.logger.info (strftime("[%H:%M:%S]: ", localtime()) + "getArduinoTemp HTTP connection")

				conn = httplib.HTTPConnection(ip_addr, port, timeout)

				if self.logger:
					self.logger.info (strftime("[%H:%M:%S]: ", localtime()) + "getArduinoTemp request")

				conn.request("GET", "/temp")
				conn.sock.settimeout(timeout)
				socket.setdefaulttimeout(timeout)
				timeoutsocket.setDefaultSocketTimeout(timeout)

				if self.logger:
					self.logger.info (strftime("[%H:%M:%S]: ", localtime()) + "getArduinoTemp get response")

				try:
					r1 = conn.getresponse()
				except socket.timeout:
					print (strftime("[%H:%M:%S]: EXCEPTION ", localtime()) + traceback.format_exc())

					if self.logger:
						self.logger.error((strftime("[%H:%M:%S]: EXCEPTION ", localtime()) + traceback.format_exc()), exc_info=True)

					temp = None
					continue

				if self.logger:
					self.logger.info (strftime("[%H:%M:%S]: ", localtime()) + "getArduinoTemp check response")

				if (r1.status == 200):
					lines = r1.read()
					line = lines.split('\n')
					temp = None
					if (len(lines.split()) == 3):
						temp = float(line[2])
					if (temp == self.ARDUINO_NOT_READY):
						temp = None
					else:
						break
				else:
					print (r1.status, r1.reason)
					temp = None
	
			except:
				print (strftime("[%H:%M:%S]: EXCEPTION ", localtime()) + traceback.format_exc())

				if self.logger:
					self.logger.error((strftime("[%H:%M:%S]: EXCEPTION ", localtime()) + traceback.format_exc()), exc_info=True)

				temp = None
				sleep(2)	#wait for device to get ready again
		
		conn.close()

		return temp
예제 #16
0
def spiderPlanet(only_if_new=False):
    """ Spider (fetch) an entire planet """
    log = planet.logger

    global index
    index = True

    timeout = config.feed_timeout()
    try:
        socket.setdefaulttimeout(float(timeout))
        log.info("Socket timeout set to %d seconds", timeout)
    except:
        try:
            import timeoutsocket
            timeoutsocket.setDefaultSocketTimeout(float(timeout))
            log.info("Socket timeout set to %d seconds", timeout)
        except:
            log.warning("Timeout set to invalid value '%s', skipping", timeout)

    from Queue import Queue
    from threading import Thread

    fetch_queue = Queue()
    parse_queue = Queue()

    threads = {}
    http_cache = config.http_cache_directory()
    # Should this be done in config?
    if http_cache and not os.path.exists(http_cache):
        os.makedirs(http_cache)

    if int(config.spider_threads()):
        # Start all the worker threads
        for i in range(int(config.spider_threads())):
            threads[i] = Thread(target=httpThread,
                                args=(i, fetch_queue, parse_queue, log))
            threads[i].start()
    else:
        log.info("Building work queue")

    # Load the fetch and parse work queues
    for uri in config.subscriptions():
        # read cached feed info
        sources = config.cache_sources_directory()
        feed_source = filename(sources, uri)
        feed_info = feedparser.parse(feed_source)

        if feed_info.feed and only_if_new:
            log.info("Feed %s already in cache", uri)
            continue
        if feed_info.feed.get('planet_http_status', None) == '410':
            log.info("Feed %s gone", uri)
            continue

        if threads and _is_http_uri(uri):
            fetch_queue.put(item=(uri, feed_info))
        else:
            parse_queue.put(item=(uri, feed_info, uri))

    # Mark the end of the fetch queue
    for thread in threads.keys():
        fetch_queue.put(item=(None, None))

    # Process the results as they arrive
    feeds_seen = {}
    while fetch_queue.qsize() or parse_queue.qsize() or threads:
        while parse_queue.qsize():
            (uri, feed_info, feed) = parse_queue.get(False)
            try:

                if not hasattr(feed,
                               'headers') or int(feed.headers.status) < 300:
                    options = {}
                    if hasattr(feed_info, 'feed'):
                        options['etag'] = \
                            feed_info.feed.get('planet_http_etag',None)
                        try:
                            modified = time.strptime(
                                feed_info.feed.get('planet_http_last_modified',
                                                   None))
                        except:
                            pass

                    data = feedparser.parse(feed, **options)
                else:
                    data = feedparser.FeedParserDict({
                        'version':
                        None,
                        'headers':
                        feed.headers,
                        'entries': [],
                        'feed': {},
                        'href':
                        feed.url,
                        'bozo':
                        0,
                        'status':
                        int(feed.headers.status)
                    })

                # duplicate feed?
                id = data.feed.get('id', None)
                if not id: id = feed_info.feed.get('id', None)

                href = uri
                if data.has_key('href'): href = data.href

                duplicate = None
                if id and id in feeds_seen:
                    duplicate = id
                elif href and href in feeds_seen:
                    duplicate = href

                if duplicate:
                    feed_info.feed['planet_message'] = \
                        'duplicate subscription: ' + feeds_seen[duplicate]
                    log.warn('Duplicate subscription: %s and %s' %
                             (uri, feeds_seen[duplicate]))
                    if href: feed_info.feed['planet_http_location'] = href

                if id: feeds_seen[id] = uri
                if href: feeds_seen[href] = uri

                # complete processing for the feed
                writeCache(uri, feed_info, data)

            except Exception, e:
                import sys, traceback
                type, value, tb = sys.exc_info()
                log.error('Error processing %s', uri)
                for line in (traceback.format_exception_only(type, value) +
                             traceback.format_tb(tb)):
                    log.error(line.rstrip())

        time.sleep(0.1)

        for index in threads.keys():
            if not threads[index].isAlive():
                del threads[index]
                if not threads:
                    log.info("Finished threaded part of processing.")
예제 #17
0
파일: cia.py 프로젝트: vmiklos/darcs-hooks
def callback(patch):
	global config
	files = []
	repo = os.path.split(os.getcwd())[-1]
	patchname = patch.getElementsByTagName("name")[0].firstChild.toxml()
	try:
		patchlog = patch.getElementsByTagName("comment")[0].firstChild.toxml()
	except IndexError:
		patchlog = None
	hash = saxutils.unescape(patch.attributes['hash'].firstChild.toxml())
	if patchlog:
		log = "* %s\n%s" % (patchname, patchlog)
	else:
		log = "* %s" % patchname
	url = ""
	if config.darcsweb_url:
		url = "<url>%s?r=%s;a=darcs_commitdiff;h=%s;</url>" % (config.darcsweb_url, repo, hash)

	patchdata = getpatch(hash)
	for i in patchdata:
		i = i.strip().replace("./", "")
		if i.startswith("adddir "):
			files.append(re.sub(r"^adddir (.*)", r"\1", i))
		elif i.startswith("addfile "):
			files.append(re.sub(r"^addfile (.*)", r"\1", i))
		elif i.startswith("rmdir "):
			files.append(re.sub(r"^rmdir (.*)", r"\1", i))
		elif i.startswith("rmfile "):
			files.append(re.sub(r"^rmfile (.*)", r"\1", i))
		elif i.startswith("binary "):
			files.append(re.sub(r"^binary (.*)", r"\1", i))
		elif i.startswith("hunk "):
			files.append(re.sub(r"^hunk (.*) [0-9]+", r"\1", i))
		elif i.startswith("move "):
			files.append(re.sub(r"^move (.*) .*", r"\1", i))
			files.append(re.sub(r"^move .* (.*)", r"\1", i))
	# remove duplicates. it does not preserve the order but that's not a
	# problem here
	set = {}
	map(set.__setitem__, files, [])
	files = set.keys()

	msg = """<?xml version="1.0" ?>
<message>
	<generator>
		<name>CIA plugin for for darcs-hooks.py</name>
		<version>%(version)s</version>
		<url>http://vmiklos.hu/project/darcs-hooks</url>
	</generator>
	<source>
		<project>%(project)s</project>
		<module>%(module)s</module>
	</source>
	<timestamp>%(timestamp)s</timestamp>
	<body>
		<commit>
			<author>%(author)s</author>
			<files>
				<file>%(files)s</file>
			</files>
			<log>%(log)s</log>
			%(url)s
		</commit>
	</body>
</message>
	""" % {
		'version': __version__,
		'project': config.project,
		'module': repo,
		'timestamp': str(int(time.time())),
		'author': patch.attributes['author'].firstChild.toxml(),
		'files': "</file>\n<file>".join(files),
		'log': log,
		'url': url
	}

	if config.post:
		timeoutsocket.setDefaultSocketTimeout(20)
		xmlrpclib.ServerProxy(config.rpc_uri).hub.deliver(msg)
	else:
		print msg
class MirrorError(Exception):
    def __init__(self, value):
        self.value = value

    def __str__(self):
        return ` self.value `


try:
    sys.argv[1]
except IndexError:
    print "Usage: " + sys.argv[0] + " <mirror-database>"
    sys.exit(0)

# If a mirror doesn't respond in this time, consider it dead.
timeoutsocket.setDefaultSocketTimeout(timeout)

if verbose:
    print "Opening mirror database " + sys.argv[1] + "..."

f = open(sys.argv[1])
lines = f.readlines()

for line in lines:
    line = string.strip(line)
    splitline = string.split(line)

    mcountry = splitline[0]

    p = re.compile(r'^(f|ht)tp://(?P<hn>[^/]*)/?.*$')
    m = p.search(splitline[1])
예제 #19
0
	SQLLogBase='/var/data1/LOG'
	SQLLogBaseDOS='I:/LOG'
	Slash="/"
	EXE_PREFIX = ''


SQLLogBaseDOSP='C:/SIGMA'

OK_SYNCHRO_COUNT=8
TIMEOUT_TIME=45
#log direktorij prodavnica
SQLLogBaseP=''
fVPN=0
cRemoteHostName='0.0.0.0'
cServerHostName='192.168.77.1'
timeoutsocket.setDefaultSocketTimeout(TIMEOUT_TIME)
TRY_LOOPS=1

LogKnjigovodstvo=0
LogProdavnica=0
# knjigovodstvo site
#k_site='2'
# prodavnica site
#p_site='35'

# sitpe par: 0 = peer
#            1 = site knjigovodstvo
#            2 = site prodavnica
#            3 = host knjigovodstvo
#            4 = host prodavnica
#            5 = privpath knjig
예제 #20
0
def spiderPlanet(only_if_new = False):
    """ Spider (fetch) an entire planet """
    log = planet.logger

    global index
    index = True

    timeout = config.feed_timeout()
    try:
        socket.setdefaulttimeout(float(timeout))
        log.info("Socket timeout set to %d seconds", timeout)
    except:
        try:
            import timeoutsocket
            timeoutsocket.setDefaultSocketTimeout(float(timeout))
            log.info("Socket timeout set to %d seconds", timeout)
        except:
            log.warning("Timeout set to invalid value '%s', skipping", timeout)

    from Queue import Queue
    from threading import Thread

    fetch_queue = Queue()
    parse_queue = Queue()

    threads = {}
    http_cache = config.http_cache_directory()
    # Should this be done in config?
    if http_cache and not os.path.exists(http_cache):
        os.makedirs(http_cache)


    if int(config.spider_threads()):
        # Start all the worker threads
        for i in range(int(config.spider_threads())):
            threads[i] = Thread(target=httpThread,
                args=(i,fetch_queue, parse_queue, log))
            threads[i].start()
    else:
        log.info("Building work queue")

    # Load the fetch and parse work queues
    for uri in config.subscriptions():
        # read cached feed info
        sources = config.cache_sources_directory()
        feed_source = filename(sources, uri)
        feed_info = feedparser.parse(feed_source)

        if feed_info.feed and only_if_new:
            log.info("Feed %s already in cache", uri)
            continue
        if feed_info.feed.get('planet_http_status',None) == '410':
            log.info("Feed %s gone", uri)
            continue

        if threads and _is_http_uri(uri):
            fetch_queue.put(item=(uri, feed_info))
        else:
            parse_queue.put(item=(uri, feed_info, uri))

    # Mark the end of the fetch queue
    for thread in threads.keys():
        fetch_queue.put(item=(None, None))

    # Process the results as they arrive
    feeds_seen = {}
    while fetch_queue.qsize() or parse_queue.qsize() or threads:
        while parse_queue.qsize():
            (uri, feed_info, feed) = parse_queue.get(False)
            try:

                if not hasattr(feed,'headers') or int(feed.headers.status)<300:
                    options = {}
                    if hasattr(feed_info,'feed'):
                        options['etag'] = \
                            feed_info.feed.get('planet_http_etag',None)
                        try:
                            modified=time.strptime(
                                feed_info.feed.get('planet_http_last_modified',
                                None))
                        except:
                            pass

                    data = feedparser.parse(feed, **options)
                else:
                    data = feedparser.FeedParserDict({'version': None,
                        'headers': feed.headers, 'entries': [], 'feed': {},
                        'href': feed.url, 'bozo': 0,
                        'status': int(feed.headers.status)})

                # duplicate feed?
                id = data.feed.get('id', None)
                if not id: id = feed_info.feed.get('id', None)

                href=uri
                if data.has_key('href'): href=data.href

                duplicate = None
                if id and id in feeds_seen:
                   duplicate = id
                elif href and href in feeds_seen:
                   duplicate = href

                if duplicate:
                    feed_info.feed['planet_message'] = \
                        'duplicate subscription: ' + feeds_seen[duplicate]
                    log.warn('Duplicate subscription: %s and %s' %
                        (uri, feeds_seen[duplicate]))
                    if href: feed_info.feed['planet_http_location'] = href

                if id: feeds_seen[id] = uri
                if href: feeds_seen[href] = uri

                # complete processing for the feed
                writeCache(uri, feed_info, data)

            except Exception, e:
                import sys, traceback
                type, value, tb = sys.exc_info()
                log.error('Error processing %s', uri)
                for line in (traceback.format_exception_only(type, value) +
                    traceback.format_tb(tb)):
                    log.error(line.rstrip())

        time.sleep(0.1)

        for index in threads.keys():
            if not threads[index].isAlive():
                del threads[index]
                if not threads:
                    log.info("Finished threaded part of processing.")
예제 #21
0
                elif command == '<postfilter>' or command == '<postfiltro>':
                    print _("Config file error (line %d): <postfilter> inside <global>") % linecount
                    sys.exit(1)

                elif command == '</postfilter>' or command == '</postfilter>':
                    print _("Config file error (line %d): </postfilter> inside <global>") % linecount
                    sys.exit(1)

                elif command == 'smtpserver' or command == 'servidorsmtp':
                    self.smtpserver = value

                elif command == 'timeout':
                    self.alarmtime = int(value)
                    if self.alarmtime != 0:
                        import timeoutsocket
                        timeoutsocket.setDefaultSocketTimeout(self.alarmtime)

                elif command == 'reconnect' or command == 'reconectar':
                    if lowval == 'yes' or lowval == 'si':
                        self.reconnect = 1
                    else:
                        self.reconnect = 0

                elif command == 'waitbeforereconnection' or command == 'esperaantesreconectar':
                    self.reconnect_wait = int(value)

                elif command == 'moretime' or command == 'mastiempo':
                    self.more_time = int(value)

                elif command == 'smtpport' or command == 'puertosmtp':
                    self.smtpport = int(value)
예제 #22
0
"""$Id: __init__.py 699 2006-09-25 02:01:18Z rubys $"""

__author__ = "Sam Ruby <http://intertwingly.net/> and Mark Pilgrim <http://diveintomark.org/>"
__version__ = "$Revision: 699 $"
__date__ = "$Date: 2006-09-25 02:01:18 +0000 (Mon, 25 Sep 2006) $"
__copyright__ = "Copyright (c) 2002 Sam Ruby and Mark Pilgrim"

import socket
if hasattr(socket, 'setdefaulttimeout'):
  socket.setdefaulttimeout(10)
  Timeout = socket.timeout
else:
  import timeoutsocket
  timeoutsocket.setDefaultSocketTimeout(10)
  Timeout = timeoutsocket.Timeout

import urllib2
import logging
from logging import *
from xml.sax import SAXException
from xml.sax.xmlreader import InputSource
import re
import xmlEncoding
import mediaTypes
from httplib import BadStatusLine

MAXDATALENGTH = 200000

def _validate(aString, firstOccurrenceOnly, loggedEvents, base, encoding, selfURIs=None):
  """validate RSS from string, returns validator object"""
  from xml.sax import make_parser, handler
예제 #23
0
import re
import string
import traceback
import SocketServer
import timeoutsocket
from time import strftime, localtime
from xml.dom.minidom import parse, parseString
from SocketServer import BaseRequestHandler, ThreadingTCPServer

##########################################
# import modules needed for testExec
##########################################
import os
import process

timeoutsocket.setDefaultSocketTimeout(None)


xoom_session = None
default_xoom_server_port = 50002
xoom_server_port = 50002  # global
USER_PORT = False
import threading
import logging
import sys
import traceback
import process
from utilLib import OutFile

# globalXoomQueue is just an instance of the python Queue object
gxq = Queue.Queue()
예제 #24
0
	def timeout(self, timeout = 10):
		timeoutsocket.setDefaultSocketTimeout(timeout)
예제 #25
0
ISO_8601_DATETIME = '%Y-%m-%dT%H:%M:%SZ'

USER_AGENT = 'pydelicious.py/%s %s' % (__version__, __url__)

DEBUG = 0
if 'DLCS_DEBUG' in os.environ:
    DEBUG = int(os.environ['DLCS_DEBUG'])

# Taken from FeedParser.py
# timeoutsocket allows feedparser to time out rather than hang forever on ultra-slow servers.
# Python 2.3 now has this functionality available in the standard socket library, so under
# 2.3 you don't need to install anything.  But you probably should anyway, because the socket
# module is buggy and timeoutsocket is better.
try:
    import timeoutsocket  # http://www.timo-tasi.org/python/timeoutsocket.py
    timeoutsocket.setDefaultSocketTimeout(DLCS_REQUEST_TIMEOUT)
except ImportError:
    import socket
    if hasattr(socket, 'setdefaulttimeout'):
        socket.setdefaulttimeout(DLCS_REQUEST_TIMEOUT)
if DEBUG:
    print >> sys.stderr, 'Set socket timeout to %s seconds' \
        % DLCS_REQUEST_TIMEOUT


### Utility classes

class _Waiter:

    """Waiter makes sure a certain amount of time passes between
    successive calls of `Waiter()`.
예제 #26
0
from sha import sha
#from string import substring
import re
import binascii
import sys,os
import urlparse,urllib

from BitTornado.bencode import *
from BitTornado.zurllib import urlopen as zurlopen
from BitCrawler.aurllib import urlopen as aurlopen

import timeoutsocket
import policy
timeout = policy.get_policy().get(policy.DEFAULT_SOCKET_TIMEOUT)
timeoutsocket.setDefaultSocketTimeout(timeout)
del timeout
del policy

def announce_to_scrape(url):
    items = list(urlparse.urlparse(url))
    path = items[2]
    return urlparse.urljoin(url,
                            os.path.basename(path).replace('announce','scrape'))

def get_scrape_by_announce_url(announce_url,infohash):
    seeder,peerer = '?','?'
    if not announce_url:
        return seeder,peerer
    scrape_url = announce_to_scrape(announce_url)

    len_hash = len(infohash)
예제 #27
0
USER_AGENT = "pydelicious.py/%s %s" % (__version__, __url__)

DEBUG = 0
if "DLCS_DEBUG" in os.environ:
    DEBUG = int(os.environ["DLCS_DEBUG"])


# Taken from FeedParser.py
# timeoutsocket allows feedparser to time out rather than hang forever on ultra-slow servers.
# Python 2.3 now has this functionality available in the standard socket library, so under
# 2.3 you don't need to install anything.  But you probably should anyway, because the socket
# module is buggy and timeoutsocket is better.
try:
    import timeoutsocket  # http://www.timo-tasi.org/python/timeoutsocket.py

    timeoutsocket.setDefaultSocketTimeout(DLCS_REQUEST_TIMEOUT)
except ImportError:
    import socket

    if hasattr(socket, "setdefaulttimeout"):
        socket.setdefaulttimeout(DLCS_REQUEST_TIMEOUT)
if DEBUG:
    print >> sys.stderr, "Set socket timeout to %s seconds" % DLCS_REQUEST_TIMEOUT


### Utility classes


class _Waiter:
    """Waiter makes sure a certain amount of time passes between
    successive calls of `Waiter()`.
예제 #28
0
  def fetch_http (self, host, port, path, realm = None, method = "GET", body = None, ctype = None, oheaders=[]):
    if not port:
      port = 80
    else:
      try:
        port = int (port)
      except ValueError:
        port = 80

    cookie = self.getCookie (host, path)

    timeoutsocket.setDefaultSocketTimeout(20)
    
    h = httplib.HTTP (host, port)
    if method:
      h.putrequest (method, path)
    else:
      h.putrequest ("GET", path)
    h.putheader ("Host", host)
    h.putheader ("User-Agent", self._user_agent)
    h.putheader ("Accept", string.join (self._types, ", "))
    h.putheader ("Accept-Language", string.join (self._languages, ", "))
    if self._last_url:
      h.putheader ("Referer", self._last_url)
    if cookie:
      h.putheader ("Cookie", cookie)
    if realm:
      auth = self.getAuth (host, port, realm)
      h.putheader ("Authorization", "Basic %s" % auth)
    if ctype:
      h.putheader ("Content-Type", ctype)
    if body:
      h.putheader ("Content-Length", str(len(body)))
    for (k, v) in oheaders:
      h.putheader (k, v)
    for (k, v) in self._extra_headers:
      h.putheader (k, v)
      
    h.endheaders ()

    if self._ul_callback is not None:
        while 1:    
            buf = self._ul_callback()
            if buf is None: break
            h.send(buf)
    elif body:
      h.send(body)

    errcode, errmsg, headers = h.getreply()

    f = h.getfile()
    if self._dl_callback is not None:
        self._dl_callback(f)
        page = ""
    else:
        page = f.read()
    f.close()

    if errcode == -1:
      return errcode, errmsg, page, headers

    if headers.has_key ("set-cookie"):
      # This isn't actually "headers" its a MimeMessage (rfc822.Message)
      # And, when you have more than one instance of a header, you have
      # to (sigh) ask for all matching, then decode them yourself
      for line in headers.getallmatchingheaders ("set-cookie"):
        header, value = string.split (line, ':', 1)
        m = Cookie.Morsel (string.strip(value))
        self.addCookie (m, host)

    if errcode == 302:
      self._last_url = self._current_url
      newurl = headers['location']
      return self.fetchpage(newurl)

    if errcode == 401:
      if headers.has_key('www-authenticate'):
        auth_header = headers['www-authenticate']
        match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', auth_header)
        if match:
          scheme, realm = match.groups()
          headers['virtualbrowser-auth-scheme'] = scheme
          headers['virtualbrowser-auth-realm'] = realm
          if string.lower(scheme) == 'basic':
            # Only attempt retry if we have an authorization to attempt
            if self.getAuth (host, port, realm):
              return self.fetch_http (host, port, path, realm)
            

    return errcode, errmsg, page, headers