Esempio n. 1
0
def api_request(url,
                headers,
                body=None,
                timeout=5,
                use_proxy=False,
                proxy_addr=None):
    opener = None
    if use_proxy:
        opener = urllib2.build_opener(
            urllib2.ProxyHandler({'http': proxy_addr}))
    else:
        opener = urllib2.build_opener(urllib2.BaseHandler())
    urllib2.install_opener(opener)

    #if body:
    #    if headers.get('Content-Type', None) == 'application/json':
    #        body = json.dumps(body)
    #    else:
    #        body = urllib.urlencode(body)
    request = urllib2.Request(url, body, headers)
    try:
        f = urllib2.urlopen(request, timeout=timeout)
        coding = f.info().get('Content-Encoding')
        if coding == 'gzip':
            result = uncompress(f.read())
        else:
            result = f.read()
    except Exception, e:
        import traceback
        logging.error('exception while trying to fetch url : %s, reason : %s' %
                      (url, traceback.format_exc()))
        return None
 def search(self, what, cat='all'):
     """search the torrent parsing the site"""
     # Sign in:
     if self.search_auth:
         self._sign_in()
         opener = self.opener
     else:
         opener = urllib2.build_opener(urllib2.BaseHandler())
     ret = []
     page = 0
     while page < self.PAGE_NUMBER:
         results = []
         parser = self.FilelistParser(results, self.url)
         url = self.url + '/browse.php?search=%s&cat=%s&searchin=0&sort=0&page=%d' % (
             what, self.supported_categories[cat], page)
         f = opener.open(url)
         dat = f.read().decode('iso-8859-1', 'replace')
         results_re = re.compile("(?s)<div class='cblock-innercontent'>.*")
         for match in results_re.finditer(dat):
             res_tab = match.group(0)
             parser.feed(res_tab)
             parser.close()
             break
         if len(results) <= 0:
             break
         page += 1
Esempio n. 3
0
def openUrl(url, timeout=_DEFAULT_TIMEOUT, modified=None, agent=_CLIENT_VERSION):
    response = None
    try:
        try:
            request = urllib2.Request(url)
            request.add_header('User-Agent', agent)
            if modified:
                request.add_header('If-Modified-Since', formatdate(modified))
                urlOpener = urllib2.build_opener(_NotModifiedHandler())
                response = urlOpener.open(request, timeout=timeout)
            else:
                urlOpener = urllib2.build_opener(urllib2.BaseHandler())
                response = urlOpener.open(request, timeout=timeout)
            return _HttpResponse(response)
        except urllib2.HTTPError as e:
            LOG_WARNING('urllib2.HTTPError', e.code, url)
        except urllib2.URLError as e:
            LOG_WARNING('urllib2.URLError', e.reason, url)
        except Exception as e:
            LOG_ERROR("Client couldn't download file", e, url)

    finally:
        if response:
            response.close()

    return _HttpResponse(response)
    def __run_download(self, url, modified_time, callback, **params):
        startTime = time.time()
        try:
            try:
                fh = remote_file = None
                last_modified = expires = None
                req = urllib2.Request(url)
                req.add_header('User-Agent', _getClientVersion())
                headers = params.get('headers') or {}
                for name, value in headers.iteritems():
                    req.add_header(name, value)

                if modified_time and isinstance(modified_time, str):
                    req.add_header('If-Modified-Since', modified_time)
                    opener = urllib2.build_opener(NotModifiedHandler())
                    fh = opener.open(req, timeout=10)
                    headers = fh.info()
                    if hasattr(fh, 'code'):
                        code = fh.code
                        if code in (304, 200):
                            info = fh.info()
                            last_modified = info.getheader('Last-Modified')
                            expires = info.getheader('Expires')
                        if code == 200:
                            remote_file = fh.read()
                else:
                    opener = urllib2.build_opener(urllib2.BaseHandler())
                    fh = opener.open(req, timeout=10)
                    info = fh.info()
                    last_modified = info.getheader('Last-Modified')
                    expires = info.getheader('Expires')
                    remote_file = fh.read()
                if expires is None:
                    expires = makeHttpTime(time.gmtime())
                else:
                    ctime = getSafeDstUTCTime()
                    expiresTmp = parseHttpTime(expires)
                    if expiresTmp > ctime + _MAX_LIFE_TIME or expiresTmp < ctime:
                        expires = makeHttpTime(
                            time.gmtime(time.time() + _MAX_LIFE_TIME))
            except urllib2.HTTPError as e:
                LOG_WARNING('Http error. Code: %d, url: %s' % (e.code, url))
            except urllib2.URLError as e:
                LOG_WARNING('Url error. Reason: %s, url: %s' %
                            (str(e.reason) if isinstance(e.reason, basestring)
                             else 'unknown', url))
            except ValueError as e:
                LOG_WARNING('Value error. Reason: %s, url: %s' % (e, url))
            except Exception as e:
                LOG_ERROR("Client couldn't download file.", e, url)

        finally:
            if fh:
                fh.close()

        _LOG_EXECUTING_TIME(startTime, '__run_download', 10.0)
        callback(remote_file, last_modified, expires)
        return
Esempio n. 5
0
    def download_torrent(self, url):
        opener = urllib2.build_opener(urllib2.BaseHandler())
        file, path = tempfile.mkstemp(".torrent")
        file = os.fdopen(file, "wb")
        dat = opener.open(url).read()
        file.write(dat)
        file.close()

        print path + " " + url
Esempio n. 6
0
    def record_video(self, record_path, record_time=10):

        if os.path.exists(record_path):
            os.remove(record_path)

        data = {
            "type": "request",
            "seq": "1",
            "params": {
                "method": "get",
                "preview": {
                    #"channels":[1,2,3],
                    #"resolutions":["VGA","HD","HD"],
                    #"audio":["enable","disable","default"]
                    "channels": [0],
                    "resolutions": ["VGA"],
                    "audio": ["default"]
                }
            }
        }

        data = json.dumps(data)
        data = "\r\n".join([
            "--record_video", "Content-Type:application/json",
            "Content-Length:%s" % len(data), "", data
        ])

        handler = urllib2.BaseHandler()
        req = urllib2.Request(url="http://%s:%s/stream" %
                              (self.ip, self.vhttpd_port),
                              data=data)
        req.add_header("Content-Type", "multipart/mixed;boundary=record_video")
        req.add_header("Connection", "keep-alive")
        opener = urllib2.build_opener(handler)
        f = opener.open(req, timeout=10)

        if record_time is None:
            record_time = 10
        start_time = time.time()
        data = ""
        while time.time() - start_time < record_time:
            rd_data = f.read(40240)
            if rd_data:
                data = "%s%s" % (data, rd_data)
            else:
                break
        record_file = open("%s.tmp" % record_path, "wb")
        record_file.write(data)
        record_file.close()

        data = self.__get_record_file_from_multipart_resp("%s.tmp" %
                                                          record_path)
        record_file = open(record_path, "wb")
        record_file.write(data)
        record_file.close()
    def __run_download(self, url, modified_time, callback, **params):
        startTime = time.time()
        try:
            fh = file = None
            last_modified = expires = None
            req = urllib2.Request(url)
            req.add_header('User-Agent', _CLIENT_VERSION)
            if modified_time and type(modified_time) == str:
                req.add_header('If-Modified-Since', modified_time)
                opener = urllib2.build_opener(NotModifiedHandler())
                fh = opener.open(req, timeout=10)
                headers = fh.info()
                if hasattr(fh, 'code'):
                    code = fh.code
                    if code in (304, 200):
                        info = fh.info()
                        last_modified = info.getheader('Last-Modified')
                        expires = info.getheader('Expires')
                    if code == 200:
                        file = fh.read()
            else:
                opener = urllib2.build_opener(urllib2.BaseHandler())
                fh = opener.open(req, timeout=10)
                info = fh.info()
                last_modified = info.getheader('Last-Modified')
                expires = info.getheader('Expires')
                file = fh.read()
            if expires is None:
                expires = makeHttpTime(time.gmtime())
            else:
                ctime = getSafeDstUTCTime()
                expiresTmp = parseHttpTime(expires)
                if expiresTmp > ctime + _MAX_LIFE_TIME or expiresTmp < ctime:
                    expires = makeHttpTime(time.gmtime(time.time() + _MAX_LIFE_TIME))
        except urllib2.HTTPError as e:
            LOG_WARNING('Http error. Code: %d, url: %s' % (e.code, url))
        except urllib2.URLError as e:
            LOG_WARNING('Url error. Reason: %s, url: %s' % (str(e.reason), url))
        except Exception as e:
            LOG_ERROR("Client couldn't download file.", e, url)
        finally:
            if fh:
                fh.close()

        _LOG_EXECUTING_TIME(startTime, '__run_download', 10.0)
        callback(file, last_modified, expires)
        return
 def download_torrent(self, url):
     # Sign in:
     if self.download_auth:
         self._sign_in()
         opener = self.opener
     else:
         opener = urllib2.build_opener(urllib2.BaseHandler())
     # Create temporary file to write the torrent file into it
     file, path = tempfile.mkstemp(".torrent")
     file = os.fdopen(file, "wb")
     # Download torrent
     dat = opener.open(url).read()
     # Write it to a file
     file.write(dat)
     file.close()
     # Logging:
     logMsg = path + "; from: " + url + "\n"
     self.log(logMsg)
     print(path + " " + url)
Esempio n. 9
0
 def search(self, what, cat="all"):
     opener = urllib2.build_opener(urllib2.BaseHandler())
     ret = []
     i = 1
     while i < 35:
         results = []
         parser = self.SimpleSGMLParser(results, self.url)
         dat = ""
         for subcat in self.supported_categories[cat]:
             dat += opener.open(
                 self.url +
                 "/?page=search&term=%s&cat=%s&voir=%d&ordre=sd" %
                 (what, subcat, i)).read().decode(
                     "iso-8859-1",
                     "replace").replace("<b><font color=\"#474747\">",
                                        "").replace("</font></b>", "")
         parser.feed(dat)
         parser.close()
         if len(results) <= 0:
             break
         i += 1
Esempio n. 10
0
    def Login(self):

        loginurl = 'http://10.4.12.22/server/'
        headers_ = {
            'User-Agent':
            'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0',
            'Host': 'jwc.hqu.edu.cn'
        }
        data_ = urllib.urlencode({
            'UserName': '******',
            'UserPass': '******'
        })
        request = urllib2.Request(loginurl, data=data_, headers=headers_)
        print request.get_method()
        opener = urllib2.build_opener(urllib2.BaseHandler())

        try:
            respones = opener.open(request)
        except urllib2.URLError as e:
            if hasattr(e, 'code'):
                print e.code
            if hasattr(e, 'reason'):
                print e.reason
            return None
        else:
            html = respones.read()

            try:
                # Find the urlpage's encoding.
                pattern = r'<meta.*charset=(.*)">'
                encoding = re.search(pattern, html).group(1)
            except AttributeError:
                pass

            else:
                html = html.decode(encoding).encode('utf-8')
            return {'respones': respones, 'html': html}
Esempio n. 11
0
    def do_both(self, isGET):
        host = self.path.partition("://")[2].partition("/")[0]
        port = self.path.partition("://")[2].rpartition(":")[2]
        file = ""
        if port != "":
            try:
                int(port)
                port = ":" + port
                file = "/" + self.path.partition("://")[2].partition(
                    "/")[2].rpartition(":")[0]
            except:
                file = "/" + self.path.partition("://")[2].partition("/")[2]
                port = ""

        print "Host: " + host + "\nPort: " + port + "\nFile: " + file

        print filesCache

        finalFile = None

        #check for local copies
        if filesCache.contains(host + file):
            print "File found in cache!"
            f = filesCache.get(host + file)
            if not f.isExpired():
                print "Not Expired!"
                if self.isConditionalGet(f):
                    print "Is Conditional"
                    finalFile = CachedFile("temp/temp.html", 304,
                                           MessageHeaders(f.getHeaders()), '')
                    finalFile.isCachy = False
                else:
                    print "Not conditional"
                    finalFile = f
            else:
                print "File is expired!"
                #send conditional get to server
                req = urllib2.Request("http://" + host + file + port)
                for h in self.headers:
                    if not self.isHopHeader(h):
                        req.add_header(h, self.headers.getheader(h))
                self.addHopHeaders(req)
                req.add_header("If-Modified-Since", f.getDate())
                url_handle = urllib2.build_opener(
                    urllib2.BaseHandler()).open(req)
                if hasattr(url_handle, 'code') and url_handle.code == 304:
                    print "File has not been modified."
                    #just return cached copy, updating the headers
                    f.updateHeaders(200, url_handle.info())
                    if self.isConditionalGet(f):
                        finalFile = CachedFile("temp/temp.html", 304,
                                               MessageHeaders(f.getHeaders()),
                                               '')
                        finalFile.isCachy = False
                    else:
                        finalFile = f
                else:
                    print "File has been modified."
                    filesCache.remove(f)
                    finalFile = CachedFile(host + file, url_handle.code,
                                           url_handle.info(),
                                           url_handle.read())
                    filesCache.add(finalFile)
        else:
            print "File not found in cache"
            req = urllib2.Request("http://" + host + file + port)
            for h in self.headers:
                if not self.isHopHeader(h):
                    req.add_header(h, self.headers.getheader(h))
            self.addHopHeaders(req)
            try:
                url_handle = urllib2.build_opener(
                    urllib2.BaseHandler()).open(req)
                finalFile = CachedFile(host + file, url_handle.code,
                                       url_handle.info(), url_handle.read())
                filesCache.add(finalFile)
            except urllib2.HTTPError:
                if '304' in str(sys.exc_info()[1]):  #file not modified
                    temp = sys.exc_info()[1]
                    finalFile = CachedFile("temp/temp.html", 304, temp.info(),
                                           '')
                    finalFile.isCachy = False
                elif '404' in str(sys.exc_info()[1]):
                    temp = sys.exc_info()[1]
                    finalFile = CachedFile("temp/temp.html", 404, temp.info(),
                                           'File Not Found')
                    finalFile.isCachy = False
                elif '403' in str(sys.exc_info()[1]):
                    temp = sys.exc_info()[1]
                    finalFile = CachedFile("temp/temp.html", 403, temp.info(),
                                           'Forbidden')
                    finalFile.isCachy = False
                else:
                    print str(sys.exc_info()[1])
                    return

        #send the final file
        self.send_response(finalFile.getStatus())
        for header in finalFile.getHeaders():
            if not self.isHopHeader(header[0]):
                self.send_header(header[0], header[1])
        temp = self.headers.getheader('connection')
        if not (temp == None):
            self.send_header('connection', temp)
        self.end_headers()
        if (isGET):
            self.wfile.write(finalFile.getBody())
        if not filesCache.contains(finalFile.name):
            finalFile.delete()
Esempio n. 12
0
    def __onReadRemoteFile(self, url, showImmediately, modified_time, headers):
        startTime = time.time()
        try:
            try:
                fh = remote_file = None
                last_modified = expires = None
                req = urllib2.Request(url)
                req.add_header('User-Agent', _getClientVersion())
                headers = headers or {}
                for name, value in headers.iteritems():
                    req.add_header(name, value)

                if modified_time and isinstance(modified_time, str):
                    req.add_header('If-Modified-Since', modified_time)
                    opener = urllib2.build_opener(NotModifiedHandler())
                    fh = opener.open(req, timeout=_DEFAULT_REQUEST_TIMEOUT)
                    headers = fh.info()
                    if hasattr(fh, 'code'):
                        code = fh.code
                        if code in (304, 200):
                            info = fh.info()
                            last_modified = info.getheader('Last-Modified')
                            expires = info.getheader('Expires')
                        if code == 200:
                            remote_file = fh.read()
                else:
                    opener = urllib2.build_opener(urllib2.BaseHandler())
                    fh = opener.open(req, timeout=_DEFAULT_REQUEST_TIMEOUT)
                    info = fh.info()
                    last_modified = info.getheader('Last-Modified')
                    expires = info.getheader('Expires')
                    remote_file = fh.read()
                if expires is None:
                    expires = makeHttpTime(time.gmtime())
                else:
                    ctime = getSafeDstUTCTime()
                    expiresTmp = parseHttpTime(expires)
                    if expiresTmp > ctime + _MAX_LIFE_TIME or expiresTmp < ctime:
                        expires = makeHttpTime(
                            time.gmtime(time.time() + _MAX_LIFE_TIME))
            except urllib2.HTTPError as e:
                LOG_WARNING('Http error. Code: %d, url: %s' % (e.code, url))
            except urllib2.URLError as e:
                LOG_WARNING('Url error. Reason: %s, url: %s' %
                            (str(e.reason) if isinstance(e.reason, basestring)
                             else 'unknown', url))
            except ValueError as e:
                LOG_WARNING('Value error. Reason: %s, url: %s' % (e, url))
            except Exception as e:
                LOG_ERROR("Client couldn't download file.", e, url)

        finally:
            if fh:
                fh.close()

        _LOG_EXECUTING_TIME(startTime, '__onReadRemoteFile', 10.0)
        if remote_file is None and last_modified is None:
            if showImmediately:
                LOG_DEBUG(
                    '__onReadRemoteFile, Error occurred. Release callbacks.',
                    url)
                self.__processedCache.pop(url, None)
            else:
                self.__postTask(url, None, True)
            return
        else:
            file_hash = base64.b32encode(url)
            ctime = getSafeDstUTCTime()
            fileChanged = False
            try:
                self.__mutex.acquire()
                cache = self.__cache
                if remote_file is None and last_modified is not None:
                    value = cache.get(file_hash, None)
                    if value is None:
                        LOG_WARNING(
                            'File is expected in cache, but there is no file')
                        self.__postTask(url, None, True)
                        return
                    crc, remote_file = value[2:4]
                else:
                    crc = binascii.crc32(remote_file)
                    fileChanged = True
                packet = (expires, ctime, crc, remote_file, _CACHE_VERSION,
                          last_modified)
                cache[file_hash] = packet
            finally:
                self.__mutex.release()

            LOG_DEBUG('writeCache', url, last_modified, expires)
            self.__writeCache(file_hash, packet)
            if showImmediately and not fileChanged:
                LOG_DEBUG(
                    '__onReadRemoteFile, showImmediately = True. Release callbacks.',
                    url)
                self.__processedCache.pop(url, None)
            else:
                self.__get(url, False, True)
            return
Esempio n. 13
0
from lib.core.exception import sqlmapGenericException
from lib.core.exception import sqlmapSyntaxException
from lib.core.exception import sqlmapUnsupportedDBMSException
from lib.core.optiondict import optDict
from lib.core.settings import MSSQL_ALIASES
from lib.core.settings import MYSQL_ALIASES
from lib.core.settings import SITE
from lib.core.settings import SUPPORTED_DBMS
from lib.core.settings import VERSION_STRING
from lib.core.update import update
from lib.parse.configfile import configFileParser
from lib.parse.queriesfile import queriesParser
from lib.request.proxy import ProxyHTTPSHandler
from lib.utils.google import Google

authHandler = urllib2.BaseHandler()
proxyHandler = urllib2.BaseHandler()


def __urllib2Opener():
    """
    This function creates the urllib2 OpenerDirector.
    """

    global authHandler
    global proxyHandler

    debugMsg = "creating HTTP requests opener object"
    logger.debug(debugMsg)

    conf.cj = cookielib.LWPCookieJar()