示例#1
0
def cReq(proxy=None):
    global local

    if ProxyFunc and (not proxy or proxy.strip() == ''):
        app.Log('cReq. NO PROXY CALL', 'ERROR.log')

    if flUseExistCurl:
        if not hasattr(local, 'fcReq'):
            ret = local.fcReq = curl.Curl()
        else:
            return local.fcReq
    else:
        ret = curl.Curl()

    ret.set_option(
        curl.pycurl.USERAGENT,
        'Mozilla/5.0 (Windows; U; Windows NT 5.2; ru; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6'
    )
    ret.set_option(curl.pycurl.FOLLOWLOCATION, True)

    if proxy:
        lProx = proxy.strip().split(':')
        ps, pp = lProx[:2]
        ret.set_option(curl.pycurl.PROXY, ps)
        ret.set_option(curl.pycurl.PROXYPORT, int(pp))
        if len(lProx) == 4:
            ret.set_option(curl.pycurl.PROXYUSERPWD,
                           '%s:%s' % tuple(lProx[2:]))

    if local.CookieFile:
        ret.set_option(curl.pycurl.COOKIEJAR, local.CookieFile)
        ret.set_option(curl.pycurl.COOKIEFILE, local.CookieFile)

    if UseSsl:
        ret.set_option(curl.pycurl.CAINFO, app.AbsPath('ca-bundle.crt'))
        ret.set_option(curl.pycurl.SSLCERT, app.AbsPath('cacert.pem'))
        ret.set_option(curl.pycurl.CAPATH, app.AbsPath('/'))
        ret.set_option(curl.pycurl.SSLVERSION, 3)
        ret.set_option(curl.pycurl.SSLCERTTYPE, 'PEM')
        ret.set_option(curl.pycurl.SSL_VERIFYPEER, 1)
        ret.set_option(curl.pycurl.SSL_VERIFYHOST, 2)
#        ret.set_option(curl.pycurl.SSL_VERIFYPEER, False)
#        ret.set_option(curl.pycurl.SSL_VERIFYHOST, False)
#        ret.set_option(curl.pycurl.SSLVERSION_SSLv3, 1)

#ret.set_option(curl.pycurl.HTTPHEADER,("Accept: text/plain",))
    ret.set_timeout(CurlTimeOut)
    ret.set_option(curl.pycurl.CONNECTTIMEOUT, CurlConnectTimeOut)
    return ret
示例#2
0
    def get(url, timeout=8, params=None, return_codeing="utf8"):
        '''
            注意,如果返回的报文内容较大, 请勿使用, 因为日志打印了resp
            @params: timeout 超时时间
            @params: params 请求参数
        '''
        try:
            t1 = time.time()
            cc = curl.Curl()
            cc.set_timeout(timeout)
            resp = cc.get(str(url), params)
            if return_codeing != "utf8":
                resp = resp.decode(return_codeing)

            if not params:
                params = {}

            t2 = time.time()
            Log.logger.info(
                "curlHttpRequest|url=%s|timeout=%s|params=%s|resp=%s|spendtime=%f",
                url, timeout, urllib.urlencode(params), resp[:200], (t2 - t1))
            return resp
        except:
            Log.logger.error("curl tcm api interface error %s |params %s", url,
                             params)
            Log.logger.error(traceback.format_exc())
            raise ews.EwsError(ews.STATUS_REQUEST_TIMEOUT)
        finally:
            cc.close()
示例#3
0
def GetImage(url, fd):
    c = curl.Curl()
    c.get(url)
    img_write = os.fdopen(fd, 'w')
    img_write.write(c.body())
    img_write.flush()
    img_write.close()
示例#4
0
def get_data(url='http://10.4.0.215'):
    print url
    c = curl.Curl(url)
    results = []
    while True:
        c.perform()
        i = c.info()
        yield (i['starttransfer-time'] - i['pretransfer-time']) * 1000
示例#5
0
def get_data(url):
    print url
    c = curl.Curl(url)
    results = []
    for x in range(COUNT):
        c.get()
        i = c.info()
        yield (i['starttransfer-time'] - i['pretransfer-time']) * 1000
示例#6
0
 def __init__(self, email, password):
     self.email = email
     self.password = password
     self.fshare = curl.Curl(base_url="https://www.fshare.vn")
     self.login_url = "site/login"
     self.download_url = "download/get"
     get_reponse = self.fshare.get(url=self.login_url).decode()
     self.fs_csrf = BeautifulSoup(get_reponse, 'html.parser').find("meta", attrs={'name': 'csrf-token'})\
         .get("content")
     self.isLogin = False
示例#7
0
 def cache_load(self, page):
     if page not in self.pagecache:
         fetch = curl.Curl(self.host)
         fetch.set_verbosity(self.verbosity)
         fetch.get(page)
         self.pagecache[page] = fetch.body()
         if fetch.answered("401"):
             raise LinksysError("authorization failure.", True)
         elif not fetch.answered(LinksysSession.check_strings[page]):
             del self.pagecache[page]
             raise LinksysError("check string for page %s missing!" % os.path.join(self.host, page), False)
         fetch.close()
def checkURL(url):
    """ Return the status code returned from the web server, False if no success at all """
    #Todo/Extra: only show successes, or better still, return the list
    #and let the caller decide what to do
    c = curl.Curl()
    response = c.get(url)
    code = c.info()["http-code"]  #returns the code 200 or 404 I think
    if code != 404:  #200 meaning it works (I think)
        print "!!!",
        print "Request for %s gives a code of %d" % (url, code)

    return
示例#9
0
 def __init__(self):
     self.http = curl.Curl()
     self.http.set_option(pycurl.CONNECTTIMEOUT, 15)
     self.content = ""
     self.response_header = ""
     self.response_code = ""
     self.timeout = 30
     self.cookie_list = {}
     self.cookie_str = ""
     self.request_proxy = None
     self.header = {}
     self.request_header = []  # 请求头
     self.user_agent = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1)"  # 默认user-agent
示例#10
0
 def __init__(self, email, password):
     self.email = email
     self.password = password
     self.fshare = curl.Curl(base_url="https://www.fshare.vn")
     self.user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:76.0) Gecko/20100101 Firefox/76.0"
     self.fshare.set_option(pycurl.COOKIEFILE, os.path.join(os.path.dirname(__file__), 'fshare.cookie'))
     self.fshare.set_option(pycurl.USERAGENT, self.user_agent)
     self.login_url = "site/login"
     self.download_url = "download/get"
     get_reponse = self.fshare.get(url=self.login_url).decode()
     self.fs_csrf = BeautifulSoup(get_reponse, 'html.parser').find("meta", attrs={'name': 'csrf-token'}) \
         .get("content")
     self.isLogin = False
示例#11
0
def cReq(CookieIdent='cookie.txt'):
    global local
    UseSsl = 1
    flUseExistCurl = 0
    if not getattr(local, 'CookieFile', ''):
        local.CookieFile = CookieIdent

    if flUseExistCurl:
        if not hasattr(local, 'fcReq'):
            ret = local.fcReq = curl.Curl()
        else:
            return local.fcReq
    else:
        ret = curl.Curl()

    ret.set_option(
        curl.pycurl.USERAGENT,
        'Mozilla/5.0 (Windows; U; Windows NT 5.2; ru; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6'
    )
    ret.set_option(curl.pycurl.FOLLOWLOCATION, True)

    if local.CookieFile:
        ret.set_option(curl.pycurl.COOKIEJAR, local.CookieFile)
        ret.set_option(curl.pycurl.COOKIEFILE, local.CookieFile)

    if UseSsl:
        ret.set_option(curl.pycurl.CAINFO, 'ca-bundle.crt')
        ret.set_option(curl.pycurl.SSLCERT, 'cacert.pem')
        ret.set_option(curl.pycurl.CAPATH, '/')
        ret.set_option(curl.pycurl.SSLVERSION, 3)
        ret.set_option(curl.pycurl.SSLCERTTYPE, 'PEM')
        ret.set_option(curl.pycurl.SSL_VERIFYPEER, 1)
        ret.set_option(curl.pycurl.SSL_VERIFYHOST, 2)

    ret.set_timeout(60)
    ret.set_option(curl.pycurl.CONNECTTIMEOUT, 13)
    return ret
示例#12
0
def get_json_data():
    """
    Load pasts events from http://2015.yearbeast.com/ (reddit powah!)
    """
    c = curl.Curl()
    data = c.get("http://2015.yearbeast.com/history.json")
    data = parse_json_data(data)
    if DEBUG:
        for fh in reversed(data.keys()):
            if fh < datetime.datetime.now(tz.tzlocal()):
                print "-->", fh.strftime("%d/%m/%Y %H:%M")
    fh = data.keys()[0]
    if fh >= datetime.datetime.now(tz.tzlocal()):
        print "==>", fh.strftime("%d/%m/%Y %H:%M")
    return data
示例#13
0
def testGf3():
    print "## Testing 'gf3' packaging"
    flush()

    clean()
    #mvn("install package -P gf3")
    print "Testing result of 'package -P gf3'"

    zipfile = getZipFileFrom("nuxeo-distribution-gf3/target/")

    os.mkdir("test")
    os.chdir("test")
    system("unzip -q ../nuxeo-distribution-gf3/target/" + zipfile)
    os.chdir("nxserver")

    p = pexpect.spawn("sh nxserver.sh -console", timeout=120)
    consoleTest(p)

    time.sleep(10)
    print "Starting server"
    flush()

    cmd = "sh nxserver.sh -console > server.log 2>&1"
    p = pexpect.spawn("sh", ["-c", cmd], timeout=1200)
    waitForServer(timeout=1200)

    print "Browsing a few pages"
    flush()

    c = curl.Curl()
    c.set_timeout(60)

    data = c.get("http://localhost:8080/")
    assert "Welcome to Nuxeo WebEngine." in data

    # FIXME: disabled for now.
    #data = c.get("http://localhost:8080/help")
    #assert "Nuxeo WebEngine Documentation" in data

    #data = c.get("http://localhost:8080/about")
    #assert "License:" in data
    #assert "Team:" in data
    #assert "Modules:" in data

    p.sendline("quit")
    p.close(force=True)
    os.chdir("../..")
    print "done"
示例#14
0
 def configure(self):
     "Write configuration changes to the Linksys."
     if self.actions:
         fields = []
         self.cache_flush()
         for (page, field, value) in self.actions:
             self.cache_load(page)
             if self.pagecache[page].find(field) == -1:
                 print_stderr("linksys: field %s not found where expected in page %s!" % (field, os.path.join(self.host, page)))
                 continue
             else:
                 fields.append((field, value))
         # Clearing the action list before fieldsping is deliberate.
         # Otherwise we could get permanently wedged by a 401.
         self.actions = []
         transaction = curl.Curl(self.host)
         transaction.set_verbosity(self.verbosity)
         transaction.get("Gozila.cgi", tuple(fields))
         transaction.close()
示例#15
0
 def post(url, timeout=8, params={}):
     cc = None
     try:
         t1 = time.time()
         cc = curl.Curl()
         cc.set_timeout(timeout)
         resp = cc.post(str(url), params)
         t2 = time.time()
         t = t2 - t1
         Log.logger.info(
             "curlHttpPost|url=%s|params=%s|resp=%s|spendTime=%f", url,
             params, resp, t)
         return resp
     except:
         Log.logger.error("curl tcm api interface error %s |params %s", url,
                          params)
         Log.logger.error(traceback.format_exc())
         raise ews.EwsError(ews.STATUS_REQUEST_TIMEOUT)
     finally:
         if cc:
             cc.close()
示例#16
0
 def post(url, timeout=8, params={}):
     cc = None
     try:
         t1 = time.time()
         cc = curl.Curl()
         cc.set_timeout(timeout)
         resp = cc.post(str(url), params)
         t2 = time.time()
         t = t2 - t1
         Log.logger.info(
             "curlHttpPost|url=%s|params=%s|resp=%s|spendTime=%f", url,
             params, resp, t)
         return resp
     except:
         Log.logger.error("curl tcm api interface error %s |params %s", url,
                          params)
         Log.logger.error(traceback.format_exc())
         raise requests.exceptions.ConnectTimeout
     finally:
         if cc:
             cc.close()
示例#17
0
import curl, re
import sys, os, os.path, platform

if platform.system() == 'Windows':
    default_folder = 'd:\\Works\\DOCs\\транспортный эмулятор\\raspisanie\\2016-08-21\\'
    os.environ['HTTP_PROXY'] = "http://161.8.100.200:8080"
    os.environ['HTTPS_PROXY'] = "http://161.8.100.200:443"
else:
    default_folder = '/home/ant/Документы/МГТ/raspisanie/2016-08-21/'

bu = "http://www.maggortrans.ru/raspisanie/"
r = curl.Curl(bu)
r.get()
b = r.body()
#print (b)
h = re.findall(b'href="(\w+.xls)"', b)
if h is None:
    print('No matches')
    exit()
else:
    for f in h:
        print(bu + f.strip().decode('utf-8'))
        break

cnt = 0
for f in h:
    fn = f.strip().decode('utf-8')
    if os.path.isfile(default_folder + fn):
        print(fn)
        continue
    r = curl.Curl(bu + fn)
def download(url):
    c = curl.Curl()
    c.set_timeout(8)
    c.get(url)
    return c.body()
示例#19
0
#!/usr/bin/env python
#encoding:utf8
#author: zhuqiyu
import pymysql as db 
import curl

_conn_status = True
_conn_retries_count = 0
while _conn_retries_count < 5 and _conn_status:
    try:
        conn = db.connect(host="192.168.115.20", user="******", passwd="123", db="ad",\
                          charset="utf8", connect_timeout=3)
        _conn_status = False
    except Exception as e:
        _conn_retries_count += 1
curs = conn.cursor()
try:
    #curs.execute("INSERT INTO ad.ad_ruleresult(data,host,time) VALUES(%s,%s,%s);",(data,host,time_stamp))
    curs.execute("select id from ad_ruleindex")
    curl_check = curl.Curl()
    for item in curs.fetchall():
       curl_check.get("http://192.168.115.1:8000/ad/monitor/message/%d/" % item[0])
    conn.commit()
except Exception as e:
    conn.rollback()
finally:
    curs.close()
    conn.close()

示例#20
0
 def setUp(self):
     self.curl = curl.Curl('http://%s:8380/' % localhost)
示例#21
0
import bitmex
import curl
import  pandas as pd
c = curl.Curl()
with open('/home/arturx/btchourly.json', 'w') as file:
    for i in range(1):
        d = c.get(url='https://testnet.bitmex.com/api/v1/quote/bucketed',
                  params={'binSize': '1h', 'symbol': 'XBTUSD', 'partial': 'false', 'start': 500 * i, 'count': 500,
                          '_format': 'csv'})

        # d = d.replace('b\'', '')
        # d = d.replace(']\'b\'[', ',')
        # # d = d.replace('[', '')
        # # d = d.replace(']', '')
        # file.writelines(d)
示例#22
0
def get_curl_modules(n_x, n_y, n_y_active, n_z, output_type, output_shape,
                     prior_size_placeholder, encoder_kwargs, decoder_kwargs):
    """Gets the training and testing model

    Args:
        n_x: flatten dimension of the input, int
        n_y: maximum number of clusters, int
        n_y_active: currently active number of clusters, int
        n_z: dimension of the latent space
        output_type: output distribution type, tf.distributions 
        output_shape: shape of the output image, List[int]
        encoder_kwargs: arguments for the encoder, dict
        decoder_kwargs: arguments for the decoder, dict


    Returns:
        model_train: 
        model_eval: 
    """

    shared_encoder = curl_skeleton.SharedEncoder(name='shared_encoder',
                                                 **encoder_kwargs)
    latent_encoder = functools.partial(curl_skeleton.latent_encoder_fn,
                                       n_y=n_y,
                                       n_z=n_z)
    latent_encoder = snt.Module(latent_encoder, name='latent_encoder')
    latent_decoder = functools.partial(curl_skeleton.latent_decoder_fn,
                                       n_z=n_z)
    latent_decoder = snt.Module(latent_decoder, name='latent_decoder')
    cluster_encoder = functools.partial(curl_skeleton.cluster_encoder_fn,
                                        n_y_active=n_y_active,
                                        n_y=n_y)
    cluster_encoder = snt.Module(cluster_encoder, name='cluster_encoder')
    data_decoder = functools.partial(curl_skeleton.data_decoder_fn,
                                     output_type=output_type,
                                     output_shape=output_shape,
                                     n_x=n_x,
                                     n_y=n_y,
                                     **decoder_kwargs)
    data_decoder = snt.Module(data_decoder, name='data_decoder')

    # Uniform prior over y.
    prior_train_probs = construct_prior_probs(prior_size_placeholder, n_y,
                                              n_y_active)
    prior_train = snt.Module(
        lambda: tfp.distributions.OneHotCategorical(probs=prior_train_probs),
        name='prior_unconditional_train')
    prior_test_probs = construct_prior_probs(prior_size_placeholder, n_y,
                                             n_y_active)
    prior_test = snt.Module(
        lambda: tfp.distributions.OneHotCategorical(probs=prior_test_probs),
        name='prior_unconditional_test')

    model_train = model.Curl(prior_train,
                             latent_decoder,
                             data_decoder,
                             shared_encoder,
                             cluster_encoder,
                             latent_encoder,
                             n_y_active,
                             is_training=True,
                             name='curl_train')
    model_eval = model.Curl(prior_test,
                            latent_decoder,
                            data_decoder,
                            shared_encoder,
                            cluster_encoder,
                            latent_encoder,
                            n_y_active,
                            is_training=False,
                            name='curl_test')

    return model_train, model_eval
示例#23
0
 def setUp(self):
     self.curl = curl.Curl('http://localhost:8380/')
示例#24
0
def retrieve_html(url):
    c = curl.Curl()
    c.get(url)
    return c.body()
示例#25
0
def get_marathon_config(addr,app):
    c = curl.Curl()
    return c.get(addr)
示例#26
0
    def run(self,
            cmd,
            data='',
            upload_file=None,
            sign=True,
            exit_on_error=True):
        logging.debug('URL base: %s', self._url_base)
        logging.debug('URL command: %s', cmd)
        logging.debug('URL data: %s', data)
        logging.debug('URL upload file: %s', upload_file)
        logging.debug('Sign request: %s', sign)
        logging.debug('Exit on error: %s', exit_on_error)

        if not os.path.exists(TMP_PATH):
            try:
                os.makedirs(TMP_PATH, 0777)
            except OSError:
                _msg = 'Error creating %s directory' % TMP_PATH
                logging.exception(_msg)
                return {
                    'errmfs': {
                        'info': _msg,
                        'code': server_errors.GENERIC
                    }
                }

        _filename = os.path.join(TMP_PATH,
                                 '%s.%s' % (self._filename_pattern, cmd))
        if self._debug:
            print(_filename)
        if sign:
            secure.wrap(_filename, {cmd: data},
                        key=os.path.join(self._path_keys, self._private_key))
        else:
            secure.wrap(_filename, {cmd: data})

        _post = [('message', (pycurl.FORM_FILE, _filename))]
        if upload_file:
            _post.append(('package', (pycurl.FORM_FILE, upload_file)))

        logging.debug('Post data: %s', _post)

        _curl = curl.Curl(
            self._url_base,
            _post,
            proxy=self._proxy,
            cert=self._cert,
        )
        _curl.run()
        if not self._debug:
            os.remove(_filename)

        if _curl.error:
            _msg = _('Curl error: %s') % _curl.error
            logging.error(_msg)
            print(_msg)

            return {'errmfs': {'info': _msg, 'code': _curl.errno}}

        if _curl.http_code >= 400:
            print(_('HTTP error code: %s') % _curl.http_code)
            if self._debug:
                _file = os.path.join(
                    TMP_PATH, 'response.%s.%s.html' % (_curl.http_code, cmd))
                utils.write_file(_file, str(_curl.body))
                print(_file)

            return {
                'errmfs': {
                    'info': str(_curl.body),
                    'code': server_errors.GENERIC
                }
            }

        # evaluate response
        _response = '%s.return' % _filename
        utils.write_file(_response, str(_curl.body))
        if sign:
            _ret = secure.unwrap(_response,
                                 key=os.path.join(self._path_keys,
                                                  self._public_key))
        else:
            _ret = secure.unwrap(_response)

        if not self._debug:
            os.remove(_response)
        else:
            print(_response)

        if not isinstance(_ret, dict) or not ('%s.return' % cmd) in _ret:
            if 'errmfs' in _ret:
                _msg = server_errors.error_info(_ret['errmfs']['code'])
                logging.error(_msg)
                print(_msg)

            _msg = 'url_request unexpected response: %s. Expected: %s'
            if self._debug:
                print(_msg % (_ret, '%s.return' % cmd))

            logging.critical(_msg, _ret, '%s.return' % cmd)
            sys.exit(errno.EACCES)

        _ret = _ret['%s.return' % cmd]  # unwrapping cmd response
        if isinstance(_ret, dict) and 'errmfs' in _ret:
            if _ret['errmfs']['code'] != server_errors.ALL_OK:
                _error = server_errors.error_info(_ret['errmfs']['code'])
                if self._debug:
                    print(_('Error: %s') % _error)
                    if _ret['errmfs']['info']:
                        print(_('Information: %s') % _ret['errmfs']['info'])

                logging.error('url_request server error response code: %s',
                              _error)
                logging.error('url_request server error response info: %s',
                              _ret['errmfs']['info'])

                if exit_on_error:
                    print(_('Error: %s') % _error)
                    sys.exit(errno.EACCES)

        return _ret