Exemple #1
0
    def __init__(self, investor_id, api_key, api_version='v1', 
                 host='api.lendingclub.com', path='/api/investor'):
        """Connection to LendingClub API.  
        
        Each client requires an `investor_id` and `api_key`.  All other 
        arguments are optional.

        Args:
          investor_id (int): The accounts Investor Id this can be found on the 
            Account Summary page on the LendingClub website after loging in.
          api_key (str): Account authorization token found under Settings.
          api_version (str, optional): api version endpoint.
          host (str, optional): Host name of api endpoint.
          path (str, optional): Base path of api endpoint.
        
        """
        self._last_update = None
        self._host = host
        investor_id = str(investor_id)
        self._investor_id = investor_id
        self._base_path = urljoin(path, api_version)
        self._acct_path = urljoin(self._base_path, 'accounts', investor_id) 
        self._loan_path = urljoin(self._base_path, 'loans')

        self._default_headers = {'Authorization': api_key,
                                 'Accept': 'application/json',
                                 'Content-type': 'application/json'}

        ssl_ctx = create_default_context(Purpose.SERVER_AUTH)
        self._conn = HTTPSConnection(host, context=ssl_ctx)
        self._conn.set_debuglevel(10)

        self._conn.connect()
Exemple #2
0
def main():
    # 获取参数
    xArg = sys.argv[1:]

    # 参数个数目前为4个
    args_nums = len(xArg)
    if args_nums != 4:
        print 'input args num error'
        return

    # 获取处理文件时间和文件类型
    Ftime = xArg[0]
    Ftype1 = xArg[1]
    Ftype2 = xArg[2]
    Frate = xArg[3]

    # 解析配置文件
    group = '%s-%s' % (Ftype1, Ftype2)
    I_DIR = Cfg[group]['I_DIR']
    O_DIR = Cfg[group]['O_DIR']

    date_s, date_e = ymd2date(Ftime)
    I_FILE = urljoin(I_DIR, group + '_' + Frate + '.txt')
    O_FILE = urljoin(I_DIR, group + '_' + Frate)
    DATA = ReadFile(I_FILE, date_s, date_e)
    print DATA

    DictTitle = {'xlabel': 'Site CO2', 'ylabel': 'Sate CO2', 'title': '%s-%s' % (Ftype1, Ftype2)}
    ds_PUB_LIB.draw_Scatter(DATA['SiteCo2'], DATA['SateCo2'], O_FILE, DictTitle, '', '')
Exemple #3
0
 def __init__(self, host="http://127.0.0.1:7796/", key=None, verify=False):
     self.host = host
     self.api = urljoin(host, 'preql/q')
     self.update = urljoin(host, 'preql/update')
     self.error_params = ["SynthDB.connect('{}')".format(self.host), "No instance of SynthDB found at {}".format(self.host), self.host]
     self.key = key
     self.verify = verify
     self.delim = '\t'
def main():
    # 获取参数
    xArg = sys.argv[1:]

    # 参数个数目前为3个
    args_nums = len(xArg)
    if args_nums != 2:
        print ' args nums error'
        return

    # 获取处理文件时间和文件类型
    Ftime = xArg[0]
    Ftype = xArg[1]

    # 解析配置文件
    I_DIR = Cfg[Ftype]['I_DIR']
    O_DIR = Cfg[Ftype]['O_DIR']

    SiteList = urljoin(O_DIR, Ftype, 'SiteList.txt')

    if os.path.exists(SiteList):
        fp = open(SiteList, 'r')
        SiteList = fp.readlines()
        fp.close()
    else:
        print u'站点列表文件不存在'
        return

    for siteLine in SiteList:
        if siteLine.strip() == '':
            continue
        siteName = siteLine.strip().split()[0]
        start_time = siteLine.strip().split()[3].replace('-', '')
        end_time = siteLine.strip().split()[4].replace('-','')
        Ftime = start_time + '-' + end_time
        print siteName, start_time, end_time
        date_s, date_e = ymd2date(Ftime)

        while date_s <= date_e:
            ymd = date_s.strftime('%Y%m%d')
            FULL_I_DIR =  urljoin(I_DIR, Ftype, siteName)
            # 输出文件在站点名下,按日输出,每天一个文件
            O_FILE = urljoin(O_DIR, Ftype, siteName, ymd[:4], ymd + '.txt')

            # 实例化WDCGG类
            tccon = TCCON()
            # 对类的全局变量赋值
            tccon.YMD = date_s
            tccon.SiteName = siteName
            # 开始处理
            tccon.FindFile(FULL_I_DIR)
            tccon.ReadFile()
            tccon.Filter()
            print  ymd,len(tccon.FileLine), len(tccon.FileData)
            tccon.Write(O_FILE)
            date_s = date_s + relativedelta(days=1)
Exemple #5
0
def main():
    # 获取参数
    xArg = sys.argv[1:]

    # 参数个数目前为2个
    args_nums = len(xArg)
    if args_nums != 2:
        print ' args nums error'
        return

    # 获取处理文件时间和文件类型
    Ftime = xArg[0]
    Ftype = xArg[1]

    # 解析配置文件
    I_DIR = Cfg[Ftype]['I_DIR']
    O_DIR = Cfg[Ftype]['O_DIR']

    SiteList = urljoin(O_DIR, Ftype, 'SiteList.txt')

    if os.path.exists(SiteList):
        fp = open(SiteList, 'r')
        SiteList = fp.readlines()
        fp.close()
    else:
        print u'站点列表文件不存在'
        return

    for siteLine in SiteList:
        if siteLine.strip() == '':
            continue
        date_s, date_e = ymd2date(Ftime)
        siteName = siteLine.strip().split()[0]
        print siteName
        while date_s <= date_e:
            ymd = date_s.strftime('%Y%m%d')
            FULL_I_DIR = urljoin(I_DIR, Ftype, siteName)
            O_FILE = urljoin(O_DIR, Ftype, siteName, ymd[:4], ymd + '.txt')

            # 实例化GGA类
            gga = GGA()
            # 类中的全局变量赋值
            gga.YMD = date_s
            gga.SiteName = siteName

            # 开始提取文件
            gga.FindFile(FULL_I_DIR)
            gga.ReadFile()
            gga.Filter()
            gga.Write(O_FILE)
            print ymd, len(gga.FileData)
            date_s = date_s + relativedelta(days=1)
def show_hours(date_s, Ftype):
    IDIR = Cfg[Ftype]['I_DIR']
    ODIR = Cfg[Ftype]['O_DIR_H']
    AERA = Cfg[Ftype]['L2S']

    TimeList = []
    DictData = {}
    for i in range(24):
        dateT1 = date_s + relativedelta(hours=i)
        TimeList.append(dateT1)

    for aera in AERA.keys():
        Line = []
        shortname = Cfg[Ftype]['L2S'][aera].decode('utf-8')
        ymd = date_s.strftime('%Y%m%d')
        FileName = urljoin(IDIR, aera, 'hours', ymd + '.txt')
        print FileName
        if os.path.isfile(FileName) and os.path.getsize(FileName) != 0:
            ary = np.loadtxt(FileName, dtype=dtype, skiprows=1).reshape((-1,))
        else:
            for i in range(24):
                t_str = '%s %s %f %f %f %d' % (ymd, '00:00:00', np.nan, np.nan, np.nan, 9999)
                Line.append([each.strip() for each in t_str.split()])
            ary = np.core.records.fromarrays(np.array(Line).transpose(),
                names=','.join(dname_Nan),
                formats=','.join(dtype_Nan))
        DictData[shortname] = ary
    # print DictData

    if not os.path.isdir(ODIR):
        os.makedirs(ODIR)
    # 平均值
    DictMean = {}
    color_dict = {}
    for eachkey in DictData.keys():
        DictMean[eachkey] = DictData[eachkey]['mean']
        print 'TimeList', len(TimeList), eachkey, len(DictMean[eachkey])

    DictVar = {}
    for eachkey in DictData.keys():
        DictVar[eachkey] = DictData[eachkey]['var']
        print 'TimeList', len(TimeList), eachkey, len(DictVar[eachkey])

    DictTitle1 = {'xlabel': 'x %s' % ymd, 'ylabel': 'mean', 'title': 'gga'}
    DictTitle2 = {'xlabel': 'x %s' % ymd, 'ylabel': 'Var', 'title': 'gga'}
    # ymd = time.strftime('%Y%m%d', time.localtime())

    Outimg1 = urljoin(ODIR, ymd + '_mean')
    Outimg2 = urljoin(ODIR, ymd + '_var')

    ds_PUB_LIB.draw_time_fig(TimeList, DictMean, Outimg1, DictTitle1, 'H')
    ds_PUB_LIB.draw_time_fig(TimeList, DictVar, Outimg2, DictTitle2, 'H')
def show_day(Ftype, Ftime):
    IDIR = Cfg[Ftype]['I_DIR']
    ODIR = Cfg[Ftype]['O_DIR_D']
    AERA = Cfg[Ftype]['L2S']

    TimeList = []
    DictData = {}
    date_s, date_e = ymd2date(Ftime)
    while date_s <= date_e:
        TimeList.append(date_s)
        date_s = date_s + relativedelta(days=1)
    for aera in AERA.keys():
        shortname = Cfg[Ftype]['L2S'][aera].decode('utf-8')
        print shortname
        FileList = []
        date_s, date_e = ymd2date(Ftime)
        while date_s <= date_e:
            ymd = date_s.strftime('%Y%m%d')
            FileName = urljoin(IDIR, aera, 'days', ymd + '.txt')
            FileList.append(FileName)
            date_s = date_s + relativedelta(days=1)
        data = combine_day(FileList)
        DictData[shortname] = data

    if not os.path.isdir(ODIR):
        os.makedirs(ODIR)
    print DictData
    # 平均值
    DictMean = {}
    for eachkey in DictData.keys():
        DictMean[eachkey] = DictData[eachkey]['mean']
        print 'TimeList', len(TimeList), eachkey, len(DictMean[eachkey])

        print len(TimeList), len(DictData[eachkey])

    # 偏差
    DictVar = {}
    for eachkey in DictData.keys():
        DictVar[eachkey] = DictData[eachkey]['var']
        print 'TimeList', len(TimeList), eachkey, len(DictVar[eachkey])

    DictTitle1 = {'xlabel': 'x %s' % ymd, 'ylabel': 'mean', 'title': 'gga'}
    DictTitle2 = {'xlabel': 'x %s' % ymd, 'ylabel': 'Var', 'title': 'gga'}
    # ymd = time.strftime('%Y%m%d', time.localtime())

    Outimg1 = urljoin(ODIR, ymd + '_mean')
    Outimg2 = urljoin(ODIR, ymd + '_var')

    ds_PUB_LIB.draw_time_fig(TimeList, DictMean, Outimg1, DictTitle1, 'D')
    ds_PUB_LIB.draw_time_fig(TimeList, DictVar, Outimg2, DictTitle2, 'D')
Exemple #8
0
    def retrive_save_document(self, analysis_id):
        combo_resource_url = urljoin(BACKEND_HOST, "api/v1/analysiscombo/{}/?format=json".format(analysis_id))
        retrive_headers = {'Authorization': 'ApiKey {}:{}'.format(API_USER,API_KEY)}
        logger.debug("Fetching resource from {}".format(combo_resource_url))

        r = False
        while not r:
            try:
                r = requests.get(combo_resource_url, headers = retrive_headers)
            except requests.exceptions.ConnectionError:
                logger.debug("Got a requests.exceptions.ConnectionError exception, will try again in {} seconds".format(SLEEP_TIME_ERROR))
                time.sleep(SLEEP_TIME_ERROR)
        response = r.json()
        logger.debug(response)
        #now files for locations
        for x in response["locations"]:
            if x['content_id'] != None:
                download_url = urljoin(BACKEND_HOST, "api/v1/location/", x['content_id'], "file/")
                new_fs_id = self.fetch_save_file(download_url)
                #now change id in repsonse
                x['location_id'] = new_fs_id
        # now for samples
        for x in response["samples"]:
            download_url = urljoin(BACKEND_HOST, "api/v1/sample/", x['sample_id'], "file/")
            new_fs_id = self.fetch_save_file(download_url)
            #now change id in repsonse
            x['sample_id'] = new_fs_id
        # same for pcaps
        for x in response["pcaps"]:
            if x['content_id'] is None:
                continue
            download_url = urljoin(BACKEND_HOST, "api/v1/pcap/", x['content_id'], "file/")
            new_fs_id = self.fetch_save_file(download_url)
            #now change id in repsonse
            x['content_id'] = new_fs_id
        #for vt,andro etc. eoint sample_id to gridfs id
        # check for issues in this
        for x in response["virustotal"]:
            x['sample_id'] = search_samples_dict_list(x['sample_id'],response["samples"])
        for x in response["honeyagent"]:
            x['sample_id'] = search_samples_dict_list(x['sample_id'],response["samples"])
        for x in response["androguard"]:
            x['sample_id'] = search_samples_dict_list(x['sample_id'],response["samples"])
        for x in response["peepdf"]:
            x['sample_id'] = search_samples_dict_list(x['sample_id'],response["samples"])
        #remove id from all samples and pcaps
        for x in response["samples"]:
            x.pop("_id")
        frontend_analysis_id = db.analysiscombo.insert(response)
        return frontend_analysis_id
def main():
    # 获取参数
    xArg = sys.argv[1:]

    # 参数个数目前为1个
    args_nums = len(xArg)
    if args_nums != 1:
        print ' args nums error'
        return

    # 获取处理文件时间和文件类型
    Ftype = xArg[0]

    # 解析配置文件
    I_DIR = Cfg[Ftype]['I_DIR']
    O_DIR = Cfg[Ftype]['O_DIR']

    SiteList = urljoin(O_DIR, Ftype, 'SiteList.txt')

    if os.path.exists(SiteList):
        fp = open(SiteList, 'r')
        SiteList = fp.readlines()
        fp.close()
    else:
        print u'站点列表文件不存在'
        return

    for siteLine in SiteList:
        if siteLine.strip() == '':
            continue
        siteName = siteLine.strip().split()[0]
        # 根据站点开始 结束时间 来处理数据,手动时可注释掉
        start_time = siteLine.strip().split()[3].replace('-', '')
        end_time = siteLine.strip().split()[4].replace('-','')
        Ftime = start_time + '-' + end_time

        print siteName, start_time, end_time
        date_s, date_e = ymd2date(Ftime)
        siteName = siteLine.strip().split()[0]
        FULL_I_DIR = urljoin(I_DIR, Ftype, siteName)
        oFile = urljoin(O_DIR, Ftype, siteName, 'time_series.txt')

        tccon = TCCON()
        tccon.FindFile(FULL_I_DIR)
        tccon.ReadFile()
        tccon.Combine_S(date_s, date_e)
        tccon.Write_S(oFile)
        print oFile
Exemple #10
0
def request(session, base_path, method, path, **kwargs):
    """Construct a :class:`requests.Request` object and send it.

    :param requests.Session session:
    :param str base_path:
    :param str method: Method for the :class:`requests.Request` object.
    :param str path: (optional) The path to join with :attr:`CouchDB.url`.
    :param kwargs: (optional) Arguments that :meth:`requests.Session.request` takes.
    :rtype: requests.Response
    """
    # Prepare the params dictionary
    if ('params' in kwargs) and isinstance(kwargs['params'], dict):
        params = kwargs['params'].copy()
        for key, val in iteritems(params):
            # Handle titlecase booleans
            if isinstance(val, bool):
                params[key] = json.dumps(val)
        kwargs['params'] = params

    if compat.urlparse(path).scheme:
        # Support absolute URLs
        url = path
    else:
        url = urljoin(base_path, path).strip('/')

    r = session.request(method, url, **kwargs)
    # Raise exception on a bad status code
    if not (200 <= r.status_code < 300):
        utils.raise_http_exception(r)

    return r
Exemple #11
0
def insert_att(doc_id, doc_rev, att_id, att, att_type, **kwargs):
    """Create or update an existing attachment.

    http://docs.couchdb.org/en/stable/api/document/attachments.html#put--db-docid-attname

    :param str doc_id: The attachment document.
    :param doc_rev: (optional) The document revision.
    :param str att_id: The attachment name.
    :param att: The dictionary, bytes, or file-like object to insert.
    :param str att_type: The attachment MIME type.
    :param kwargs: (optional) Arguments that :meth:`requests.Session.request` takes.
    :rtype: (str, str, dict)
    """
    if doc_rev:
        if ('params' not in kwargs) or (not isinstance(kwargs['params'], dict)):
            kwargs['params'] = {}
        kwargs['params']['rev'] = doc_rev

    if ('headers' not in kwargs) or (not isinstance(kwargs['headers'], dict)):
        kwargs['headers'] = {}

    path = urljoin(utils.encode_document_id(doc_id), utils.encode_attachment_id(att_id))
    kwargs['headers']['Content-Type'] = att_type
    kwargs['data'] = att

    return 'PUT', path, kwargs
Exemple #12
0
    def actions(self, appfolder, controller, skipped, skip_auth=False):
        """
        Generator.
        Parse codelines from controller code and yield '<controller>/<action>' strings.

        Args:
            appfolder: application folder
            controller: name of the controller (without .py) ['<controller>/*' will be converted into 'controller']
            skipped: list of actions we want to skip/ignore them
            skip_auth: if True, all @auth. decorated actions will be skipped too (useful for unlogged user)
        """
        controller = controller.split('/')[0]  # <controller> or <controller/*> can be used
        codelines = self.get_controller_codelines(appfolder, controller)
        skip_next = False
        for ln in codelines:
            ln = ln.rstrip()  # - \n

            if ln[:4] == 'def ':  # function without parameters, maybe Web2py action
                if not skip_next and ln[-3:] == '():':
                    action = ln[4:-3].strip()
                    if action[:2] != '__':                 # Web2py action
                        url = urljoin(controller, action)
                        if url not in skipped:
                            yield url
                skip_next = False
            elif skip_auth and ln[:6] == '@auth.' or re.match('^#\s*ajax$', ln):   #  unlogged user + need authorize --or-- # ajax
                skip_next = True
Exemple #13
0
 def controllers(appfolder):
     """
         yields names of all controllers (without .py) except off appadmin
     """
     for controller in os.listdir(urljoin(appfolder, 'controllers')):
         if controller[-3:] == '.py' and not controller == 'appadmin.py':
             yield controller[:-3]
Exemple #14
0
    def set_temporary_password(self, client_id, client_secret, username, new_password):
        """ Changes password on behalf of the user with client credentials

        Args:
            client_id: The oauth client id that this code was generated for
            client_secret: The secret for the client_id above
            user_id: the user id to act on
            new_password: the users desired password

        Raises:
            UAAError: there was an error changing the password

        Returns:
            boolean: Success/failure
        """
        list_filter = 'userName eq "{0}"'.format(username)
        userList = self.client_users(client_id, client_secret, list_filter=list_filter)

        if len(userList['resources']) > 0:
            user_id = userList['resources'][0]['id']
            token = self._get_client_token(client_id, client_secret)
            self._request(
                urljoin('/Users', user_id, 'password'),
                'PUT',
                body={
                    'password': new_password
                },
                headers={
                    'Authorization': 'Bearer ' + token
                }
            )
            return True

        return False
Exemple #15
0
    def FindFile(self, dir):

        # 拼接数据上的日期格式,当天
        StrYmd1 = self.YMD.strftime('%d%b%Y')
        # 拼接数据上的日期格式,昨天
        yesterday = self.YMD - relativedelta(days=1)
        StrYmd2 = yesterday.strftime('%d%b%Y')
        # 要查找当天文件的正则
        patFile1 = '\Agga_%s_f000(\d{1}).txt\Z' % StrYmd1
        patFile2 = '\Agga_%s_f000(\d{1}).txt\Z' % StrYmd2
        # 对目录进行递归查找,找出符合规则的所有文件
        Lst = sorted(os.listdir(dir), reverse=True)
        for Line in Lst:
            FullPath = urljoin(dir, Line)
            # 如果是目录则进行递归
            if os.path.isdir(FullPath):
                self.FindFile(FullPath)
            # 如果是文件则进行正则判断,并把符合规则的所有文件保存到List中
            elif os.path.isfile(FullPath):
                FileName = os.path.split(FullPath)[1]
                R1 = re.match(patFile1, FileName)
                R2 = re.match(patFile2, FileName)
                if R1:
                    self.FileList.append(FullPath)
                if R2:
                    self.FileList.append(FullPath)
def set_piazza(course_id, piazza_id):
    """Uses the backend API to add a course to the course list"""
    course_id = str(course_id)
    ret = post(urljoin(URL, course_id, 'course/setpiazza/'), data={'piazza_cid': piazza_id})
    if ret.status_code != 200:
        raise ValueError('Unable to add piazza id %s' % piazza_id)
    return ret
Exemple #17
0
def request(name, cookies=USE_DEFAULT, game_number=USE_DEFAULT, json=True, extra_opts={}, **data):
	"""Do a request with given name and form data."""

	if cookies == USE_DEFAULT:
		global default_cookies
		if not default_cookies:
			with open(os.path.expanduser(DEFAULT_COOKIE_PATH)) as f:
				default_cookies = parse_cookies(f.read().strip())
		cookies = default_cookies
	elif isinstance(cookies, basestring):
		cookies = parse_cookies(cookies)

	if game_number == USE_DEFAULT:
		game_number = os.environ['NP_GAME_NUMBER']

	url = urljoin(BASE_URL, name)
	data['type'] = name
	if game_number: data['game_number'] = game_number

	resp = requests.post(url, data=data, cookies=cookies, **extra_opts)
	resp.raise_for_status()
	if not json: return resp.text
	resp_obj = decode_json(resp.text)
	report = resp_obj.get('report', None)

	if report == 'must_be_logged_in':
		raise RequestError(report)

	return report
Exemple #18
0
    def __init__(self, url, create_db=True):
        """Initialize the database object.

        :param str url: The Database URL.
        :param bool create_db: (optional) Create the database.
        """
        # Raise exception on an invalid URL
        Request('HEAD', url).prepare()

        #: Database host
        self.host = utils.get_database_host(url)

        #: Database name
        self.name = utils.get_database_name(url)

        #: Database URL
        # FIXME: Converts "https://test.db/a/b/index.html?e=f" to "https://test.db/index.html"
        self.url = urljoin(self.host, self.name)

        #: Database initialization
        self.create_db = create_db

        #: Default :class:`requests.Session`
        self.session = Session()
        # http://docs.couchdb.org/en/stable/api/basics.html#request-headers
        self.session.headers['Accept'] = 'application/json'
Exemple #19
0
def main():
    # 获取参数
    xArg = sys.argv[1:]

    # 参数个数目前为1个
    args_nums = len(xArg)
    if args_nums != 1:
        print ' args nums error'
        return

    # 获取处理文件时间和文件类型
    Ftype = xArg[0]

    # 解析配置文件
    I_DIR = Cfg[Ftype]['I_DIR']
    O_DIR = Cfg[Ftype]['O_DIR']

    # 一维LIST去重方法
    # l1 = ['b','c','d','b','c','a']
    # l2 = {}.fromkeys(l1).keys()

    SiteList = urljoin(O_DIR, Ftype,  'SiteList.txt')
    if os.path.exists(SiteList):
        fp = open(SiteList, 'r')
        SiteList = fp.readlines()
        fp.close()
    else:
        print u'站点列表文件不存在'
        return

    for siteLine in SiteList:
        if siteLine.strip() == '':
            continue
        siteName =  siteLine.strip().split()[0]
        # 根据站点开始 结束时间 来处理数据,手动时可注释掉
        start_time = siteLine.strip().split()[3].replace('-', '')
        end_time = siteLine.strip().split()[4].replace('-','')
        Ftime = start_time + '-' + end_time
        print siteName, start_time, end_time
        date_s, date_e = ymd2date(Ftime)
        wdcgg = WDCGG()
        wdcgg.SiteName = siteName
        oFile = urljoin(O_DIR, Ftype, wdcgg.SiteName, 'time_series.txt')
        wdcgg.FindFile(I_DIR)
        wdcgg.ReadFile()
        wdcgg.Combine_S(date_s, date_e)
        wdcgg.Write_S(oFile)
def add_root(course_id, root_id):
    """Uses the backend API to add a root to the tree"""
    root_id = str(root_id)
    course_id = str(course_id)
    ret = post(urljoin(URL, course_id, 'root/set', root_id, ''), data={})
    if ret.status_code != 200:
        raise ValueError('Unable to add root %s' % root_id)
    return ret
def delete_node(course_id, node_id):
    """Uses the backend API to delete a node that already exists"""
    node_id = str(node_id)
    course_id = str(course_id)
    ret = post(urljoin(URL, course_id, 'node/delete', node_id, ''), data={})
    if ret.status_code != 200:
        raise ValueError('Unable to delete node %s' % node_id)
    return ret
Exemple #22
0
    def summary(self):
        """Return a dict object summarizing account data"""
        request_url = urljoin(self._acct_path, 'summary')
        self._conn.request('GET', request_url, None, self._default_headers)

        r = self._conn.getresponse()

        return json.load(r)
def create_node(course_id, contents='foo', renderer='bar'):
    """Uses the backend API to create a node"""
    node_value = {'contents': contents, 'renderer': renderer}
    course_id = str(course_id)
    ret = post(urljoin(URL, course_id, 'node/add/'), data=node_value)
    if ret.status_code != 200:
        raise ValueError('Unable to create node')
    return ret
Exemple #24
0
def main():
    # 获取参数
    xArg = sys.argv[1:]

    # 参数个数目前为3个
    args_nums = len(xArg)
    if args_nums != 2:
        print ' args nums error'
        return

    # 获取处理文件时间和文件类型
    Ftime = xArg[0]
    Ftype = xArg[1]

    # 解析配置文件
    I_DIR = Cfg[Ftype]['I_DIR']
    O_DIR = Cfg[Ftype]['O_DIR']

    date_s, date_e = ymd2date(Ftime)

    while date_s <= date_e:
        ymd = date_s.strftime('%Y%m%d')
        FULL_I_DIR = urljoin(I_DIR, Ftype)
        O_FILE = urljoin(O_DIR, Ftype, ymd[:4], ymd + '.txt')

        if Ftype == 'GOSAT_GER':
            pat = '.+%s.+nc\Z' % ymd
            gosat = GOSAT()
            gosat.FindFile(FULL_I_DIR, pat)
            gosat.ReadGer()
            gosat.Write(O_FILE)
            print O_FILE

        elif Ftype == 'GOSAT_JAP':
            pat = '.+%s.+h5\Z' % ymd
            gosat = GOSAT()
            gosat.FindFile(FULL_I_DIR, pat)
            gosat.ReadJap()
            gosat.Write(O_FILE)
            print O_FILE

        else:
            print 'File Type Error'
            return
        date_s = date_s + relativedelta(days=1)
Exemple #25
0
def main():
    # 获取参数
    xArg = sys.argv[1:]

    # 参数个数目前为2个
    args_nums = len(xArg)
    if args_nums != 3:
        print 'input error: args nums is three!'
        return

    # 获取处理文件时间和文件类型
    Ftime = xArg[0]
    Ftype = xArg[1]
    Frate = xArg[2]

    I_DIR = Cfg[Ftype]['I_DIR']
    O_DIR = Cfg[Ftype]['O_DIR']
    AERA = Cfg[Ftype]['L2S']

    DictData = {}
    for siteName in AERA.keys():
        date_s, date_e = ymd2date(Ftime)
        ShortName = Cfg[Ftype]['L2S'][siteName].decode('utf-8')
        I_FILE = urljoin(I_DIR, Ftype, siteName, 'time_series_' + Frate + '.txt')
        SiteData = ReadFile(I_FILE, date_s, date_e)
        print siteName
        if len(SiteData) != 0:
            DictData[ShortName] = SiteData
    # 平均值
    DictMean = {}
    for eachkey in DictData.keys():
        DictMean[eachkey] = DictData[eachkey]['mean']
    print DictMean
    stime = date_s.strftime('%Y%m%d')
    etime = date_e.strftime('%Y%m%d')
    DictTitle = {'xlabel': '%s-%s' % (stime, etime), 'ylabel': 'mean', 'title': '%s' % Ftype}

    O_FILE = urljoin(O_DIR, Ftype, stime + '_' + etime)
    TimeList = []
    for x in xrange(len(SiteData)):
        dateT = datetime.strptime(SiteData['time'][x], "%Y-%m-%d %H:%M:%S")
        print dateT
        TimeList.append(dateT)

    ds_PUB_LIB.draw_time_fig(TimeList, DictMean, O_FILE, DictTitle, Frate)
Exemple #26
0
 def ensure_users(self, usr=None):
     if usr:
         users = [usr for eusr in self.users if eusr.split('#')[0] == usr]  # 1 item only
     else:
         users = self.users
     ensure_users_encoded = [base64.b32encode(eusr) for eusr in users]  # if without encode: Web2py args failure?
     suburl = URL(a='x', c='plugin_splinter', f='ensure_users', args=ensure_users_encoded, vars=MORE_AUTH_USER_FIELDS)[3:]
     self.br.visit(urljoin(self.url, suburl))  # prepare user from [splinter]ensure_users= setting inside the testing database
     self.br.is_text_present(self.usual_text)
def test_request_with_path(mocker):
    mock_request = mocker.patch.object(Session, 'request', autospec=True)
    mock_request.return_value.status_code = 200
    session = Session()
    path = '_design/someid'

    result = time2relax.request(session, TEST_URL, 'HEAD', path)
    assert result == mock_request.return_value
    mock_request.assert_called_with(session, 'HEAD', urljoin(TEST_URL, path))
def get_node(course_id, node_id):
    """Uses the backend API to request a node"""
    node_id = str(node_id)
    course_id = str(course_id)
    full_url = urljoin(URL, course_id, 'node/get', node_id, '')
    ret = get(full_url)
    if ret.status_code != 200:
        raise ValueError('Unable to access node %s' % node_id)
    return ret
Exemple #29
0
    def test_get_user(self):
        """get_user() makes a GET request to /Users/<id>"""

        uaac = UAAClient('http://example.com', 'foo', False)
        m = Mock()
        uaac._request = m

        uaac.get_user('foo')
        m.assert_called_with(urljoin('/Users', 'foo'), 'GET')
Exemple #30
0
def _ddoc(method, ddoc_id, func_type, func_id, _path=None, **kwargs):
    """Apply or execute a design document function.

    :param str method: Method for the :class:`requests.Request` object.
    :param str ddoc_id: The design document name.
    :param str func_type: The design function type.
    :param str func_id: The design function name.
    :param str _path: (internal)
    :param kwargs: (optional) Arguments that :meth:`requests.Session.request` takes.
    :rtype: (str, str, dict)
    """
    doc_id = urljoin('_design', ddoc_id)
    path = urljoin(utils.encode_document_id(doc_id), func_type, func_id)

    if _path:
        path = urljoin(path, _path)

    return method, path, kwargs
Exemple #31
0
 def _refresh_token(self):
     self.headers['authorization'] = 'Bearer {}'.format(self.tkn_refresh)
     url = urljoin(self.url_base, 'refresh_token/')
     r = super(APISession, self).post(url, timeout=self.timeout)
     if r.status_code == status.ok:
         self.tkn_access = r.json()['access_token']
         if 'refresh_token' in r.json():
             self.tkn_refresh = r.json()['refresh_token']
         self.headers['authorization'] = 'Bearer {}'.format(self.tkn_access)
     else:
         err_msg = 'Token refresh error: {}'.format(r.text)
         raise OasisException(err_msg)
     return r
Exemple #32
0
    def get_playlist(self, id: int = None, name: str = None) -> Playlist:
        """
        A list a single playlist in the current project based on the id.

        :param id: Specify the playlist to get by its id
        :param name: Retrieve a playlist by its unique name within project
        :return: A single playlist
        :raises: `PhenotypeError` if the project does not exist
        :raises: ServerError
        """

        if name and id:
            raise TypeError("id and name cannot both be supplied")
        if not self.project:
            raise PhenotypeError("Project does not exist.")
        url = urljoin(
            self.session.url_from_endpoint("projects"),
            self.project_name,
            "playlists",
            str(id),
        )

        if name:
            url = urljoin(self.session.url_from_endpoint('projects'),
                          self.project_name, 'playlists')

        try:
            resp = self.session.get(url, data={'name': name})
        except ServerError as ex:
            if ex.response and ex.response["code"] == codes.not_found:
                raise PhenotypeError("Playlist not found") from None
            else:
                raise

        if name:
            data = resp.json()['playlists'][0]
        else:
            data = resp.json()["playlist"]
        return Playlist(self.session, data)
Exemple #33
0
    def got_dir(self, req, res, dt=0):
        if req:
            if res == rescache.get(req.url):
                return
            else:
                self.clear_widgets()
                rescache.set(req.url, res)

        sdir, direntries = get_direntries(res)

        files = [de for de in direntries if de[2] == FILE]

        jurl = self.server_url + urljoin('jpeg',
                                         quote(sdir.encode('utf-8')), '')
        url = self.server_url + urljoin(quote(sdir.encode('utf-8')), '')

        index = i = 0
        for (fn, orig_orientation, file_type) in files:
            fn = quote(fn.encode('utf-8'))
            if fn[-4:].lower() in (".jpg", "jpeg"):
                file_url = url + fn
            else:
                file_url = jurl + fn + '.jpg'

            orientation = orig_orientation
            if platform == 'android':
                orientation = {1: 8, 3: 6, 6: 6, 8: 8}[orig_orientation]

            image = CachedImage(source=file_url, orientation=orientation,
                                load=False, allow_scale=True)
            image.orig_orientation = orig_orientation
            image.bind(image_scale=self.on_image_scale)
            self.add_widget(image)

            if fn == self.filename:
                index = i
            i +=1

        self.index = index
Exemple #34
0
 def on_path(self, widget, path):
     if not self.server_url:
         return
     self.clear_widgets()
     url = urljoin(self.server_url, quote((path).encode('utf-8')), "")
     UrlRequest(url, on_success=self.got_dir)
     res = rescache.get(url)
     if res:
         # Create the widget content from the cache data, but since this is
         # called from the parent's init, wait until this object is fully
         # initialized
         callback = partial(self.got_dir, None, res)
         Clock.schedule_once(callback, 0)
Exemple #35
0
    def test_filter_restless_count(self, test_client, session, workspace,
                                second_workspace, host_factory):
        # The hosts that should be shown
        hosts = host_factory.create_batch(30, workspace=workspace, os='Unix')

        # This shouldn't be shown, they are from other workspace
        host_factory.create_batch(5, workspace=second_workspace, os='Unix')

        session.commit()
        res = test_client.get(urljoin(self.url(), 'filter?q={"filters":[{"name": "os", "op":"eq", "val":"Unix"}],'
                                                  '"offset":0, "limit":20}'))
        assert res.status_code == 200
        assert res.json['count'] == 30
Exemple #36
0
 def test_invalid_json_on_executorData_breaks_the_api(
         self, csrf_token, session, test_client):
     agent = AgentFactory.create(workspaces=[self.workspace])
     session.add(agent)
     session.commit()
     payload = {
         'csrf_token': csrf_token,
         'executorData': '[][dassa',
     }
     res = test_client.post(self.check_url(urljoin(self.url(agent),
                                                   'run/')),
                            json=payload)
     assert res.status_code == 400
Exemple #37
0
 def test_run_agent(self, session, csrf_token, test_client):
     agent = AgentFactory.create(workspaces=[self.workspace])
     session.add(agent)
     session.commit()
     payload = {
         'csrf_token': csrf_token,
         'executorData': '',
     }
     res = test_client.post(
         self.check_url(urljoin(self.url(agent), 'run/')),
         json=payload
     )
     assert res.status_code == 400
Exemple #38
0
    def get_resource(self, resource_path):
        full_path = urljoin(
            self._manager_file_server_url,
            constants.FILE_SERVER_DEPLOYMENTS_FOLDER,
            self._tenant,
            self._deployment_id,
            resource_path
        )
        try:
            return self._get_resource_by_url(full_path)
        except exceptions.HTTPException as e:
            if e.code != 404:
                raise

        full_path = urljoin(
            self._manager_file_server_url,
            constants.FILE_SERVER_BLUEPRINTS_FOLDER,
            self._tenant,
            self._blueprint_id,
            resource_path
        )
        return self._get_resource_by_url(full_path)
Exemple #39
0
    def health_check(self):
        """
        Checks the health of the server.

        """
        try:
            url = urljoin(self.url_base, 'healthcheck/')
            return self.get(url)
        except (TypeError, AttributeError, BytesWarning, HTTPError,
                ConnectionError, ReadTimeout):
            err_msg = 'Health check failed: Unable to connect to {}'.format(
                self.url_base)
            raise OasisException(err_msg)
Exemple #40
0
    def delete_sensor_measurements(
        self,
        box_id,
        sensor_id,
        timestamps=None,
        from_date=None,
        to_date=None,
        all=None,
        really=False,
    ):
        """
        Issue a request to delete measurements from a sensor

        Args:
            box_id (str): the senseBox id
            sensor_id (str): the sensor id
            to_date, from_date (datetime.datetime, optional): start and end
                dates to delete the data
            timestamps (list of datetime.datetime, optional): timestamps to
                delete
            all (bool, optional): delete all measurements?
            really (bool, optional): really delete the measurements?

        Returns:
            bool : whether the deletion worked
        """
        assert really, ("Refusing to delete measurements from sensor "
                        "'{}' without really=True").format(sensor_id)
        d = {}
        if from_date is not None:
            d.update({"from-date": date2str(from_date)})
        if to_date is not None:
            d.update({"to-date": date2str(to_date)})
        if all is not None:
            d.update({"deleteAllMeasurements": bool(all)})
        if timestamps is not None:
            d.update({"timestamps": [date2str(x) for x in timestamps]})
        logger.debug("Request payload:\n{}".format(pretty_json(d)))
        response = self.request(
            "delete",
            urljoin(self.api, paths.BOXES, box_id, sensor_id, "measurements"),
            headers=self.authorization_header,
            json=d,
        )
        response_json = response.json()
        message = response_json.get("message", "")
        if not response.status_code == requests.codes.OK:  # pragma: no cover
            raise OpenSenseMapAPIError(
                "Could not delete measurements from sensor '{}'".
                format(box_id) + ": {}".format(message) if message else "")
        return True
Exemple #41
0
    def post_measurement(
        self,
        box_id,
        sensor_id,
        value,
        time=None,
        lat=None,
        lon=None,
        height=None,
    ):
        """
        Issue a request to upload a new measurement

        Args:
            box_id (str) : the senseBox id
            sensor_id (str) : the sensor's id
            value (float) : the current measurement value
            time (datetime.datetime, optional) : the time of the measurement
            lat, lon, height (float,optional) : the current position

        Returns:
            True : on success
        """
        assert box_id is not None, "box_id must  be defined"
        assert sensor_id is not None, "sensor_id must  be defined"
        d = {}
        d["value"] = float(value)
        if time:
            d["createdAt"] = date2str(time)
        try:
            d["location"] = location_dict(lat, lon, height)
        except ValueError:
            pass
        logger.debug("Sending Request with JSON:\n{}".format(pretty_json(d)))
        response = self.request("post",
                                urljoin(self.api, paths.BOXES, box_id,
                                        sensor_id),
                                json=d)
        try:
            response_json = response.json()
        except compat.JSONDecodeError:  # pragma: no cover
            raise OpenSenseMapAPIError(
                "Posting measurement didn't work: {}".format(response.text))
        if hasattr(response_json, "get"):  # is a dict
            message = response_json.get("message")
            raise OpenSenseMapAPIError(
                "Posting measurement didn't work{}".format(": " + message
                                                           or ""))
        else:  # no dict
            if re.search(r"measurement\s+saved\s+in\s+box", response_json):
                return True
Exemple #42
0
 def mkdir(self, path, mode=o777, specific_remote_path=None):
     """
     创建目录
     
     :param path: 目录路径
     :param mode: 权限
     :param specific_remote_path: 指定远程目录, 为None使用config['remotePath']或home目录
     :return: 成功返回0
     """
     if specific_remote_path is not None:
         remote_path = specific_remote_path
     else:
         remote_path = self.config.get('remotePath', self.home)
     target_path = urljoin(remote_path, path)
     try:
         self.sftp.chdir(target_path)
     except IOError:
         try:
             self.sftp.mkdir(target_path, mode=mode)
         except:
             base_path = '/'
             for p in target_path.split('/'):
                 if p:
                     base_path = urljoin(base_path, p)
                     try:
                         self.sftp.chdir(base_path)
                     except IOError:
                         try:
                             self.sftp.mkdir(base_path, mode=mode)
                         except Exception as e:
                             self.logger.error('mkdir {} error: {}'.format(base_path, e))
                             return -1
     try:
         self.sftp.chdir(target_path)
         return 0
     except Exception as e:
         self.logger.error('mkdir->chdir {} error {}'.format(target_path, e))
         return -1
Exemple #43
0
    def test_put_user(self):
        """put_user() makes a PUT request to /Users/<id> with appropriate headers"""

        uaac = UAAClient("http://example.com", "foo", False)
        m = Mock()
        uaac._request = m

        user = {"id": "foo", "meta": {"version": "123"}}

        uaac.put_user(user)

        m.assert_called_with(
            urljoin("/Users", "foo"), "PUT", body=user, headers={"If-Match": "123"}
        )
    def _get_auth_server(self):
        auth_server = urljoin(self.root_url, "auth")
        r = requests.get(auth_server)
        # ! temporary hack because services are split between xxx.wuxi and xxx-cluster.wuxi
        if r.status_code == codes.not_found:
            if "-cluster" not in auth_server:
                lst = self.root_url.split(".", 1)
                auth_server = lst[0] + "-cluster." + lst[1]
            else:
                auth_server = auth_server.replace("-cluster", "")

        # test if realm is available
        realm_url = urljoin(auth_server, "realms", self.realm)
        try:
            resp = requests.get(realm_url)
        except requests.exceptions.ConnectionError:
            raise ServerError(f"Keycloak server {realm_url} is not reachable")
        if resp.status_code == requests.codes.not_found:
            raise AuthServerError(
                f"Realm '{self.realm}' was not found on keycloak server {auth_server}"
            )

        return auth_server
Exemple #45
0
    def submit(self, pcap):
        try:
            api_url = self.config_dict["api_url"]
            api_key = self.config_dict["api_key"]
            username = self.config_dict["username"]
        except KeyError:
            raise RuntimeError("IDS module not properly configured, skip")

        log.debug("Will run with API user: {}, key: {}, URL: {}".format(
            username, api_key, api_url))

        self.headers = {
            'Authorization': 'ApiKey {}:{}'.format(username, api_key),
            'Content-Type': 'application/json'
        }

        with fs.get(pcap['_id']) as i:
            payload = {
                'pcap_file': {
                    "name": os.path.basename(self.pcap_path),
                    "file": base64.b64encode(i.read()),
                    "content_type": "application/vnd.tcpdump.pcap",
                }
            }

        response = None
        attempt = 0
        while response is None and attempt < MAX_ATTEMPTS:
            try:
                log.debug("Trying to POST pcap")
                response = requests.post(urljoin(api_url, 'task/'),
                                         data=json.dumps(payload),
                                         headers=self.headers)
            except requests.exceptions.ConnectionError:
                log.debug("Got requests.exceptions.ConnectionError, "
                          "trying again in one second")
                attempt += 1
                time.sleep(1)

        if not response:
            raise RuntimeError("Unable to contact the IDS APIs")

        if response.status_code == 201:
            retrieve_url = response.headers['Location']
            log.debug("Will need to poll URL: {}".format(retrieve_url))
        else:
            raise RuntimeError("Unable to create new Extracted File, "
                               "got status code: {}".format(
                                   response.status_code))
        return retrieve_url
Exemple #46
0
    def direntry_selected(self, direntry):
        Logger.debug("%s: on_direntry_selected %s" % (
            APP, direntry.encode('ascii','replace')))

        # return

        # TODO Cancelar los requests anteriores si posible
        # El servidor se puede quedar pillado haciendo thumbnails
        # antes de responder al cambio de directorio

        self.remove_widget(self.content)
        self.navigation.append(self.content)
        self.fetch_dir(path=urljoin(self.content.path, direntry, ''))
        self.dispatch('on_navigate_down')
Exemple #47
0
 def get_covariate(self, id):
     """
     Get a single covariate by its id
     """
     url = urljoin(self.links['self'], 'covariates', str(id))
     try:
         resp = self.session.get(url)
     except ServerError as ex:
         if ex.response and ex.response['code'] == codes.not_found:
             raise PhenotypeError(f"Covariate not found") from None
         else:
             raise ex
     data = resp.json()['covariate']
     return data
Exemple #48
0
    def __init__(self, mode='fw', sort_by='by-signature', branch_name=''):
        """
        Could download firmware or bootloder files from stable or specified branch.

        :param mode: firmware or bootloader, defaults to 'fw'
        :type mode: str, optional
        :param sort_by: files on remote server are stored by project_name or device_signature, defaults to 'by-signature'
        :type sort_by: str, optional
        :param branch_name: looking for fw/bootloader from specified branch (instead of stable), defaults to None
        :type branch_name: str, optional
        """
        self.mode = mode
        try:
            url_handler.urlopen(
                CONFIG['ROOT_URL'])  # Checking, user has internet connection
        except (URLError, HTTPError) as e:
            logging.error('Check internet connection')
            die(e)
        self.parent_url_path = urljoin(CONFIG['ROOT_URL'], mode, sort_by)
        self.fw_source = CONFIG['DEFAULT_SOURCE']
        self.branch_name = branch_name
        if branch_name:
            self.fw_source = urljoin('unstable', branch_name)
Exemple #49
0
def test_connection(ibcontext, **kwargs):
    logger = logging.getLogger('ipublish')
    b2share_cfg = ibcontext['b2share'].get_config(kwargs)
    logger.debug(b2share_cfg)
    api_url = b2share_cfg.get('b2share_api_url')
    access_token = b2share_cfg.get('b2share_access_token')
    community = b2share_cfg.get('b2share_community')
    url = urljoin(api_url,
                  'communities',
                  community)
    logger.info('checking community %s, url: %s', community, url)
    req = requests.get(url, params={'access_token': access_token})
    req.raise_for_status()
    req.json()
Exemple #50
0
    def create_phenotype(
        self,
        name: str,
        result_type: str,
        description: Optional[str] = None,
        url: Optional[str] = None,
        category: Optional[str] = None,
        query: Optional[str] = None,
        tags: Optional[List[str]] = [],
    ) -> Phenotype:
        """
        Create a new phenotype in the current project

        :param name: Unique (lowercase) phenotype name in the project
        :param result_type: Must be one of SET, QT or CATEGORY
        :param description: Free text description of the phenotype (optional)
        :param url: Reference URL for the phenotype (to dataset or other reference)
        :param category: Enter the category for the phenotype (must be defined in the project - see get_categories) (optional)
        :param query: NOR query that defines this phenotype (optional)
        :param tags: comma separated list of tags to add to this phenotype (optional) e.g. ['tag1','tag2']
        :raises: PhenotypeError, ServerError
        """
        uri = urljoin(
            self.session.url_from_endpoint("root"),
            "projects",
            self.project_name,
            "phenotypes",
        )
        result_type = result_type.upper()
        if result_type not in SUPPORTED_RESULT_TYPES:
            raise PhenotypeError(
                f"Result type {result_type} not supported. Use one of {', '.join(SUPPORTED_RESULT_TYPES)}"
            )
        payload = {
            "name": name,
            "result_type": result_type,
            "description": description,
            "url": url,
            "category": category,
            "query": query,
            "tag_list": tags,
        }
        resp = self.session.post(uri, json=payload)
        resp.raise_for_status()
        data = resp.json()

        # if the project did not already exist, initialize the service
        if not self.project:
            self._init_project(self.project_name)
        return Phenotype(self.session, data["phenotype"])
def test_batch_delete(table, mock_records):
    ids = [i['id'] for i in mock_records]
    with Mocker() as mock:
        for id_ in ids:
            mock.delete(urljoin(table.url_table, id_),
                        status_code=201,
                        json={
                            'delete': True,
                            'id': id_
                        })
        records = [i['id'] for i in mock_records]
        resp = table.batch_delete(records)
    expected = [{'delete': True, 'id': i} for i in ids]
    assert resp == expected
Exemple #52
0
 def test_get_organizations(self, status, results):
     discovery_url = urljoin(settings.DISCOVERY_BASE_URL,
                             DISCOVERY_API_TPL.format('organizations', ''))
     responses.add(
         responses.GET,
         discovery_url,
         status=status,
         json=self.get_multi_response(results),
     )
     orgs = DiscoveryServiceClient.get_organizations()
     if status == 200:
         self.assertEqual(results, orgs)
     else:
         self.assertEqual([], orgs)
Exemple #53
0
 def get_user_client_roles(self, user_name: str,
                           client_name: str) -> List[str]:
     """
     Get the specified client roles for this user
     """
     user_id = self.get_user(user_name)["id"]
     client_id = self.get_client(client_name)["id"]
     url = urljoin(self.realm_url, "users", str(user_id), "role-mappings",
                   "clients", client_id)
     resp = self.session.get(url)
     resp.raise_for_status()
     roles = resp.json()
     role_names = [r["name"] for r in roles]
     return role_names
Exemple #54
0
def test_delete_by_field(table, mock_response_single):
    id_ = mock_response_single["id"]
    expected = {"delete": True, "id": id_}
    match_params = urlencode({"FilterByFormula": "{Value}='abc'"})
    match_url = table.url_table + "?" + match_params
    with Mocker() as mock:
        mock.get(
            match_url,
            status_code=200,
            json={"records": [mock_response_single, mock_response_single]},
        )
        mock.delete(urljoin(table.url_table, id_), status_code=201, json=expected)
        resp = table.delete_by_field("value", "abc")
    assert resp == expected
Exemple #55
0
    def test_activity_feed(self, session, test_client):
        command = self.factory.create()
        another_command = EmptyCommandFactory.create(workspace=command.workspace)
        vuln = session.query(Vulnerability).get(command.command_objects[0].object_id)
        session.flush()
        CommandObjectFactory.create(
            command=another_command,
            object_type='vulnerability',
            object_id=vuln.id,
            workspace=command.workspace
        )
        CommandObjectFactory.create(
            command=another_command,
            object_type='host',
            object_id=vuln.host.id,
            workspace=command.workspace
        )
        session.commit()

        res = test_client.get(urljoin(self.url(workspace=command.workspace), 'activity_feed'))
        assert res.status_code == 200

        assert list(filter(lambda stats: stats['_id'] == command.id, res.json)) == [
            {'_id': command.id,
             'command': command.command,
             'import_source': 'shell',
             'user': command.user,
             'date': time.mktime(command.start_date.timetuple()) * 1000,
             'params': command.params,
             'tool': command.tool,
             'hosts_count': 1,
             'services_count': 0,
             'vulnerabilities_count': 1,
             'criticalIssue': 0}]

        assert list(filter(lambda stats: stats['_id'] == another_command.id,
                           res.json)) == [{
            '_id': another_command.id,
            'command': another_command.command,
            'import_source': 'shell',
            'tool': another_command.tool,
            'user': another_command.user,
            'date': time.mktime(
                another_command.start_date.timetuple()) * 1000,
            'params': another_command.params,
            'hosts_count': 0,
            'services_count': 0,
            'vulnerabilities_count': 0,
            'criticalIssue': 0}]
Exemple #56
0
    def test_vuln_count(self,
                        vulnerability_factory,
                        host_factory,
                        service_factory,
                        workspace_factory,
                        test_client,
                        session,
                        querystring):

        workspace1 = workspace_factory.create()
        workspace2 = workspace_factory.create()
        session.add(workspace1)
        session.add(workspace2)
        session.commit()

        hosts_to_query = host_factory.create_batch(HOST_TO_QUERY_AMOUNT, workspace=workspace1)
        hosts_not_to_query = host_factory.create_batch(HOST_NOT_TO_QUERY_AMOUNT, workspace=workspace1)
        hosts_not_to_query_w2 = host_factory.create_batch(HOST_NOT_TO_QUERY_AMOUNT, workspace=workspace2)
        hosts = hosts_to_query + hosts_not_to_query + hosts_not_to_query_w2

        services = []
        vulns = []

        session.add_all(hosts)

        for host in hosts:
            services += service_factory.create_batch(SERVICE_BY_HOST, host=host, workspace=host.workspace)
            vulns += vulnerability_factory.create_batch(VULN_BY_HOST, host=host, service=None, workspace=host.workspace)

        session.add_all(services)

        for service in services:
            vulns += vulnerability_factory.create_batch(VULN_BY_SERVICE, service=service, host=None,
                                                        workspace=service.workspace)

        session.add_all(vulns)
        session.commit()

        url = urljoin(
            self.url(workspace=workspace1),
            querystring.format(",".join(map(lambda x: str(x.id), hosts_to_query)))
        )
        res = test_client.get(url)

        assert res.status_code == 200

        for host in hosts_to_query:
            assert res.json['hosts'][str(host.id)]['total'] == VULN_BY_HOST + VULN_BY_SERVICE * SERVICE_BY_HOST
            assert str(host.id) in res.json['hosts']
Exemple #57
0
 def test_invalid_executor(self, test_client, session, csrf_token):
     agent = AgentFactory.create(workspaces=[self.workspace])
     session.add(agent)
     session.commit()
     payload = {
         'csrf_token': csrf_token,
         'executorData': {
             "args": {
                 "param1": True
             },
             "executor": "executor_name"
         },
     }
     res = test_client.post(urljoin(self.url(agent), 'run'), json=payload)
     assert res.status_code == 400
Exemple #58
0
    def get_user(self, user_id):
        """Retrive a user from UAA by their user id

        Args:
            user_id: The id of the user to retrieve

        Raises:
            UAAError: There was an error getting the user

        Returns:
            dict:  An object representing the user

        """

        return self._request(urljoin('/Users', user_id), 'GET')
Exemple #59
0
    def get_covariates(self, limit=100):
        """
        Get all covariates in current project
        """
        url = urljoin(self.links['self'], 'covariates')

        def do_get(offset=0):
            content = {'limit': limit, 'offset': offset}
            resp = self.session.get(url, data=content)
            data = resp.json()['covariates']
            return data

        combined_data = _get_paginated_results(do_get, limit)

        return combined_data
Exemple #60
0
    def __init__(self, server, webservice_name='GenericTicketConnector', ssl_context=None):
        """ @param server : the http(s) URL of the root installation of OTRS
                            (e.g: https://tickets.example.net)

            @param webservice_name : the name of the installed webservice
                   (choosen by the otrs admin).
        """

        self.endpoint = urljoin(
            server, 'otrs/nph-genericinterface.pl/Webservice/',
            webservice_name)
        self.login = None
        self.password = None
        self.session_id = None
        self.ssl_context = ssl_context