def _get_projects_for_upload(cls, hub_headers, username, rememberme=False):

        try:
            r = cls.hub_pool.request('POST',
                                     '/dynamic/upload/projects',
                                     headers=hub_headers,
                                     redirect=False)

        except (HTTPError, SSLError) as e:
            LOG.error(e)
            response.status_int = 500
            return {'ok': False, 'msg': str(e)}

        if r.status != 200:
            if r.status == 503:
                response.status_int = 503
                # pylint: disable=E1103
                return {'ok': False, 'msg': json_loads(r.data).get('msg', 'Service currently unavailable.')}
                # pylint: enable=E1103
            response.status_int = 500
            return {'ok': False, 'msg': 'Wrong Hub answer.'}

        response.headers['Cache-Control'] = 'no-store, no-cache, max-age=0'

        return {
            'ok': True,
            'cookie': hub_headers.get('Cookie') if rememberme else None,
            'user': username,
            # pylint: disable=E1103
            'projects': json_loads(r.data).get('projects', [])
            # pylint: enable=E1103
        }
Exemple #2
0
    def _get_projects_for_upload(cls, hub_headers, username, rememberme=False):

        try:
            r = cls.hub_pool.request('POST',
                                     '/dynamic/upload/projects',
                                     headers=hub_headers,
                                     redirect=False)

        except (HTTPError, SSLError) as e:
            LOG.error(e)
            response.status_int = 500
            return {'ok': False, 'msg': str(e)}

        if r.status != 200:
            if r.status == 503:
                response.status_int = 503
                # pylint: disable=E1103
                return {'ok': False, 'msg': json_loads(r.data).get('msg', 'Service currently unavailable.')}
                # pylint: enable=E1103
            response.status_int = 500
            return {'ok': False, 'msg': 'Wrong Hub answer.'}

        response.headers['Cache-Control'] = 'no-store, no-cache, max-age=0'

    # pylint: disable=E1103
        return {
            'ok': True,
            'cookie': hub_headers.get('Cookie') if rememberme else None,
            'user': username,
            'projects': json_loads(r.data).get('projects', [])
        }
Exemple #3
0
    def request_hashes(self, project):
        try:
            min_version = 2
            r = self.hub_pool.urlopen(
                'GET',
                '/dynamic/upload/list?version=%d&project=%s' %
                (min_version, project),
                headers={
                    'Cookie': self.hub_cookie,
                    'Accept-Encoding': 'gzip'
                },
                redirect=False,
                assert_same_host=False)
            if r.status == 200:
                response = json_loads(r.data)
                # pylint: disable=E1103
                if response.get('version', 1) >= min_version:
                    return response['hashes']
                # pylint: enable=E1103

        except (HTTPError, SSLError, TypeError, ValueError):
            pass
        except Exception as e:
            LOG.error(str(e))
        return []
def json_request(method, url, **kwargs):
    """Takes a request in json parse it and return in json"""
    kwargs.setdefault("headers", {})
    if "body" in kwargs:
        kwargs["headers"]["Content-Type"] = "application/json"
        kwargs["body"] = json_dumps(kwargs["body"])
    parsed, conn = http_connection(url)
    conn.request(method, parsed.path, **kwargs)
    resp = conn.getresponse()
    body = resp.read()
    http_log((url, method), kwargs, resp, body)
    if body:
        try:
            body = json_loads(body)
        except ValueError:
            body = None
    if not body or resp.status < 200 or resp.status >= 300:
        raise ClientException(
            "Auth GET failed",
            http_scheme=parsed.scheme,
            http_host=conn.host,
            http_port=conn.port,
            http_path=parsed.path,
            http_status=resp.status,
            http_reason=resp.reason,
        )
    return resp, body
def login(connection, options):
    username = options.user
    password = options.password

    if not options.silent:
        log('Login as "%s".' % username)

    credentials = {"login": username, "password": password, "source": "/tool"}

    try:
        r = connection.request("POST", "/dynamic/login", fields=credentials, retries=1, redirect=False)
    except (HTTPError, SSLError):
        error("Connection to Hub failed!")
        exit(-1)

    if r.status != 200:
        if r.status == 301:
            redirect_location = r.headers.get("location", "")
            end_domain = redirect_location.find("/dynamic/login")
            error('Login is being redirected to "%s". Please verify the Hub URL.' % redirect_location[:end_domain])
        else:
            error("Wrong user login information!")
        exit(-1)

    cookie = r.headers.get("set-cookie", None)
    login_info = json_loads(r.data)

    # pylint: disable=E1103
    if not cookie or HUB_COOKIE_NAME not in cookie or login_info.get("source") != credentials["source"]:
        error("Hub login failed!")
        exit(-1)
    # pylint: enable=E1103

    return cookie
Exemple #6
0
def json_request(method, url, **kwargs):
    """Takes a request in json parse it and return in json"""
    kwargs.setdefault('headers', {})
    if 'body' in kwargs:
        kwargs['headers']['Content-Type'] = 'application/json'
        kwargs['body'] = json_dumps(kwargs['body'])
    parsed, conn = http_connection(url)
    conn.request(method, parsed.path, **kwargs)
    resp = conn.getresponse()
    body = resp.read()
    http_log((
        url,
        method,
    ), kwargs, resp, body)
    if body:
        try:
            body = json_loads(body)
        except ValueError:
            body = None
    if not body or resp.status < 200 or resp.status >= 300:
        raise ClientException('Auth GET failed',
                              http_scheme=parsed.scheme,
                              http_host=conn.host,
                              http_port=conn.port,
                              http_path=parsed.path,
                              http_status=resp.status,
                              http_reason=resp.reason)
    return resp, body
Exemple #7
0
    def test_search_cell(self):
        (lat, lon), all_cells = random.choice(self.TOWER_DATA)

        expected_lat = int(lat * 1000)
        expected_lon = int(lon * 1000)

        query_data = {"radio": '', "cell": []}
        for cell_data in all_cells:
            radio_name = RADIO_MAP[cell_data['radio']]
            if query_data['radio'] == '':
                query_data['radio'] = radio_name
            query_data['cell'].append(dict(radio=radio_name,
                                           cid=cell_data['cid'],
                                           mcc=cell_data['mcc'],
                                           mnc=cell_data['mnc'],
                                           lac=cell_data['lac']))
            jdata = json.dumps(query_data)
            res = self.session.post(HOST + '/v1/search?key=test', jdata)
            self.assertEqual(res.status_code, 200)
            jdata = json_loads(res.content)
            if jdata['status'] != 'not_found':
                actual_lat = int(jdata['lat'] * 1000)
                actual_lon = int(jdata['lon'] * 1000)
                self.assertEquals(actual_lat, expected_lat)
                self.assertEquals(actual_lon, expected_lon)
Exemple #8
0
def parse_state(state):
    if isinstance(state, str):
        state = json_loads(state)
    items = []
    assert isinstance(state, dict)
    for k, v in state.items():
        if isinstance(v, dict):
            is_meta_obj = '__value' in v or '__check' in v or '__watchdog' in v
            if is_meta_obj:
                items.append({
                    'key': k,
                    'value': v.get('__value'),
                    'unit': v.get('__unit'),
                    'counter': v.get('__counter'),
                    'check': v.get('__check'),
                    'watchdog': v.get('__watchdog'),
                })
            else:
                items.append({
                    'key': k,
                    'items': parse_state(v),
                })
        else:
            items.append({
                'key': k,
                'value': v,
            })
    return items
Exemple #9
0
def JSONLocationDictDecoder(dct):
    if '__JSONTupleKeyedDict__' in dct:
        tmp = {}
        for k, v in dct['dict'].items():
            tmp[tuple(json_loads(k))] = v
        return tmp
    return dct
def _postupload_progress(deploy_info, connection, cookie, silent, verbose):
    if silent:
        sleep_step = 1.0
    elif verbose:
        log('Post processing:')
        sleep_step = 0.2
    else:
        log('Post processing files...')
        sleep_step = 0.4

    if not deploy_info.hub_session:
        error('No deploy session found.')
        return -1

    old_progress = 0

    while True:
        sleep(sleep_step)

        if deploy_info.error:
            error(deploy_info.error)
            return -1

        try:
            r = connection.request('POST',
                                   '/dynamic/upload/progress/%s' %
                                   deploy_info.hub_session,
                                   headers={'Cookie': cookie},
                                   redirect=False)
        except (HTTPError, SSLError) as e:
            error(e)
            error('Post-upload progress check failed.')
            return -1

        if r.status != 200:
            error('Wrong Hub answer.')
            return -1

        r_data = json_loads(r.data)
        # pylint: disable=E1103
        current_progress = int(r_data.get('progress', -1))
        error_msg = str(r_data.get('error', ''))
        # pylint: enable=E1103

        if error_msg:
            error('Post-upload processing failed: %s' % error_msg)
            return -1
        if -1 == current_progress:
            error('Invalid post-upload progress.')
            return -1

        if verbose and not silent:
            if old_progress != current_progress:
                log('Progress: %u%%' % current_progress)
            old_progress = current_progress

        if 100 <= current_progress:
            if not silent:
                log('Post processing completed.')
            return 0
Exemple #11
0
def JSONLocationDictDecoder(dct):
    if '__JSONTupleKeyedDict__' in dct:
        tmp = {}
        for k, v in dct['dict'].items():
            tmp[tuple(json_loads(k))] = v
        return tmp
    return dct
Exemple #12
0
    def test_search_cell(self):
        (lat, lon), all_cells = random.choice(self.TOWER_DATA)

        expected_lat = int(lat * 1000)
        expected_lon = int(lon * 1000)

        query_data = {"radio": '', "cell": []}
        for cell_data in all_cells:
            radio_name = RADIO_MAP[cell_data['radio']]
            if query_data['radio'] == '':
                query_data['radio'] = radio_name
            query_data['cell'].append(
                dict(radio=radio_name,
                     cid=cell_data['cid'],
                     mcc=cell_data['mcc'],
                     mnc=cell_data['mnc'],
                     lac=cell_data['lac']))
            jdata = json.dumps(query_data)
            res = self.session.post(HOST + '/v1/search?key=test', jdata)
            self.assertEqual(res.status_code, 200)
            jdata = json_loads(res.content)
            if jdata['status'] != 'not_found':
                actual_lat = int(jdata['lat'] * 1000)
                actual_lon = int(jdata['lon'] * 1000)
                self.assertEquals(actual_lat, expected_lat)
                self.assertEquals(actual_lon, expected_lon)
Exemple #13
0
def write_to_file(options, data, filename=None, output_path=None, force_overwrite=False):
    if not filename:
        filename = "%s-%s-%s.json" % (options.project, options.type, options.daterange.filename_str())

    try:
        if not output_path:
            output_path = normpath(path_join(options.outputdir, filename))

        if path_exists(output_path):
            if options.overwrite or force_overwrite:
                if not options.silent:
                    warning("Overwriting existing file: %s" % output_path)
            elif not options.silent:
                warning("Skipping existing file: %s" % output_path)
                return

        indentation = None
        if options.indent:
            indentation = 4
            if isinstance(data, str):
                data = json_loads(data)

        with open(output_path, "wb") as fout:
            if isinstance(data, str):
                fout.write(data)
            else:
                json_dump(data, fout, indent=indentation)

        if options.verbose:
            log("Finished writing to: %s" % output_path)

    except (IOError, OSError) as e:
        error(e)
        exit(-1)
Exemple #14
0
def get_auth(url, user, key, region, snet=False):
    """
    Get authentication/authorization credentials.

    The snet parameter is used for Rackspace's ServiceNet internal network
    implementation. In this function, it simply adds *snet-* to the beginning
    of the host name for the returned storage URL. With Rackspace Cloud Files,
    use of this network path causes no bandwidth charges but requires the
    client to be running on Rackspace's ServiceNet network.

    :param url: authentication/authorization URL
    :param user: user to authenticate as
    :param key: key or password for authorization
    :param region: service region [dfw, ord, syd, iad, etc]
    :param snet: use SERVICENET internal network (see above), default is False
    :returns: tuple of (storage URL, auth token)
    :raises ClientException: HTTP GET request to auth URL failed
    """
    swift_service = 'object-store'
    parsed, conn = http_connection(url)
    params = json_dumps({
        "auth": {
            "RAX-KSKEY:apiKeyCredentials": {
                "username": user,
                "apiKey": key
            }
        }
    })
    conn.request('POST', parsed.path, params, {
        'Accept': 'application/json',
        'Content-Type': 'application/json'
    })
    resp = conn.getresponse()
    data = json_loads(resp.read())
    if resp.status < 200 or resp.status >= 300:
        raise ClientException('Auth POST failed',
                              http_scheme=parsed.scheme,
                              http_host=conn.host,
                              http_port=conn.port,
                              http_path=parsed.path,
                              http_status=resp.status,
                              http_reason=resp.reason)

    try:
        token = data['access']['token']['id']
        for service in data['access']['serviceCatalog']:
            if service['type'] == swift_service:
                for points in service['endpoints']:
                    if points['region'] == region:
                        if snet:
                            storage_url = points['internalURL']
                        else:
                            storage_url = points['publicURL']
                        return storage_url, token
                raise ClientException('Region %s not found' % region)
        raise ClientException('Service Type %s not found' % swift_service)
    except KeyError:
        raise ClientException(
            'Inconsistent Service Catalog back from auth: %s' % data)
Exemple #15
0
def get_account(url, token, marker=None, limit=None, prefix=None,
                http_conn=None, full_listing=False):
    """
    Get a listing of containers for the account.

    :param url: storage URL
    :param token: auth token
    :param marker: marker query
    :param limit: limit query
    :param prefix: prefix query
    :param http_conn: HTTP connection object (If None, it will create the
                      conn object)
    :param full_listing: if True, return a full listing, else returns a max
                         of 10000 listings
    :returns: a tuple of (response headers, a list of containers) The response
              headers will be a dict and all header names will be lowercase.
    :raises ClientException: HTTP GET request failed
    """
    if not http_conn:
        http_conn = http_connection(url)
    if full_listing:
        rv = get_account(url, token, marker, limit, prefix, http_conn)
        listing = rv[1]
        while listing:
            marker = listing[-1]['name']
            listing = \
                get_account(url, token, marker, limit, prefix, http_conn)[1]
            if listing:
                rv[1].extend(listing)
        return rv
    parsed, conn = http_conn
    qs = 'format=json'
    if marker:
        qs += '&marker=%s' % quote(marker)
    if limit:
        qs += '&limit=%d' % limit
    if prefix:
        qs += '&prefix=%s' % quote(prefix)
    full_path = '%s?%s' % (parsed.path, qs)
    headers = {'X-Auth-Token': token}
    method = 'GET'
    conn.request(method, full_path, '', headers)
    resp = conn.getresponse()
    body = resp.read()
    http_log(("%s?%s" % (url, qs), method,), {'headers': headers}, resp, body)

    resp_headers = {}
    for header, value in resp.getheaders():
        resp_headers[header.lower()] = value
    if resp.status < 200 or resp.status >= 300:
        raise ClientException('Account GET failed', http_scheme=parsed.scheme,
                              http_host=conn.host, http_port=conn.port,
                              http_path=parsed.path, http_query=qs,
                              http_status=resp.status, http_reason=resp.reason,
                              http_response_content=body)
    if resp.status == 204:
        body
        return resp_headers, []
    return resp_headers, json_loads(body)
def inline_array_events_s3(options, today_log, array_files_list, enc_key,
                           connection):

    verbose = options.verbose
    to_sort = set()

    try:
        for index, filename in enumerate(array_files_list):
            # Format: 'https://bucket.s3.amazonaws.com/gamefolder/arrayevents/date(seconds)/objectid.bin?
            #          AWSAccessKeyId=keyid&Expires=timestamp&Signature=signature'
            # The objectid doesn't correspond to a database entry but it used for uniqueness and timestamp
            filename_cleaned = filename.split('?', 1)[0].rsplit('/', 1)[-1]
            event_objectid = filename_cleaned.split('.', 1)[0]
            timestamp = get_objectid_timestamp(event_objectid)
            formatted_timestamp = strftime('%Y-%m-%d %H:%M:%S',
                                           gmtime(timestamp))

            if verbose:
                log('Requesting events file ' + str(index + 1) +
                    ' submitted at ' + formatted_timestamp)
            r = connection.request('GET', filename, redirect=False)

            # pylint: disable=E1103
            if r.status != 200:
                error_msg = 'Couldn\'t download event %d.' % (index + 1)
                if r.data.get('msg', None):
                    error_msg += ' ' + r.data['msg']
                error(str(r.status) + error_msg)
                exit(-1)
            # pylint: enable=E1103

            r_data = decrypt_data(r.data, enc_key)
            r_data = json_loads(zlib_decompress(r_data))

            if not isinstance(r_data, list):
                r_data = [r_data]

            for event in r_data:
                slug = event['slug']
                del event['slug']
                event['time'] = strftime('%Y-%m-%d %H:%M:%S',
                                         gmtime(event['time']))

                if slug not in today_log:
                    today_log[slug] = {'playEvents': [], 'customEvents': []}

                today_log[slug]['customEvents'].append(event)
                # Maintaining a list of slugs to sort the customEvents by date for so that added array events appear in
                # order but we do not unneccesarily sort large lists if an array event wasn't added to it
                to_sort.add(slug)

        for slug in to_sort:
            today_log[slug]['customEvents'].sort(key=lambda k: k['time'])

        return today_log

    except (HTTPError, SSLError) as e:
        error(e)
        exit(-1)
Exemple #17
0
def get_account(url, token, marker=None, limit=None, prefix=None,
                http_conn=None, full_listing=False):
    """
    Get a listing of containers for the account.

    :param url: storage URL
    :param token: auth token
    :param marker: marker query
    :param limit: limit query
    :param prefix: prefix query
    :param http_conn: HTTP connection object (If None, it will create the
                      conn object)
    :param full_listing: if True, return a full listing, else returns a max
                         of 10000 listings
    :returns: a tuple of (response headers, a list of containers) The response
              headers will be a dict and all header names will be lowercase.
    :raises ClientException: HTTP GET request failed
    """
    if not http_conn:
        http_conn = http_connection(url)
    if full_listing:
        rv = get_account(url, token, marker, limit, prefix, http_conn)
        listing = rv[1]
        while listing:
            marker = listing[-1]['name']
            listing = \
                get_account(url, token, marker, limit, prefix, http_conn)[1]
            if listing:
                rv[1].extend(listing)
        return rv
    parsed, conn = http_conn
    qs = 'format=json'
    if marker:
        qs += '&marker=%s' % quote(marker)
    if limit:
        qs += '&limit=%d' % limit
    if prefix:
        qs += '&prefix=%s' % quote(prefix)
    full_path = '%s?%s' % (parsed.path, qs)
    headers = {'X-Auth-Token': token}
    conn.request('GET', full_path, '',
                 headers)
    resp = conn.getresponse()
    body = resp.read()
    http_log(("%s?%s" % (url, qs), 'GET',), {'headers': headers}, resp, body)

    resp_headers = {}
    for header, value in resp.getheaders():
        resp_headers[header.lower()] = value
    if resp.status < 200 or resp.status >= 300:
        raise ClientException('Account GET failed', http_scheme=parsed.scheme,
                              http_host=conn.host, http_port=conn.port,
                              http_path=parsed.path, http_query=qs,
                              http_status=resp.status, http_reason=resp.reason,
                              http_response_content=body)
    if resp.status == 204:
        body
        return resp_headers, []
    return resp_headers, json_loads(body)
def _postupload_progress(deploy_info, connection, cookie, silent, verbose):
    if silent:
        sleep_step = 1.0
    elif verbose:
        log('Post processing:')
        sleep_step = 0.2
    else:
        log('Post processing files...')
        sleep_step = 0.4

    if not deploy_info.hub_session:
        error('No deploy session found.')
        return -1

    old_progress = 0

    while True:
        sleep(sleep_step)

        if deploy_info.error:
            error(deploy_info.error)
            return -1

        try:
            r = connection.request('POST',
                                   '/dynamic/upload/progress/%s' % deploy_info.hub_session,
                                   headers={'Cookie': cookie},
                                   redirect=False)
        except (HTTPError, SSLError) as e:
            error(e)
            error('Post-upload progress check failed.')
            return -1

        if r.status != 200:
            error('Wrong Hub answer.')
            return -1

        r_data = json_loads(r.data)
        # pylint: disable=E1103
        current_progress = int(r_data.get('progress', -1))
        error_msg = str(r_data.get('error', ''))
        # pylint: enable=E1103

        if error_msg:
            error('Post-upload processing failed: %s' % error_msg)
            return -1
        if -1 == current_progress:
            error('Invalid post-upload progress.')
            return -1

        if verbose and not silent:
            if old_progress != current_progress:
                log('Progress: %u%%' % current_progress)
            old_progress = current_progress

        if 100 <= current_progress:
            if not silent:
                log('Post processing completed.')
            return 0
Exemple #19
0
def loads(s, *args, **kwargs):
    """Helper to log actual value which failed to be parsed"""
    try:
        return json_loads(s, *args, **kwargs)
    except:
        lgr.error("Failed to load content from %r with args=%r kwargs=%r" %
                  (s, args, kwargs))
        raise
 def __get_config_data(self, inConfigPath):
     """
     Given a pathway, return a python object
     built from a JSON object.
     """
     with open(inConfigPath) as config_file:
         config_string = config_file.read()
         config_data = json_loads(config_string)
         return config_data
 def _get_config_data(self, inConfigPath):
     """
     Given a file path to a JSON config file, open and
     convert to a python object.
     """
     with open(inConfigPath) as config_file:
         config_string = config_file.read()
         config_data = json_loads(config_string)
         return config_data
Exemple #22
0
def get_account(url, token, marker=None, limit=None, prefix=None, http_conn=None, full_listing=False):
    """
    Get a listing of containers for the account.

    :param url: storage URL
    :param token: auth token
    :param marker: marker query
    :param limit: limit query
    :param prefix: prefix query
    :param http_conn: HTTP connection object (If None, it will create the
                      conn object)
    :param full_listing: if True, return a full listing, else returns a max
                         of 10000 listings
    :returns: a tuple of (response headers, a list of containers) The response
              headers will be a dict and all header names will be lowercase.
    :raises ClientException: HTTP GET request failed
    """
    if not http_conn:
        http_conn = http_connection(url)
    if full_listing:
        rv = get_account(url, token, marker, limit, prefix, http_conn)
        listing = rv[1]
        while listing:
            marker = listing[-1]["name"]
            listing = get_account(url, token, marker, limit, prefix, http_conn)[1]
            if listing:
                rv[1].extend(listing)
        return rv
    parsed, conn = http_conn
    qs = "format=json"
    if marker:
        qs += "&marker=%s" % quote(marker)
    if limit:
        qs += "&limit=%d" % limit
    if prefix:
        qs += "&prefix=%s" % quote(prefix)
    conn.request("GET", "%s?%s" % (parsed.path, qs), "", {"X-Auth-Token": token})
    resp = conn.getresponse()
    resp_headers = {}
    for header, value in resp.getheaders():
        resp_headers[header.lower()] = value
    if resp.status < 200 or resp.status >= 300:
        resp.read()
        raise ClientException(
            "Account GET failed",
            http_scheme=parsed.scheme,
            http_host=conn.host,
            http_port=conn.port,
            http_path=parsed.path,
            http_query=qs,
            http_status=resp.status,
            http_reason=resp.reason,
        )
    if resp.status == 204:
        resp.read()
        return resp_headers, []
    return resp_headers, json_loads(resp.read())
def _check_project(connection, options, cookie):
    project = options.project
    projectversion = options.projectversion
    projectversion_title = options.projectversiontitle

    try:
        r = connection.request('POST',
                               '/dynamic/upload/projects',
                               headers={'Cookie': cookie},
                               redirect=False)
    except (HTTPError, SSLError) as e:
        error(e)
        exit(-1)

    if r.status != 200:
        error('Wrong Hub answer!')
        exit(-1)

    # pylint: disable=E1103
    projects = json_loads(r.data).get('projects', [])
    # pylint: enable=E1103

    upload_access = False
    new_version = True
    for project_info in projects:
        if project_info['slug'] == project:
            upload_access = True
            for version_info in project_info['versions']:
                if version_info['version'] == projectversion:
                    new_version = False
                    # Use the supplied project version title or the existing one as a fallback
                    existingversion_title = version_info['title']
                    projectversion_title = projectversion_title or existingversion_title
                    break

    # If projectversion_title is still unset this is a new version with no supplied title, default to the version
    projectversion_title = projectversion_title or projectversion

    if not upload_access:
        error(
            'Project "%s" does not exist or you are not authorized to upload new versions!'
            % project)
        exit(-1)

    if not options.silent:
        if new_version:
            log('Uploading to new version "%s" on project "%s".' %
                (projectversion, project))
        else:
            log('Uploading to existing version "%s" on project "%s".' %
                (projectversion, project))
            if projectversion_title != existingversion_title:
                log('Changing project version title from "%s" to "%s".' %
                    (existingversion_title, projectversion_title))

    return (project, projectversion, projectversion_title)
Exemple #24
0
def loads(s, *args, **kwargs):
    """Helper to log actual value which failed to be parsed"""
    try:
        return json_loads(s, *args, **kwargs)
    except:
        lgr.error(
            "Failed to load content from %r with args=%r kwargs=%r"
            % (s, args, kwargs)
        )
        raise
Exemple #25
0
def inline_array_events_s3(options, today_log, array_files_list, enc_key, connection):

    verbose = options.verbose
    to_sort = set()

    try:
        for index, filename in enumerate(array_files_list):
            # Format: 'https://bucket.s3.amazonaws.com/gamefolder/arrayevents/date(seconds)/objectid.bin?
            #          AWSAccessKeyId=keyid&Expires=timestamp&Signature=signature'
            # The objectid doesn't correspond to a database entry but it used for uniqueness and timestamp
            filename_cleaned = filename.split("?", 1)[0].rsplit("/", 1)[-1]
            event_objectid = filename_cleaned.split(".", 1)[0]
            timestamp = get_objectid_timestamp(event_objectid)
            formatted_timestamp = strftime("%Y-%m-%d %H:%M:%S", gmtime(timestamp))

            if verbose:
                log("Requesting events file " + str(index + 1) + " submitted at " + formatted_timestamp)
            r = connection.request("GET", filename, redirect=False)

            # pylint: disable=E1103
            if r.status != 200:
                error_msg = "Couldn't download event %d." % (index + 1)
                if r.data.get("msg", None):
                    error_msg += " " + r.data["msg"]
                error(str(r.status) + error_msg)
                exit(-1)
            # pylint: enable=E1103

            r_data = decrypt_data(r.data, enc_key)
            r_data = json_loads(zlib_decompress(r_data))

            if not isinstance(r_data, list):
                r_data = [r_data]

            for event in r_data:
                slug = event["slug"]
                del event["slug"]
                event["time"] = strftime("%Y-%m-%d %H:%M:%S", gmtime(event["time"]))

                if slug not in today_log:
                    today_log[slug] = {"playEvents": [], "customEvents": []}

                today_log[slug]["customEvents"].append(event)
                # Maintaining a list of slugs to sort the customEvents by date for so that added array events appear in
                # order but we do not unneccesarily sort large lists if an array event wasn't added to it
                to_sort.add(slug)

        for slug in to_sort:
            today_log[slug]["customEvents"].sort(key=lambda k: k["time"])

        return today_log

    except (HTTPError, SSLError) as e:
        error(e)
        exit(-1)
def inline_array_events_local(options, today_log, array_files_list, enc_key):

    verbose = options.verbose
    to_sort = set()

    try:
        index = 0
        for index, filename in enumerate(array_files_list):
            # Format: 'eventlogspath/gamefolder/arrayevents/date(seconds)/objectid.bin'
            # The objectid doesn't correspond to a database entry but is used for uniqueness and timestamp

            filename = filename.replace('\\', '/')
            event_objectid = filename.rsplit('/', 1)[-1].split('.', 1)[0]
            timestamp = get_objectid_timestamp(event_objectid)
            formatted_timestamp = strftime('%Y-%m-%d %H:%M:%S',
                                           gmtime(timestamp))

            if verbose:
                log('Retrieving array events file ' + str(index + 1) +
                    ' submitted at ' + formatted_timestamp)

            with open(filename, 'rb') as fin:
                file_content = fin.read()
            file_content = decrypt_data(file_content, enc_key)
            file_content = json_loads(zlib_decompress(file_content))

            if not isinstance(file_content, list):
                file_content = [file_content]
            for event in file_content:
                slug = event['slug']
                del event['slug']
                # Some older files had no timestamp in the file data itself in which case we use the timestamp
                # on the filename
                if 'time' in event:
                    event['time'] = strftime('%Y-%m-%d %H:%M:%S',
                                             gmtime(event['time']))
                else:
                    event['time'] = formatted_timestamp

                if slug not in today_log:
                    today_log[slug] = {'playEvents': [], 'customEvents': []}

                today_log[slug]['customEvents'].append(event)
                # Maintaining a list of slugs to sort the customEvents by date for so that added array events appear in
                # order but we do not unneccesarily sort large lists if an array event wasn't added to it
                to_sort.add(slug)

        for slug in to_sort:
            today_log[slug]['customEvents'].sort(key=lambda k: k['time'])

        return today_log

    except (IOError, OSError) as e:
        error(e)
        exit(-1)
Exemple #27
0
    def batch_check_files(self, files, checked_queue_put):
        urlopen = self.hub_pool.urlopen
        base_url = self._base_check_url
        url_format = self._check_url_format
        get_upload_token = _get_upload_file_token
        timeout = self.hub_timeout
        if self._batch_checks:
            query = '&'.join((url_format % (get_upload_token(i, f[1]), f[3], f[2])) for i, f in enumerate(files))
            r = urlopen('GET',
                        base_url + query,
                        redirect=False,
                        assert_same_host=False,
                        timeout=timeout)
            if r.status == 200:
                # pylint: disable=E1103
                missing_files = set(json_loads(r.data).get('missing', []))
                # pylint: enable=E1103
                for i, f in enumerate(files):
                    if get_upload_token(i, f[1]) in missing_files:
                        # Update meta data cache and upload
                        checked_queue_put(f)
                    else:
                        # Only needs to update meta data cache
                        checked_queue_put((f[1], f[2], f[3], f[4], f[5]))
                return

            else:
                f = files.pop(0)
                if r.status == 304:
                    # First one only needs to update meta data cache
                    checked_queue_put((f[1], f[2], f[3], f[4], f[5]))
                elif r.status == 404:
                    # First one needs to update meta data cache and to upload
                    checked_queue_put(f)
                else:
                    raise Exception(r.reason)
                if len(files) == 1:
                    return
                # Legacy format, check one by one...
                self._batch_checks = False
                r = None

        for f in files:
            query = url_format % (basename(f[1]), f[3], f[2])
            if urlopen('GET',
                       base_url + query,
                       redirect=False,
                       assert_same_host=False,
                       timeout=timeout).status == 304:
                # Only needs to update meta data cache
                checked_queue_put((f[1], f[2], f[3], f[4], f[5]))
            else:
                # Update meta data cache and upload
                checked_queue_put(f)
Exemple #28
0
 def get_configuration(self, user):
     """
     Should be replaced by intelligent proxy object.
     """
     try:
         configuration = json_loads(user.configuration, parse_float=Decimal)
     except:
         configuration = {}
     if not isinstance(configuration, dict):
         configuration = {}
     return configuration
Exemple #29
0
    def login(cls):
        """
        Start deploying the game.
        """
        response.headers['Cache-Control'] = 'no-store, no-cache, max-age=0'

        hub_pool = connection_from_url(cls.base_url, maxsize=8, timeout=8.0)
        if not hub_pool or not cls.cookie_name:
            response.status_int = 500
            return {'ok': False, 'msg': 'Wrong deployment configuration.'}

        cls.hub_pool = hub_pool

        form = request.params
        try:
            login_name = form['login']
            credentials = {
                'login': login_name,
                'password': form['password'],
                'source': '/local'
            }
        except KeyError:
            response.status_int = 400
            return {'ok': False, 'msg': 'Missing user login information.'}

        try:
            r = hub_pool.request('POST',
                                 '/dynamic/login',
                                 fields=credentials,
                                 retries=1,
                                 redirect=False)
        except (HTTPError, SSLError) as e:
            LOG.error(e)
            response.status_int = 500
            return {'ok': False, 'msg': str(e)}

        if r.status != 200:
            response.status_int = 400
            return {'ok': False, 'msg': 'Wrong user login information.'}

        cookie = r.headers.get('set-cookie', None)
        login_info = json_loads(r.data)

        # pylint: disable=E1103
        if not cookie or cls.cookie_name not in cookie or login_info.get(
                'source') != credentials['source']:
            response.status_int = 400
            return {'ok': False, 'msg': 'Wrong user login information.'}
        # pylint: enable=E1103

        hub_headers = {'Cookie': cookie}

        return cls._get_projects_for_upload(hub_headers, login_name,
                                            form.get('rememberme'))
Exemple #30
0
    def batch_check_files(self, files, checked_queue_put):
        urlopen = self.hub_pool.urlopen
        base_url = self._base_check_url
        url_format = self._check_url_format
        get_upload_token = _get_upload_file_token
        timeout = self.hub_timeout
        if self._batch_checks:
            query = '&'.join((url_format % (get_upload_token(i, f[1]), f[3], f[2])) for i, f in enumerate(files))
            r = urlopen('GET',
                        base_url + query,
                        redirect=False,
                        assert_same_host=False,
                        timeout=timeout)
            if r.status == 200:
                # pylint: disable=E1103
                missing_files = set(json_loads(r.data).get('missing', []))
                # pylint: enable=E1103
                for i, f in enumerate(files):
                    if get_upload_token(i, f[1]) in missing_files:
                        # Update meta data cache and upload
                        checked_queue_put(f)
                    else:
                        # Only needs to update meta data cache
                        checked_queue_put((f[1], f[2], f[3], f[4], f[5]))
                return

            else:
                f = files.pop(0)
                if r.status == 304:
                    # First one only needs to update meta data cache
                    checked_queue_put((f[1], f[2], f[3], f[4], f[5]))
                elif r.status == 404:
                    # First one needs to update meta data cache and to upload
                    checked_queue_put(f)
                else:
                    raise Exception(r.reason)
                if len(files) == 1:
                    return
                # Legacy format, check one by one...
                self._batch_checks = False
                r = None

        for f in files:
            query = url_format % (basename(f[1]), f[3], f[2])
            if urlopen('GET',
                       base_url + query,
                       redirect=False,
                       assert_same_host=False,
                       timeout=timeout).status == 304:
                # Only needs to update meta data cache
                checked_queue_put((f[1], f[2], f[3], f[4], f[5]))
            else:
                # Update meta data cache and upload
                checked_queue_put(f)
Exemple #31
0
    def _get_config_data(self, path):
        """
        Given a file path to a JSON config file, open and
        convert to a python object.

        :param str path: full pathway to a JSON-formatted config file.
        """
        with open(path) as config_file:
            config_string = config_file.read()
            config_data = json_loads(config_string)
            return config_data
Exemple #32
0
def load_pack_info(filename, scon=None, file=None):
    if not file:
        f = scon.get_object(filename)
    else:
        f = file
    if not f:
        return None
    try:
        return json_loads(zlib.decompress(f.read()))
    finally:
        f.close()
Exemple #33
0
 def load_session(self, session_id):
     assert isinstance(session_id, str)
     hashed_id = get_hashed_session_id(session_id)
     with self._engine.begin() as conn:
         r = conn.execute(
             select([t_sessions]).where(t_sessions.c.id_hash == hashed_id))
         row = r.first()
         if row:
             assert row[t_sessions.c.id_hash] == hashed_id
             return json_loads(row[t_sessions.c.data_json])
         else:
             return None
def inline_array_events_local(options, today_log, array_files_list, enc_key):

    verbose = options.verbose
    to_sort = set()

    try:
        index = 0
        for index, filename in enumerate(array_files_list):
            # Format: 'eventlogspath/gamefolder/arrayevents/date(seconds)/objectid.bin'
            # The objectid doesn't correspond to a database entry but is used for uniqueness and timestamp

            filename = filename.replace('\\', '/')
            event_objectid = filename.rsplit('/', 1)[-1].split('.', 1)[0]
            timestamp = get_objectid_timestamp(event_objectid)
            formatted_timestamp = strftime('%Y-%m-%d %H:%M:%S', gmtime(timestamp))

            if verbose:
                log('Retrieving array events file ' + str(index + 1) + ' submitted at ' + formatted_timestamp)

            with open(filename, 'rb') as fin:
                file_content = fin.read()
            file_content = decrypt_data(file_content, enc_key)
            file_content = json_loads(zlib_decompress(file_content))

            if not isinstance(file_content, list):
                file_content = [file_content]
            for event in file_content:
                slug = event['slug']
                del event['slug']
                # Some older files had no timestamp in the file data itself in which case we use the timestamp
                # on the filename
                if 'time' in event:
                    event['time'] = strftime('%Y-%m-%d %H:%M:%S', gmtime(event['time']))
                else:
                    event['time'] = formatted_timestamp

                if slug not in today_log:
                    today_log[slug] = { 'playEvents': [], 'customEvents': [] }

                today_log[slug]['customEvents'].append(event)
                # Maintaining a list of slugs to sort the customEvents by date for so that added array events appear in
                # order but we do not unneccesarily sort large lists if an array event wasn't added to it
                to_sort.add(slug)

        for slug in to_sort:
            today_log[slug]['customEvents'].sort(key=lambda k: k['time'])

        return today_log

    except (IOError, OSError) as e:
        error(e)
        exit(-1)
def _check_project(connection, options, cookie):
    project = options.project
    projectversion = options.projectversion
    projectversion_title = options.projectversiontitle

    try:
        r = connection.request('POST',
                               '/dynamic/upload/projects',
                               headers={'Cookie': cookie},
                               redirect=False)
    except (HTTPError, SSLError) as e:
        error(e)
        exit(-1)

    if r.status != 200:
        error('Wrong Hub answer!')
        exit(-1)

    # pylint: disable=E1103
    projects = json_loads(r.data).get('projects', [])
    # pylint: enable=E1103

    upload_access = False
    new_version = True
    for project_info in projects:
        if project_info['slug'] == project:
            upload_access = True
            for version_info in project_info['versions']:
                if version_info['version'] == projectversion:
                    new_version = False
                    # Use the supplied project version title or the existing one as a fallback
                    existingversion_title = version_info['title']
                    projectversion_title = projectversion_title or existingversion_title
                    break

    # If projectversion_title is still unset this is a new version with no supplied title, default to the version
    projectversion_title = projectversion_title or projectversion

    if not upload_access:
        error('Project "%s" does not exist or you are not authorized to upload new versions!' % project)
        exit(-1)

    if not options.silent:
        if new_version:
            log('Uploading to new version "%s" on project "%s".' % (projectversion, project))
        else:
            log('Uploading to existing version "%s" on project "%s".' % (projectversion, project))
            if projectversion_title != existingversion_title:
                log('Changing project version title from "%s" to "%s".' % (existingversion_title,
                                                                           projectversion_title))

    return (project, projectversion, projectversion_title)
    def login(cls):
        """
        Start deploying the game.
        """
        response.headers['Cache-Control'] = 'no-store, no-cache, max-age=0'

        hub_pool = connection_from_url(cls.base_url, maxsize=8, timeout=8.0)
        if not hub_pool or not cls.cookie_name:
            response.status_int = 500
            return {'ok': False, 'msg': 'Wrong deployment configuration.'}

        cls.hub_pool = hub_pool

        form = request.params
        try:
            login_name = form['login']
            credentials = {
                'login': login_name,
                'password': form['password'],
                'source': '/local'
            }
        except KeyError:
            response.status_int = 400
            return {'ok': False, 'msg': 'Missing user login information.'}

        try:
            r = hub_pool.request('POST',
                                 '/dynamic/login',
                                 fields=credentials,
                                 retries=1,
                                 redirect=False)
        except (HTTPError, SSLError) as e:
            LOG.error(e)
            response.status_int = 500
            return {'ok': False, 'msg': str(e)}

        if r.status != 200:
            response.status_int = 400
            return {'ok': False, 'msg': 'Wrong user login information.'}

        cookie = r.headers.get('set-cookie', None)
        login_info = json_loads(r.data)

        # pylint: disable=E1103
        if not cookie or cls.cookie_name not in cookie or login_info.get('source') != credentials['source']:
            response.status_int = 400
            return {'ok': False, 'msg': 'Wrong user login information.'}
        # pylint: enable=E1103

        hub_headers = {'Cookie': cookie}

        return cls._get_projects_for_upload(hub_headers, login_name, form.get('rememberme'))
Exemple #37
0
def analyse_json(filename):
    """Utility to return the ratio of key size, punctuation size, and leaf value size."""

    unique_keys = {}

    def __get_size(j):
        """Recurse to generate size."""
        (keys, punctuation, key_count) = (0, 0, 0)
        if isinstance(j, list):
            punctuation += 1  # [
            punctuation += (len(j) - 1)  # ,
            for v in j:
                sub_k, sub_p, sub_count = __get_size(v)
                keys += sub_k
                punctuation += sub_p
                key_count += sub_count
            punctuation += 1  # ]
        elif isinstance(j, dict):
            punctuation += 1  # {
            if len(j.keys()) > 1:
                punctuation += (len(j.keys()) - 1)  # ,
            for k, v in j.iteritems():
                if k not in unique_keys:
                    unique_keys[k] = True
                key_count += 1
                punctuation += 1  # "
                keys += len(k)
                punctuation += 1  # "
                punctuation += 1  # :
                sub_k, sub_p, sub_count = __get_size(v)
                keys += sub_k
                punctuation += sub_p
                key_count += sub_count
            punctuation += 1  # }
        elif isinstance(j, (str, unicode)):
            punctuation += 1  # "
            punctuation += 1  # "
        return (keys, punctuation, key_count)

    total_size = path_getsize(filename)
    with open(filename, 'r') as f:
        data = f.read()
        j = json_loads(data)

        (keys, punctuation, key_count) = __get_size(j)
        values = total_size - (keys + punctuation)
        unique_count = len(unique_keys.keys())
        compressed_size = len(zlib_compress(data, 6))

        return (keys, punctuation, values, key_count, unique_count, total_size,
                compressed_size)
def inline_array_events_s3(options, today_log, array_files_list, enc_key, connection):

    verbose = options.verbose
    to_sort = set()

    try:
        for index, filename in enumerate(array_files_list):
            # Format: 'https://bucket.s3.amazonaws.com/gamefolder/arrayevents/date(seconds)/objectid.bin?
            #          AWSAccessKeyId=keyid&Expires=timestamp&Signature=signature'
            # The objectid doesn't correspond to a database entry but it used for uniqueness and timestamp
            filename_cleaned = filename.split('?', 1)[0].rsplit('/', 1)[-1]
            event_objectid = filename_cleaned.split('.', 1)[0]
            timestamp = get_objectid_timestamp(event_objectid)
            formatted_timestamp = strftime('%Y-%m-%d %H:%M:%S', gmtime(timestamp))

            if verbose:
                log('Requesting array event ' + str(index + 1) + ' occuring at ' + formatted_timestamp)
            r = connection.request('GET', filename, redirect=False)

            # pylint: disable=E1103
            if r.status != 200:
                error_msg = 'Couldn\'t download event %d.' % (index + 1)
                if r.data.get('msg', None):
                    error_msg += ' ' + r.data['msg']
                error(str(r.status) + error_msg)
                exit(-1)
            # pylint: enable=E1103

            r_data = decrypt_data(r.data, enc_key)
            r_data = json_loads(zlib_decompress(r_data))

            slug = r_data['slug']
            del r_data['slug']
            r_data['time'] = formatted_timestamp

            if slug not in today_log:
                today_log[slug] = { 'playEvents': [], 'customEvents': [] }

            today_log[slug]['customEvents'].append(r_data)
            # Maintaining a list of slugs to sort the customEvents by date for so that added array events appear in
            # order but we do not unneccesarily sort large lists if an array event wasn't added to it
            to_sort.add(slug)

        for slug in to_sort:
            today_log[slug]['customEvents'].sort(key=lambda k: k['time'])

        return today_log

    except (HTTPError, SSLError) as e:
        error(e)
        exit(-1)
Exemple #39
0
def json2yaml(source_filename, dest_filename, is_mapping_table):

    json_filename = '%s.json' % source_filename
    yaml_filename = '%s.yaml' % dest_filename

    result = 0

    json_file = None
    yaml_file = None

    try:
        json_file = open(json_filename, 'r')
        yaml_file = open(yaml_filename, 'w')
    except IOError as e:
        print str(e)
        result = 1
    else:
        json = json_file.read()
        try:
            json_dict = json_loads(json)
        except JSONDecodeError as e:
            print ('Failed to decode response for: %s' % json)
            result = 1
        else:
            if is_mapping_table:
                try:
                    mapping_version = json_dict['version']
                    if mapping_version == 1.0:
                        json_dict = json_dict['urnmapping']
                        if json_dict:
                            yaml.dump(json_dict, yaml_file, default_flow_style=False)
                        else:
                            print ('Cannot find urnmapping data')
                            result = 1
                    else:
                        print ('Mapping table version, not recognized: %s' % mapping_version)
                        result = 1
                except KeyError:
                    print 'No version information in mapping table'
                    result = 1
            else:
                yaml.dump(json_dict, yaml_file, default_flow_style=False)

    if json_file is not None:
        json_file.close()

    if yaml_file is not None:
        yaml_file.close()

    return result
Exemple #40
0
def get_auth(url, user, key, region, snet=False):
    """
    Get authentication/authorization credentials.

    The snet parameter is used for Rackspace's ServiceNet internal network
    implementation. In this function, it simply adds *snet-* to the beginning
    of the host name for the returned storage URL. With Rackspace Cloud Files,
    use of this network path causes no bandwidth charges but requires the
    client to be running on Rackspace's ServiceNet network.

    :param url: authentication/authorization URL
    :param user: user to authenticate as
    :param key: key or password for authorization
    :param region: service region [dfw, ord, syd, iad, etc]
    :param snet: use SERVICENET internal network (see above), default is False
    :returns: tuple of (storage URL, auth token)
    :raises ClientException: HTTP GET request to auth URL failed
    """
    swift_service = 'object-store'
    parsed, conn = http_connection(url)
    params = json_dumps({"auth": {"RAX-KSKEY:apiKeyCredentials":
                                  {"username": user, "apiKey": key}}})
    conn.request('POST', parsed.path, params,
                 {'Accept': 'application/json',
                  'Content-Type': 'application/json'})
    resp = conn.getresponse()
    data = json_loads(resp.read())
    if resp.status < 200 or resp.status >= 300:
        raise ClientException(
            'Auth POST failed', http_scheme=parsed.scheme,
            http_host=conn.host, http_port=conn.port,
            http_path=parsed.path, http_status=resp.status,
            http_reason=resp.reason)

    try:
        token = data['access']['token']['id']
        for service in data['access']['serviceCatalog']:
            if service['type'] == swift_service:
                for points in service['endpoints']:
                    if points['region'] == region:
                        if snet:
                            storage_url = points['internalURL']
                        else:
                            storage_url = points['publicURL']
                        return storage_url, token
                raise ClientException('Region %s not found' % region)
        raise ClientException('Service Type %s not found' % swift_service)
    except KeyError:
        raise ClientException(
            'Inconsistent Service Catalog back from auth: %s' % data)
        def myapp(environ, start_response):
            
            path_info = environ['PATH_INFO']
            query_string = environ['QUERY_STRING']
            
            #if not hasattr(self, 'pattern_cache'):
            #    self.pattern_cache = [(pth, args, pfunc) for pth, args, pfunc in self.patterns()]
            self.pattern_cache = [(re.compile('/path_xml'),[],self.path_xml),(re.compile('/transit_path'),[],self.transit_path),(re.compile('/getUrbanExplorerBlob'),[],self.getUrbanExplorerBlob)]
            
            for ppath, pargs, pfunc in self.pattern_cache:
                if ppath.match(path_info):
                    
                    args = cgi.parse_qs(query_string)
                    args = dict( [(k,v[0]) for k,v in args.iteritems()] )
                    
                    try:
                        #use simplejson to coerce args to native types
                        #don't attempt to convert an arg 'jsoncallback'; just ignore it.
                        arglist = []
                        for k,v in args.iteritems():
                            if k=="jsoncallback":
                                arglist.append( (k,v) )
                            elif k != "_":
                                arglist.append( (k,json_loads(v)) )
                        args = dict( arglist )
                        
                        if hasattr(pfunc, 'mime'):
                            mime = pfunc.mime
                        else:
                            mime = self.DEFAULT_MIME
                        
                        start_response('200 OK', [('Content-type', mime)])
                        #try:
#                        for value in pfunc(**args):
#                            rr = xstr( value )
                        #except TypeError:
                        #    problem = "Arguments different than %s"%str(pargs)
                        #    start_response('500 Internal Error', [('Content-type', 'text/plain'),('Content-Length', str(len(problem)))])
                        #    return [problem]
                        
                        return pfunc(**args)
                    except:
                        problem = traceback.format_exc()
                        start_response('500 Internal Error', [('Content-type', 'text/plain'),('Content-Length', str(len(problem)))])
                        return [problem]
                        
            # no match:
            problem = "No method corresponds to path '%s'"%environ['PATH_INFO']
            start_response('404 Not Found', [('Content-type', 'text/plain'),('Content-Length', str(len(problem)))])
            return [problem]
Exemple #42
0
    def get_container_objects(self):
        """Retrieve objects list in a container

        :return: A list of dict that describe objects
                 or None if container does not exist
        """
        qs = '?format=json'
        path = self.base_path + qs
        ret = self.httpclient.request('GET', path)
        if ret.status_code == 404:
            return None
        if ret.status_code < 200 or ret.status_code > 300:
            raise SwiftException('GET request failed with error code %s'
                                 % ret.status_code)
        content = ret.read()
        return json_loads(content)
Exemple #43
0
def inline_array_events_local(options, today_log, array_files_list, enc_key):

    verbose = options.verbose
    to_sort = set()

    try:
        index = 0
        for index, filename in enumerate(array_files_list):
            # Format: 'eventlogspath/gamefolder/arrayevents/date(seconds)/objectid.bin'
            # The objectid doesn't correspond to a database entry but is used for uniqueness and timestamp
            filename = filename.replace("\\", "/")
            event_objectid = filename.rsplit("/", 1)[-1].split(".", 1)[0]
            timestamp = get_objectid_timestamp(event_objectid)
            formatted_timestamp = strftime("%Y-%m-%d %H:%M:%S", gmtime(timestamp))

            if verbose:
                log("Retrieving events file " + str(index + 1) + " submitted at " + formatted_timestamp)

            with open(filename, "rb") as fin:
                file_content = fin.read()
            file_content = decrypt_data(file_content, enc_key)
            file_content = json_loads(zlib_decompress(file_content))

            if not isinstance(file_content, list):
                file_content = [file_content]
            for event in file_content:
                slug = event["slug"]
                del event["slug"]
                event["time"] = strftime("%Y-%m-%d %H:%M:%S", gmtime(event["time"]))

                if slug not in today_log:
                    today_log[slug] = {"playEvents": [], "customEvents": []}

                today_log[slug]["customEvents"].append(event)
                # Maintaining a list of slugs to sort the customEvents by date for so that added array events appear in
                # order but we do not unneccesarily sort large lists if an array event wasn't added to it
                to_sort.add(slug)

        for slug in to_sort:
            today_log[slug]["customEvents"].sort(key=lambda k: k["time"])

        return today_log

    except (IOError, OSError) as e:
        error(e)
        exit(-1)
def _check_project(connection, options, cookie):
    project = options.project
    projectversion = options.projectversion

    try:
        r = connection.request('POST',
                               '/dynamic/upload/projects',
                               headers={'Cookie': cookie},
                               redirect=False)
    except (HTTPError, SSLError) as e:
        error(e)
        exit(-1)

    if r.status != 200:
        error('Wrong Hub answer!')
        exit(-1)

    # pylint: disable=E1103
    projects = json_loads(r.data).get('projects', [])
    # pylint: enable=E1103

    upload_access = False
    new_version = True
    projectversion_title = projectversion
    for project_info in projects:
        if project_info['slug'] == project:
            upload_access = True
            for version_info in project_info['versions']:
                if version_info['version'] == projectversion:
                    new_version = False
                    projectversion_title = version_info['title']
                    break

    if not upload_access:
        error('Project "%s" does not exist or you are not authorized to upload new versions!' % project)
        exit(-1)

    if not options.silent:
        if new_version:
            log('Uploading to new version "%s" on project "%s".' % (projectversion, project))
        else:
            log('Uploading to existing version "%s" on project "%s".' % (projectversion, project))

    return (project, projectversion, projectversion_title)
Exemple #45
0
def loads(s,
          encoding=None,
          object_hook=None,
          parse_float=None,
          parse_int=None,
          parse_constant=None,
          object_pairs_hook=None,
          use_decimal=False,
          **kw):
    return json_loads(s,
                      encoding=encoding,
                      cls=JsJSONDecoder,
                      object_hook=object_hook,
                      parse_float=parse_float,
                      parse_int=parse_int,
                      parse_constant=parse_constant,
                      object_pairs_hook=object_pairs_hook,
                      use_decimal=use_decimal,
                      **kw)
Exemple #46
0
    def swift_auth_v2(self):
        self.tenant, self.user = self.user.split(';')
        auth_dict = {}
        auth_dict['auth'] = {
            'passwordCredentials': {
                'username': self.user,
                'password': self.password,
            },
            'tenantName': self.tenant
        }
        auth_json = json_dumps(auth_dict)
        headers = {'Content-Type': 'application/json'}
        auth_httpclient = HTTPClient.from_url(
            self.auth_url,
            connection_timeout=self.http_timeout,
            network_timeout=self.http_timeout,
        )
        path = urlparse.urlparse(self.auth_url).path
        if not path.endswith('tokens'):
            path = posixpath.join(path, 'tokens')
        ret = auth_httpclient.request('POST',
                                      path,
                                      body=auth_json,
                                      headers=headers)

        if ret.status_code < 200 or ret.status_code >= 300:
            raise SwiftException('AUTH v2.0 request failed on ' +
                                 '%s with error code %s (%s)' %
                                 (str(auth_httpclient.get_base_url()) + path,
                                  ret.status_code, str(ret.items())))
        auth_ret_json = json_loads(ret.read())
        token = auth_ret_json['access']['token']['id']
        catalogs = auth_ret_json['access']['serviceCatalog']
        object_store = [
            o_store for o_store in catalogs
            if o_store['type'] == 'object-store'
        ][0]
        endpoints = object_store['endpoints']
        endpoint = [
            endp for endp in endpoints if endp["region"] == self.region_name
        ][0]
        return endpoint[self.endpoint_type], token
def _request_data(options):
    daterange = options.daterange
    params = {
        'start_time': daterange.start,
        'end_time': daterange.end,
        'version': __version__
    }

    connection = connection_from_url(options.hub, timeout=8.0)
    cookie = login(connection, options)

    try:
        r = connection.request('GET',
                               DATATYPE_URL[options.type] % options.project,
                               headers={
                                   'Cookie': cookie,
                                   'Accept-Encoding': 'gzip'
                               },
                               fields=params,
                               redirect=False)
    except (HTTPError, SSLError) as e:
        error(e)
        exit(-1)

    # pylint: disable=E1103
    r_data = json_loads(r.data)
    if r.status != 200:
        error_msg = 'Wrong Hub answer.'
        if r_data.get('msg', None):
            error_msg += ' ' + r_data['msg']
        if r.status == 403:
            error_msg += ' Make sure the project you\'ve specified exists and you have access to it.'
        error(error_msg)
        exit(-1)
    # pylint: enable=E1103

    if options.verbose:
        log('Data received from the hub')
        log('Logging out')
    logout(connection, cookie)

    return r_data
def login(connection, options):
    username = options.user
    password = options.password

    if not options.silent:
        log('Login as "%s".' % username)

    credentials = {'login': username, 'password': password, 'source': '/tool'}

    try:
        r = connection.request('POST',
                               '/dynamic/login',
                               fields=credentials,
                               retries=1,
                               redirect=False)
    except (HTTPError, SSLError):
        error('Connection to Hub failed!')
        exit(-1)

    if r.status != 200:
        if r.status == 301:
            redirect_location = r.headers.get('location', '')
            end_domain = redirect_location.find('/dynamic/login')
            error(
                'Login is being redirected to "%s". Please verify the Hub URL.'
                % redirect_location[:end_domain])
        else:
            error('Wrong user login information!')
        exit(-1)

    cookie = r.headers.get('set-cookie', None)
    login_info = json_loads(r.data)

    # pylint: disable=E1103
    if not cookie or HUB_COOKIE_NAME not in cookie or login_info.get(
            'source') != credentials['source']:
        error('Hub login failed!')
        exit(-1)
    # pylint: enable=E1103

    return cookie
    def request_hashes(self, project):
        try:
            min_version = 2
            r = self.hub_pool.urlopen('GET',
                                      '/dynamic/upload/list?version=%d&project=%s' % (min_version, project),
                                      headers={'Cookie': self.hub_cookie,
                                               'Accept-Encoding': 'gzip'},
                                      redirect=False,
                                      assert_same_host=False)
            if r.status == 200:
                response = json_loads(r.data)
                # pylint: disable=E1103
                if response.get('version', 1) >= min_version:
                    return response['hashes']
                # pylint: enable=E1103

        except (HTTPError, SSLError, TypeError, ValueError):
            pass
        except Exception as e:
            LOG.error(str(e))
        return []
Exemple #50
0
def write_to_file(options, data, filename=None, output_path=None, force_overwrite=False):
    if not filename:
        start_date = options.daterange[0]
        end_date = options.daterange[-1]

        filename = '%s-%s-%s' % (options.project, options.datatype, start_date)
        if start_date != end_date:
            filename += '_-_' + end_date
        filename += '.json'

    try:
        if not output_path:
            output_path = normpath(path_join(options.outputdir, filename))

        if path_exists(output_path):
            if options.overwrite or force_overwrite:
                if not options.silent:
                    warning('Overwriting existing file: %s' % output_path)
            elif not options.silent:
                warning('Skipping existing file: %s' % output_path)
                return

        indentation = None
        if options.indent:
            indentation = 4
            if isinstance(data, str):
                data = json_loads(data)

        with open(output_path, 'wb') as fout:
            if isinstance(data, str):
                fout.write(data)
            else:
                json_dump(data, fout, indent=indentation)

        if options.verbose:
            log('Finished writing to: %s' % output_path)

    except (IOError, OSError) as e:
        error(e)
        exit(-1)
def write_to_file(options, data, filename=None, output_path=None, force_overwrite=False):
    if not filename:
        start_date = options.daterange[0]
        end_date = options.daterange[-1]

        filename = '%s-%s-%s' % (options.project, options.datatype, start_date)
        if start_date != end_date:
            filename += '_-_' + end_date
        filename += '.json'

    try:
        if not output_path:
            output_path = normpath(path_join(options.outputdir, filename))

        if path_exists(output_path):
            if options.overwrite or force_overwrite:
                if not options.silent:
                    warning('Overwriting existing file: %s' % output_path)
            elif not options.silent:
                warning('Skipping existing file: %s' % output_path)
                return

        indentation = None
        if options.indent:
            indentation = 4
            if isinstance(data, str):
                data = json_loads(data)

        with open(output_path, 'wb') as fout:
            if isinstance(data, str):
                fout.write(data)
            else:
                json_dump(data, fout, indent=indentation)

        if options.verbose:
            log('Finished writing to: %s' % output_path)

    except (IOError, OSError) as e:
        error(e)
        exit(-1)
def login(connection, options):
    username = options.user
    password = options.password

    if not options.silent:
        log('Login as "%s".' % username)

    credentials = {'login': username,
                   'password': password,
                   'source': '/tool'}

    try:
        r = connection.request('POST',
                               '/dynamic/login',
                               fields=credentials,
                               retries=1,
                               redirect=False)
    except (HTTPError, SSLError):
        error('Connection to Hub failed!')
        exit(-1)

    if r.status != 200:
        if r.status == 301:
            redirect_location = r.headers.get('location', '')
            end_domain = redirect_location.find('/dynamic/login')
            error('Login is being redirected to "%s". Please verify the Hub URL.' % redirect_location[:end_domain])
        else:
            error('Wrong user login information!')
        exit(-1)

    cookie = r.headers.get('set-cookie', None)
    login_info = json_loads(r.data)

    # pylint: disable=E1103
    if not cookie or HUB_COOKIE_NAME not in cookie or login_info.get('source') != credentials['source']:
        error('Hub login failed!')
        exit(-1)
    # pylint: enable=E1103

    return cookie
Exemple #53
0
 def update_session(self, session_id, changes):
     assert isinstance(session_id, str)
     assert isinstance(changes, dict)
     hashed_id = get_hashed_session_id(session_id)
     with self._engine.begin() as conn:
         r = conn.execute(
             select([t_sessions]).where(t_sessions.c.id_hash == hashed_id))
         row = r.first()
         if row:
             assert row[t_sessions.c.id_hash] == hashed_id
             orig_data = json_loads(row[t_sessions.c.data_json])
             new_data = {**orig_data, **changes}
             assert {new_data[k] == changes[k] for k in changes}
             conn.execute(t_sessions.update().where(
                 t_sessions.c.id_hash == hashed_id).values(
                     last_used=datetime.utcnow(),
                     data_json=json_dumps(new_data)))
         else:
             conn.execute(t_sessions.insert().values(
                 id_hash=hashed_id,
                 last_used=datetime.utcnow(),
                 data_json=json_dumps(changes)))
Exemple #54
0
    def try_login(cls):
        """
        Try to login automatically and return deployable projects.
        """
        response.headers['Cache-Control'] = 'no-store, no-cache, max-age=0'

        hub_pool = connection_from_url(cls.base_url, maxsize=8, timeout=8.0)
        if not hub_pool or not cls.cookie_name:
            response.status_int = 500
            return {'ok': False, 'msg': 'Wrong deployment configuration.'}

        cls.hub_pool = hub_pool

        try:
            hub_headers = {'Cookie': request.params['cookie']}
            r = hub_pool.request('POST',
                                 '/dynamic/user',
                                 headers=hub_headers,
                                 retries=1,
                                 redirect=False)
            # pylint: disable=E1103
            username = json_loads(r.data).get('username')
            # pylint: enable=E1103

            status = r.status

        except (HTTPError, SSLError) as e:
            LOG.error(e)
            response.status_int = 500
            return {'ok': False, 'msg': str(e)}
        except KeyError:
            status = 400

        if status != 200:
            response.status_int = 401
            return {'ok': False, 'msg': 'Wrong user login information.'}

        return cls._get_projects_for_upload(hub_headers, username, True)
Exemple #55
0
    def test_search_wifi(self):
        """
        Grab 3 keys for a lat lon
        """
        (lat, lon), ap_data = random.choice(self.AP_DATA)

        expected_lat = int(lat * 1000)
        expected_lon = int(lon * 1000)
        if len(ap_data) >= 3:
            wifi_data = ap_data[:3]
            if random.random() >= 0.5:
                # Throw in some garbage
                wifi_data.append({'key': INVALID_WIFI_KEY})
            jdata = json.dumps({'wifi': wifi_data})
            res = self.session.post(HOST + '/v1/search?key=test', jdata)

            self.assertEqual(res.status_code, 200)
            jdata = json_loads(res.content)
            if jdata['status'] != 'not_found':
                actual_lat = int(jdata['lat'] * 1000)
                actual_lon = int(jdata['lon'] * 1000)
                self.assertEquals(actual_lat, expected_lat)
                self.assertEquals(actual_lon, expected_lon)
Exemple #56
0
async def test_stream_snapshots_get_latest(
    model,
    sample_snapshot_loaded,
):
    stream, = await model.streams.list_all()
    snapshot = await model.stream_snapshots.get_latest(stream_id=stream.id)
    await snapshot.load_state()
    assert snapshot.stream_id == stream.id
    assert snapshot.date.isoformat() == '2019-04-01T00:30:00+00:00'
    assert yaml_dump(json_loads(snapshot.state_json)) == dedent('''\
        disk_free:
          __check:
            color: green
          __unit: bytes
          __value: 10000000
        load: 1.2
        uptime:
          __unit: seconds
          __value: 3600
        watchdog:
          __watchdog:
            deadline: 1554079810123
    ''')
 def _send_request(self: "RPCProxy", method: str, *args: Any,
                   **kwargs: Any) -> Any:
     if not self.username or not self.password:
         auth = None
     else:
         auth = (self.username, self.password)
     arg: Union[dict, tuple]
     if args:
         arg = args
     elif kwargs:
         arg = kwargs
     else:
         arg = ()
     dict_to_send = {"id": 0, "method": method, "params": arg}
     if self.xpub:
         dict_to_send["xpub"] = self.xpub
     response = self.session.post(
         self.url,
         headers={"content-type": "application/json"},
         json=dict_to_send,
         auth=auth,
         verify=self.verify,
     )
     response.raise_for_status()
     json = response.json()
     if json["error"]:
         raise ValueError("Error from server: {}".format(json["error"]))
     if json["id"] != 0:
         warnings.warn("ID mismatch!")
     result = json.get("result", {})
     if isinstance(result, str):
         try:
             result = json_loads(result)
         except Exception:
             pass
     return result
Exemple #58
0
    def postupload_progress(cls):
        response.headers['Cache-Control'] = 'no-store, no-cache, max-age=0'

        form = request.params
        try:
            hub_project = form['project']
            hub_version = form['version']
        except KeyError:
            response.status_int = 400
            return {'ok': False, 'msg': 'Wrong project information.'}

        deploy_key = hub_project + hub_version
        deploy_info = cls._deploying.get(deploy_key, None)
        if not deploy_info:
            response.status_int = 404
            return {'ok': False, 'msg': 'Unknown deploy session.'}

        if deploy_info.error:
            LOG.error(deploy_info.error)
            response.status_int = 400
            return {'ok': False, 'msg': deploy_info.error}

        if not deploy_info.done:
            return {'ok': True, 'data': {'total': 1, 'processed': 0}}

        if not deploy_info.hub_session:
            response.status_int = 404
            return {'ok': False, 'msg': 'No deploy session found.'}

        try:
            r = cls.hub_pool.request(
                'POST',
                '/dynamic/upload/progress/%s' % deploy_info.hub_session,
                headers={'Cookie': deploy_info.hub_cookie},
                redirect=False)
        except (HTTPError, SSLError) as e:
            LOG.error(e)
            response.status_int = 500
            return {'ok': False, 'msg': 'Post-upload progress check failed.'}

        if r.status != 200:
            response.status_int = 500
            return {'ok': False, 'msg': 'Wrong Hub answer.'}

        r_data = json_loads(r.data)
        # pylint: disable=E1103
        progress = int(r_data.get('progress', -1))
        upload_info = str(r_data.get('info', ''))
        failed = r_data.get('failed', False)
        # pylint: enable=E1103

        if failed:
            response.status_int = 500
            return {
                'ok': False,
                'msg': 'Post-upload processing failed: %s' % upload_info
            }
        if -1 == progress:
            response.status_int = 500
            return {'ok': False, 'msg': 'Invalid post-upload progress.'}
        if 100 <= progress:
            del cls._deploying[deploy_key]

            try:
                cls.hub_pool.request(
                    'POST',
                    '/dynamic/logout',
                    headers={'Cookie': deploy_info.hub_cookie},
                    redirect=False)
            except (HTTPError, SSLError) as e:
                LOG.error(e)

            try:
                game = form['local']
            except KeyError:
                response.status_int = 400
                return {'ok': False, 'msg': 'Wrong request.'}

            game = get_game_by_slug(game)
            if game:
                game.set_deployed()

        return {
            'ok': True,
            'data': {
                'total': 100,
                'processed': progress,
                'msg': upload_info
            }
        }