示例#1
0
    def new_token(self,
                  user_id,
                  username,
                  domain=None,
                  tenant_id=None,
                  expire=None):
        """Create Token.

        This part of step 1 during the authentication after validation.

        Args:
            user_id (str): User ID
            username (str): Username.
            email (str): User email address.
            token (str): Unique token for specific user.
            domain_id (str): Current domain id.
            tenant_id (str): Current tenant id.
        """
        self._token = {}
        if user_id is None:
            raise ValueError('Require user_id for new_token')
        if username is None:
            raise ValueError('Require username for new_token')

        # These are only set during valid login.
        # Unique user id.
        self._token['user_id'] = user_id

        # Unique username.
        self._token['username'] = username

        # Token creation datetime, format YYYY/MM/DD HH:MM:SS.
        self._token['creation'] = now()

        # Token expire datetime, format YYYY/MM/DD HH:MM:SS.
        if expire is None:
            expire = (now() + timedelta(seconds=self._token_expire))
            self._token['expire'] = expire.strftime("%Y/%m/%d %H:%M:%S")
        else:
            self._token['expire'] = expire

        # Scope domain.
        self._token['domain'] = domain
        self._token['domain_id'] = domain_id(domain)

        # Scope tenant.
        self._token['tenant_id'] = tenant_id

        # Scope roles.
        self._token['roles'] = user_roles(user_id, domain, tenant_id)

        # Token Signature
        private_key = g.app.app_root.rstrip('/') + '/token.key'
        bytes_token = if_unicode_to_bytes(js.dumps(self._token))
        self._token_sig = pki.sign(private_key, base64.b64encode(bytes_token))
        return self._token_sig
示例#2
0
def acct(msg):
    try:
        pkt = msg['attributes']
    except KeyError:
        return
    try:
        pkt = decode_packet(pkt)
    except Exception:
        return
    try:
        nas_session_id = pkt.get('Acct-Session-Id', [None])[0]
        unique_session_id = pkt.get('Acct-Unique-Session-Id')[0]
        status = pkt.get('Acct-Status-Type', [''])[0].lower()
        username = pkt.get('User-Name', [None])[0]
        client = pkt.get('Client-IP-Address')[0]
        nas = pkt.get('NAS-IP-Address', ['0.0.0.0'])[0]
    except IndexError:
        return True

    dt = utc(parse_datetime(msg.get('datetime', None)))
    diff = (now()-dt).total_seconds()

    if diff > 60:
        log.error('Processing radius accounting message older' +
                  ' than 60 seconds. Age(%s)' % diff)

    with db() as conn:
        with dbw() as connw:
            with conn.cursor() as crsr:
                user = get_user(crsr,
                                client,
                                nas,
                                username)
                crsr.commit()
                if not user:
                    log.debug("user '%s' not found"
                              % username)
                    return False

                input_octets, output_octets = do_acct(connw,
                                                      pkt,
                                                      client,
                                                      nas,
                                                      nas_session_id,
                                                      unique_session_id,
                                                      dt,
                                                      user,
                                                      status)
                usage(connw,
                      pkt,
                      client,
                      nas,
                      nas_session_id,
                      unique_session_id,
                      user,
                      input_octets,
                      output_octets,
                      status)

    return True
示例#3
0
文件: acct.py 项目: Vuader/calabiyau
def acct(msg):
    fr = parse_fr(msg.get('fr', ()))
    status = fr.get('Acct-Status-Type', 'start').lower()
    dt = utc(parse_datetime(msg.get('datetime', None)))
    diff = (now() - dt).total_seconds()

    if diff > 60:
        log.error('Processing radius accounting message older' +
                  ' than 60 seconds. Age(%s)' % diff)

    if not require_attributes('accounting', fr, [
            'User-Name', 'NAS-IP-Address', 'Acct-Status-Type',
            'Acct-Session-Id', 'Acct-Unique-Session-Id', 'Acct-Input-Octets64',
            'Acct-Output-Octets64'
    ]):
        return False

    with db() as conn:
        with dbw() as connw:
            user = get_user(conn, fr['NAS-IP-Address'], fr['User-Name'])
            if not user:
                log.debug("user '%s' not found" % (fr['User-Name'], ))
                return False

            input_octets, output_octets = do_acct(connw, fr, dt, user, status)
            usage(connw, fr, user, input_octets, output_octets, status)

            if not user['static_ip4'] and user['pool_id']:
                update_ip(connw, user, fr)

    return True
示例#4
0
 def validate(self):
     if not self.authenticated:
         raise AccessDeniedError("Credentials token missing") from None
     elif 'expire' in self._credentials:
         utc_expire = utc(self._credentials['expire'])
         if now() > utc_expire:
             self.clear()
             raise AccessDeniedError('Credentials token expired') from None
示例#5
0
 def parse_token(self, token):
     self._initial()
     token = if_unicode_to_bytes(token)
     signature, token = token.split(b'!!!!')
     self._token_sig = self._check_token(signature, token)
     self._token = js.loads(base64.b64decode(token))
     self._token_sig = signature
     utc_now = now()
     utc_expire = utc(self._token['expire'])
     if utc_now > utc_expire:
         raise AccessDenied('Token Expired')
示例#6
0
    def json(self):
        # Return json token.
        if not self.authenticated:
            raise AccessDeniedError("Credentials token missing")

        utc_expire = utc(self._credentials['expire'])
        if now() > utc_expire:
            raise AccessDeniedError('Auth Token Expired')

        credentials = {}
        credentials['token'] = self.token
        credentials.update(self._credentials)
        return js.dumps(credentials)
示例#7
0
    def add_user_role(self, req, resp, id, role, domain=None, tenant_id=None):
        """
        Associate role to a user.

        Args:
            id (str): UUID of user.
            role (str): UUID of role.
            domain (str): Name of domain (defaults to None).
                          Use the text "none" to indicate global domain
                          when tenant_id is supplied.
            tenant_id (str): UUID of tenant (defaults to None).

        Example return data:

        .. code-block:: json

            {
                "id": "e729af96-5672-4669-b4a1-6251493a67fa",
                "user_id": "e95ec7b1-4f0f-4c70-991f-4bb1bec6a524",
                "role_id": "08034650-1438-4e56-b5a8-674ede74fe83",
                "domain": "default",
                "tenant_id": null
            }
        """
        if domain is not None and domain.lower() == "none":
            domain = None
        with db() as conn:
            check_context_auth(conn, id, domain, tenant_id)
            # Even though we have unique constraint, sqlite
            # does not consider null as unique. ref:
            # https://goo.gl/JmjT5G
            # So need to manually check that.
            check_unique(conn, id, role, domain, tenant_id)

            sql = "INSERT INTO luxon_user_role " \
                  "(`id`,`role_id`,`tenant_id`,`user_id`," \
                  "`domain`,`creation_time`) " \
                  "VALUES (?,?,?,?,?,?)"
            user_role_id = str(uuid4())
            conn.execute(sql,
                         (user_role_id, role, tenant_id, id, domain, now()))
            conn.commit()
            user_role = {
                "id": user_role_id,
                "user_id": id,
                "role_id": role,
                "domain": domain,
                "tenant_id": tenant_id
            }
            return json.dumps(user_role, indent=4)
示例#8
0
文件: main.py 项目: Carlo15139/luxon
def clean_sessions(args):
    """Removes all expired session files"""
    path = args.path.rstrip('/')
    tmp_path = os.path.join(path, 'tmp')
    config = Config()
    config.load(path + '/settings.ini')
    expire = config.getint('sessions', 'expire', fallback=86400)
    if exists(tmp_path):
        files = ls(tmp_path)
        for file in files:
            if file[2].startswith('session_'):
                modified = file[8]
                expired = now() - timedelta(seconds=expire)
                if modified <= expired:
                    rm(file[1])
示例#9
0
    def load(self, key):
        """Loads cached data from key

        Args:
            key (str): key for required data
        """
        try:
            value, expire = self._cache.pop(key)
            if expire > now():
                self._cache[key] = (
                    value,
                    expire,
                )
                return pickle.loads(value)
        except KeyError:
            return None
示例#10
0
    def token(self, token):
        # Load exisiting token
        token = if_unicode_to_bytes(token)
        signature, b64_token = token.split(b'!!!!')

        try:
            self._rsakey.verify(signature, b64_token)
        except ValueError as e:
            raise AccessDeniedError('Invalid Auth Token. %s' % e)

        decoded = js.loads(base64.b64decode(b64_token))
        utc_expire = utc(decoded['expire'])

        if now() > utc_expire:
            raise AccessDeniedError('Auth Token Expired')

        self._credentials = decoded
示例#11
0
    def store(self, key, value, expire):
        """Stores data

        Args:
            key (str): key associated with cached data
            value (obj): data to be cached
            expire (int): time to expire (s)
        """
        if sys.getsizeof(value, 0) <= self._max_obj_size:
            try:
                self._cache.pop(key)
            except KeyError:
                if len(self._cache) >= self._max_objs:
                    self._cache.popitem(last=False)
            self._cache[key] = (
                pickle.dumps(value),
                now() + timedelta(seconds=expire),
            )
示例#12
0
def calc_next_expire(metric, span, expired=None):
    if expired is None:
        expired = now()

    if metric == 'days':
        new_expire = add_date(expired, days=span)
        return new_expire

    elif metric == 'weeks':
        new_expire = add_date(expired, weeks=span)
        return new_expire

    elif metric == 'months':
        new_expire = add_date(expired, months=span)
        return new_expire

    else:
        return expired
示例#13
0
    def token(self):
        # Return serialized token.
        if not self.authenticated:
            raise AccessDeniedError("Credentials token missing")

        utc_expire = utc(self._credentials['expire'])
        if now() > utc_expire:
            raise AccessDeniedError('Auth Token Expired')

        bytes_token = if_unicode_to_bytes(
            js.dumps(self._credentials, indent=None))
        b64_token = base64.b64encode(bytes_token)
        token_sig = if_unicode_to_bytes(self._rsakey.sign(b64_token))
        token = if_bytes_to_unicode(token_sig + b'!!!!' + b64_token)
        if len(token) > 1280:
            raise ValueError("Auth Token exceeded 10KB" +
                             " - Revise Assignments for credentials")

        return token
示例#14
0
    def new(self, user_id, username=None, domain=None, roles=None):

        self.clear()

        if 'expire' not in self._credentials:
            # Create New Token
            expire = (now() + timedelta(seconds=self._token_expire))
            self._credentials['expire'] = expire.strftime("%Y/%m/%d %H:%M:%S")

        self._credentials['user_id'] = user_id

        if username is not None:
            self._credentials['username'] = username

        if domain is not None:
            self._credentials['user_domain'] = domain

        if roles is not None:
            self.roles = roles
示例#15
0
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
from uuid import uuid4

from luxon import register
from luxon import SQLModel
from luxon.utils.timezone import now

from infinitystone.models.domains import luxon_domain
from infinitystone.models.tenants import luxon_tenant

USERS = [
    ('00000000-0000-0000-0000-000000000000', 'tachyonic', None, None, 'root',
     '$2b$12$QaWa.Q3gZuafYXkPo3EJRuSJ1wGuutShb73RuH1gdUVri82CU6V5q', None,
     'Default Root User', None, None, None, None, 1, now()),
]


@register.model()
class luxon_user(SQLModel):
    id = SQLModel.Uuid(default=uuid4, internal=True)
    tag = SQLModel.String(hidden=True, max_length=30, null=False)
    domain = SQLModel.Fqdn(internal=True)
    tenant_id = SQLModel.Uuid(internal=True)
    username = SQLModel.Username(max_length=100, null=False)
    password = SQLModel.String(max_length=100, null=True)
    email = SQLModel.Email(max_length=255)
    name = SQLModel.String(max_length=100)
    phone_mobile = SQLModel.Phone()
    phone_office = SQLModel.Phone()
示例#16
0
文件: http.py 项目: Carlo15139/luxon
def request(client,
            method,
            url,
            params={},
            data=None,
            headers={},
            stream=False,
            **kwargs):

    with Timer() as elapsed:
        method = method.upper()
        headers = headers.copy()
        params = params.copy()

        try:
            _cache_engine = Cache()
        except NoContextError:
            _cache_engine = None

        try:
            if g.current_request.user_token:
                headers['X-Auth-Token'] = g.current_request.user_token
            if g.current_request.context_domain:
                headers['X-Domain'] = g.current_request.context_domain
            if g.current_request.context_tenant_id:
                headers['X-Tenant-Id'] = g.current_request.context_tenant_id
        except NoContextError:
            pass

        for kwarg in kwargs:
            headers[kwarg] = kwargs

        if data is not None:
            if hasattr(data, 'json'):
                data = data.json
            elif isinstance(data, (dict, list, OrderedDict)):
                data = js.dumps(data)
            data = if_unicode_to_bytes(data)

        if isinstance(data, bytes):
            headers['Content-Length'] = str(len(data))

        cached = None
        if (_cache_engine and stream is False and method == 'GET'
                and data is None):

            if isinstance(params, dict):
                cache_params = list(orderdict(params).values())

            if isinstance(headers, dict):
                cache_headers = list(orderdict(headers).values())

            cache_key = (method, url, cache_params, cache_headers)
            cache_key = str(md5sum(pickle.dumps(cache_key)))
            cached = _cache_engine.load(cache_key)
            if cached is not None:
                cache_control = parse_cache_control_header(
                    cached.headers.get('Cache-Control'))
                max_age = cache_control.max_age
                date = cached.headers.get('Date')
                etag = cached.headers.get('Etag')
                date = utc(date)
                current = now()
                diff = (current - date).total_seconds()
                if cache_control.no_cache:
                    # If no-cache revalidate.
                    headers['If-None-Match'] = etag
                elif max_age and diff < int(max_age):
                    # If not expired, use cache.
                    _debug(method, url, params, data, headers,
                           cached.headers, cached.content, cached.status_code,
                           elapsed(), 'Memory')
                    return cached
                else:
                    # If expired, revalidate..
                    headers['If-None-Match'] = etag

        try:
            response = Response(
                client._s.request(method.upper(),
                                  url,
                                  params=params,
                                  data=data,
                                  headers=headers,
                                  stream=stream))
            if (_cache_engine and cached is not None
                    and response.status_code == 304):

                _debug(method, url, params, data, headers,
                       cached.headers, cached.content, cached.status_code,
                       elapsed(), 'Validated (304)')
                return cached

            if response.status_code >= 400:

                try:
                    title = None
                    description = None
                    if 'error' in response.json:
                        error = response.json['error']
                        try:
                            title = error.get('title')
                            description = error.get('description')
                        except AttributeError:
                            pass

                    raise HTTPError(response.status_code, description, title)
                except HTTPClientContentDecodingError:
                    raise HTTPError(response.status_code)

            if _cache_engine and stream is False and method == 'GET':
                if response.status_code == 200:
                    cache_control = parse_cache_control_header(
                        response.headers.get('Cache-Control'))
                    if (not cache_control.no_store and cache_control.max_age
                            and response.headers.get('Etag')
                            and response.headers.get('Date') and data is None):
                        _cache_engine.store(cache_key, response, 604800)

        except requests.exceptions.InvalidHeader as e:
            raise HTTPClientInvalidHeader(e)
        except requests.exceptions.InvalidURL as e:
            raise HTTPClientInvalidURL(e)
        except requests.exceptions.InvalidSchema as e:
            raise HTTPClientInvalidSchema(e)
        except requests.exceptions.MissingSchema as e:
            raise HTTPClientMissingSchema(e)
        except requests.exceptions.ConnectionError as e:
            raise HTTPClientConnectionError(e)
        except requests.exceptions.ProxyError as e:
            raise HTTPClientProxyError(e)
        except requests.exceptions.SSLError as e:
            raise HTTPClientSSLError(e)
        except requests.exceptions.Timeout as e:
            raise HTTPClientTimeoutError(e)
        except requests.exceptions.ConnectTimeout as e:
            raise HTTPClientConnectTimeoutError(e)
        except requests.exceptions.ReadTimeout as e:
            raise HTTPClientReadTimeoutError(e)
        except requests.exceptions.HTTPError as e:
            raise HTTPError(e.response.status_code, e)

        _debug(method, url, params, data, headers, response.headers,
               response.content, response.status_code, elapsed())

    return response
示例#17
0
def github(req, resp):
    root_path = g.app.path
    mkdir(joinpath(root_path, 'github'))
    mkdir(joinpath(root_path, 'docs'))

    try:
        projects = load(root_path + '/projects.pickle')
    except FileNotFoundError:
        projects = {}

    username = g.app.config.get('github', 'username')
    password = g.app.config.get('github', 'password')

    tachyonic = GitHub(auth=(username, password))

    while True:
        try:
            teams = {}
            github_teams = tachyonic.teams('TachyonicProject')
            for github_team in github_teams:
                team = github_team['name']
                if team == "Author":
                    continue
                teams[team] = {}
                github_members = tachyonic.team_members(github_team['id'])
                for github_member in github_members:
                    login = github_member['login']
                    teams[team][login] = {}
                    teams[team][login]['github'] = github_member['html_url']
                    teams[team][login]['avatar'] = github_member['avatar_url']
            save(teams, root_path + '/team.pickle', perms=664)

            save(tachyonic.projects('TachyonicProject'),
                 root_path + '/planning.pickle',
                 perms=664)

            found = []
            log.info("Getting Repos")
            repos = tachyonic.repos('TachyonicProject')
            for repo in repos:
                name = repo['name']
                found.append(name)
                description = repo['description']
                if name not in projects:
                    projects[name] = {}
                log.info("Scanning Repo " + name)
                updated_at = utc(repo['updated_at'])
                created_at = utc(repo['created_at'])
                pushed_at = utc(repo['pushed_at'])

                if (('updated_at' not in projects[name])
                        or ('updated_at' in projects[name]
                            and updated_at != projects[name]['updated_at'])
                        or ('pushed_at' not in projects[name])
                        or ('pushed_at' in projects[name]
                            and pushed_at != projects[name]['pushed_at'])):

                    projects[name]['created_at'] = created_at
                    projects[name]['description'] = description
                    projects[name]['clone_url'] = repo['clone_url']
                    log.info("Getting Branches for %s" % name)
                    branches = tachyonic.branches('TachyonicProject', name)
                    branches = [branch['name'] for branch in branches]
                    projects[name]['branches'] = branches
                    log.info("Getting Tags for %s" % name)
                    tags = tachyonic.tags('TachyonicProject', name)
                    tags = [tag['name'] for tag in tags]
                    projects[name]['tags'] = tags
                    projects[name]['refs'] = version_order(branches + tags)
                    projects[name]['doc_refs'] = {}
                else:
                    log.info("Project %s Already up-to-date (%s)" % (
                        name,
                        updated_at,
                    ))

                projects[name]['updated_at'] = updated_at
                projects[name]['pushed_at'] = pushed_at

                if 'updated_doc' not in projects[name]:
                    projects[name]['updated_doc'] = {}

                for ref in projects[name]['refs']:
                    current_datetime = now()
                    if ref in projects[name]['updated_doc']:
                        commits = tachyonic.commits(
                            'TachyonicProject',
                            name,
                            sha=ref,
                            since=format_iso8601(
                                projects[name]['updated_doc'][ref]))
                        if len(commits) == 0:
                            log.info("Documentation" + " '%s/%s'" % (
                                name,
                                ref,
                            ) + " Already up-to-date (%s)" % updated_at)
                            continue

                    venv_dir = "%s/github/%s_%s" % (
                        root_path,
                        name,
                        ref,
                    )
                    doc_dir = "%s/docs/%s_%s" % (
                        root_path,
                        name,
                        ref,
                    )
                    src_path = venv_dir + '/' + name
                    log.info("Creating Virtual Environment '%s'" % venv_dir)
                    create_env(str(venv_dir), wipe=True, site_packages=False)

                    clone(projects[name]['clone_url'], src_path)

                    if (exists(src_path + '/docs/source/conf.py')
                            and exists(src_path + '/docs/Makefile')):
                        log.info("Bulding '%s/%s'" % (
                            name,
                            ref,
                        ))
                        projects[name]['doc_refs'][ref] = True
                        info = build_doc(root_path, venv_dir, src_path, ref,
                                         doc_dir, name)
                        updated(name, ref, info)
                    else:
                        projects[name]['doc_refs'][ref] = False
                        log.warning("No Sphinx docs found '%s/%s'" % (
                            name,
                            ref,
                        ))

                    projects[name]['updated_doc'][ref] = current_datetime

                save(projects, root_path + '/projects.pickle', perms=664)

            events = []
            events_ordered = []
            git_events = tachyonic.events('TachyonicProject')
            for pj in projects.copy():
                if pj not in found:
                    del projects[pj]
                else:
                    for event in git_events:
                        type = event['type']
                        payload = event['payload']
                        if type == 'PullRequestEvent':
                            pr = payload['pull_request']
                            merged = pr['merged']
                            base = pr['base']
                            ref = base['ref']
                            if merged is True:
                                merged_at = utc(pr['merged_at'])
                                events.append((merged_at, "Code Updated",
                                               "Repo " + pj + "/" + ref + ""))
            for item in sorted(events, key=operator.itemgetter(0)):
                events_ordered.append(item)
            events_ordered = list(reversed(events_ordered))
            save(events_ordered[0:10], root_path + '/events.pickle', perms=664)

            save(projects, root_path + '/projects.pickle', perms=664)
            log.info('Infinite loop sleeping 5 Minutes')
            sleep(300)

        except KeyboardInterrupt:
            print("Control-C closed / Killed")
            break
        except ExecuteError as e:
            handle_error(e.title, e.description)
        except Exception as e:
            trace = str(traceback.format_exc())
            error = '%s: %s' % (object_name(e), e)
            handle_error(error, trace)
示例#18
0
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
from uuid import uuid4

from luxon import register
from luxon import SQLModel
from luxon.utils.timezone import now

from infinitystone.models.domains import luxon_domain
from infinitystone.models.tenants import luxon_tenant
from infinitystone.models.roles import luxon_role

USER_ROLES = [
    ('00000000-0000-0000-0000-000000000000',
     '00000000-0000-0000-0000-000000000000', None, None,
     '00000000-0000-0000-0000-000000000000', now()),
]


@register.model()
class luxon_user_role(SQLModel):
    id = SQLModel.Uuid(default=uuid4, internal=True)
    role_id = SQLModel.Uuid()
    domain = SQLModel.Fqdn(internal=True)
    tenant_id = SQLModel.String()
    user_id = SQLModel.Uuid()
    creation_time = SQLModel.DateTime(readonly=True, default=now)
    unique_user_role = SQLModel.UniqueIndex(role_id, tenant_id, user_id)
    user_role_id_ref = SQLModel.ForeignKey(role_id, luxon_role.id)
    user_role_domain_ref = SQLModel.ForeignKey(domain, luxon_domain.name)
    user_role_tenant_ref = SQLModel.ForeignKey(tenant_id, luxon_tenant.id)
示例#19
0
文件: acct.py 项目: Vuader/calabiyau
def usage(db, fr, user, input_octets=0, output_octets=0, status="start"):
    # Return Values
    # 0 All good.
    # 1 Deactivate Subscriber
    unique_session_id = fr['Acct-Unique-Session-Id']
    user_id = user['id']
    nas_secret = user['nas_secret']

    # Combined input/output usage for session
    combined = input_octets + output_octets

    utc_datetime = now()

    with db.cursor() as crsr:
        ####################
        # GET USER SESSION #
        ####################
        crsr.execute(
            "SELECT" + " id," + " ctx" + " FROM calabiyau_session" +
            ' WHERE acctuniqueid = %s' + ' LIMIT 1' + ' FOR UPDATE',
            (unique_session_id, ))
        session = crsr.fetchone()
        session_ctx = session['ctx']

        if user['package_span'] and user['package_span'] > 0:
            if (utc(user['package_expire'])
                    and utc_datetime > utc(user['package_expire'])):
                if session_ctx != 1:
                    applyctx(crsr, user, 1, fr, nas_secret, status)
                crsr.commit()
                return 1

        crsr.execute(
            'SELECT * FROM calabiyau_subscriber' + ' WHERE id = %s' +
            ' FOR UPDATE', (user_id, ))
        locked_user = crsr.fetchone()
        if user and locked_user:
            # IF DATA PLAN NOT UNCAPPED
            if user['plan'] == 'data':
                ######################
                # CHECK PACKAGE DATA #
                ######################
                volume_used_bytes = locked_user['volume_used_bytes'] + combined
                pkg_volume_used = locked_user['volume_used']
                if user['volume_gb']:
                    package_volume_bytes = (user['volume_gb'] * 1024 * 1024 *
                                            1024)
                else:
                    package_volume_bytes = 0

                if utc(locked_user['volume_expire']) < utc_datetime:
                    if user['volume_repeat']:
                        log.info('Package data reloaded (%s)' %
                                 user['username'])
                        new_expire = calc_next_expire(user['volume_metric'],
                                                      user['volume_span'],
                                                      utc_datetime)
                        crsr.execute(
                            "UPDATE calabiyau_subscriber" +
                            " SET volume_expire = %s," +
                            " volume_used_bytes = 0," + " volume_used = 0," +
                            " ctx = 0" + " WHERE id = %s", (
                                new_expire,
                                user['id'],
                            ))
                        pkg_volume_used = 0
                        if session_ctx != 0:
                            applyctx(crsr, user, 0, fr, nas_secret, status)
                        crsr.commit()
                        return 0
                    else:
                        crsr.execute(
                            "UPDATE calabiyau_subscriber" +
                            " SET volume_expire = %s," +
                            " volume_used_bytes = 0," + " volume_used = 1," +
                            " ctx = 1" + " WHERE id = %s", (
                                new_expire,
                                user['id'],
                            ))
                        pkg_volume_used = 1
                        log.info('Package data expired (%s)' %
                                 user['username'])

                if (not pkg_volume_used
                        and volume_used_bytes > package_volume_bytes):
                    crsr.execute(
                        "UPDATE calabiyau_subscriber" +
                        " SET volume_used_bytes = 0," + " volume_used = 1," +
                        " ctx = 1" + " WHERE id = %s", (user_id, ))
                    log.info('Package data depleted (%s)' % user['username'])
                elif (not pkg_volume_used
                      and volume_used_bytes <= package_volume_bytes):
                    crsr.execute(
                        "UPDATE calabiyau_subscriber" +
                        " SET volume_used_bytes = " +
                        " volume_used_bytes + %s," + " ctx = 0" +
                        " WHERE id = %s", (
                            combined,
                            user_id,
                        ))
                    if session_ctx != 0:
                        applyctx(crsr, user, 0, fr, nas_secret, status)
                    crsr.commit()
                    return 0

                ####################
                # CHECK TOPUP DATA #
                ####################
                crsr.execute(
                    'SELECT * FROM calabiyau_topup' + ' WHERE user_id = %s' +
                    ' ORDER BY creation_time asc' + ' FOR UPDATE', (user_id, ))
                topups = crsr.fetchall()
                for topup in topups:
                    if topup['volume_gb']:
                        topup_volume_bytes = (topup['volume_gb'] * 1024 *
                                              1024 * 1024)
                    else:
                        topup_volume_bytes = 0

                    if utc(topup['volume_expire']) < utc_datetime:
                        if topup['volume_repeat']:
                            log.auth('Topup renew (%s, %s Gb, %s)' % (
                                user['username'],
                                topup['volume_gb'],
                                topup['creation_time'],
                            ))
                            new_expire = calc_next_expire(
                                topup['volume_metric'], topup['volume_span'],
                                utc_datetime)

                            crsr.execute(
                                "UPDATE calabiyau_topup" +
                                " SET volume_expire = %s" + " WHERE id = %s", (
                                    new_expire,
                                    topup['id'],
                                ))
                            crsr.execute(
                                "UPDATE calabiyau_subscriber" +
                                " SET volume_used_bytes = 0," + " ctx = 0" +
                                " WHERE id = %s", (user_id, ))
                            if session_ctx != 0:
                                applyctx(crsr, user, 0, fr, nas_secret, status)
                            crsr.commit()
                            return 0
                        else:
                            log.auth('Topup expired (%s, %s Gb, %s)' % (
                                user['username'],
                                topup['volume_gb'],
                                topup['creation_time'],
                            ))
                            crsr.execute(
                                "UPDATE calabiyau_subscriber" +
                                " SET volume_used_bytes = 0," + " ctx = 0" +
                                " WHERE id = %s", (user_id, ))
                            crsr.execute(
                                'DELETE FROM' + ' calabiyau_topup' +
                                ' WHERE id = %s', (topup['id'], ))
                    else:
                        if volume_used_bytes < topup_volume_bytes:
                            crsr.execute(
                                "UPDATE calabiyau_subscriber" +
                                " SET volume_used_bytes = " +
                                " volume_used_bytes + %s," + " ctx = 0" +
                                " WHERE id = %s", (
                                    combined,
                                    user_id,
                                ))
                            if session_ctx != 0:
                                applyctx(crsr, user, 0, fr, nas_secret, status)
                            crsr.commit()
                            return 0
                        else:
                            log.auth('Topup depleted (%s, %s Gb, %s)' % (
                                user['username'],
                                topup['volume_gb'],
                                topup['creation_time'],
                            ))
                            crsr.execute(
                                "UPDATE calabiyau_subscriber" +
                                " SET volume_used_bytes = 0," + " ctx = 0" +
                                " WHERE id = %s", (user_id, ))
                            crsr.execute(
                                'DELETE FROM' + ' calabiyau_topup' +
                                ' WHERE id = %s', (topup['id'], ))

                if session_ctx != 1:
                    applyctx(crsr, user, 1, fr, nas_secret, status)
                crsr.commit()
                return 1
            else:
                if session_ctx != 0:
                    applyctx(crsr, user, 0, fr, nas_secret, status)
                crsr.commit()
                return 0
        if session_ctx != 1:
            applyctx(crsr, user, 1, fr, nas_secret, status)
        crsr.commit()
        return 1
示例#20
0
def request(client,
            method,
            url,
            params={},
            data=None,
            headers={},
            stream=False,
            endpoint=None,
            **kwargs):

    if endpoint is None:
        endpoint = url

    with Timer() as elapsed:
        method = method.upper()
        headers = headers.copy()
        params = params.copy()

        try:
            _cache_engine = Cache()
        except NoContextError:
            _cache_engine = None

        for kwarg in kwargs:
            # NOTE(cfrademan):
            # Generally headers have '-' not '_'. Also kwargs
            # cannot contain '-'.
            if kwargs[kwarg] is not None:
                header = kwarg.replace('_', '-')
                headers[header] = str(kwargs[kwarg])

        if data is not None:
            if hasattr(data, 'json'):
                data = data.json
            elif isinstance(data, (dict, list, OrderedDict)):
                data = js.dumps(data)
            data = if_unicode_to_bytes(data)

            if isinstance(data, bytes):
                headers['Content-Length'] = str(len(data))

        cached = None
        if (_cache_engine and stream is False and method == 'GET'
                and data is None):

            if isinstance(params, dict):
                cache_params = list(orderdict(params).values())

            if isinstance(headers, dict):
                cache_headers = list(orderdict(headers).values())

            cache_key = (method, url, cache_params, cache_headers)
            cache_key = str(md5sum(pickle.dumps(cache_key)))
            cached = _cache_engine.load(cache_key)
            if cached is not None:
                cache_control = parse_cache_control_header(
                    cached.headers.get('Cache-Control'))
                max_age = cache_control.max_age
                date = cached.headers.get('Date')
                etag = cached.headers.get('Etag')
                date = utc(date)
                current = now()
                diff = (current - date).total_seconds()
                if cache_control.no_cache:
                    # If no-cache revalidate.
                    headers['If-None-Match'] = etag
                elif max_age and diff < int(max_age):
                    # If not expired, use cache.
                    _debug(method, url, params, data, headers,
                           cached.headers, cached.content, cached.status_code,
                           elapsed(), 'Memory')
                    return cached
                else:
                    # If expired, revalidate..
                    headers['If-None-Match'] = etag

        try:
            # response = Response(client._s.request(method.upper(),
            #                                      url,
            #                                      params=params,
            #                                      data=data,
            #                                      headers=headers,
            #                                      stream=stream))

            # NOTE(cfrademan): Using prepared requests, because we need to
            # no Transfer Encoding chunked, and expect Content-Length...
            # Chunked encoding is not well supported uploading to WSGI app.
            prepped = client._s.prepare_request(
                requests.Request(method.upper(),
                                 url,
                                 params=params,
                                 data=data,
                                 headers=headers))

            if 'Content-Length' in prepped.headers:
                if 'Transfer-Encoding' in prepped.headers:
                    del prepped.headers['Transfer-Encoding']

            response = Response(client._s.send(prepped, stream=stream))

            if (_cache_engine and cached is not None
                    and response.status_code == 304):

                _debug(method, url, params, data, headers,
                       cached.headers, cached.content, cached.status_code,
                       elapsed(), 'Validated (304)')
                return cached

            if response.status_code >= 400:
                if 'X-Expired-Token' in response.headers:
                    raise TokenExpiredError()

                try:
                    title = None
                    description = None
                    if ('json' in response.content_type.lower()
                            and 'error' in response.json):
                        error = response.json['error']
                        try:
                            title = error.get('title')
                            description = error.get('description')
                            if endpoint is not None:
                                title += " (%s)" % endpoint
                        except AttributeError:
                            if endpoint is not None:
                                description = " Endpoint: %s" % endpoint
                    else:
                        if endpoint is not None:
                            description = " Endpoint: %s" % endpoint

                    if stream is True:
                        _debug(method, url, params, data, headers,
                               response.headers, None, response.status_code,
                               elapsed())
                    else:
                        _debug(method, url, params, data, headers,
                               response.headers, response.content,
                               response.status_code, elapsed())
                    raise HTTPError(response.status_code, description, title)
                except HTTPClientContentDecodingError:
                    if endpoint is not None:
                        description = 'Endpoint: %s'
                        raise HTTPError(response.status_code,
                                        description=description) from None
                    else:
                        raise HTTPError(response.status_code) from None

            if _cache_engine and stream is False and method == 'GET':
                if response.status_code == 200:
                    cache_control = parse_cache_control_header(
                        response.headers.get('Cache-Control'))
                    if (not cache_control.no_store and cache_control.max_age
                            and response.headers.get('Etag')
                            and response.headers.get('Date') and data is None):
                        _cache_engine.store(cache_key, response, 604800)

        except requests.exceptions.InvalidHeader as e:
            e = append_to_error(e, endpoint)
            raise HTTPClientInvalidHeader(e)
        except requests.exceptions.InvalidURL as e:
            e = append_to_error(e, endpoint)
            raise HTTPClientInvalidURL(e)
        except requests.exceptions.InvalidSchema as e:
            e = append_to_error(e, endpoint)
            raise HTTPClientInvalidSchema(e)
        except requests.exceptions.MissingSchema as e:
            e = append_to_error(e, endpoint)
            raise HTTPClientMissingSchema(e)
        except requests.exceptions.ConnectionError as e:
            e = append_to_error(e, endpoint)
            log.critical(e)
            raise HTTPClientConnectionError(
                "API Connection error to '%s' (%s)" % (
                    url,
                    endpoint,
                ))
        except requests.exceptions.ProxyError as e:
            e = append_to_error(e, endpoint)
            log.critical(e)
            raise HTTPClientProxyError("API proxy error to '%s' (%s)" % (
                url,
                endpoint,
            ))
        except requests.exceptions.SSLError as e:
            e = append_to_error(e, endpoint)
            log.critical(e)
            raise HTTPClientSSLError("API SSL error to '%s' (%s)" % (
                url,
                endpoint,
            ))
        except requests.exceptions.Timeout as e:
            e = append_to_error(e, endpoint)
            log.critical(e)
            raise HTTPClientTimeoutError(
                "API connection timeout to '%s' (%s)" % (
                    url,
                    endpoint,
                ))
        except requests.exceptions.ConnectTimeout as e:
            e = append_to_error(e, endpoint)
            log.critical(e)
            raise HTTPClientConnectTimeoutError(
                "API connect timeout to '%s' (%s)" % (
                    url,
                    endpoint,
                ))
        except requests.exceptions.ReadTimeout as e:
            e = append_to_error(e, endpoint)
            log.critical(e)
            raise HTTPClientReadTimeoutError("API read timeout to '%s' (%s)" %
                                             (
                                                 url,
                                                 endpoint,
                                             ))
        except requests.exceptions.HTTPError as e:
            e = append_to_error(e, endpoint)
            raise HTTPError(e.response.status_code, e)

        if stream is True:
            _debug(method, url, params, data, headers, response.headers, None,
                   response.status_code, elapsed())
        else:
            _debug(method, url, params, data, headers, response.headers,
                   response.content, response.status_code, elapsed())

    return response
示例#21
0
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
from uuid import uuid4

from luxon import register
from luxon import SQLModel
from luxon.utils.timezone import now

ROLES = [
    ('00000000-0000-0000-0000-000000000000', 'Root', None, now()),
    (str(uuid4()), 'Operations', None, now()),
    (str(uuid4()), 'Administrator', None, now()),
    (str(uuid4()), 'Account Manager', None, now()),
    (str(uuid4()), 'Billing', None, now()),
    (str(uuid4()), 'Support', None, now()),
    (str(uuid4()), 'Customer', None, now()),
    (str(uuid4()), 'Wholesale', None, now()),
    (str(uuid4()), 'Minion', None, now()),
]


@register.model()
class luxon_role(SQLModel):
    id = SQLModel.Uuid(default=uuid4, internal=True)
    name = SQLModel.String(max_length=64, null=False)
示例#22
0
文件: users.py 项目: secnam/luxon
from luxon import String
from luxon import Text
from luxon import DateTime
from luxon import Boolean
from luxon import Email
from luxon import Phone
from luxon import Enum
from luxon import Index
from luxon import ForeignKey
from luxon import UniqueIndex
from luxon import Username
from luxon import Fqdn
from luxon.utils.timezone import now

ROLES = [
    ('00000000-0000-0000-0000-000000000000', 'Root', None, now()),
    (str(uuid4()), 'Operations', None, '0000-00-00 00:00:00'),
    (str(uuid4()), 'Administrator', None, '0000-00-00 00:00:00'),
    (str(uuid4()), 'Account Manager', None, '0000-00-00 00:00:00'),
    (str(uuid4()), 'Billing', None, '0000-00-00 00:00:00'),
    (str(uuid4()), 'Customer', None, '0000-00-00 00:00:00'),
    (str(uuid4()), 'Support', None, '0000-00-00 00:00:00'),
]


@database_model()
class luxon_role(SQLModel):
    id = Uuid(default=uuid4, internal=True)
    name = String(max_length=64, null=False)
    description = Text()
    creation_time = DateTime(default=now, internal=True)
示例#23
0
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
from uuid import uuid4

from luxon import register
from luxon import SQLModel
from luxon.utils.timezone import now

DOMAINS = [
    ('00000000-0000-0000-0000-000000000000', 'default', None, 1, now()),
]


@register.model()
class luxon_domain(SQLModel):
    id = SQLModel.Uuid(default=uuid4, internal=True)
    name = SQLModel.Fqdn(null=False)
    description = SQLModel.Text()
    enabled = SQLModel.Boolean(default=True)
    creation_time = SQLModel.DateTime(default=now, readonly=True)
    primary_key = id
    unique_domain = SQLModel.UniqueIndex(name)
    db_default_rows = DOMAINS
    domains = SQLModel.Index(name)