示例#1
0
class Single:
  def __init__(self, client):
    self.model = Token(client)

  def on_put(self, req, resp, token_id):
    if authorize_as(req.auth, 'developer'):
      body = loads(req.stream.read().decode('utf-8'))
      resource = self.model.update(body, token_id)
      if resource.modified_count == 1:
        resp.status = HTTP_204
      else:
        raise HTTPBadRequest('failed to update resource',
            'a resource with id: ' + token_id + 'was not found')
    else:
      raise HTTPUnauthorized('unauthorized', 'unauthorized')

  def on_delete(self, req, resp, token_id):
    if authorize_as(req.auth, 'developer'):
      result = self.model.delete(token_id)
      if result.deleted_count == 1:
        resp.status = HTTP_204
      else:
        raise HTTPBadRequest('failed to update resource',
            'a resource with id: ' + token_id + 'was not found')
    else:
      raise HTTPUnauthorized('unauthorized', 'unauthorized')
示例#2
0
def generate_token(db, user_id):
    token = Token()
    token.id = _generate_uuid_token()
    token.expires = datetime.datetime.now() + datetime.timedelta(seconds=conf.token_expires)
    token.user_id = user_id
    db.add(token)
    db.commit()
    return token
    def updateToken(self, address):
        if address != '0x0000000000000000000000000000000000000001':
            t = self.web3.eth.contract(
                address=self.web3.toChecksumAddress(address),
                abi=self.TRC21ABI)
            total_supply = t.functions.totalSupply().call()
            symbol = t.functions.symbol().call()
            name = symbol
            decimals = t.functions.decimals().call()
            is_major = False
        else:
            total_supply = '1000000000000000000000000000000'
            symbol = 'TOMO'
            name = 'TomoChain'
            decimals = '18'
            is_major = True

        if symbol == 'BTC' or symbol == 'USDT' or symbol == 'TOMO' or symbol == 'ETH':
            is_major = True

        logger.info('Token address %s symbol %s major %s', address, symbol,
                    is_major)
        rl = (Token.insert(address=address,
                           name=name,
                           symbol=symbol,
                           decimals=decimals,
                           total_supply=total_supply,
                           is_major=is_major).on_conflict(
                               conflict_target=(Token.address, ),
                               update={
                                   Token.is_major: is_major
                               }).execute())
示例#4
0
 def work(self, body):
     data = parse(body, DataSet)
     print "Tagging " + data.id
     for sent in data.sentences:
         tokens = [t.word for t in sent.tokens]
         sent.tokens = [Token(t[0], t[1]) for t in pos_tag(tokens)]
     self.write(as_json(data))
示例#5
0
    async def get(self):
        redis = self.application.redis
        keys = await redis.hlen('public_res')
        logger.debug('Length = %d', keys)

        if keys:
            cached = await redis.hgetall('public_res', encoding='utf-8')
            return self.json_response(
                {k: json.loads(v)
                 for k, v in cached.items()})

        relayers = [
            model_to_dict(relayer or {}) for relayer in Relayer.select()
        ]
        contracts = [
            model_to_dict(c or {})
            for c in Contract.select().where(Contract.obsolete == False)
        ]
        tokens = [model_to_dict(token or {}) for token in Token.select()]
        self.json_response({
            'Relayers': relayers,
            'Contracts': contracts,
            'Tokens': tokens
        })
        await redis.hmset_dict('public_res',
                               Relayers=json.dumps(relayers),
                               Contracts=json.dumps(contracts),
                               Tokens=json.dumps(tokens))
示例#6
0
 def __init__(self, database, hash_method):
   self.collection = database.user
   self.token = Token(database)
   self.hash_method = hash_method
   self.create_keys = ['user_name', 'password', 'position', 'location_id']
   self.update_keys = ['password', 'score', 'stars', 'money',
                       'currency', 'position', 'location_id']
   self.admin_keys = ['user_name', 'trainer_number', 'level']
示例#7
0
class Base:
  def __init__(self, client):
    self.model = Token(client)

  def on_get(self, req, resp):
    if authorize_as(req.auth, 'developer'):
      resp.body = dumps(self.model.all())
    else:
      raise HTTPUnauthorized('unauthorized', 'unauthorized')

  def on_post(self, req, resp):
    if authorize_as(req.auth, 'developer'):
      body = loads(req.stream.read().decode('utf-8'))
      created = self.model.create(body)
      resp.status = HTTP_201
      resp.body = dumps({'id': created.inserted_id})
    else:
      raise HTTPUnauthorized('unauthorized', 'unauthorized')
    async def get(self):
        relayers = [model_to_dict(relayer or {}) for relayer in Relayer.select()]
        tokens = [model_to_dict(token or {}) for token in Token.select()]
        domains = [model_to_dict(domain or {}) for domain in Domain.select()]
        contracts = Blockchain.contracts

        self.json_response({
            'Relayers': relayers,
            'Contracts': contracts,
            'Tokens': tokens,
            'Domains': domains
        })
    async def post(self, user=None):
        """Add new tokens"""
        tokens = self.request_body

        if not tokens:
            raise InvalidValueException('Invalid empty payload')

        b = Blockchain()
        if not isinstance(tokens, list):
            token = tokens
            address = b.web3.toChecksumAddress(token['address'])
            b.updateToken(address)
            obj = Token.select().where(Token.address == address).get()
            return self.json_response(model_to_dict(obj))

        result = []
        for token in tokens:
            address = b.web3.toChecksumAddress(token['address'])
            b.updateToken(address)
            obj = Token.select().where(Token.address == address).get()
            result.append(model_to_dict(obj))

        self.json_response(result)
示例#10
0
 async def get(self):
     relayers = [
         model_to_dict(relayer or {}) for relayer in Relayer.select()
     ]
     contracts = [
         model_to_dict(c or {})
         for c in Contract.select().where(Contract.obsolete == False)
     ]
     tokens = [model_to_dict(token or {}) for token in Token.select()]
     self.json_response({
         'Relayers': relayers,
         'Contracts': contracts,
         'Tokens': tokens
     })
示例#11
0
 def get_tokens(self, tagged_tokens):
     tagConversionDict = {
         'NN': wn.NOUN,
         'JJ': wn.ADJ,
         'VB': wn.VERB,
         'RB': wn.ADV
     }
     tokens = []
     for index, tagged_token in enumerate(tagged_tokens):
         token = Token.Token(tagged_token[0], index, tagged_token[1])
         if token.penn_tag[:2] in tagConversionDict:
             token.wn_tag = tagConversionDict[token.penn_tag[:2]]
             token.lemma = self.lemmatizer.lemmatize(
                 token.token, token.wn_tag)
         tokens.append(token)
     return tokens
示例#12
0
 def __init__(self, client):
   self.model = Token(client)
示例#13
0
class User:
  def __init__(self, database, hash_method):
    self.collection = database.user
    self.token = Token(database)
    self.hash_method = hash_method
    self.create_keys = ['user_name', 'password', 'position', 'location_id']
    self.update_keys = ['password', 'score', 'stars', 'money',
                        'currency', 'position', 'location_id']
    self.admin_keys = ['user_name', 'trainer_number', 'level']

  def all(self):
    return self.collection.find()

  def find(self, user_id):
    if(isinstance(user_id, str)):
      user_id = ObjectId(user_id)
    return self.collection.find_one({'_id': user_id})

  def find_by_name(self, user_name):
    return self.collection.find_one({'user_name': user_name})

  def create(self, content):
    used = self.__user_name_used(content['user_name'])
    token = self.__check_token(content['token'])
    if used == False or token == False:
      return False
    insert = insert_object(self.create_keys, content, True)
    insert['password'] = self.hash_method.encrypt(insert['password'])
    insert['score'] = 0
    insert['stars'] = 0
    insert['money'] = 3000.0
    insert['level'] = token['level']
    insert['trainer_card'] = token['key']

    created = self.collection.insert_one(insert)
    new_user = self.find(created.inserted_id)
    self.token.update({'used': True}, token['_id'])
    new_user['goodies'] = token['goodies']
    new_user['token'] = sign_token(new_user['user_name'], new_user['level'])
    return new_user

  def update(self, content, user_id, is_dev = False):
    if(is_dev):
      keys = list(set(self.update_keys) | set(self.admin_keys))
    else:
      keys = self.update_keys
    insert = insert_object(keys, content)
    if 'password' in insert:
      insert['password'] = self.hash_method.encrypt(insert['password'])
    resource = self.collection.update_one({'_id': ObjectId(user_id)},
                                          {'$set': insert})
    return resource

  def __user_name_used(self, name):
    check_user = self.collection.find_one({'user_name': name})
    return bool(check_user == None)

  def __check_token(self, token_key):
    token = self.token.find(token_key)
    if token == None or token['used'] == True:
      return False
    else:
      return token
示例#14
0
from model import Token
from archives import DFA_ex

# token_stream = Lexer.scan('../examples/example2.dcf', True)
# print('\nmmmm token:', token_stream[0].token)

# x = x
token_stream = []
token_stream.append(Token.Token('', 'x', '', 0))
token_stream.append(Token.Token('', '=', '', 0))
token_stream.append(Token.Token('', 'x', '', 0))

token_stream.append(Token.Token('$', '$', '', 0))

input_length = len(token_stream)

for i in range(0, input_length):
    print(token_stream[i].token)

state_stack = [1]
token_stack = []

state_type = ''
state = 0
dfa_input = ''
i = 0

successful_parsing = False

while i < input_length:
    if i == 0:
示例#15
0
 def get_token_info(self, token):
     token = self.collection.find_one({'token': token})
     if token is None:
         return None
     target_token = Token(token)
     return target_token
示例#16
0
 def get(self, user=None):
     """Return all available tokens for trading"""
     tokens = [model_to_dict(token or {}) for token in Token.select()]
     self.json_response(tokens)
示例#17
0
from bson.objectid import ObjectId
from model import Token
from pymongo import MongoClient
from pytest import yield_fixture


client = MongoClient()
collection = client.trainer_card.token_test
token = Token(client)
# use a test database
token.collection = collection

@yield_fixture(autouse=True)
def tear_down_db():
  yield
  collection.remove({})

def test_create_token():
  resource = token.create({'level': 'developer', 'goodies': 1})
  inserted = collection.find_one({'_id': resource.inserted_id})
  assert isinstance(resource.inserted_id, ObjectId) == True
  assert isinstance(inserted['key'], str) == True
  assert isinstance(inserted['used'], bool) == True

def test_get_all():
  for x in range(5):
    collection.insert_one({'level': 'player', 'goodies': x})
  assert token.all().count() == 5

def test_find_by_key():
  resource = token.create({'level': 'developer', 'goodies': 1})
示例#18
0
import cgi
import cgitb
import sys
import os

sys.path.insert(0, '')

from local_cfg import cfg
token_file = cfg['token_file']

from model import Token
from dbutils import get_local_db_session

if __name__ == "__main__":
    print "Content-type: text/html\n\n"
    
    form = cgi.FieldStorage()
    data = str(form.getfirst('data', 'None'))

    session = get_local_db_session()
    
    cgitb.enable()

    token, submission = Token.get_new(session)
    if token is None:
        Token.generate_tokens(session)
        token, submission = Token.get_new(session)
    submission.dialogue_id = data
    session.commit()

    print token.number 
示例#19
0
文件: Lexer.py 项目: armi3/compipower
def tokenize(lexemes, line_nums, d):
    token_stream = []

    count = 0
    for lexeme in lexemes:
        token = ''
        token_type = 0
        # only 1 comparison
        if lexeme == '!':
            token = 'NEGATION'
            token_type = 1

        elif lexeme == ';':
            token = 'SEMICOLON'
            token_type = 2

        elif lexeme == ',':
            token = 'COMMA'
            token_type = 2

        elif lexeme == '{':
            token = 'BRACE_L'
            token_type = 3

        elif lexeme == '}':
            token = 'BRACE_R'
            token_type = 3

        elif lexeme == '(':
            token = 'PARENTHESIS_L'
            token_type = 4

        elif lexeme == ')':
            token = 'PARENTHESIS_R'
            token_type = 4

        elif lexeme == '[':
            token = 'BRACKET_L'
            token_type = 5

        elif lexeme == ']':
            token = 'BRACKET_R'
            token_type = 5

        elif lexeme[0] == '"':
            token = 'STRING_LITERAL'
            token_type = 6

        # simple comparisons
        elif lexeme[0:2] == '//':
            token = 'COMMENT'
            token_type = 7

        elif lexeme == '=':
            token = 'ASSIGN'
            token_type = 8

        elif lexeme == '+=':
            token = 'PLUS_ASSIGN'
            token_type = 8

        elif lexeme == '-=':
            token = 'MINUS_ASSIGN'
            token_type = 8

        elif lexeme == '%':
            token = 'MOD'
            token_type = 9

        elif lexeme == '/':
            token = 'DIV'
            token_type = 9

        elif lexeme == '*':
            token = 'MULT'
            token_type = 9

        elif lexeme == '-':
            token = 'MINUS'
            token_type = 9

        elif lexeme == '+':
            token = 'SUM'
            token_type = 9

        elif lexeme == '>':
            token = 'GREATER_THAN'
            token_type = 10

        elif lexeme == '<':
            token = 'LESS_THAN'
            token_type = 10

        elif lexeme == '>=':
            token = 'GREATER_EQUALS_THAN'
            token_type = 10

        elif lexeme == '<=':
            token = 'LESS_EQUALS_THAN'
            token_type = 10

        elif lexeme == '==':
            token = 'EQUALS'
            token_type = 11

        elif lexeme == '!=':
            token = 'NOT_EQUALS'
            token_type = 11

        elif lexeme == '&&':
            token = 'AND'
            token_type = 12

        elif lexeme == '||':
            token = 'OR'
            token_type = 12

        # complex comparisons
        elif lexeme == 'class':
            token = 'RW_CLASS'
            token_type = 13

        elif lexeme == 'void':
            token = 'RW_VOID'
            token_type = 13

        elif lexeme == 'if':
            token = 'RW_IF'
            token_type = 13

        elif lexeme == 'else':
            token = 'RW_ELSE'
            token_type = 13

        elif lexeme == 'for':
            token = 'RW_FOR'
            token_type = 13

        elif lexeme == 'return':
            token = 'RW_RETURN'
            token_type = 13

        elif lexeme == 'break':
            token = 'RW_BREAK'
            token_type = 13

        elif lexeme == 'continue':
            token = 'RW_CONTINUE'
            token_type = 13

        elif lexeme == 'callout':
            token = 'RW_CALLOUT'
            token_type = 13

        elif lexeme == 'main':
            token = 'RW_MAIN'
            token_type = 13

        elif lexeme == 'int':
            token = 'VT_INTEGER'
            token_type = 14

        elif lexeme == 'boolean':
            token = 'VT_BOOLEAN'
            token_type = 14

        elif lexeme == 'true':
            token = 'TRUE_LITERAL'
            token_type = 15

        elif lexeme == 'false':
            token = 'FALSE_LITERAL'
            token_type = 15

        elif is_float(lexeme):
            token = 'DECIMAL_LITERAL'
            token_type = 16

        elif lexeme[0:2] == ('0x' or '0X') and is_hex(lexeme):
            token = 'HEXADECIMAL_LITERAL'
            token_type = 17

        else:
            token = 'ID'
            token_type = 18

        tokenized = Token.Token(lexeme, token, token_type, line_nums[count])
        if d:
            print('\nlexeme:       ', tokenized.lexeme,
                  '\ntoken:        ', tokenized.token,
                  '\ntoken_type:   ', tokenized.token_type,
                  '\nline_num:     ', tokenized.line_num,
                  '\nobj:          ', type(tokenized))
        token_stream.append(tokenized)
        count += 1
        
    return token_stream
示例#20
0
 def get_all_tokens(self):
     tokens = self.collection.find()
     target_tokens = []
     for token in tokens:
         target_tokens.append(Token(token))
     return target_tokens
示例#21
0
def index_tokens(tokens):
    indexs = defaultdict(list)
    for token in tokens:
        tk = Token(token)
        indexs[tk.lineno].append(tk)
    return indexs