Esempio n. 1
0
def article(title):
    """Display an article."""

    article_path = os.path.join('./article', (title + '.md'))
    meta_path = os.path.join('./article_meta', (title + '.yml'))

    # if markdown file doesn't exist, return 404
    if not os.path.isfile(article_path):
        abort(404, "No such article.")

    meta = {'title': title}
    # if yml file exists, parse it. otherwise use defaults
    # ideally, defaults should be used on a per-value basis
    if os.path.isfile(meta_path):
        with open(meta_path, 'r') as stream:
            try:
                meta = yaml.load(stream)
            except yaml.YAMLError as ex:
                abort(500, ex)

    with open(article_path, 'r') as stream:
        article_body = stream.read()

        # merge the meta, SETTINGS and content together into one dict
        article_content = meta.copy()
        article_content.update(SETTINGS)
        article_content['markdown'] = markdown.markdown(article_body)
        return template('article', article_content)
Esempio n. 2
0
def show_file_list(path='.'):
    path = request.params.get('p', path)
    resp_format = request.params.get('f', '')
    try:
        path, relpath, dirs, file_list, readme = files.get_dir_contents(path)
    except files.DoesNotExist:
        if path == '.':
            if resp_format == 'json':
                response.content_type = 'application/json'
                return json.dumps(dict(
                    dirs=dirs,
                    files=dictify_file_list(file_list),
                    readme=readme
                ))
            return dict(path='.', dirs=[], files=[], up='.', readme='')
        abort(404)
    except files.IsFileError as err:
        if resp_format == 'json':
            fstat = os.stat(path)
            response.content_type = 'application/json'
            return json.dumps(dict(
                name=os.path.basename(path),
                size=fstat[stat.ST_SIZE],
            ))
        return static_file(err.path, root=files.get_file_dir())
    up = os.path.normpath(os.path.join(path, '..'))
    if resp_format == 'json':
        response.content_type = 'application/json'
        return json.dumps(dict(
            dirs=dirs,
            files=dictify_file_list(file_list),
            readme=readme
        ))
    return dict(path=relpath, dirs=dirs, files=file_list, up=up, readme=readme)
Esempio n. 3
0
def handle_websocket():
    wsock = request.environ.get('wsgi.websocket')
    if not wsock:
        abort(400, 'Expected WebSocket request.')

    while True:
        try:
            message = wsock.receive()
            message_dict = eval(str(message))
            try:
                game_id = message_dict['gID']
                user_id = message_dict['uID']
                cmd = message_dict['cmd']
                data = message_dict['dat']
            except KeyError:
                print 'malformed message!'

            except TypeError as e:
                if e.message == "'NoneType' object has no attribute '__getitem__'":
                    if user_id is not None:
                        USERS.getUserByToken(user_id).signOut()
                else:
                    raise

            user = USERS.getUserByToken( user_id)
            print "received :", cmd, 'from',  user_id
            webSocketParser.parse(cmd, data, user, wsock, USERS, GAMES.games[0].OOIs)

        except geventwebsocket.WebSocketError:
            print 'client disconnected'
            break
 def error_handler(*args, **kwargs):
     try:
         return func(*args,**kwargs)
     except disc_exceptions.ServiceUnavailable:
         bottle.abort(503, 'Service Unavailable')
     except Exception as e:
         raise
Esempio n. 5
0
File: init.py Progetto: silky/umad
def index():
	url = request.query.url or ''

	if not url:
		human_method = { 'GET':"index", 'DELETE':"delete" }.get(request.method, 'something-something-action')
		abort(400, "Y U DO DIS? I can't {0} something unless you give me 'url' as a query parameter".format(human_method))
	debug(u"URL to index: %s" % url)

	human_action = { 'GET':"indexing", 'DELETE':"deletion" }.get(request.method, 'something-something-action')

	try:
		if request.method == 'DELETE':
			queue_name  = 'umad_deletion_queue'
		else:
			queue_name  = 'umad_indexing_queue'

		# Throw URLs into Redis. We're using this idiom to provide what is
		# effectively a "BSPOP" (blocking pop from a set), on a sorted set.
		# cf. Event Notification: http://redis.io/commands/blpop
		# I-It's not like I wanted the set to be sorted or anything! I'm
		# keeping input timestamps, just so you know.
		pipeline = teh_redis.pipeline()
		pipeline.zadd(queue_name, time.time(), url)
		pipeline.lpush('barber', 'dummy_value')
		pipeline.execute() # will return something like:   [ {0|1}, num_dummies ]
		debug(u"Successful insertion of {0} for {1}".format(url, human_action))
	except Exception as e:
		abort(500, "Something went boom while inserting {0}: {1}".format(url, e))


	return u"Success, enqueued URL for {0}: '{1}'".format(human_action, url)
Esempio n. 6
0
def searchUsers(authKey):
	currentUser = checkRights(authKey, True)
	givenUser = util.RequestParser(request).parseJson()
	results = connection.searchUser(givenUser)
	if results == None:
		abort(404, errors['missingDetails'])
	return json.dumps(Model().listAsSerialised(results))
Esempio n. 7
0
def currentBorrowed(authKey, id):
	currentUser = checkRights(authKey, False)
	if isinstance (currentUser, Admin) or currentUser.id == id:
		user = connection.getUser(id)
		return json.dumps(user.listAsSerialised(user.getBorrowedItems()))
	else:
		abort(403, error['wrongUser'])
Esempio n. 8
0
def update_book(book_id):
  """this function handles updating the title of an existing book"""
  if book_id < len(books):
    book_title = request.forms.get('title')
    books[book_id] = book_title
    redirect('/books/%i' % book_id)
  abort(404)
Esempio n. 9
0
        def wrapper(*args, **kwargs):

            def expire(when):
                for t in [k for k in _times.keys()]:
                    if (when - t) > timeout:
                        del(_cache[_times[t]])
                        del(_times[t])

            now = time.time()
            try:
                item = _cache[request.urlparts]
                if 'If-Modified-Since'  in request.headers:
                    try:
                        since = time.mktime(email.utils.parsedate(request.headers['If-Modified-Since']))
                    except:
                        since = now
                    if item['mtime'] >= since:
                        expire(now)
                        abort(304,'Not modified')
                for h in item['headers']:
                    response.set_header(str(h), item['headers'][h])
                body = item['body']
                response.set_header('X-Source', 'Worker Cache')
            except KeyError:
                body = callback(*args, **kwargs)
                item = {
                    'body': body,
                    'headers': response.headers,
                    'mtime': int(now)
                }
                _cache[request.urlparts] = item
                _times[now] = request.urlparts

            expire(now)
            return body
Esempio n. 10
0
    def heartbeat(self, sig):
        # self.syslog('heartbeat from "%s"' % sig)
        self._debug['msg_hbt'] += 1
        info = sig.split(':')
        if len(info) != 2:
            self.syslog('Unable to parse heartbeat cookie %s' % sig)
            bottle.abort(404, 'Unable to parse heartbeat')

        service_type = info[1]
        service_id = info[0]
        entry = self._db_conn.lookup_service(service_type, service_id)
        if not entry:
            self.syslog('Received stray heartbeat with cookie %s' % (sig))
            self._debug['hb_stray'] += 1
            bottle.abort(404, 'Publisher %s not found' % sig)

        # update heartbeat timestamp in database
        entry['heartbeat'] = int(time.time())

        # insert entry if timed out by background task
        if entry['sequence'] == -1:
            self._db_conn.insert_service(
                service_type, entry['service_id'], entry)
        self._db_conn.update_service(
                service_type, entry['service_id'], entry)

        m = sandesh.dsHeartBeat(
            publisher_id=sig, service_type=service_type,
            sandesh=self._sandesh)
        m.trace_msg(name='dsHeartBeatTraceBuf', sandesh=self._sandesh)
        return '200 OK'
Esempio n. 11
0
def handle_websocket():
    wsock = request.environ.get("wsgi.websocket")
    if not wsock:
        abort(400, "Expected WebSocket request.")

    while True:
        try:
            message = wsock.receive()
            mesDict = eval(str(message))
            try:
                gameID = mesDict["gID"]
                userID = mesDict["uID"]
                cmd = mesDict["cmd"]
                data = mesDict["dat"]
            except KeyError:
                print "malformed message!"

            except TypeError as e:
                if e.message == "'NoneType' object has no attribute '__getitem__'":
                    # it's likely that pesky onclose message I can't fix... ignore for now
                    print "connection closed"
                else:
                    raise
            # TODO: call message parser sort of like:
            # game_manager.parseMessage(message,wsock)
            # NOTE: message parser should probably be an attribute of the game
            print "received :", cmd, "from", userID
            webSocketParser.parse(cmd, data, USERS.getUserByToken(userID), wsock, GAMES.games[0].OOIs)
        except WebSocketError:
            print "client disconnected"
            break
Esempio n. 12
0
def api_specific_energy_providers(id, db):
    """
    Returns the current energy provider,
    or the specified energy provider.
    """
    if id == "current":
        provider = (db.query(database.Provider)
                    .filter_by(current=1)
                    .first())
    else:
        try:
            id = int(id)
        except ValueError:
            abort(400, "Invalid parameter.")

        provider = (db.query(database.Provider)
                    .filter_by(id=id)
                    .first())

    if not provider:
        provider = None
    else:
        provider = tools.to_dict(provider)
        if provider["day_slope_watt_euros"] != provider["night_slope_watt_euros"]:
            session = session_manager.get_session()
            user = db.query(database.User).filter_by(login=session["login"]).first()
            start_night_rate = ("%02d" % (user.start_night_rate // 3600) + ":" +
                                "%02d" % ((user.start_night_rate % 3600) // 60))
            end_night_rate = ("%02d" % (user.end_night_rate // 3600) + ":" +
                              "%02d" % ((user.end_night_rate % 3600) // 60))
            provider["start_night_rate"] = start_night_rate
            provider["end_night_rate"] = end_night_rate

    return {"data": provider}
Esempio n. 13
0
def http_get_datacenters(tenant_id):
    #check valid tenant_id
    if tenant_id != 'any':
        if not nfvo.check_tenant(mydb, tenant_id): 
            print 'httpserver.http_get_datacenters () tenant %s not found' % tenant_id
            bottle.abort(HTTP_Not_Found, 'Tenant %s not found' % tenant_id)
            return
    select_,where_,limit_ = filter_query_string(bottle.request.query, None,
            ('uuid','name','vim_url','type','created_at') )
    if tenant_id != 'any':
        where_['nfvo_tenant_id'] = tenant_id
        if 'created_at' in select_:
            select_[ select_.index('created_at') ] = 'd.created_at as created_at'
        if 'created_at' in where_:
            where_['d.created_at'] = where_.pop('created_at')
        result, content = mydb.get_table(FROM='datacenters as d join tenants_datacenters as td on d.uuid=td.datacenter_id',
                                      SELECT=select_,WHERE=where_,LIMIT=limit_)
    else:
        result, content = mydb.get_table(FROM='datacenters',
                                      SELECT=select_,WHERE=where_,LIMIT=limit_)
    if result < 0:
        print "http_get_datacenters Error", content
        bottle.abort(-result, content)
    else:
        #change_keys_http2db(content, http2db_tenant, reverse=True)
        convert_datetime2str(content)
        data={'datacenters' : content}
        return format_out(data)
Esempio n. 14
0
def get_transactions(arg=None):
    """
    /transactions/<hex>          return transaction by hexhash
    """
    try:
        tx_hash = arg.decode('hex')
    except TypeError:
        bottle.abort(500, 'No hex  %s' % arg)
    try: # index
        tx, blk = chain_manager.index.get_transaction(tx_hash)
    except KeyError:
        # try miner
        txs = chain_manager.miner.get_transactions()
        found = [tx for tx in txs if tx.hex_hash() == arg]
        if not found:
            return bottle.abort(404, 'Unknown Transaction  %s' % arg)
        tx, blk = found[0], chain_manager.miner.block
    # response
    tx = tx.to_dict()
    tx['block'] = blk.hex_hash()
    if not chain_manager.in_main_branch(blk):
        tx['confirmations'] = 0
    else:
        tx['confirmations'] = chain_manager.head.number - blk.number
    return dict(transactions=[tx])
Esempio n. 15
0
def api_get_ids(sensor, watt_euros, id1, id2, db):
    """
    Returns measures between ids <id1> and <id2> from sensor <sensor> in
    watts or euros.

    If id1 and id2 are negative, counts from the end of the measures.

    * If `watts_euros` is watts, returns the list of measures.
    * If `watt_euros` is kwatthours, returns the total energy for all the
    measures (dict).
    * If `watt_euros` is euros, returns the cost of all the measures (dict).

    Returns measure in ASC order of timestamp.

    Returns null if no measures were found.
    """
    if (id2 - id1) > config.get("max_returned_values"):
        abort(403,
              "Too many values to return. " +
              "(Maximum is set to %d)" % config.get("max_returned_values"))
    elif id2 < id1 or id2 * id1 < 0:
        abort(400, "Invalid parameters")
    else:
        data = cache.do_cache_ids(sensor, watt_euros, id1, id2, db)

    return {"data": data, "rate": get_rate_type(db)}
Esempio n. 16
0
def delete(uuid):
    # TODO: Exception handling
    path = bottle.request.query.path
    if not path:
        bottle.abort(400)
    folders.delete(uuid, path)
    return "File/folder deleted"
Esempio n. 17
0
def track_points(track_id, db):
    bottle.response.set_header("Content-Type", "application/json")
    # As json does not support trailing comma (thanks IE team btw...) we need to know how many
    # points we are going to enumerate
    # TODO: make a look-ahead iterator to detect it without an extra request
    pt_count = db.execute("SELECT count(*) FROM point WHERE track_id=?", (track_id,)).fetchone()
    if pt_count is None:
        bottle.abort(404, "No such track.")

    pt_count = pt_count[0]
    points = db.execute(
        """
    SELECT timestamp, lat, lon, ele, distance_from_prev, speed 
    FROM point 
    WHERE track_id=? 
    ORDER BY seq""",
        (track_id,),
    )
    encoder = DateJSONEncoder()
    yield "["
    for p in points:
        yield encoder.encode(dict(p))
        pt_count -= 1
        if pt_count > 0:
            yield ","
    yield "]"
Esempio n. 18
0
def get_star(name):
	star = stardb.find_one({'_id':ObjectId(name)})
	print name
	if not star:
		abort(404,'No document with id %s' % id)
	webpath = app.config.get('webpath')
	return dict(name=star['name'],webpath=webpath)
Esempio n. 19
0
def add_star():
	data = request.body.readline()
	_id = request.params.get('_id')
	name = request.params.get('name')
	surname = request.params.get('surname')
	blogs = request.params.get('blog')
	fb = request.params.get('fb')
	gender = request.params.get('gender')
	occupanys = request.params.get('occupation')
	country = request.params.get('country')
	occupant = []

	blog_list = []
	blog_split = blogs.split(';')
	print blog_split
	for blog in blog_split:
		blog_list.append(blog)
	print blog_list


	if not data:
		abort(400, 'No data received')

	try:
		stardb.update({'_id':ObjectId(_id)},{'$set':{'name':name,'surname':surname,
		'blog':blog_list,'fb':fb, 'gender':gender, 'occupant':occupant, 'country':country}},upsert=False)
	except:
		abort(400,str(ve))
Esempio n. 20
0
def serve(layer, zoom=None, x=None, z=None, fmt=None):
    try:
        obj = {}
        obj['layer'] = layer
        if zoom != None: obj['zoom'] = zoom
        if x != None: obj['x'] = int(x)
        if z != None: obj['z'] = int(z)
        if fmt != None: obj['format'] = fmt
    except ValueError: abort(404)    
    
    try:
        if obj['format'] == 'json':
            return serve_json(obj)
        elif obj['format'] == 'png':
            return serve_tile(obj)
        else:
            obj = profile.load(obj)
            if obj != None and obj.has_key('raw_data'):
                return obj['raw_data']
    except RuntimeError as e:
        logging.error(e)
    except IOError as e:
        if e.filename is not None and e.strerror is not None:
            logging.error('%s: %s' % (e.filename, e.strerror))
        else:
            logging.error(e)
    
    abort(404, 'Object not found')
Esempio n. 21
0
def createUser():
   print 'create User ------> moo.signUp'
   jsonData = json.loads(request.body.read())
   if not jsonData:
       abort(400, 'No data received')
   print jsonData
   return room.createUser(jsonData)
Esempio n. 22
0
def show_special_page(page_name, login_data=False, ):
    page = entries.get_page(page_name)
    if not page:
        abort(404, "Not Found")
    page_variables = generate_pagevars(login_data, page['title'],
                                       page['title'].replace(' ', ', '))
    return template('special', page_variables, page=page)
Esempio n. 23
0
def delete_channel(url):
	try:
		c = Channel.get(Channel.url == url)
		Item.delete().where(Item.channel == c).execute()	
		Channel.delete().where(Channel.url == url).execute()			
	except Channel.DoesNotExist:
		abort(404)	
Esempio n. 24
0
def upload_image(session):
    if not session.get('in'):
        bottle.abort(404)
    upload = bottle.request.files.get('upload')
    upload.filename = bottle.request.forms.get('filename')
    upload.save('./uploads')
    return 'Done!'
Esempio n. 25
0
def getfileref():
    """Returns a URL to the static file indicated by the query parameters."""
    if not settings.ALLOW_STATIC_FILE_ACCESS:
        abort(404)
    response.content_type = 'text/plain; charset=utf-8'
    return "http://%s:%d/static/%s" % (settings.HOST, settings.PORT,
                                       pathname2url(resolve_file()))
Esempio n. 26
0
def urls(mongodb):
    try:
        auth.require(role='access_normalized')
    except AAAException as e:
        return HTTPError(401, e.message)

    query_keys = request.query.keys()
    query_dict = {}

    if 'limit' in query_keys:
        limit = int(request.query.limit)
    else:
        limit = 50

    if 'url_regex' in query_keys:
        query_dict['url'] = {'$regex': request.query.url_regex}

    if 'hash' in query_keys:
        hash_length = len(request.query['hash'])
        if hash_length is 128:
            query_dict['extractions.hashes.sha512'] = request.query['hash']
        elif hash_length is 40:
            query_dict['extractions.hashes.sha1'] = request.query['hash']
        elif hash_length is 32:
            query_dict['extractions.hashes.md5'] = request.query['hash']
        else:
            abort(400, '{0} could not be recognized as a supported hash. Currently supported hashes are: SHA1, SHA512 and MD5. ')

    result = list(mongodb['url'].find(query_dict, fields={'_id': False}).limit(limit))
    return jsonify({'urls': result}, response)
Esempio n. 27
0
def post_bundle_info(uuid):
    '''
    Save metadata information for a bundle.
    '''
    service = BundleService()
    bundle_info = service.get_bundle_info(uuid)
    # Save only if we're the owner.
    if bundle_info['edit_permission']:
        # TODO(klopyrev): The Content-Type header is not set correctly in
        # editable_field.jsx, so we can't use request.json.
        data = json.loads(request.body.read())
        new_metadata = data['metadata']

        # TODO: do this generally based on the CLI specs.
        # Remove generated fields.
        for key in ['data_size', 'created', 'time', 'time_user', 'time_system', 'memory', 'disk_read', 'disk_write', 'exitcode', 'actions', 'started', 'last_updated']:
            if key in new_metadata:
                del new_metadata[key]

        # Convert to arrays
        for key in ['tags', 'language', 'architectures']:
            if key in new_metadata and isinstance(new_metadata[key], basestring):
                new_metadata[key] = new_metadata[key].split(',')

        # Convert to ints
        for key in ['request_cpus', 'request_gpus', 'request_priority']:
            if key in new_metadata:
                new_metadata[key] = int(new_metadata[key])

        service.update_bundle_metadata(uuid, new_metadata)
        bundle_info = service.get_bundle_info(uuid)
        return bundle_info
    else:
        abort(httplib.FORBIDDEN, 'Can\'t save unless you\'re the owner')
Esempio n. 28
0
def track(id):
  details = Tracker().track(id)
  if details != None:
    response.set_header('Content-Type', 'application/json')
    return json.dumps(details, cls=DateTimeEncoder, sort_keys=True, indent=4, separators=(',', ': '))
  else:
    abort(404, 'Consignment details not Found')
Esempio n. 29
0
def get_document():
    cursor = db['announcementcollection'].find()
    if not cursor:
        abort(404, 'No document with id %s' % id)
    response.content_type = 'application/json'
    entries = [entry for entry in cursor]
    return MongoEncoder().encode(entries)
Esempio n. 30
0
 def _fix_json(self, js={}, via=False):
     # Validate / Patch JSON
     if not js:
         try:    
             js = request._json
             if not js:
                 abort(400, "Empty JSON")
         except Exception as e:
             abort(400, "JSON is not well formed: {0}".format(e))
     if js.has_key('_id'):
         del js['_id']
     if js.has_key('id'):
         # Record old IRI in via
         if via:
             if js.has_key('via'):
                 v = js['via']
                 if type(v) != list:
                     v = [v]
                 if not js['id'] in v:
                     v.append(js['id'])
                     js['via'] = v
             else:
                 js['via'] = js['id']
         del js['id']
     return js
Esempio n. 31
0
def _exists(jobname):
    namepath = os.path.join(conf['confdir'], jobname.strip('/\\'))
    if os.path.exists(namepath+'.dba') or os.path.exists(namepath+'.dba.starred'):
        bottle.abort(400, "File name exists.")
Esempio n. 32
0
    def base_tree(self, path_on_disk):
        connection = sqlite3.connect(path_on_disk)
        cursor = connection.cursor()
        base_tree = []

        try:
            cursor.execute("SELECT * FROM sqlite_master WHERE type='table';")
            cursor.fetchone()
        except:
            abort(
                500,
                'File does not have a SQLite Master table. The file might be corrupt or not a SQLite file.'
            )

        # Master Table
        base_tree.append({
            'title': u'Master Table (1)',
            'key': u'master',
            'folder': True,
            'lazy': True
        })

        # Tables
        cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
        tables = cursor.fetchall()
        base_tree.append({
            'title': u'Tables (' + unicode(len(tables)) + u')',
            'key': u'table',
            'folder': True,
            'lazy': True
        })

        # Views
        cursor.execute("SELECT name FROM sqlite_master WHERE type='view';")
        views = cursor.fetchall()
        base_tree.append({
            'title': u'Views (' + unicode(len(views)) + u')',
            'key': u'view',
            'folder': True,
            'lazy': True
        })

        # Indexes
        cursor.execute("SELECT name FROM sqlite_master WHERE type='index';")
        indexes = cursor.fetchall()
        base_tree.append({
            'title': u'Indexes (' + unicode(len(indexes)) + u')',
            'key': u'index',
            'folder': True,
            'lazy': True
        })

        # Triggers
        cursor.execute("SELECT name FROM sqlite_master WHERE type='trigger';")
        triggers = cursor.fetchall()
        base_tree.append({
            'title': u'Triggers (' + unicode(len(triggers)) + u')',
            'key': u'trigger',
            'folder': True,
            'lazy': True
        })
        response.content_type = 'application/json'

        connection.close()
        return json.dumps(base_tree)
Esempio n. 33
0
def check_run_permission(bundle):
    """
    Checks whether the current user can run the bundle.
    """
    if not check_bundle_have_run_permission(local.model, request.user.user_id, bundle):
        abort(http.client.FORBIDDEN, "User does not have permission to run bundle.")
Esempio n. 34
0
def restricted():
    abort(401, "Sorry, access denied.")  # or redirect to a funny page
Esempio n. 35
0
 def test():
     bottle.abort(401)
Esempio n. 36
0
        def start_model(project):
            '''
                Manually launches one of the model processes (train, inference, both, etc.),
                depending on the provided flags.
            '''
            if not self.loginCheck(project=project, admin=True):
                abort(401, 'forbidden')
            try:
                params = request.json
                doTrain = 'train' in params and params['train'] is True
                doInference = 'inference' in params and params[
                    'inference'] is True

                if 'minNumAnnoPerImage' in params:
                    minNumAnnoPerImage = int(params['minNumAnnoPerImage'])
                else:
                    minNumAnnoPerImage = 0  #TODO
                if 'maxNum_train' in params:
                    maxNumImages_train = int(params['maxNum_train'])
                else:
                    maxNumImages_train = -1  #TODO
                if 'maxNum_inference' in params:
                    maxNumImages_inference = int(params['maxNum_inference'])
                else:
                    maxNumImages_inference = -1  #TODO

                if doTrain:
                    if doInference:
                        status = self.middleware.start_train_and_inference(
                            project=project,
                            minTimestamp='lastState',
                            minNumAnnoPerImage=minNumAnnoPerImage,
                            maxNumWorkers_train=1,  #TODO
                            forceUnlabeled_inference=False,
                            maxNumImages_inference=maxNumImages_inference,
                            maxNumWorkers_inference=-1)  #TODO
                    else:
                        #TODO: expand to other tasks and requests
                        if self.middleware.task_ongoing(
                                project,
                            ('AIController.start_training',
                             'AIWorker.call_train',
                             'AIWorker.call_average_model_states')):
                            raise Exception(
                                'A training process is already ongoing for project "{}".'
                                .format(project))

                        status = self.middleware.start_training(
                            project=project,
                            numEpochs=1,
                            minTimestamp='lastState',
                            minNumAnnoPerImage=minNumAnnoPerImage,
                            maxNumImages=maxNumImages_train,
                            maxNumWorkers=1)  #TODO
                else:
                    status = self.middleware.start_inference(
                        project=project,
                        forceUnlabeled=False,
                        maxNumImages=maxNumImages_inference,
                        maxNumWorkers=-1)  #TODO

                return {'status': status}
            except Exception as e:
                abort(400, 'bad request')
Esempio n. 37
0
 def _decorator(*args, **kwargs):
         if driveboard.connected():
             return func(*args, **kwargs)
         else:
             bottle.abort(400, "No machine.")
Esempio n. 38
0
def offsety(y):
    if not driveboard.status()['ready']:
        bottle.abort(400, "Machine not ready.")
    driveboard.offset(y=y)
    return '{}'
Esempio n. 39
0
 def aide_exec():
     '''
         Reserve for future implementations that require
         unauthorized but token-protected services.
     '''
     abort(404, 'not found')
Esempio n. 40
0
def offset(x, y, z):
    if not driveboard.status()['ready']:
        bottle.abort(400, "Machine not ready.")
    driveboard.absoffset(x, y, z)
    return '{}'
Esempio n. 41
0
 def flowmodHandler():
     if 'flowmod' in restHandlers:
         data = json.load(request.body)
         return restHandlers['flowmod'](data)
     else:
         abort(404, "Not found: '/flowmod'")
Esempio n. 42
0
def nofound():
    #引发404错误
    abort(404)
Esempio n. 43
0
    def outer(*args, **kwargs):
        response.content_type = CONTENT_TYPE
        if request.get_header("Origin"):
            response.set_header("Access-Control-Allow-Origin", "*")

        if request.get_header("Access-Control-Request-Headers"):
            response.set_header(
                "Access-Control-Allow-Headers",
                request.get_header("Access-Control-Request-Headers"))

        if request.get_header("Access-Control-Request-Method"):
            response.set_header("Access-Control-Allow-Methods",
                                "GET, POST, PATCH, DELETE, OPTIONS")

        try:
            return fn(*args, **kwargs)

        except ValueError:
            logging.error("".join(traceback.format_exception(*sys.exc_info())))
            abort(400, "Could not parse request. Be sure to use valid JSON.")

        except IntegrityError as e:
            if str(e).startswith("NOT NULL constraint"):
                field = e.message.split(": ")[1]
                if field.endswith("_id"):
                    field = field[:-3]

                abort(400, field + " cannot be null")

            if "UNIQUE constraint" in str(e):
                abort(409, "This id is already taken.")

            abort(400, e)

        except AssertionError as e:
            abort(400, e)

        except JsonAPIException as e:
            abort(e.status, e)

        except Exception as e:
            if e.__class__.__name__.endswith("DoesNotExist"):
                abort(404, "The requested resource does not exist.")

            if str(e).startswith("Instance matching query"):
                abort(400, "Trying to set relationship with invalid resource.")

            # log this unknown error
            logging.error("".join(traceback.format_exception(*sys.exc_info())))

            abort(500, e)
Esempio n. 44
0
 def StatHandler():
     if 'stat' in restHandlers:
         return restHandlers['stat'](request)
     else:
         abort(404, "Not found: '/info/%s'" % request)
Esempio n. 45
0
def home():
    abort(418)
Esempio n. 46
0
 def restRouter(request):
     if request in restHandlers:
         return restHandlers[request]()
     else:
         abort(404, "Not found: '/info/%s'" % request)
Esempio n. 47
0
def get_flow():
    """Get special values from Nmap & View databases

    :query str q: query (including limit/skip, orderby, etc.)
    :query str callback: callback to use for JSONP results
    :query str action: can be set to "details"
    :status 200: no error
    :status 400: invalid referer
    :>json object: results

    """
    callback = request.params.get("callback")
    action = request.params.get("action", "")
    if callback is None:
        response.set_header('Content-Disposition',
                            'attachment; filename="IVRE-results.json"')
    else:
        yield callback + "(\n"
    utils.LOGGER.debug("Params: %r", dict(request.params))
    query = json.loads(request.params.get('q', '{}'))
    limit = query.get("limit", config.WEB_GRAPH_LIMIT)
    skip = query.get("skip", config.WEB_GRAPH_LIMIT)
    mode = query.get("mode", "default")
    count = query.get("count", False)
    orderby = query.get("orderby", None)
    timeline = query.get("timeline", False)
    try:
        before = datetime.datetime.strptime(query["before"], "%Y-%m-%d %H:%M")
    except (TypeError, ValueError) as e:
        utils.LOGGER.warning(str(e))
        before = None
    except KeyError:
        before = None
    try:
        after = datetime.datetime.strptime(query["after"], "%Y-%m-%d %H:%M")
    except (TypeError, ValueError) as e:
        utils.LOGGER.warning(str(e))
        after = None
    except KeyError:
        after = None

    utils.LOGGER.debug("Action: %r, Query: %r", action, query)
    if action == "details":
        # TODO: error
        if query["type"] == 'node':
            res = db.flow.host_details(query["id"])
        else:
            res = db.flow.flow_details(query["id"])
        if res is None:
            abort(404, "Entity not found")
    else:
        cquery = db.flow.from_filters(query,
                                      limit=limit,
                                      skip=skip,
                                      orderby=orderby,
                                      mode=mode,
                                      timeline=timeline,
                                      after=after,
                                      before=before)
        if count:
            res = db.flow.count(cquery)
        else:
            res = db.flow.to_graph(cquery,
                                   limit=limit,
                                   skip=skip,
                                   orderby=orderby,
                                   mode=mode,
                                   timeline=timeline,
                                   after=after,
                                   before=before)
    yield json.dumps(res, default=utils.serialize)
    if callback is not None:
        yield ");\n"
Esempio n. 48
0
def get_image(image_id):
    # parts = image_id.split('.')
    # if len(parts) != 1 and len(parts) != 2:
    #     abort(400, 'invalid request!')
    # if len(parts[0]) != 32:
    #     abort(400, 'invalid ID!')
    #
    # image_id = parts[0]
    #
    #     ext = ''
    # if len(parts) > 1:
    #     ext = parts[1].lower()
    #     if ext not in ALLOWED_EXTENSIONS:
    #         abort(400, 'invalid format!')
    #     if ext == 'jpg': ext = 'jpeg'

    image_id = image_id.lower()
    re_id = re.match(RE_ID, image_id)
    re_id_ext = re.match(RE_ID_EXT, image_id)
    if re_id:
        ext = ''
    elif re_id_ext:
        image_id = re_id_ext.group(1)
        ext = re_id_ext.group(2)
        if ext not in ALLOWED_EXTENSIONS:
            abort(400, 'invalid format!')
    else:
        abort(400, 'invalid request!')
    if ext == 'jpg': ext = 'jpeg'

    addr = id2address(image_id)
    path = os.path.join(addr, image_id)
    h = int(request.query.h or 0)  #参数大小的限制?
    w = int(request.query.w or 0)
    s = request.query.s
    a = int(request.query.a or 0)
    filter = request.query.f
    ratio = request.query.r
    quality = request.query.q
    watermark = int(request.query.watermark or 0)
    position = request.query.p
    watermark_text = request.query.text

    if s and s not in SETTING_OPTIONS:
        abort(400, 'invalid options!')
    if a >= 360:
        abort(400, 'invalid angle!')
    if ratio and (int(ratio) > 100 or int(ratio) < 0):
        abort(400, 'invalid ratio!')
    if quality and (int(quality) > 100 or int(quality) < 0):
        abort(400, 'invalid quality!')
    if watermark:
        if not position:
            position = 'right_bottom'
        elif position not in POSITIONS:
            abort(
                400,
                'invalid position! valid positions:[centre, left_top, left_bottom, right_top, right_bottom]'
            )

    cache_id = gen_cache_key(image_id, w, h, ext, s, a, filter, ratio, quality,
                             watermark, position, watermark_text)
    data = cache.get(cache_id)
    # data = None
    if data is None:
        data = cache.get(gen_cache_key(image_id))
        if data is None:
            with open(path, 'rb') as f:
                data = f.read()
            logging.info('open image in file: %s', image_id)
        else:
            logging.info('get origin cache: %s', gen_cache_key(image_id))

        need_resize, need_save, need_rotate, need_filter, need_reduce, need_watermark = False, False, False, False, False, False
        if h or w or ratio or a or filter or ext or quality or watermark:
            imgfile = io.BytesIO(data)
            imgfile.seek(0)
            im = Image.open(imgfile)
            fmt = im.format

            if a: need_rotate = True
            if filter: need_filter = True
            if h or w: need_resize = True
            if ratio:
                ratio = int(ratio)
                need_reduce = True
            if watermark: need_watermark = True
            if need_resize or need_rotate or need_filter or ext != fmt or quality or need_watermark:
                need_save = True

            if need_resize and need_reduce:
                abort(
                    400,
                    "invalid request! can't reduce and resize a image in the same time!"
                )

            if need_resize:
                if s == 'fill':
                    if not h or not w:
                        abort(400, 'fill option needs 2 arguments!')
                    im = fill(im, w, h)
                    logging.info('[fill] (%s) width: %s height: %s', image_id,
                                 w, h)
                elif s == 'fit':
                    if not h or not w:
                        abort(400, 'fit option needs 2 arguments!')
                    im = fit(im, w, h)
                    logging.info('[fit] (%s) width: %s height: %s', image_id,
                                 w, h)
                else:
                    im = resize_image(im, w, h)
                    logging.info('resize (%s) width: %s height: %s', image_id,
                                 w, h)

            if need_reduce:
                im = reduce_image(im, ratio)
                logging.info('reduce (%s) in %d%% ', image_id, ratio)

            if need_rotate:
                im = im.rotate(a)
                logging.info('rotate (%s) angle: %s', image_id, a)

            if need_filter:
                if filter == 'b':
                    im = im.convert('1')
                    logging.info('covert (%s) to black', image_id)
                elif filter == 'bl':
                    im = im.filter(ImageFilter.BLUR)
                    logging.info('filter (%s) with BLUR ', image_id)
                else:
                    abort(400, 'invalid filter!')

            if need_watermark:
                if watermark_text:
                    print('watermark_text:', watermark_text)
                    # watermark_im = text2watermark(watermark_text)
                    watermark_im = text2watermark2(watermark_text)
                    logging.info('defined watermark (%s)', image_id)
                else:
                    watermark_im = Image.open(WATER_MARK_DEFAULT)
                    logging.info('default watermark (%s)', image_id)
                im = watermark_image(im, watermark_im, position)

            if need_save:
                if ext == '': ext = fmt
                buf = io.BytesIO()
                if quality:
                    im.save(buf, ext, quality=int(quality))
                    logging.info('save (%s) as %s, quality:%s', image_id, ext,
                                 quality)
                else:
                    im.save(buf, ext)
                    logging.info('save (%s) as %s', image_id, ext)
                data = buf.getvalue()

            mtype = 'image/' + ext
        else:
            mtype = magic.from_buffer(data, mime=True)
            logging.info('get origin image (%s)', image_id)
        cache.set(cache_id, data)
        cache.expire(cache_id, EXPIRE_TIME)
        d = cache.get(cache_id)
        if d != None:
            logging.info('set cache: %s', cache_id)
    else:
        logging.info('get cache: %s', cache_id)
        mtype = magic.from_buffer(data, mime=True)

    res = HTTPResponse(body=data)
    res.set_header('Content-Type', mtype)
    return res
Esempio n. 49
0
 def get_project_settings():
     if self.loginCheck():
         settings = {'settings': self.middleware.getProjectSettings()}
         return settings
     else:
         abort(401, 'not logged in')
Esempio n. 50
0
 def not_implemented(self, *args, **kwargs):
     """Returns not implemented status."""
     abort(501)
Esempio n. 51
0
def passwords():
    return bottle.abort(201, 'AccessDenied')
Esempio n. 52
0
 def get_class_definitions():
     if self.loginCheck():
         classDefs = {'classes': self.middleware.getClassDefinitions()}
         return classDefs
     else:
         abort(401, 'not logged in')
Esempio n. 53
0
def callback():
    """Takes the response from the provider and verify the identity of the logged in user

    Returns:
        Redirect to the wanted page after authentication
    """
    session = request.environ.get("beaker.session")
    data = request.query.get("signedAttempt")

    if not data:
        return abort(400, "signedAttempt parameter is missing")

    data = j.data.serializers.json.loads(data)

    if "signedAttempt" not in data:
        return abort(400, "signedAttempt value is missing")

    username = data["doubleName"]

    if not username:
        return abort(400, "DoubleName is missing")

    res = requests.get(f"https://login.threefold.me/api/users/{username}",
                       {"Content-Type": "application/json"})
    if res.status_code != 200:
        return abort(400, "Error getting user pub key")
    pub_key = res.json()["publicKey"]

    user_pub_key = VerifyKey(j.data.serializers.base64.decode(pub_key))

    # verify data
    signedData = data["signedAttempt"]

    verifiedData = user_pub_key.verify(
        j.data.serializers.base64.decode(signedData)).decode()

    data = j.data.serializers.json.loads(verifiedData)

    if "doubleName" not in data:
        return abort(400, "Decrypted data does not contain (doubleName)")

    if "signedState" not in data:
        return abort(400, "Decrypted data does not contain (state)")

    if data["doubleName"] != username:
        return abort(400, "username mismatch!")

    # verify state
    state = data["signedState"]
    if state != session["state"]:
        return abort(400, "Invalid state. not matching one in user session")

    nonce = j.data.serializers.base64.decode(data["data"]["nonce"])
    ciphertext = j.data.serializers.base64.decode(data["data"]["ciphertext"])

    try:
        priv = j.core.identity.me.nacl.private_key
        box = Box(priv, user_pub_key.to_curve25519_public_key())
        decrypted = box.decrypt(ciphertext, nonce)
    except nacl.exceptions.CryptoError:
        return abort(400, "Error decrypting data")

    try:
        result = j.data.serializers.json.loads(decrypted)
    except JSONDecodeError:
        return abort(400, "3Bot login returned faulty data")

    if "email" not in result:
        return abort(400, "Email is not present in data")

    email = result["email"]["email"]

    sei = result["email"]["sei"]
    res = requests.post(
        "https://openkyc.live/verification/verify-sei",
        headers={"Content-Type": "application/json"},
        json={"signedEmailIdentifier": sei},
    )

    if res.status_code != 200:
        return abort(400, "Email is not verified")

    session["username"] = username
    session["email"] = email
    session["authorized"] = True
    session["signedAttempt"] = signedData
    try:
        tid = j.sals.reservation_chatflow.reservation_chatflow.validate_user({
            "username":
            username,
            "email":
            email
        }).id
        session["tid"] = tid
        session["explorer"] = j.core.identity.me.explorer_url
    except Exception as e:
        j.logger.warning(
            f"Error in validating user: {username} with email: {email} in explorer: {j.core.identity.me.explorer_url}\n from {str(e)}"
        )

    return redirect(session.get("next_url", "/"))
Esempio n. 54
0
 def hi():
     raise bottle.abort(420, 'Enhance Your Calm')
Esempio n. 55
0
def get(urlid):
    s = Storage()
    url = s.get(ConvertID.to_rowid(urlid))
    if url is None:
        abort(404, "No such URL ID")
    redirect(tob(url))
def error():
    ''' Cause a common HTTP error '''
    abort(401, 'Access denied')
Esempio n. 57
0
def buildbot():
    logger = g.logger.getChild('buildbot')

    response.content_type = 'text/plain'

    for row in json.loads(request.forms.packets):
        if row['event'] == 'buildFinished':
            info = row['payload']['build']
            lazy_debug(logger, lambda: 'info: {}'.format(info))
            props = dict(x[:2] for x in info['properties'])

            if 'retry' in info['text']:
                continue

            if not props['revision']:
                continue

            try:
                state, repo_label = find_state(props['revision'])
            except ValueError:
                lazy_debug(logger,
                           lambda: 'Invalid commit ID from Buildbot: {}'.
                           format(props['revision']))  # noqa
                continue

            lazy_debug(logger, lambda: 'state: {}, {}'.format(
                state, state.build_res_summary()))  # noqa

            if info['builderName'] not in state.build_res:
                lazy_debug(logger, lambda: 'Invalid builder from Buildbot: {}'.
                           format(info['builderName']))  # noqa
                continue

            repo_cfg = g.repo_cfgs[repo_label]

            if request.forms.secret != repo_cfg['buildbot']['secret']:
                abort(400, 'Invalid secret')

            build_succ = 'successful' in info['text'] or info['results'] == 0

            url = '{}/builders/{}/builds/{}'.format(
                repo_cfg['buildbot']['url'],
                info['builderName'],
                props['buildnumber'],
            )

            if 'interrupted' in info['text']:
                step_name = ''
                for step in reversed(info['steps']):
                    if 'interrupted' in step.get('text', []):
                        step_name = step['name']
                        break

                if step_name:
                    try:
                        url = (
                            '{}/builders/{}/builds/{}/steps/{}/logs/interrupt'  # noqa
                        ).format(
                            repo_cfg['buildbot']['url'],
                            info['builderName'],
                            props['buildnumber'],
                            step_name,
                        )
                        res = requests.get(url)
                    except Exception as ex:  # noqa
                        logger.warn('/buildbot encountered an error during '
                                    'github logs request')
                        # probably related to
                        # https://gitlab.com/pycqa/flake8/issues/42
                        lazy_debug(
                            logger,
                            lambda: 'buildbot logs err: {}'.format(ex))  # noqa
                        abort(502, 'Bad Gateway')

                    mat = INTERRUPTED_BY_HOMU_RE.search(res.text)
                    if mat:
                        interrupt_token = mat.group(1)
                        if getattr(state, 'interrupt_token',
                                   '') != interrupt_token:
                            state.interrupt_token = interrupt_token

                            if state.status == 'pending':
                                state.set_status('')

                                desc = (':snowman: The build was interrupted '
                                        'to prioritize another pull request.')
                                state.add_comment(desc)
                                state.change_labels(LabelEvent.INTERRUPTED)
                                utils.github_create_status(state.get_repo(),
                                                           state.head_sha,
                                                           'error',
                                                           url,
                                                           desc,
                                                           context='homu')

                                g.queue_handler()

                        continue

                else:
                    logger.error('Corrupt payload from Buildbot')

            report_build_res(build_succ, url, info['builderName'], state,
                             logger, repo_cfg)

        elif row['event'] == 'buildStarted':
            info = row['payload']['build']
            lazy_debug(logger, lambda: 'info: {}'.format(info))
            props = dict(x[:2] for x in info['properties'])

            if not props['revision']:
                continue

            try:
                state, repo_label = find_state(props['revision'])
            except ValueError:
                pass
            else:
                if info['builderName'] in state.build_res:
                    repo_cfg = g.repo_cfgs[repo_label]

                    if request.forms.secret != repo_cfg['buildbot']['secret']:
                        abort(400, 'Invalid secret')

                    url = '{}/builders/{}/builds/{}'.format(
                        repo_cfg['buildbot']['url'],
                        info['builderName'],
                        props['buildnumber'],
                    )

                    state.set_build_res(info['builderName'], None, url)

            if g.buildbot_slots[0] == props['revision']:
                g.buildbot_slots[0] = ''

                g.queue_handler()

    return 'OK'
Esempio n. 58
0
 def decorator(*args, **kwargs):
     session = request.environ.get("beaker.session")
     if j.core.config.get_config().get("threebot_connect", True):
         if not session.get("authorized", False):
             return abort(401)
     return handler(*args, **kwargs)
Esempio n. 59
0
def github():
    logger = g.logger.getChild('github')

    response.content_type = 'text/plain'

    payload = request.body.read()
    info = request.json

    lazy_debug(logger, lambda: 'info: {}'.format(
        utils.remove_url_keys_from_json(info)))  # noqa

    owner_info = info['repository']['owner']
    owner = owner_info.get('login') or owner_info['name']
    repo_label = g.repo_labels[owner, info['repository']['name']]
    repo_cfg = g.repo_cfgs[repo_label]

    hmac_method, hmac_sig = request.headers['X-Hub-Signature'].split('=')
    if hmac_sig != hmac.new(
            repo_cfg['github']['secret'].encode('utf-8'),
            payload,
            hmac_method,
    ).hexdigest():
        abort(400, 'Invalid signature')

    event_type = request.headers['X-Github-Event']

    if event_type == 'pull_request_review_comment':
        action = info['action']
        original_commit_id = info['comment']['original_commit_id']
        head_sha = info['pull_request']['head']['sha']

        if action == 'created' and original_commit_id == head_sha:
            pull_num = info['pull_request']['number']
            body = info['comment']['body']
            username = info['sender']['login']

            state = g.states[repo_label].get(pull_num)
            if state:
                state.title = info['pull_request']['title']
                state.body = info['pull_request']['body']

                if parse_commands(
                        g.cfg,
                        body,
                        username,
                        repo_cfg,
                        state,
                        g.my_username,
                        g.db,
                        g.states,
                        realtime=True,
                        sha=original_commit_id,
                ):
                    state.save()

                    g.queue_handler()

    elif event_type == 'pull_request':
        action = info['action']
        pull_num = info['number']
        head_sha = info['pull_request']['head']['sha']

        if action == 'synchronize':
            state = g.states[repo_label][pull_num]
            state.head_advanced(head_sha)

            state.save()

        elif action in ['opened', 'reopened']:
            state = PullReqState(pull_num, head_sha, '', g.db, repo_label,
                                 g.mergeable_que, g.gh,
                                 info['repository']['owner']['login'],
                                 info['repository']['name'],
                                 repo_cfg.get('labels', {}), g.repos)
            state.title = info['pull_request']['title']
            state.body = info['pull_request']['body']
            state.head_ref = info['pull_request']['head']['repo']['owner'][
                'login'] + ':' + info['pull_request']['head']['ref']  # noqa
            state.base_ref = info['pull_request']['base']['ref']
            state.set_mergeable(info['pull_request']['mergeable'])
            state.assignee = (info['pull_request']['assignee']['login']
                              if info['pull_request']['assignee'] else '')

            found = False

            if action == 'reopened':
                # FIXME: Review comments are ignored here
                for c in state.get_repo().issue(pull_num).iter_comments():
                    found = parse_commands(
                        g.cfg,
                        c.body,
                        c.user.login,
                        repo_cfg,
                        state,
                        g.my_username,
                        g.db,
                        g.states,
                    ) or found

                status = ''
                for info in utils.github_iter_statuses(state.get_repo(),
                                                       state.head_sha):
                    if info.context == 'homu':
                        status = info.state
                        break

                state.set_status(status)

            state.save()

            g.states[repo_label][pull_num] = state

            if found:
                g.queue_handler()

        elif action == 'closed':
            state = g.states[repo_label][pull_num]
            if hasattr(state, 'fake_merge_sha'):

                def inner():
                    utils.github_set_ref(
                        state.get_repo(),
                        'heads/' + state.base_ref,
                        state.merge_sha,
                        force=True,
                    )

                def fail(err):
                    state.add_comment(':boom: Failed to recover from the '
                                      'artificial commit. See {} for details.'
                                      ' ({})'.format(state.fake_merge_sha,
                                                     err))

                utils.retry_until(inner, fail, state)

            del g.states[repo_label][pull_num]

            db_query(g.db, 'DELETE FROM pull WHERE repo = ? AND num = ?',
                     [repo_label, pull_num])
            db_query(g.db, 'DELETE FROM build_res WHERE repo = ? AND num = ?',
                     [repo_label, pull_num])
            db_query(g.db, 'DELETE FROM mergeable WHERE repo = ? AND num = ?',
                     [repo_label, pull_num])

            g.queue_handler()

        elif action in ['assigned', 'unassigned']:
            state = g.states[repo_label][pull_num]
            state.assignee = (info['pull_request']['assignee']['login']
                              if info['pull_request']['assignee'] else '')

            state.save()

        else:
            lazy_debug(logger, lambda: 'Invalid pull_request action: {}'.
                       format(action))  # noqa

    elif event_type == 'push':
        ref = info['ref'][len('refs/heads/'):]

        for state in list(g.states[repo_label].values()):
            if state.base_ref == ref:
                state.set_mergeable(
                    None,
                    cause={
                        'sha': info['head_commit']['id'],
                        'title':
                        info['head_commit']['message'].splitlines()[0],
                    })

            if state.head_sha == info['before']:
                if state.status:
                    state.change_labels(LabelEvent.PUSHED)
                state.head_advanced(info['after'])

                state.save()

    elif event_type == 'issue_comment':
        body = info['comment']['body']
        username = info['comment']['user']['login']
        pull_num = info['issue']['number']

        state = g.states[repo_label].get(pull_num)

        if 'pull_request' in info['issue'] and state:
            state.title = info['issue']['title']
            state.body = info['issue']['body']

            if parse_commands(
                    g.cfg,
                    body,
                    username,
                    repo_cfg,
                    state,
                    g.my_username,
                    g.db,
                    g.states,
                    realtime=True,
            ):
                state.save()

                g.queue_handler()

    elif event_type == 'status':
        try:
            state, repo_label = find_state(info['sha'])
        except ValueError:
            return 'OK'

        status_name = ""
        if 'status' in repo_cfg:
            for name, value in repo_cfg['status'].items():
                if 'context' in value and value['context'] == info['context']:
                    status_name = name
        if status_name is "":
            return 'OK'

        if info['state'] == 'pending':
            return 'OK'

        for row in info['branches']:
            if row['name'] == state.base_ref:
                return 'OK'

        report_build_res(info['state'] == 'success', info['target_url'],
                         'status-' + status_name, state, logger, repo_cfg)

    return 'OK'
Esempio n. 60
0
def get_equipement(id):
    entity = db['equipements'].find_one({'_id':id})
    if not entity:
        abort(404, 'No equipement with id %s' % id)
    return entity