Beispiel #1
0
 def execute(self, test=False, *args, **kwargs):
     self.target = self.config.driverlicense.collection.data
     self.gfs = GridFS(self.target.connection[self.target.database])
     self.download(test)
Beispiel #2
0
def get_app(config):
    """
    :param config: the configuration dict
    :param active_callback: a callback without arguments that will be called when the app is fully initialized
    :return: A new app
    """
    config = _put_configuration_defaults(config)

    task_directory = config["tasks_directory"]
    download_directory = config.get("download_directory", "lti_download")
    default_allowed_file_extensions = config['allowed_file_extensions']
    default_max_file_size = config['max_file_size']

    appli = web.application((), globals(), autoreload=False)

    zmq_context, asyncio_thread = start_asyncio_and_zmq()

    # Init the different parts of the app
    plugin_manager = PluginManager()

    mongo_client = MongoClient(
        host=config.get('mongo_opt', {}).get('host', 'localhost'))
    database = mongo_client[config.get('mongo_opt',
                                       {}).get('database', 'INGInious')]
    gridfs = GridFS(database)

    course_factory, task_factory = create_factories(task_directory,
                                                    plugin_manager,
                                                    FrontendCourse,
                                                    FrontendTask)

    #
    # Allow user config to over-rider the username strong in Mongo.
    # This is enabled by most LMS's such as Moodle, and the ext_user_username
    # is the "login name" for the user, which is typically the same as
    # would be authenticated by logging into the course via ldap
    #
    lti_user_name = config.get('lti_user_name', 'user_id')
    if lti_user_name not in ['user_id', 'ext_user_username']:
        lti_user_name = 'user_id'

    user_manager = UserManager(
        CustomSession(appli, MongoStore(database, 'sessions')), database,
        lti_user_name)

    update_pending_jobs(database)

    client = create_arch(config, task_directory, zmq_context)

    lis_outcome_manager = LisOutcomeManager(database, user_manager,
                                            course_factory, config["lti"])

    submission_manager = LTISubmissionManager(
        client, user_manager, database, gridfs, plugin_manager,
        config.get('nb_submissions_kept', 5), lis_outcome_manager)

    template_helper = TemplateHelper(plugin_manager, 'frontend/lti/templates',
                                     'frontend/lti/templates/layout',
                                     config.get('use_minified_js', True))

    # Update the database
    update_database(database)

    # Add some helpers for the templates
    template_helper.add_to_template_globals("get_homepath",
                                            lambda: web.ctx.homepath)
    template_helper.add_to_template_globals("user_manager", user_manager)
    template_helper.add_to_template_globals("default_allowed_file_extensions",
                                            default_allowed_file_extensions)
    template_helper.add_to_template_globals("default_max_file_size",
                                            default_max_file_size)

    # Not found page
    appli.notfound = lambda: web.notfound(template_helper.get_renderer().
                                          notfound('Page not found'))

    # Insert the needed singletons into the application, to allow pages to call them
    appli.plugin_manager = plugin_manager
    appli.course_factory = course_factory
    appli.task_factory = task_factory
    appli.submission_manager = submission_manager
    appli.user_manager = user_manager
    appli.template_helper = template_helper
    appli.database = database
    appli.gridfs = gridfs
    appli.default_allowed_file_extensions = default_allowed_file_extensions
    appli.default_max_file_size = default_max_file_size
    appli.consumers = config["lti"]
    appli.download_directory = download_directory
    appli.download_status = {}
    appli.webterm_link = config.get("webterm", None)

    # Init the mapping of the app
    appli.init_mapping(urls)

    # Loads plugins
    plugin_manager.load(client, appli, course_factory, task_factory, database,
                        user_manager, submission_manager,
                        config.get("plugins", []))

    # Start the Client
    client.start()

    return appli.wsgifunc(), lambda: _close_app(appli, mongo_client, client,
                                                lis_outcome_manager)
Beispiel #3
0
 def _init(self):
     self._client = MongoClient(self._conn_str)
     self._db = self._client.get_database(self._db_name)
     self._collection = self._db[self._coll_name]
     self._gridfs = GridFS(self._db, self._coll_name)
     self._gridfs_bucket = GridFSBucket(self._db, self._coll_name)
Beispiel #4
0
 def _get_gridfs(self, model_instance):
     # XXX shouldn't we use the model's collection here?
     return GridFS(
         connections[model_instance.__class__.objects.db].database)
Beispiel #5
0
 def __init__(self, *args, **kwargs):
     self.db = Connection(
         host=settings.GRIDFS_HOST,
         port=settings.GRIDFS_PORT)[settings.GRIDFS_DATABASE_NAME]
     self.fs = GridFS(self.db)
Beispiel #6
0
def get_fs():
    fs = getattr(g, 'fs', None)
    if fs is None:
        g.fs = GridFS(get_db())
    return g.fs
Beispiel #7
0
def update_mongo(model, db, engine, limit=10):
    Session = sessionmaker(bind=engine)
    session = Session()
    fs = GridFS(db)
    cursor = monta_filtro(db, session, limit)
    score_soma = 0.
    contagem = 0.001
    max_uploadDate = datetime(2000, 1, 1)
    for ind, registro in enumerate(cursor):
        s0 = time.time()
        _id = ObjectId(registro['_id'])
        sql = f'select isocode_group from ajna_conformidade where id_imagem="{str(_id)}"'
        isocode_group = session.execute(sql).scalar()
        if registro['uploadDate'] > max_uploadDate:
            max_uploadDate = registro['uploadDate']
        if isocode_group is None or isocode_group[0] != 'R':
            logging.info(f'Pulando registro {_id} por não ser reefer')
            continue
        grid_out = fs.get(_id)
        img_str = grid_out.read()
        nparr = np.fromstring(img_str, np.uint8)
        image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
        # size = pil_image.size
        s1 = time.time()
        logging.info(f'Elapsed retrieve time {s1 - s0}')
        pred_boxes, pred_classes, pred_scores = model.get_preds(image)
        if len(pred_boxes) == 0 or pred_scores[0] < .9:
            class_label = 2
            if len(pred_boxes) == 0:
                preds = [0, 0, image.shape[0], image.shape[1]]
                score = 0.
            else:
                preds = pred_boxes[0]
                score = pred_scores[0]
        else:
            preds = pred_boxes[0]
            class_label = pred_classes[0]
            score = pred_scores[0]
        if score > 0.:
            score_soma += score
            contagem += 1.
        if class_label is None:
            logging.info(f'Pulando registro {_id} porque classe veio vazia...')
            continue
        s2 = time.time()
        logging.info(f'Elapsed model time {s2 - s1}. SCORE {score} SCORE MÉDIO {score_soma / contagem}')
        # new_preds = normalize_preds(preds, size)
        new_predictions = [{'reefer_bbox': preds, 'reefer_class': class_label, 'reefer_score': score}]
        logging.info({'_id': _id, 'metadata.predictions.0.reefer': new_predictions})
        db['fs.files'].update(
            {'_id': _id},
            {'$set': {'metadata.predictions.0.reefer': new_predictions}}
        )
        s3 = time.time()
        logging.info(f'Elapsed update time {s3 - s2} - registro {ind}')
    sql = 'INSERT INTO ajna_modelos (nome, uploadDate) ' + \
          'VALUES  ("motor_reefer", :uploadDate) ON DUPLICATE KEY UPDATE ' + \
          'uploadDate = :uploadDate'
    logging.info(f'Fazendo UPSERT no uploadDate para {max_uploadDate}: {sql}')
    session.execute(sql, {'uploadDate': max_uploadDate})
    session.commit()
Beispiel #8
0
 def test_remove_file(self):
     fs = GridFS(self.conn['test'], 'test')
     id = fs.put("test file", filename="test.txt", encoding='utf8')
     assert_soon(lambda: sum(1 for _ in self._search()) == 1)
     fs.delete(id)
     assert_soon(lambda: sum(1 for _ in self._search()) == 0)
 def setGridFS(self):
     self.fs = GridFS(self.db)
Beispiel #10
0
from PIL import Image
from pytesseract import image_to_string

app = Flask(__name__)
Bootstrap(app)

app.config.update(DROPZONE_ALLOWED_FILE_TYPE='image',
                  DROPZONE_MAX_FILE_SIZE=10,
                  DROPZONE_MAX_FILES=120,
                  DROPZONE_REDIRECT_VIEW='converted',
                  DROPZONE_UPLOAD_MULTIPLE=True,
                  DROPZONE_UPLOAD_ON_CLICK=True)
dropzone = Dropzone(app)

DB = MongoClient(host=['mongodb:27017']).gridfs
FS = GridFS(DB)


@app.route("/")
def index():
    return render_template('index.html')


@app.route('/uploads', methods=['POST'])
def upload():
    i = 0
    for key, f in request.files.items():
        i += 1
        if key.startswith('file'):
            filename = secure_filename(f.filename).split('.')[0]
            filename += str(i)
Beispiel #11
0
from gridfs import GridFS
from lxml import etree
import pymongo

# 爬取列表

max_page = 1

url = '192.168.0.109'
# client = pymongo.MongoClient(host='192.168.100.109', port=27017)
client = pymongo.MongoClient(host=url, port=27017)
db = client.kaichecc
article_db = db.article

db = pymongo.MongoClient(host=url, port=27017).pics
fs = GridFS(db, collection='pic')

headers = {
    'User-Agent':
    'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0'
}
for i in range(1, max_page + 1):
    print("第" + str(i) + "页")
    time.sleep(5)
    try:
        req = urllib.request.Request(
            url="http://www.kaichecc.com/category/guochan/page/%d" % i,
            headers=headers)
        response = urllib.request.urlopen(req, timeout=30)
        if response.status != 200:
            print(
Beispiel #12
0
 def get_gfs(self, name=None):
     name = name or __conf__.GFS_NAME
     return GridFS(self.get_mongo(name))
def ImageAnalysis(nx, ny, imageName):
    """ ImageAnalysis is calling the octave file calculating the probability that
    there is labware(s) in the actual picture. Because the x/y system of the image is
    different from the x/y system of the platform, corrections are made and add the necessary
    mm if nx and ny are larger than 1.
    INPUT : square numbers on the deck (nx/and ny are provided by planner) and the imageName
    OUTPUT: Data set is store in the DB


    This function also recalculates x-y position on the platform using nx and ny.
         x-y values calculated from pictures are in the standard format
         Those values are, on the platform, y-x because of the design. Below
         is the code to correct that.
            ny = number of square in y [0:3]
            nx = number of square in x [0:4]

         y_plateforme = round(item["y"]+yPhotoRes*ny,3)
         x_plateforme = round(xPhotoRes*(nx+1)-item["x"],3)
        
    """
    croptedPicFolder = '/home/ubuntu/biobot_ros_jtk/src/ros_3d_cartography/src/croptedPictures/'
    fileToData = '/home/ubuntu/biobot_ros_jtk/src/ros_3d_cartography/src/data'
    # dataFolder = '/home/ubuntu/.ros'
    # wsFolder = '/home/ubuntu/biobot_ros_jtk/src/ros_3d_cartography/src/object_detection'

    onePixelInMM = float((1 / 17) * 10)
    yPhotoRes = float(640 * onePixelInMM)
    xPhotoRes = float(480 * onePixelInMM)
    labware = []
    # Database --------------------------------------------------
    client = pymongo.MongoClient()
    biobot = client['biobot']
    fs = GridFS(biobot)
    #------------------------------------------------------------
    # Transform the data to .mat --------------------------------
    SplitRosFile(ws_dir + '/' + imageName + '.txt', fileToData)
    print(imageName)
    octave.addpath(fileToData)
    print("Create mat")
    octave.CreateMatFile(fileToData)
    source = os.path.join('/home', 'ubuntu', '.ros', 'example.mat')
    destination = os.path.join('/home', 'ubuntu', 'biobot_ros_jtk', 'src',
                               'ros_3d_cartography', 'src', 'object_detection')
    # #Move PCL file to /ros_3d_cartography IOT modify it with out being rewritten
    time.sleep(5)
    shutil.copy(source, destination)
    #------------------------------------------------------------
    print("Done .mat file")
    octave.addpath(ws_dir)
    #print dataFolder
    print imageName

    answer = octave.ObjectDetection(imageName)
    print("Done ObjectDetection")

    try:
        answer = dict(answer)
        for v in answer.values():
            try:
                item = {'type': v[0]}
                item['mod2DJPG'] = str(v[1])  # cropted picture name
                item['mod2D'] = v[2]  # RGB matrices of cropted picture
                item['x'] = float(v[3])
                item['y'] = float(v[4])
                item['z'] = round(
                    float(v[5]), 3
                )  # Not send to DB, uses highest mod. instead (To be calculated)
                labware.append(item)
            except:
                print("Received NaN values")
        """ x-y values calculated from pictures are in the standard format
         Those values are, on the platform, y-x because of the design. Below
         is the code to correct that.
            ny = number of square in y [0:3]
            nx = number of square in x [0:4]
        """
        print("Labware result: ")
        print labware
        for item in labware:
            try:
                y_plateforme = round(item["y"] + yPhotoRes * ny, 3)
                x_plateforme = round(xPhotoRes * (nx + 1) - item["x"], 3)
                item["y"] = y_plateforme
                item["x"] = x_plateforme
                CP = os.path.join(croptedPicFolder, item["mod2DJPG"] + '.jpg')
                print(["saved image: ", CP])
                scipy.misc.imsave(str(CP), item["mod2D"])
            except Exception as e:
                print("Nan values passed: {}".format(e))

            # Send each labware to the database
            with open(CP, 'rb') as f:
                data = f.read()

            uid = uuid.uuid4().hex
            filename = "{}.jpg".format(uid)
            image_id = fs.put(data, filename=filename)
            item = {'type': item["type"], 'carto_x': item["x"], \
                    'carto_y': item["y"], 'uuid': uid, 'filename': filename, \
                    'validated': False, 'image_id': image_id, 'source': '3d_cartography'}
            biobot.deck.insert_one(item)
    except:
        print('Nothing was detected')
Beispiel #14
0
from flask import Flask, render_template, redirect, url_for, request, flash, jsonify
from flask_pymongo import PyMongo
from gridfs import GridFS
# from werkzeug import secure_filename

app = Flask(__name__)
app.config['SECRET_KEY'] = '59d3ca27e6701d3fd06eb960ca5866a5'
app.config["MONGO_URI"] = "mongodb://*****:*****@app.route("/")
@app.route("/home", methods=["POST", "GET"])
def home():
    return render_template("Dashboard.html")


@app.route("/createLab", methods=["POST", "GET"])
def createLab():
    data = request.form.to_dict(flat=False)
    if (data):
        assignments = mongo.db.assignments
        assignments.insert({
            "section": data['section'],
            "lab_name": data['lab_name'],
            "start_time": data['start_time'],
            "end_time": data['end_time'],
            "lab_type": data['lab_type'],
            "q_id": data['q_ids']
    def lookup_tables_as_collection_and_gridfs(cfg, maindb):
        """Import lookup tables (from txt file) as Collection and GridFS
        Args:
            cfg: SEIMS config object
            maindb: workflow model database
        """
        for tablename, txt_file in list(
                cfg.paramcfgs.lookup_tabs_dict.items()):
            # import each lookup table as a collection and GridFS file.
            c_list = maindb.collection_names()
            if not StringClass.string_in_list(tablename.upper(), c_list):
                maindb.create_collection(tablename.upper())
            else:
                maindb.drop_collection(tablename.upper())
            # initial bulk operator
            bulk = maindb[tablename.upper()].initialize_ordered_bulk_op()
            # delete if the tablename gridfs file existed
            spatial = GridFS(maindb, DBTableNames.gridfs_spatial)
            if spatial.exists(filename=tablename.upper()):
                x = spatial.get_version(filename=tablename.upper())
                spatial.delete(x._id)

            # read data items
            data_items = read_data_items_from_txt(txt_file)
            field_names = data_items[0][0:]
            item_values = list()  # import as gridfs file
            for i, cur_data_item in enumerate(data_items):
                if i == 0:
                    continue
                data_import = dict()  # import as Collection
                item_value = list()  # import as gridfs file
                for idx, fld in enumerate(field_names):
                    if MathClass.isnumerical(cur_data_item[idx]):
                        tmp_value = float(cur_data_item[idx])
                        data_import[fld] = tmp_value
                        item_value.append(tmp_value)
                    else:
                        data_import[fld] = cur_data_item[idx]
                bulk.insert(data_import)
                if len(item_value) > 0:
                    item_values.append(item_value)
            MongoUtil.run_bulk(bulk,
                               'No operations during import %s.' % tablename)
            # begin import gridfs file
            n_row = len(item_values)
            # print(item_values)
            if n_row >= 1:
                n_col = len(item_values[0])
                for i in range(n_row):
                    if n_col != len(item_values[i]):
                        raise ValueError(
                            'Please check %s to make sure each item has '
                            'the same numeric dimension. The size of first '
                            'row is: %d, and the current data item is: %d' %
                            (tablename, n_col, len(item_values[i])))
                    else:
                        item_values[i].insert(0, n_col)

                metadic = {
                    ModelParamDataUtils.item_count: n_row,
                    ModelParamDataUtils.field_count: n_col
                }
                cur_lookup_gridfs = spatial.new_file(
                    filename=tablename.upper(), metadata=metadic)
                header = [n_row]
                fmt = '%df' % 1
                s = pack(fmt, *header)
                cur_lookup_gridfs.write(s)
                fmt = '%df' % (n_col + 1)
                for i in range(n_row):
                    s = pack(fmt, *item_values[i])
                    cur_lookup_gridfs.write(s)
                cur_lookup_gridfs.close()
Beispiel #16
0
 def __init__(self):
     self.client = MongoClient("mongodb://" + cfg.MONGODB_ADDR)
     self.img_db = self.client["image"]
     self.fs = GridFS(database=self.img_db, collection="fs")
Beispiel #17
0
 def _fs(cls):
     return GridFS(session(cls).impl.db, cls._root_collection())
Beispiel #18
0
MIN_RATIO = 1.8

today = date.today()
str_today = datetime.strftime(today, '%d/%m/%Y')
yesterday = today - timedelta(days=1)
str_yesterday = datetime.strftime(yesterday, '%d/%m/%Y')

MONGODB_URI = os.environ.get('MONGODB_URI')
if MONGODB_URI:
    DATABASE = ''.join(MONGODB_URI.rsplit('/')[-1:])
else:
    DATABASE = 'test'

conn = MongoClient(host=MONGODB_URI)
mongodb = conn[DATABASE]
fs = GridFS(mongodb)


def parse_datas(inicio, fim):
    return datetime.strptime(inicio, '%d/%m/%Y'), \
           datetime.strptime(fim + ' 23:59:59', '%d/%m/%Y %H:%M:%S')


def get_image(row, crop=False, min_ratio=MIN_RATIO):
    """Retrieve image content from Mongo, crop on bbox if crop is True."""
    oid = ObjectId(row['_id'])
    if fs.exists(oid):
        grid_out = fs.get(oid)
        image = Image.open(io.BytesIO(grid_out.read()))
        xfinal, yfinal = image.size
        if xfinal / yfinal < min_ratio:
Beispiel #19
0
 def test_remove_file(self):
     fs = GridFS(self.conn["test"], "test")
     id = fs.put("test file", filename="test.txt", encoding="utf8")
     assert_soon(lambda: self._count() == 1)
     fs.delete(id)
     assert_soon(lambda: self._count() == 0)
Beispiel #20
0
    def export_scenario_to_gtiff(self, outpath=None):
        # type: (Optional[str]) -> None
        """Export scenario to GTiff.

        TODO: Read Raster from MongoDB should be extracted to pygeoc.
        """
        if not self.export_sce_tif:
            return
        dist = self.bmps_info[self.cfg.bmpid]['DISTRIBUTION']
        dist_list = StringClass.split_string(dist, '|')
        if len(dist_list) >= 2 and dist_list[0] == 'RASTER':
            dist_name = '0_' + dist_list[1]  # prefix 0_ means the whole basin
            # read dist_name from MongoDB
            client = ConnectMongoDB(self.modelcfg.host, self.modelcfg.port)
            conn = client.get_conn()
            maindb = conn[self.modelcfg.db_name]
            spatial_gfs = GridFS(maindb, DBTableNames.gridfs_spatial)
            # read file from mongodb
            if not spatial_gfs.exists(filename=dist_name):
                print('WARNING: %s is not existed, export scenario failed!' %
                      dist_name)
                return
            try:
                slpposf = maindb[DBTableNames.gridfs_spatial].files.find(
                    {'filename': dist_name}, no_cursor_timeout=True)[0]
            except NetworkTimeout or Exception:
                # In case of unexpected raise
                client.close()
                return

            ysize = int(slpposf['metadata'][RasterMetadata.nrows])
            xsize = int(slpposf['metadata'][RasterMetadata.ncols])
            xll = slpposf['metadata'][RasterMetadata.xll]
            yll = slpposf['metadata'][RasterMetadata.yll]
            cellsize = slpposf['metadata'][RasterMetadata.cellsize]
            nodata_value = slpposf['metadata'][RasterMetadata.nodata]
            srs = slpposf['metadata'][RasterMetadata.srs]
            if is_string(srs):
                srs = str(srs)
            srs = osr.GetUserInputAsWKT(srs)
            geotransform = [0] * 6
            geotransform[0] = xll - 0.5 * cellsize
            geotransform[1] = cellsize
            geotransform[3] = yll + (ysize - 0.5) * cellsize  # yMax
            geotransform[5] = -cellsize

            slppos_data = spatial_gfs.get(slpposf['_id'])
            total_len = xsize * ysize
            fmt = '%df' % (total_len, )
            slppos_data = unpack(fmt, slppos_data.read())
            slppos_data = numpy.reshape(slppos_data, (ysize, xsize))

            v_dict = dict()
            for unitidx, geneidx in viewitems(self.cfg.unit_to_gene):
                v_dict[unitidx] = self.gene_values[geneidx]
            # Deprecated and replaced by using self.cfg.unit_to_gene. 03/14/2019. ljzhu.
            # for idx, gene_v in enumerate(self.gene_values):
            #     v_dict[self.cfg.gene_to_unit[idx]] = gene_v

            for k, v in v_dict.items():
                slppos_data[slppos_data == k] = v
            if outpath is None:
                outpath = self.scenario_dir + os.path.sep + 'Scenario_%d.tif' % self.ID
            RasterUtilClass.write_gtiff_file(outpath, ysize, xsize,
                                             slppos_data, geotransform, srs,
                                             nodata_value)
            client.close()
Beispiel #21
0
        model_config = micro_to_partial(model_config)

    info = {
        "node_input": 148,
        "edge_input": 3,
        "global_input": 4
    }

    model_config.update(info)

    if 'layers' in model_config:
        model_config = partial_to_model_bs(model_config, 45)
        model_config.update(info)

    print("Upload model")
    fs = GridFS(db)
    file = fs.new_file(model=config['name'],
                       dataset_key=dataset,
                       app_type='torch_model',
                       encoding="utf-8")

    try:
        with open(args.model, "rb") as i:
            shutil.copyfileobj(i, file)
    finally:
        file.close()

    insert = {
        'experiment': config['name'],
        'dataset': dataset,
        'competition': '2018',
Beispiel #22
0
 def fs(self):
     return GridFS(self.db, collection='fs')
Beispiel #23
0
def get_app(config):
    """
    :param config: the configuration dict
    :return: A new app
    """
    config = _put_configuration_defaults(config)

    mongo_client = MongoClient(
        host=config.get('mongo_opt', {}).get('host', 'localhost'))
    database = mongo_client[config.get('mongo_opt',
                                       {}).get('database', 'INGInious')]
    gridfs = GridFS(database)

    # Init database if needed
    db_version = database.db_version.find_one({})
    if db_version is None:
        database.submissions.ensure_index([("username", pymongo.ASCENDING)])
        database.submissions.ensure_index([("courseid", pymongo.ASCENDING)])
        database.submissions.ensure_index([("courseid", pymongo.ASCENDING),
                                           ("taskid", pymongo.ASCENDING)])
        database.submissions.ensure_index([("submitted_on", pymongo.DESCENDING)
                                           ])  # sort speed
        database.user_tasks.ensure_index([("username", pymongo.ASCENDING),
                                          ("courseid", pymongo.ASCENDING),
                                          ("taskid", pymongo.ASCENDING)],
                                         unique=True)
        database.user_tasks.ensure_index([("username", pymongo.ASCENDING),
                                          ("courseid", pymongo.ASCENDING)])
        database.user_tasks.ensure_index([("courseid", pymongo.ASCENDING),
                                          ("taskid", pymongo.ASCENDING)])
        database.user_tasks.ensure_index([("courseid", pymongo.ASCENDING)])
        database.user_tasks.ensure_index([("username", pymongo.ASCENDING)])

    appli = CookieLessCompatibleApplication(MongoStore(database, 'sessions'))

    # Init gettext
    available_languages = {
        "en": "English",
        "fr": "Français",
        "es": "Español",
        "de": "Deutsch",
        "pt": "Português",
    }

    for lang in available_languages.keys():
        appli.add_translation(
            lang,
            gettext.translation('messages',
                                get_root_path() + '/frontend/i18n', [lang]))

    builtins.__dict__['_'] = appli.gettext

    if config.get("maintenance", False):
        template_helper = TemplateHelper(PluginManager(), None,
                                         'frontend/templates',
                                         'frontend/templates/layout',
                                         'frontend/templates/layout_lti',
                                         config.get('use_minified_js', True))
        template_helper.add_to_template_globals("get_homepath",
                                                appli.get_homepath)
        template_helper.add_to_template_globals("_", _)
        appli.template_helper = template_helper
        appli.init_mapping(urls_maintenance)
        return appli.wsgifunc(), appli.stop

    default_allowed_file_extensions = config['allowed_file_extensions']
    default_max_file_size = config['max_file_size']

    zmq_context, __ = start_asyncio_and_zmq(config.get('debug_asyncio', False))

    # Init the different parts of the app
    plugin_manager = PluginManager()

    # Create the FS provider
    if "fs" in config:
        fs_provider = filesystem_from_config_dict(config["fs"])
    else:
        task_directory = config["tasks_directory"]
        fs_provider = LocalFSProvider(task_directory)

    default_problem_types = {
        problem_type.get_type(): problem_type
        for problem_type in [
            DisplayableCodeProblem, DisplayableCodeSingleLineProblem,
            DisplayableFileProblem, DisplayableMultipleChoiceProblem,
            DisplayableMatchProblem
        ]
    }

    course_factory, task_factory = create_factories(fs_provider,
                                                    default_problem_types,
                                                    plugin_manager,
                                                    WebAppCourse, WebAppTask)

    user_manager = UserManager(appli.get_session(), database,
                               config.get('superadmins', []))

    update_pending_jobs(database)

    client = create_arch(config, fs_provider, zmq_context)

    lti_outcome_manager = LTIOutcomeManager(database, user_manager,
                                            course_factory)

    submission_manager = WebAppSubmissionManager(client, user_manager,
                                                 database, gridfs,
                                                 plugin_manager,
                                                 lti_outcome_manager)

    template_helper = TemplateHelper(plugin_manager, user_manager,
                                     'frontend/templates',
                                     'frontend/templates/layout',
                                     'frontend/templates/layout_lti',
                                     config.get('use_minified_js', True))

    # Init web mail
    smtp_conf = config.get('smtp', None)
    if smtp_conf is not None:
        web.config.smtp_server = smtp_conf["host"]
        web.config.smtp_port = int(smtp_conf["port"])
        web.config.smtp_starttls = bool(smtp_conf.get("starttls", False))
        web.config.smtp_username = smtp_conf.get("username", "")
        web.config.smtp_password = smtp_conf.get("password", "")
        web.config.smtp_sendername = smtp_conf.get("sendername",
                                                   "*****@*****.**")

    # Add some helpers for the templates
    template_helper.add_to_template_globals("_", _)
    template_helper.add_to_template_globals("str", str)
    template_helper.add_to_template_globals("available_languages",
                                            available_languages)
    template_helper.add_to_template_globals("get_homepath", appli.get_homepath)
    template_helper.add_to_template_globals(
        "allow_registration", config.get("allow_registration", True))
    template_helper.add_to_template_globals("user_manager", user_manager)
    template_helper.add_to_template_globals("default_allowed_file_extensions",
                                            default_allowed_file_extensions)
    template_helper.add_to_template_globals("default_max_file_size",
                                            default_max_file_size)
    template_helper.add_other(
        "course_admin_menu",
        lambda course, current: course_admin_utils.get_menu(
            course, current, template_helper.get_renderer(False),
            plugin_manager, user_manager))
    template_helper.add_other(
        "preferences_menu", lambda current: preferences_utils.get_menu(
            appli, current, template_helper.get_renderer(False),
            plugin_manager, user_manager))

    # Not found page
    appli.notfound = lambda: web.notfound(template_helper.get_renderer().
                                          notfound('Page not found'))

    # Enable stacktrace display if logging is at level DEBUG
    if config.get('log_level', 'INFO') == 'DEBUG':
        appli.internalerror = debugerror

    # Insert the needed singletons into the application, to allow pages to call them
    appli.plugin_manager = plugin_manager
    appli.course_factory = course_factory
    appli.task_factory = task_factory
    appli.submission_manager = submission_manager
    appli.user_manager = user_manager
    appli.template_helper = template_helper
    appli.database = database
    appli.gridfs = gridfs
    appli.default_allowed_file_extensions = default_allowed_file_extensions
    appli.default_max_file_size = default_max_file_size
    appli.backup_dir = config.get("backup_directory", './backup')
    appli.webterm_link = config.get("webterm", None)
    appli.lti_outcome_manager = lti_outcome_manager
    appli.allow_registration = config.get("allow_registration", True)
    appli.allow_deletion = config.get("allow_deletion", True)
    appli.available_languages = available_languages
    appli.welcome_page = config.get("welcome_page", None)
    appli.static_directory = config.get("static_directory", "./static")

    # Init the mapping of the app
    appli.init_mapping(urls)

    # Loads plugins
    plugin_manager.load(client, appli, course_factory, task_factory, database,
                        user_manager, submission_manager,
                        config.get("plugins", []))

    # Start the inginious.backend
    client.start()

    return appli.wsgifunc(), lambda: _close_app(appli, mongo_client, client)
Beispiel #24
0
def view(id):
    file = GridFS(getDBConnection().upload).get(ObjectId(id))
    return render_template("upload-view.html",
                           title="View file",
                           bread=get_bread(),
                           file=file)
Beispiel #25
0
 def _get_gridfs(self, model_instance):
     model = model_instance.__class__
     return GridFS(connections[model.objects.db].database,
                   model._meta.db_table)
Beispiel #26
0
def upload():

    stream = None
    fn = None
    content_type = None
    if request.form['url'] != "":
        stream = urlopen(request.form['url'])
        fn = path.basename(urlparse(request.form['url']).path)
        content_type = stream.info().gettype()
    else:
        stream = request.files['file']
        fn = request.files['file'].filename
        content_type = request.files['file'].content_type

    metadata = {
        "name": request.form['name'],
        "full_description": request.form['full_description'],
        "related_to": request.form['related_to'],
        "data_format": request.form['data_format'],
        "creator": request.form['creator'],
        "reference": request.form['reference'],
        "bibtex": request.form['bibtex'],
        "comments": request.form['comments'],
        "uploader": current_user.name,
        "uploader_id": current_user.id,
        "time": datetime.datetime.utcnow(),
        "original_file_name": fn,
        "status": "unmoderated",
        "version": "1",
        "file_url": request.form['url'],
        "content_type": content_type
    }
    flask.flash(
        "Received file '%s' and awaiting moderation from an administrator" %
        fn)

    upload_db = getDBConnection().upload
    upload_fs = GridFS(upload_db)
    db_id = upload_fs.put(stream.read(), metadata=metadata, filename=fn)

    logging.info("file '%s' receieved and data with id '%s' stored" %
                 (fn, db_id))

    if fn[-4:] == ".tgz" or fn[-4:] == ".tar" or fn[-7:] == ".tar.gz":
        child_index = []
        tar = tarfile.open(fileobj=upload_fs.get(ObjectId(db_id)))
        for tarinfo in tar:
            if tarinfo.isfile():
                metadata2 = copy.copy(metadata)
                metadata2['parent_archive_id'] = db_id
                metadata2['parent_archive_filename'] = fn
                metadata2['status'] = "unmoderatedchild"
                metadata2['original_file_name'] = fn + "/" + tarinfo.name
                metadata2['related_to'] = ""
                metadata2['content_type'] = ""
                id = upload_fs.put(tar.extractfile(tarinfo).read(),
                                   metadata=metadata2,
                                   filename=fn + "/" + tarinfo.name)
                child_index.append([id, tarinfo.name])
        upload_db.fs.files.update(
            {"_id": db_id}, {"$set": {
                "metadata.child_index": child_index
            }})

    return flask.redirect("/upload/view/" + str(db_id))
Beispiel #27
0
def get_app(config):
    """
    :param config: the configuration dict
    :return: A new app
    """
    config = _put_configuration_defaults(config)

    appli = web.application((), globals(), autoreload=False)

    if config.get("maintenance", False):
        template_helper = TemplateHelper(PluginManager(),
                                         'frontend/webapp/templates',
                                         'frontend/webapp/templates/layout',
                                         config.get('use_minified_js', True))
        template_helper.add_to_template_globals("get_homepath",
                                                lambda: web.ctx.homepath)
        appli.template_helper = template_helper
        appli.init_mapping(urls_maintenance)
        return appli.wsgifunc(), appli.stop

    task_directory = config["tasks_directory"]
    default_allowed_file_extensions = config['allowed_file_extensions']
    default_max_file_size = config['max_file_size']

    zmq_context, _ = start_asyncio_and_zmq()

    # Init the different parts of the app
    plugin_manager = PluginManager()

    mongo_client = MongoClient(
        host=config.get('mongo_opt', {}).get('host', 'localhost'))
    database = mongo_client[config.get('mongo_opt',
                                       {}).get('database', 'INGInious')]
    gridfs = GridFS(database)

    course_factory, task_factory = create_factories(task_directory,
                                                    plugin_manager,
                                                    WebAppCourse, WebAppTask)

    user_manager = UserManager(
        web.session.Session(appli, MongoStore(database, 'sessions')), database,
        config.get('superadmins', []))

    update_pending_jobs(database)

    client = create_arch(config, task_directory, zmq_context)

    submission_manager = WebAppSubmissionManager(client, user_manager,
                                                 database, gridfs,
                                                 plugin_manager)

    template_helper = TemplateHelper(plugin_manager,
                                     'frontend/webapp/templates',
                                     'frontend/webapp/templates/layout',
                                     config.get('use_minified_js', True))

    # Init web mail
    smtp_conf = config.get('smtp', None)
    if smtp_conf is not None:
        web.config.smtp_server = smtp_conf["host"]
        web.config.smtp_port = int(smtp_conf["port"])
        web.config.smtp_starttls = bool(smtp_conf.get("starttls", False))
        web.config.smtp_username = smtp_conf.get("username", "")
        web.config.smtp_password = smtp_conf.get("password", "")
        web.config.smtp_sendername = smtp_conf.get("sendername",
                                                   "*****@*****.**")

    # Update the database
    update_database(database, gridfs, course_factory, user_manager)

    # Add some helpers for the templates
    template_helper.add_to_template_globals("get_homepath",
                                            lambda: web.ctx.homepath)
    template_helper.add_to_template_globals("user_manager", user_manager)
    template_helper.add_to_template_globals("default_allowed_file_extensions",
                                            default_allowed_file_extensions)
    template_helper.add_to_template_globals("default_max_file_size",
                                            default_max_file_size)
    template_helper.add_other(
        "course_admin_menu",
        lambda course, current: course_admin_utils.get_menu(
            course, current, template_helper.get_renderer(False),
            plugin_manager, user_manager))

    # Not found page
    appli.notfound = lambda: web.notfound(template_helper.get_renderer().
                                          notfound('Page not found'))

    # Enable stacktrace display if logging is at level DEBUG
    if config.get('log_level', 'INFO') == 'DEBUG':
        appli.internalerror = debugerror

    # Insert the needed singletons into the application, to allow pages to call them
    appli.plugin_manager = plugin_manager
    appli.course_factory = course_factory
    appli.task_factory = task_factory
    appli.submission_manager = submission_manager
    appli.user_manager = user_manager
    appli.template_helper = template_helper
    appli.database = database
    appli.gridfs = gridfs
    appli.default_allowed_file_extensions = default_allowed_file_extensions
    appli.default_max_file_size = default_max_file_size
    appli.backup_dir = config.get("backup_directory", './backup')
    appli.webterm_link = config.get("webterm", None)

    # Init the mapping of the app
    appli.init_mapping(urls)

    # Loads plugins
    plugin_manager.load(client, appli, course_factory, task_factory, database,
                        user_manager, submission_manager,
                        config.get("plugins", []))

    # Start the inginious.backend
    client.start()

    return appli.wsgifunc(), lambda: _close_app(appli, mongo_client, client)
Beispiel #28
0
app = Flask(__name__)
app.config['SECRET_KEY'] = 'b36e44b9f68e5e32fe9f5d1be56c561e'

client = MongoClient('mongodb://*****:*****@app.route('/', methods=['GET', 'POST'])
def home():
    return render_template('Home.html',
                           title='Welcome to Lab Management System!')


@app.route('/slogin', methods=['GET', 'POST'])
def slog():
    if 'username' in session:
        regno = session['username']
        return redirect(url_for('studash', regno=regno))
    stdata = db.student_details
    form = Studlog()
Beispiel #29
0
from django.views.decorators.csrf import csrf_exempt

import pymongo
from bson.objectid import ObjectId
from django.core.exceptions import PermissionDenied
from gridfs import GridFS

sys.path.append(settings.CUCKOO_PATH)

from lib.cuckoo.core.database import Database, TASK_PENDING
from lib.cuckoo.common.constants import CUCKOO_ROOT
import modules.processing.network as network

results_db = pymongo.MongoClient(settings.MONGO_HOST,
                                 settings.MONGO_PORT)[settings.MONGO_DB]
fs = GridFS(results_db)


@require_safe
def index(request):
    db = Database()
    tasks_files = db.list_tasks(limit=50,
                                category="file",
                                not_status=TASK_PENDING)
    tasks_urls = db.list_tasks(limit=50,
                               category="url",
                               not_status=TASK_PENDING)

    analyses_files = []
    analyses_urls = []
import os

from gridfs import GridFS

import utils.sys.config

from utils.gadget.general import SysUtils

# 解包出来的文件 内容集合(存储桶)
from utils.gadget.my_file import MyFile
from utils.gadget.my_path import MyPath

# 固件文件存储桶集合
fw_files_storage = GridFS(utils.sys.config.g_firmware_db_full,
                          collection='fw_files_storage')


class FwFilesStorage:
    @staticmethod
    def save(file_id, file_name, file_path, content_type, contents):
        # 更新文件内容到 GridFS 存储桶中
        fw_files_storage.put(contents,
                             content_type=content_type,
                             filename=file_id,
                             aliases=[file_name, file_path])
        # fw_files_storage.put(content.encode(encoding="utf-8"), content_type=content_type, filename=file_id,
        #                      aliases=[file_name, file_path])

    @staticmethod
    def fetch(file_id):
        grid_out = fw_files_storage.find_one({'filename': file_id})