예제 #1
0
파일: upload.py 프로젝트: philippkraft/odmf
 def getindex(self, dir):
     index = Path(op.join(datapath, dir, 'index.html'))
     io = StringIO()
     if index.exists():
         io.write(open(index.absolute).read())
     imphist = Path(op.join(datapath, dir, '.import.hist'))
     if imphist.exists():
         io.write('\n')
         for l in open(imphist.absolute):
             ls = l.split(',', 3)
             io.write(' * file:%s/%s imported by user:%s at %s into %s\n' %
                      tuple([imphist.up()] + ls))
     return web.markdown(io.getvalue())
예제 #2
0
파일: upload.py 프로젝트: philippkraft/odmf
    def logimport(self, filename, kwargs, import_with_class=LogbookImport):
        """

        :param filename:
        :param kwargs:
        :param import_with_class:
        :return:
        """
        import dataimport.importlog as il

        t0 = time.time()

        absfile = web.abspath(filename.strip('/'))
        path = Path(absfile)

        import dataimport as di
        error = web.markdown(di.checkimport(path.absolute))

        config = None
        if import_with_class == ManualMeasurementsImport:
            config = ManualMeasurementsImport.from_file(path.absolute)
            print("path = %s;\nabsfile = %s" % (path, absfile))

        from cherrypy import log
        log("Import with class %s" % import_with_class.__name__)

        li = import_with_class(absfile, web.user(), config=config)
        # TODO: Sometimes this is causing a delay
        logs, cancommit = li('commit' in kwargs)
        # TODO: REFACTORING FOR MAINTAINABILITY

        t1 = time.time()

        log("Imported in %.2f s" % (t1 - t0))

        if 'commit' in kwargs and cancommit:
            di.savetoimports(absfile, web.user(), ["_various_as_its_manual"])
            raise web.HTTPRedirect('/download?dir=' + escape(path.up()))
        else:
            return web.render('logimport.html', filename=path, logs=logs,
                              cancommit=cancommit, error=error)\
                .render('html', doctype='html')
예제 #3
0
파일: upload.py 프로젝트: philippkraft/odmf
    def instrumentimport(self, filename, kwargs):
        """
        Loads instrument data using a .conf file
        """

        t0 = time.time()

        # Error streams
        errorstream = StringIO()

        # TODO: Major refactoring of this code logic, when to load gaps, etc.
        path = Path(web.abspath(filename.strip('/')))
        print("path = %s" % path)
        import dataimport as di
        error = web.markdown(di.checkimport(path.absolute))
        startdate = kwargs.get('startdate')
        enddate = kwargs.get('enddate')
        siteid = web.conv(int, kwargs.get('site'))
        instrumentid = web.conv(int, kwargs.get('instrument'))
        config = di.getconfig(path.absolute)

        if not config:
            errorstream.write(
                "No config available. Please provide a config for"
                " computing a decent result.")

        if config:
            valuetype = [e.valuetype for e in config.columns]

        if config:
            config.href = Path(config.filename).href

        if startdate:
            startdate = web.parsedate(startdate)

        if enddate:
            enddate = web.parsedate(enddate)

        stats = gaps = datasets = None
        sites = []
        possible_datasets = []

        if startdate and enddate:
            gaps = [(startdate, enddate)]

        if siteid and (instrumentid or config):
            absfile = web.abspath(filename.strip('/'))
            adapter = di.get_adapter(absfile, web.user(), siteid, instrumentid,
                                     startdate, enddate)
            adapter.errorstream = errorstream
            if 'loadstat' in kwargs:
                stats = adapter.get_statistic()
                startdate = min(v.start for v in stats.values())
                enddate = max(v.end for v in stats.values())
            if 'importdb' in kwargs and startdate and enddate:
                gaps = None
                datasets = di.importfile(absfile, web.user(), siteid,
                                         instrumentid, startdate, enddate)
            else:
                gaps = di.finddateGaps(siteid, instrumentid, valuetype,
                                       startdate, enddate)
                error = adapter.errorstream.getvalue()

            adapter.errorstream.close()

        t1 = time.time()

        log("Imported in %.2f s" % (t1 - t0))

        return web.render('dbimport.html', di=di, error=error,
                          filename=filename, instrumentid=instrumentid,
                          dirlink=path.up(), siteid=siteid, gaps=gaps,
                          stats=stats, datasets=datasets, config=config,
                          #mmimport=isinstance(config, di.mm.ImportManualMeasurementsDescription),
                          sites=sites, possible_datasets=possible_datasets)\
            .render('html', doctype='html')