コード例 #1
0
def getSavedPlots():
    globalPlots = json.load(open("savedPlots.json"))
    userPlots = {}
    if (os.path.isfile('data/' + getUserPath() + '/userPlots.json')):
        userPlots = json.load(open('data/' + getUserPath() +
                                   '/userPlots.json'))
    allPlots = {
        key: value
        for (key, value) in ({
            "": ""
        }.items() + globalPlots.items() + userPlots.items())
    }
    return allPlots
コード例 #2
0
def savePlot():
    savedPlotName = request.form['name']
    selected_datasets = request.form['data'].split(",")
    userPlots = {}
    #Create the User directory if not exists- TODO: do at time of account setup
    directory = 'data/' + getUserPath()
    if not os.path.exists(directory):
        os.makedirs(directory)
    if (os.path.isfile(directory + '/userPlots.json')):
        userPlots = json.load(open('data/' + getUserPath() +
                                   '/userPlots.json'))
    userPlots[savedPlotName] = selected_datasets

    with open('data/' + getUserPath() + '/userPlots.json', 'w') as f:
        json.dump(userPlots, f)
    return redirect(url_for('main.dmplotter'))
コード例 #3
0
ファイル: plotter.py プロジェクト: m-deon/DD2LHC
def dataset_names():
    datasets = glob('data/*.xml')
    datasets.extend(glob('data/' + getUserPath() + '/*.xml'))
    for dataset in datasets:
        dataset = ntpath.basename(dataset)
        dataset = dataset.replace(DATA_FILE_EXT, '')
        yield dataset
コード例 #4
0
def upload():
    form = UploadForm(CombinedMultiDict((request.files, request.form)))
    if form.validate_on_submit():
        f = form.data_file.data
        filename = secure_filename(f.filename)
        directory = os.path.join(app.config['UPLOAD_FOLDER'], getUserPath())
        #Validate Data here
        if validateFile(f.read()):
            if not os.path.exists(directory):
                os.makedirs(directory)
            f.seek(0)
            f.save(os.path.join(directory, filename))
            flash(u'File uploaded.', 'info')
        else:
            flash(u'Could not parse input file- check required fields.',
                  'error')
    return redirect(url_for('main.dmplotter'))
コード例 #5
0
ファイル: plotter.py プロジェクト: m-deon/DD2LHC
def get_metadata(dataset):
    input_file = os.path.join(DATA_LOCATION, dataset + DATA_FILE_EXT)
    if (os.path.isfile(input_file) == False):
        input_file = os.path.join(DATA_LOCATION + getUserPath(),
                                  dataset + DATA_FILE_EXT)

    #XML Parsing Test
    result = untangle.parse(input_file)

    dataComment = result.limit.data_comment.cdata
    dataLabel = result.limit.data_label.cdata
    dataReference = result.limit.data_reference.cdata
    dateOfAnnouncement = result.limit.date_of_announcement.cdata
    experiment = result.limit.experiment.cdata
    dataformat = result.limit.dataformat.cdata
    measurementType = result.limit.measurement_type.cdata
    resultType = result.limit.result_type.cdata
    spinDependency = result.limit.spin_dependency.cdata
    xRescale = result.limit.x_rescale.cdata
    xUnits = result.limit.x_units.cdata
    yRescale = result.limit.y_rescale.cdata
    yUnits = result.limit.y_units.cdata

    metadata = {
        'fileName': dataset,
        'dataComment': dataComment,
        'dataLabel': dataLabel,
        'dataReference': dataReference,
        'dateOfAnnouncement': dateOfAnnouncement,
        'experiment': experiment,
        'dataformat': dataformat,
        'measurementType': measurementType,
        'resultType': resultType,
        'spinDependency': spinDependency,
        'xRescale': xRescale,
        'xUnits': xUnits,
        'yRescale': yRescale,
        'yUnits': yUnits
    }
    #print (metadata)
    return metadata
コード例 #6
0
ファイル: plotter.py プロジェクト: m-deon/DD2LHC
def get_data(dataset, modifier=''):

    input_file = os.path.join(DATA_LOCATION, dataset + DATA_FILE_EXT)
    if (os.path.isfile(input_file) == False):
        input_file = os.path.join(DATA_LOCATION + getUserPath(),
                                  dataset + DATA_FILE_EXT)
    if (os.path.isfile(input_file) == False):
        return None

    #XML Parsing
    result = untangle.parse(input_file)
    dataValues = result.limit.data_values.cdata
    experiment = result.limit.experiment.cdata
    dataformat = result.limit.dataformat.cdata
    spinDependency = result.limit.spin_dependency.cdata
    yRescale = result.limit.y_rescale.cdata
    #print (dataValues)

    #remove leading {[, trailing ]}
    rawData = dataValues.replace("{[", "").replace("]}", "").replace("\n", "")
    data = StringIO(rawData)

    #dataset_type, names = parseExperimentType(experiment)
    dataset_type, names = parseDataformat(dataformat)
    if (dataset_type == ''):
        return None

    #parse
    df = pd.read_csv(data,
                     delim_whitespace=True,
                     lineterminator=';',
                     names=names)
    #adjust the yRescale, ie (multiply every value in column index 1 with the yRescale)
    df.iloc[:, 1] = df.iloc[:, 1].apply(lambda x: x * float(yRescale))

    #add a coloum of labels
    label = os.path.basename(input_file).split('.')[0]
    df.insert(0, 'label', label)

    #convert
    if dataset_type == 'DD':
        df['type'] = 'DD'
        if spinDependency == 'SD':  #BP
            dd2lhc_SD(df)
        elif spinDependency == 'SI':
            dd2lhc_SI(df, modifier if modifier else si_modifier)
        else:
            return None

    elif dataset_type == 'LHC':
        df['type'] = 'LHC'
        if spinDependency == 'SD':
            lhc2dd_SD(df, modifier if modifier else sd_modifier)
        elif spinDependency == 'SI':
            lhc2dd_SI(df, modifier if modifier else si_modifier)
        else:
            return None
        #extrapolate LHC Data
        #note, not sure if appending in this fashion is the best way to extrapolate and expand the dataframe
        extrap_mdm = range(1, int(min(df['m_DM'])))
        extrap_sigma = np.repeat(min(df['sigma']), len(extrap_mdm))
        extrap_mMed = np.repeat(max(df['m_med']), len(extrap_mdm))
        extrap_df = pd.DataFrame({
            'label': label,
            'm_DM': extrap_mdm,
            'm_med': extrap_mMed,
            'sigma': extrap_sigma,
            'type': 'LHC'
        })
        df = df.append(extrap_df)

    else:
        return None
    #Data has be read from file, converted, extrapolated, and returned in a DataFrame. None is returned if error converting.
    return df