Esempio n. 1
0
def getDatasetList():
    dataset_folder = _folder.data_abspath(_get_dataset_folder())
    if os.path.exists(dataset_folder):
        dataset_folder_contents = os.listdir(dataset_folder)
        dataset_names = [item for item in dataset_folder_contents if os.path.isdir(os.path.join(dataset_folder, item))]
        return dataset_names
    else:
        return []
Esempio n. 2
0
def getLabelsSessions(dataset):
    sessions = []

    folder = _folder.data_abspath(_get_labels_folder(), dataset)
    if os.path.exists(folder):
        for fn in os.listdir(folder):
            sessions.append(fn)

    return sessions
def main(dataset, *, allfiles=False):
    # Delete the dataset folder
    dataset_folder = _helper.datasetDir(dataset)

    if not os.path.exists(dataset_folder):
        _helper.errorExit('The dataset does not exist ' + dataset_folder)

    shutil.rmtree(dataset_folder)
    print("Deleted dataset ", dataset_folder)

    if allfiles:

        # Delete the labels folder for the given dataset
        labels_folder = _folder.data_abspath(_helper._get_labels_folder(),
                                             dataset)

        if os.path.exists(labels_folder):
            shutil.rmtree(labels_folder)
            print("Deleted labels for the dataset ", labels_folder)

        # Delete exported labels files for the given dataset
        export_file = _helper.exportFilename(dataset)
        if os.path.exists(export_file):
            os.remove(export_file)
            print("Deleted exported labels file for the dataset ", export_file)

        # Delete all mturk submissions for the given dataset
        mturk_submit_folder = _folder.data_abspath('mturksubmit')
        if os.path.exists(mturk_submit_folder):
            mturk_session_ids = os.listdir(mturk_submit_folder)

            for session in mturk_session_ids:
                session_datasets = os.listdir(
                    os.path.join(mturk_submit_folder, session))
                if dataset in session_datasets:
                    dataset_folder = os.path.join(mturk_submit_folder, session,
                                                  dataset)
                    shutil.rmtree(dataset_folder)

            print("Deleted mturk submissions for the dataset ", dataset)
Esempio n. 4
0
def mturkGetSubmissions(session):
    submissions = []

    folder = _folder.data_abspath('mturksubmit', session)
    if os.path.exists(folder):
        for dataset in os.listdir(folder):
            submitfilename = mturkSubmitLabelsFilename(dataset, session)
            if os.path.exists(submitfilename):
                with open(submitfilename, 'rt') as dfile:
                    submission = json.loads(dfile.read())
                    submissions.append(submission)

    return submissions
Esempio n. 5
0
def checkPlaylogFileEmpty():
    if args.file == None:
        path = _folder.data_abspath(args.file)
    else:
        path = args.file
    # Opens the file to check if its empty.
    if os.path.exists(path):
        if os.path.getsize(path) > 0:
            return "[FAIL] Log file is not empty."
        else:
            return "[PASS] Log file exists and is empty."
    else:
        return "[FAIL] Log file doesn't exists."
    def _process_request(self, path, vars):
        global _debug_delay
        if _debug_delay:
            time.sleep(_debug_delay)

        if path == '/signaligner.html':
            if 'dataset' in vars and ALNUMUN_RE.match(vars['dataset']):
                dataset = vars['dataset']
            else:
                dataset = 'null'

            if 'session' in vars and ALNUMUN_RE.match(vars['session']):
                session = vars['session']
            else:
                session = SESSION_ERROR

            def replace_data(data):
                data = replace_vars(data, session, False)
                return data

            self._send_header_and_file_data(
                _folder.file_abspath('signaligner/signaligner.html'), False,
                CTYPE_HTML, replace_data)

        elif path == '/signaligner.js':

            def replace_data(data):
                data = replace_mode_config(data)
                return data

            self._send_header_and_file_data(
                _folder.file_abspath('signaligner/signaligner.js'), False,
                CTYPE_JS, replace_mode_config)

        elif path == '/fetchdatasetlist':
            datasets = _helper.getDatasetList()
            self._send_header(200, CTYPE_PLAIN)
            self._send_data(json.dumps(datasets), False)

        elif path == '/fetchdataset':
            if 'dataset' in vars and ALNUMUN_RE.match(vars['dataset']):
                dataset_name = vars['dataset']

                if 'type' in vars and vars['type'] == 'config':
                    file_path = _helper.datasetConfigFilename(dataset_name)
                elif 'type' in vars and vars[
                        'type'] == 'tile' and 'id' in vars and ALNUMUN_RE.match(
                            vars['id']):
                    file_path = os.path.join(
                        _helper.datasetTileDir(dataset_name),
                        vars['id'] + '.json')
                else:
                    self._send_header(404, CTYPE_PLAIN)
                    return

                if not os.path.exists(file_path):
                    self._send_header(404, CTYPE_PLAIN)
                    return

                self._send_header_and_file_data(file_path, False, CTYPE_PLAIN)
            else:
                self._send_header(404, CTYPE_PLAIN)

        elif path == '/fetchlabels':
            if 'dataset' in vars and ALNUMUN_RE.match(vars['dataset']):
                dataset = vars['dataset']

                self._send_header(200, CTYPE_PLAIN)
                labels = _helper.getLabelsLatest(dataset)
                if labels:
                    self._send_data(json.dumps(labels), False)
            else:
                self._send_header(404, CTYPE_PLAIN)

        elif path == '/reportlabels':
            if 'data' in vars:
                data = json.loads(vars['data'])

                if 'dataset' in data and ALNUMUN_RE.match(
                        data['dataset']
                ) and 'session' in data and ALNUMUN_RE.match(data['session']):
                    dataset = data['dataset']
                    session = data['session']

                    with open(
                            _helper.ensureDirExists(
                                _helper.logLabelsFilename(dataset, session),
                                True), 'at') as dfile:
                        dfile.write(json.dumps(data) + '\n')

                    with open(
                            _helper.ensureDirExists(
                                _helper.latestLabelsFilename(dataset, session),
                                True), 'wt') as dfile:
                        dfile.write(json.dumps(data) + '\n')

                    with open(
                            _helper.ensureDirExists(
                                _helper.latestLabelsFilename(dataset, session),
                                True), 'rt') as dfile:
                        response = json.loads(dfile.read())

                    self._send_header(200, CTYPE_PLAIN)
                    self._send_data(json.dumps(response), False)

                else:
                    self._send_header(404, CTYPE_PLAIN)

            else:
                self._send_header(404, CTYPE_PLAIN)

        elif path == '/mturksubmit' or path == '/mturksubmissions':
            if 'data' in vars:
                data = json.loads(vars['data'])

                if 'dataset' in data and ALNUMUN_RE.match(
                        data['dataset']
                ) and 'session' in data and ALNUMUN_RE.match(data['session']):
                    dataset = data['dataset']
                    session = data['session']

                    if path == '/mturksubmit':
                        mturk_submit = _helper.mturkSubmitLabelsFilename(
                            dataset, session)
                        if not os.path.exists(mturk_submit):
                            with open(
                                    _helper.ensureDirExists(
                                        mturk_submit, True), 'wt') as dfile:
                                dfile.write(json.dumps(data) + '\n')

                    submissions = _helper.mturkGetSubmissions(session)

                    total = 0
                    datasets = []
                    for submission in submissions:
                        score = submission['score'] / 100.0
                        score = score**2
                        score *= submission['daysofdata']
                        # minimum of 1 cent for tutorial levels, 20 cents for challenge
                        score = max(score, 0.20)
                        if submission['istutorial']:
                            score *= 0.05
                        total += score
                        datasets.append(submission['dataset'])

                    total = int(total * 100)
                    if session not in _mturk_session_codes:
                        _mturk_session_codes[session] = _helper.makeId()[:3]

                    code = _mturk_session_codes[session]
                    code = code + ('%03d' % total).upper()
                    code = code + hashlib.md5(
                        code.encode('utf-8')).hexdigest()[:3].upper()

                    response = {
                        'amount': '$%d.%02d' % (total // 100, total % 100),
                        'code': code,
                        'datasets': datasets
                    }

                    self._send_header(200, CTYPE_PLAIN)
                    self._send_data(json.dumps(response), False)

                else:
                    self._send_header(404, CTYPE_PLAIN)

            else:
                self._send_header(404, CTYPE_PLAIN)

        elif path == '/log':
            if 'data' in vars:
                with open(
                        _helper.ensureDirExists(
                            _folder.data_abspath('playlog'), True),
                        'at') as dfile:
                    dfile.write(vars['data'] + '\n')

            self._send_header(200, CTYPE_PLAIN)

        elif HTML_RE.match(path):
            if path == '/mturk_start.html':
                global _mode
                if _mode != 'MTURK':
                    self._send_header(200, CTYPE_PLAIN)
                    self._send_data(
                        'mode must be MTURK to request mturk_start.html',
                        False)
                    return

            if 'session' in vars and ALNUMUN_RE.match(vars['session']):
                session = vars['session']
            else:
                session = SESSION_ERROR

            def replace_data(data):
                return replace_vars(data, session, True)

            self._send_header_and_file_data(
                _folder.file_abspath('static' + path), False, CTYPE_HTML,
                replace_data)

        elif PNG_RE.match(path):
            self._send_header_and_file_data(
                _folder.file_abspath('static' + path), True, CTYPE_PNG)

        elif JS_RE.match(path):
            self._send_header_and_file_data(
                _folder.file_abspath('static' + path), False, CTYPE_JS)

        elif CSS_RE.match(path):
            self._send_header_and_file_data(
                _folder.file_abspath('static' + path), False, CTYPE_CSS)

        else:
            self._send_header(404, CTYPE_PLAIN)
Esempio n. 7
0
def mturkSubmitLabelsFilename(dataset, session):
    return _folder.data_abspath('mturksubmit', session, dataset, 'labels.mturksubmit.json')
Esempio n. 8
0
def exportFilename(dataset):
    return _folder.data_abspath('export', dataset + '.csv')
Esempio n. 9
0
def mhealthfolder(dataset, signal):
    return _folder.data_abspath('algo', dataset, 'mhealth', signal)
Esempio n. 10
0
def latestLabelsFilename(dataset, session):
    return _folder.data_abspath(_get_labels_folder(), dataset, session, 'labels.latest.json')
Esempio n. 11
0
def _get_labels_folder():
    if os.path.exists(_folder.data_abspath('labels_custom')):
        return 'labels_custom'
    return 'labels'
Esempio n. 12
0
def datasetDir(dataset):
    common_dir = _folder.file_abspath('common', 'datasets', dataset)
    if os.path.exists(common_dir):
        return common_dir
    return _folder.data_abspath(_get_dataset_folder(), dataset)
Esempio n. 13
0
def _get_dataset_folder():
    if os.path.exists(_folder.data_abspath('datasets_custom')):
        return 'datasets_custom'
    return 'datasets'
parser.add_argument(
    '--session',
    type=str,
    help='The session for which you want to find log info. (default: all)',
    default=None)
parser.add_argument('--file',
                    type=str,
                    help='Log file name. (default: playlog)',
                    default=None)
parser.add_argument('--stdout',
                    action='store_true',
                    help='Write output to stdout.')
args = parser.parse_args()

if args.file is None:
    path = _folder.data_abspath('playlog')
else:
    path = args.file

# Find times of all events for each session
session_event_times = {}
session_event_labels = {}
session_event_mturk_times = {}
session_event_zoom_levels = {}
session_event_article = {}
session_event_video = {}
session_event_slideshow = {}
session_event_submissions = {}

slideshow_pages = ["total"]
Esempio n. 15
0
def algofolder(dataset, signal):
    return _folder.data_abspath('algo', dataset, 'output', signal)
Esempio n. 16
0
def logLabelsFilename(dataset, session):
    return _folder.data_abspath(_get_labels_folder(), dataset, session, 'labels.log.jsons')
Esempio n. 17
0
# set up logging to files
class Logger:
    def __init__(self, stream1, stream2):
        self.stream1 = stream1
        self.stream2 = stream2

    def write(self, data):
        self.stream1.write(data)
        self.stream2.write(data)

    def flush(self):
        self.stream1.flush()
        self.stream2.flush()


logfilename = _folder.data_abspath(
    'log', 'signalauncher.' + str(os.getpid()) + '.txt')
logfile = open(_helper.ensureDirExists(logfilename, True), 'wt')
sys.stdout = Logger(sys.stdout, logfile)
sys.stderr = Logger(sys.stderr, logfile)


# utility functions
def datasetexists(dataset):
    out_folder = _helper.datasetDir(dataset)
    return os.path.exists(out_folder)


def mhealthfolder(dataset, signal):
    return _folder.data_abspath('algo', dataset, 'mhealth', signal)