def main(args):
    if not os.path.exists(args.rootdir):
        print(
            'Oops, bad path given for the directory that should contain the files to upload.'
        )
        return
    if not os.path.exists(args.csvfile):
        print('Oops, bad path given for the csv input file.')
        return

    rows_to_upload = []

    for row in read_csv(args.csvfile):
        if not row_should_be_omitted(args, row):
            rows_to_upload.append(row)

    if (args.dryrun):
        print('Dry run: stopping, would otherwise copy', len(rows_to_upload),
              'items.')
    else:
        connection = make_ftp_connection(args.ftpaddress, args.login, args.pwd,
                                         args.ftppath)
        upload_rows(args.rootdir, args.ftppath, rows_to_upload, connection)
        quit_ftp_connection(connection)
        print('Done uploading.')
def report_new_additions_mutopia(csvfile):
    composer_lookup = {}
    for c in composer_list:
        # no dates in this listing, keep it simple
        composer_lookup[c[0]] = '{0} {1}'.format(c[2], c[1])

    count = 0
    missing_composers = set()

    for row in read_csv(csvfile):
        if row['mutopiacomposer'] not in composer_lookup:
            missing_composers.add(row['mutopiacomposer'])

        elif row['omit?'] != 'T' and row['new?'] == 'T':
            count += 1
            print(
                # count,
                # row['id'],
                # int(row['id']) in oldIds,
                # row['mutopiacomposer'],
                composer_lookup[row['mutopiacomposer']] + ' | ' +
                row['cn-title'] + ' | ' + row['cn-instrument'])

    print(str(count) + ' new additions.')
    if len(missing_composers) > 0:
        print('PROBLEM: MISSING COMPOSERS:')
        print(missing_composers)
    else:
        print('No missing composers.')
    print('Done with new additions report.\n')
Exemple #3
0
def main(args):
    rows_to_copy = []
    for row in read_csv(args.csvfile):
        if not row_should_be_omitted(args, row):
            rows_to_copy.append(row)

    if (args.dryrun):
        print('Dry run: stopping, would otherwise copy', len(rows_to_copy),
              'items.')
    else:
        count = 0
        for row in rows_to_copy:
            count += 1
            try:
                if (args.mode == 'mutopia'):
                    to_print_out = copy_mutopia(args.rootdir, args.outdir, row)
                elif (args.mode == 'thesession'):
                    to_print_out = copy_session(args.rootdir, args.outdir, row)
                else:
                    raise ValueError(
                        "Oops! We need a valid mode argument, either 'mutopia' or 'thesession'."
                    )

                print(str(count) + ' ' + to_print_out)

            except ValueError as err:
                print(err.args)

        print('Done.', count, 'pieces copied.')
Exemple #4
0
def handle_csv(csvpath, rootdir, outdir, mode):
    for row in read_csv(csvpath):
        if row['omit?'] != 'T' and row['cn-code'] == 'False' and row[
                'new?'] == 'T':
            if mode == 'mutopia':
                handle_row_mutopia(row, rootdir, outdir)
            elif mode == 'thesession':
                handle_row_session(row, rootdir, outdir)
Exemple #5
0
def main(args):
    if not os.path.exists(args.indir):
        print(
            'Oops, bad path given for the directory that should contain the ly files.'
        )
        return
    if not os.path.exists(args.csvpath):
        print('Oops, bad path given for the csv input file.')
        return
    makedirs_filepath(args.logfile)
    makedirs_filepath(args.errorfile)

    error_summary = []
    muto = args.mode == 'mutopia'
    csv_data = read_csv(args.csvpath)
    rows = csv_data[:int(args.count)] if args.count else csv_data
    for row in rows:
        vsn = row['ly-version']

        if (vsn_compare(vsn, greater_than, args.lowvsn)
                and vsn_compare(vsn, less_than, args.highvsn)
                and row['omit?'] != 'T'):

            if muto:
                lypaths = get_ly_paths(os.path.join(args.indir, row['path']))
            else:
                lypaths = [
                    os.path.join(args.indir, row['path'], row['filename'])
                ]

            print('____________________________')
            print(row['id'])
            mutocomp = row['mutopiacomposer'] if muto else ''
            print(row['parse-order'], ': ', mutocomp, row['cn-title'])

            for filepath in lypaths:
                command = get_command(row['ly-version'], args.highvsn,
                                      filepath)
                returncode, console_out = run_command(command)

                console_out_returncode = console_out + [str(returncode)]
                log_lines(console_out_returncode, args.logfile)
                print_lines(console_out_returncode)

                if returncode == 1:
                    error_summary.append(filepath)
                    log_lines(console_out, args.errorfile)

    error_header = ['CONVERT-LY ERROR SUMMARY']
    print('Done. There were', len(error_summary), 'errors.')
    log_lines(error_header + error_summary, args.errorfile)
def report_new_additions_session(csvfile):
    count = 0
    additions = set()

    for row in read_csv(csvfile):
        if row['omit?'] != 'T' and row['new?'] == 'T':
            additions.add(row['cn-title'])
            count += 1

    # TODO: distinguish new tune additions and new settings of existing tunes

    for a in sorted(additions):
        print(a)

    print(str(len(additions)) + ' new tunes. ' + str(count) + ' new settings.')
    print('Done with new additions report.\n')
def get_old_meta_data(old_csv, id_field_name):
    old_meta_data = {}

    for row in read_csv(old_csv):
        item_id = int(row[id_field_name])

        if row['new?'] == 'T':
            print(
                '\nOOPS! - There is an item marked as NEW in the OLD csv file... ID: '
                + str(item_id))

        old_meta_data[item_id] = {
            'omit?': row['omit?'],
            'omit-reason': row['omit-reason'],
            'new?': row['new?'],
            'error-status?': row['error-status?']
        }
    return old_meta_data
def make_json_session(csvfile, jsfile):
    # dict/object has ids as keys that map to lists/arrays of data for each item
    items_dict = {}
    # a list/array of ids ordered into the default sort order for 'browsing'
    titles = []
    titles_to_ids = {}

    for row in read_csv(csvfile):
        if row['omit?'] != 'T':

            ID = row['tune-id']
            print('ID', ID)
            if ID in items_dict:
                items_dict[ID][4].append(int(row['setting-number']))
                items_dict[ID][4].sort()
            else:
                fname = regexes['session-filename'].search(
                    row['filename']).group(1)
                titles.append(row['cn-title'])
                titles_to_ids[row['cn-title']] = int(ID)
                items_dict[ID] = [
                    row['cn-title'], row['meter'], fname,
                    int(row['setting-id']), [int(row['setting-number'])]
                ]

    titles.sort()
    sorted_ids = []
    for title in titles:
        sorted_ids.append(titles_to_ids[title])

    print('sorted titles', titles)
    print('sorted ids', sorted_ids)
    print('titles to ids', titles_to_ids)

    print('CSV data parsed.')

    with open(jsfile, 'w') as f:
        f.write('var sessionItems = ' + json.dumps(items_dict))
        f.write('\nvar sessionIdsSorted = ' + json.dumps(sorted_ids))
        print('JS file saved.')
Exemple #9
0
def triage_rows(args):
    rows = []
    for row in read_csv(args.oldcsv):
        if not row_should_be_omitted(args, row):
            rows.append(row)
    return rows
def make_json_mutopia(csvfile, jsfile):
    instrument_tally = {}
    style_tally = {}
    composer_tally = {}
    # dict/object has ids as keys that map to lists/arrays of data for each item
    items_dict = {}
    # a list/array of ids ordered into the default sort order for 'browsing'
    items_sorted_ids = []

    for row in read_csv(csvfile):
        if row['omit?'] != 'T':

            ID = int(row['id'])
            items_sorted_ids.append(ID)

            # INSTRUMENTS
            insts, no_instrument_match, unrecognized_inst_tokens = inst_classifier(
                row['cn-instrument'], row['id'])
            for i in insts:
                if i in instrument_tally:
                    instrument_tally[i] += 1
                else:
                    instrument_tally[i] = 1

            # STYLES
            # handle style 'Popular / Dance' that contains forward slash
            stl = row['cn-style'].replace(' / ', '')
            if stl in style_tally:
                style_tally[stl] += 1
            else:
                style_tally[stl] = 1

            # COMPOSERS
            comp = row['mutopiacomposer']
            if comp in composer_tally:
                composer_tally[comp] += 1
            else:
                composer_tally[comp] = 1

            multifile = (1 < len(row['filename'].split(',,, ')))

            items_dict[ID] = [
                stl,
                comp,  # 1
                row['cn-title'],
                insts,  # 3
                row['path'],  # 4
                False if multifile else row['filename'][:-3],
                row['license-type'] + row['license-vsn'],  # 6
                row['cn-opus'],
                row['cn-poet'],  # 8
                row['date'],
                row['arranger'],  # 10
            ]

    json_out = json.dumps(items_dict)
    print('JSON parsed.')

    print(
        '\nEach piece has to have at least one instrument that is recognized.')
    print('Pieces with no instrument recognized (fix these):',
          sorted(no_instrument_match))

    print(
        '\nA piece may have other instruments that are not recognized.  (Optionally add these.)'
    )
    print('Unrecognized Instruments:', sorted(unrecognized_inst_tokens))

    with open(jsfile, 'w') as f:
        f.write('var mutopiaItems = ' + json_out)
        f.write('\nvar mutopiaIdsSorted = ' + json.dumps(items_sorted_ids))
        print('JSON saved.')

        # composer lookup table also goes in json data file
        composer_lookup = {}
        composer_lookup_text = ''

        for c in composer_list:
            if c[0] in composer_tally:
                composer_lookup[c[0]] = [
                    '{0} {1}'.format(c[2], c[1]), '{0}'.format(c[3])
                ]

        composer_lookup_text = json.dumps(composer_lookup)
        f.write('\nvar mutopiaComposerLookup = ' + composer_lookup_text)

        print('\nComposerLookup saved in JS file.')

    return style_tally, instrument_tally, composer_tally