Exemplo n.º 1
0
def script2():
    verbose('Updating ports on %s' % obj.absolute_url_path())
    from collective.documentgenerator.utils import update_oo_config
    #from imio.dms.mail.utils import update_solr_config
    #update_solr_config()
    update_oo_config()
    transaction.commit()
Exemplo n.º 2
0
def patch_instance(inst='instance-debug'):
    idp = 'parts/{}/bin/interpreter'.format(inst)
    if not os.path.exists(idp):
        error("'{}' doesn't exist: cannot patch it".format(idp))
        return False
    lines = read_file(idp)
    if 'ploneCustom.css' not in ''.join(lines):
        sp = 0
        for (i, line) in enumerate(lines):
            if 'exec(_val)' in line:
                nl = line.lstrip()
                sp = len(line) - len(nl)
                break
        lines.insert(
            i, "{}{}".format(
                ' ' * sp,
                '_val = _val.replace("\'); from AccessControl.SpecialUsers import system '
                'as user;", "/ploneCustom.css\'); from AccessControl.SpecialUsers import '
                'system as user;")'))
        verbose("=> Patching: '{}'".format(idp))
        fh = open(idp, 'w')
        fh.write('\n'.join(lines))
        fh.close()
    else:
        verbose("=> Already patched: '{}'".format(idp))
Exemplo n.º 3
0
def script1_2():
    verbose('Pst migration on %s' % obj.absolute_url_path())
    catalog = obj.portal_catalog
    for brain in catalog(portal_type='projectspace'):
        ps = brain.getObject()
        ps.manage_addLocalRoles("pst_editors", (
            'Reader',
            'Editor',
            'Reviewer',
            'Contributor',
        ))
        ps.reindexObject()
        ps.reindexObjectSecurity()
Exemplo n.º 4
0
def script1_3():
    verbose('Pst dashboards migration on %s' % obj.absolute_url_path())
    catalog = obj.portal_catalog
    from collective.eeafaceted.collectionwidget.utils import _updateDefaultCollectionFor
    from imio.project.pst import add_path
    for brain in catalog(portal_type='projectspace'):
        ps = brain.getObject()
        if 'operationalobjectives' not in ps:
            continue
        folder = ps['operationalobjectives']
        xmlpath = add_path('faceted_conf/operationalobjective.xml')
        folder.unrestrictedTraverse('@@faceted_exportimport').import_xml(
            import_file=open(xmlpath))
        _updateDefaultCollectionFor(folder, folder['all'].UID())
    obj.portal_setup.runImportStepFromProfile('imio.project.core:default',
                                              'viewlets',
                                              run_dependencies=False)
    transaction.commit()
Exemplo n.º 5
0
def script1_1():
    verbose('Pst archive migrations on %s' % obj.absolute_url_path())
    from imio.project.pst.interfaces import IImioPSTProject
    from zope.interface import alsoProvides
    # consider modified schema for projectspace
    obj.portal_setup.runImportStepFromProfile('imio.project.core:default',
                                              'typeinfo',
                                              run_dependencies=False)
    verbose('Typeinfo updated')
    # set marker interface
    catalog = obj.portal_catalog
    for brain in catalog(portal_type='projectspace'):
        ps = brain.getObject()
        alsoProvides(ps, IImioPSTProject)
        if not ps.budget_years:
            ps.budget_years = [2013, 2014, 2015, 2016, 2017, 2018]
        ps.manage_addLocalRoles("pst_editors", (
            'Reader',
            'Editor',
            'Reviewer',
            'Contributor',
        ))
        ps.reindexObject()
        ps.reindexObjectSecurity()
    verbose('Pstproject: marker added, years added, localroles added')
    # add archive action
    obj.portal_setup.runImportStepFromProfile('imio.project.pst:default',
                                              'actions',
                                              run_dependencies=False)
    verbose('Actions updated')
    # update dexterity type local roles
    from plone.dexterity.interfaces import IDexterityFTI
    from zope.component import getUtility
    fti = getUtility(IDexterityFTI, name='projectspace')
    lr = getattr(fti, 'localroles')
    lrsc = lr['static_config']
    if 'internally_published' in lrsc and 'pst_editors' in lrsc[
            'internally_published']:
        del (lrsc['internally_published']['pst_editors'])
        lr._p_changed = True
    verbose('Dexterity local roles removed')
Exemplo n.º 6
0
def script1():
    verbose('Pst budget correction on %s' % obj.absolute_url_path())
    catalog = obj.portal_catalog
    from imio.project.core.events import onModifyProject
    for brain in catalog(portal_type='projectspace'):
        ps = brain.getObject()
        verbose(ps.absolute_url())
        ret = ps.restrictedTraverse('clean_budget/display')()
        verbose("Before: {}".format(ret.split('<br />\n')[0]))
        ps.restrictedTraverse('clean_budget/delete')(empty_budget=False)
        path = brain.getPath()
        pt = ('pstaction', 'operationalobjective', 'strategicobjective')
        for brain in catalog(portal_type=pt, path=path, sort_on='path'):
            onModifyProject(brain.getObject(), None)
        ret = ps.restrictedTraverse('clean_budget/display')()
        verbose("After : {}".format(ret.split('<br />\n')[0]))
    transaction.commit()
Exemplo n.º 7
0
def generate(output_dir, domain):
    """ Generate taskjuggler report """
    verbose("Begin of taskjuggler report")
    records_nb = 0
    output_dir = output_dir.rstrip('/')
    DAY_DIR = os.path.join(output_dir, date.strftime(date.today(), "%Y-%m-%d"))
    rep_cmd = base_rep_cmd.replace('DAY_DIR', DAY_DIR)
    if not os.path.exists(DAY_DIR):
        os.makedirs(os.path.join(DAY_DIR, 'css'))
        os.symlink('%s/custom.css' % BUILD_PATH, '%s/css/custom.css' % DAY_DIR)
    outfiles['index']['file'] = os.path.join(output_dir,
                                             outfiles['index']['filename'])
    outfiles['error']['file'] = os.path.join(output_dir,
                                             outfiles['error']['filename'])
    report_err = [outfiles['error']['filename'], 0]
    verbose("Running command: %s" % rep_cmd)
    (cmd_out, cmd_err) = runCommand(rep_cmd)
    errors = [err for err in cmd_err if 'Error: ' in err]
    if errors:
        errors_str = '\n'.join(errors)
        error("error running command %s : %s" % (rep_cmd, errors_str))
        write_to(outfiles, 'error', errors_str)
        report_err[1] = len(errors)
    gen_err = ['generation_errors.html', 0]
    if os.path.exists(os.path.join(output_dir, gen_err[0])):
        lines = read_file(os.path.join(output_dir, gen_err[0]),
                          skip_empty=True)
        # Records number:
        for line in lines:
            # mo = records_pat.match(line)
            mo = tickets_pat.match(line)
            if mo:
                records_nb = mo.group(1)
                break
        if lines:
            gen_err[1] = len(lines) - 1
    olds = read_dir(output_dir, only_folders=True)
    template = env.get_template('index.html')
    rendered = template.render(report_err=report_err,
                               gen_err=gen_err,
                               olds=olds,
                               records_nb=records_nb,
                               domain=domain)
    write_to(outfiles, 'index', rendered.encode('utf8'))
    close_outfiles(outfiles)
    # delete wrong generated folder
    if report_err[1] and os.path.exists(DAY_DIR):
        shutil.rmtree(DAY_DIR, ignore_errors=True)
    verbose("End of taskjuggler report")
Exemplo n.º 8
0
def generate(output_dir, domain):
    """ Generate taskjuggler report """
    verbose("Begin of taskjuggler report")
    records_nb = 0
    output_dir = output_dir.rstrip('/')
    DAY_DIR = os.path.join(output_dir, date.strftime(date.today(), "%Y-%m-%d"))
    rep_cmd = base_rep_cmd.replace('DAY_DIR', DAY_DIR)
    if not os.path.exists(DAY_DIR):
        os.makedirs(os.path.join(DAY_DIR, 'css'))
        os.symlink('%s/custom.css' % BUILD_PATH, '%s/css/custom.css' % DAY_DIR)
    outfiles['index']['file'] = os.path.join(output_dir, outfiles['index']['filename'])
    outfiles['error']['file'] = os.path.join(output_dir, outfiles['error']['filename'])
    report_err = [outfiles['error']['filename'], 0]
    verbose("Running command: %s" % rep_cmd)
    (cmd_out, cmd_err) = runCommand(rep_cmd)
    errors = [err for err in cmd_err if 'Error: ' in err]
    if errors:
        errors_str = '\n'.join(errors)
        error("error running command %s : %s" % (rep_cmd, errors_str))
        write_to(outfiles, 'error', errors_str)
        report_err[1] = len(errors)
    gen_err = ['generation_errors.html', 0]
    if os.path.exists(os.path.join(output_dir, gen_err[0])):
        lines = read_file(os.path.join(output_dir, gen_err[0]), skip_empty=True)
        # Records number:
        for line in lines:
            # mo = records_pat.match(line)
            mo = tickets_pat.match(line)
            if mo:
                records_nb = mo.group(1)
                break
        if lines:
            gen_err[1] = len(lines)-1
    olds = read_dir(output_dir, only_folders=True)
    template = env.get_template('index.html')
    rendered = template.render(report_err=report_err, gen_err=gen_err, olds=olds, records_nb=records_nb, domain=domain)
    write_to(outfiles, 'index', rendered.encode('utf8'))
    close_outfiles(outfiles)
    # delete wrong generated folder
    if report_err[1] and os.path.exists(DAY_DIR):
        shutil.rmtree(DAY_DIR, ignore_errors=True)
    verbose("End of taskjuggler report")
Exemplo n.º 9
0
def generate(dsn):
    """ Generate taskjuggler files from trac """
    now = datetime.now()
    prj_start = now - timedelta(minutes=now.minute) + timedelta(hours=1)
    min_mst_due = prj_start + timedelta(days=1)
    min_mst_due = datetime.strftime(min_mst_due, "%Y-%m-%d")
    getBlockingTickets(dsn)
    records = selectWithSQLRequest(dsn, query, TRACE=TRACE)
    #verbose("Records number: %d" % len(records))
    print >> sys.stderr, "# Records number: %d<br />" % len(records)
    #("URBAN - DEV - Permis d'environnement classe 1", '2012-12-31', 5340, 'Ajouter le champ "Secteur d\'activit\xc3\xa9"',
    #'NOUVEAU', 'sdelcourt', 'Urbanisme communes (URBAN)', Decimal('0.0'), Decimal('0'), "data grid avec au moins ")
    tickets_nb = 0
    for rec in records:
        (mst, mst_due, id, summary, status, owner, prj, estimated, hours,
         description) = rec
        estimated = float(estimated)
        hours = float(hours)
        try:
            mst_list = mst.split(' - ')
            (mst_prj, mst_wrk) = (mst_list[0], mst_list[1])
            if mst_prj not in PROJECTS:
                herror("Project '%s' not well extracted from '%s' (%s, %s)" %
                       (mst_prj, mst, owner, a_link(TICKET_URL, id)))
        except:
            herror("Project cannot be extracted from '%s' (%s, %s)" %
                   (mst, owner, a_link(TICKET_URL, id)))
        #due = datetime.strptime(mst_due, '%Y/%m/%d').date()
        # We skip unfollowed projects !!
        if mst_prj not in PROJECTS_TO_KEEP:
            continue

        tickets_nb += 1
        if mst_prj not in msts_due:
            msts_due[mst_prj] = {}
        if mst_wrk not in msts_due[mst_prj]:
            msts_due[mst_prj][mst_wrk] = {}
        if mst_due not in msts_due[mst_prj][mst_wrk]:
            msts_due[mst_prj][mst_wrk][mst_due] = []
        mst = mst.decode('utf8')
        if mst not in msts:
            mstid = unique_slugify(mst, separator='_',
                                   unique_id=True).encode('utf8')
            msts[mst] = {
                'prj': mst_prj,
                'due': (mst_due <= min_mst_due and min_mst_due or mst_due),
                't': [],
                'own': {},
                'wrk': mst_wrk,
                'dep': [],
                'id': mstid,
                'prty': 1
            }
            msts_due[mst_prj][mst_wrk][mst_due].append(mst)
        msts[mst]['t'].append(id)
        if id in tkts:
            herror("Ticket '%s' already found in dict %s (%s, %s)" %
                   (id, tkts[id], owner, a_link(TICKET_URL, id)))
            continue
        if not owner:
            herror("Ticket '%s' has no owner (%s)" %
                   (id, a_link(TICKET_URL, id)))
        tkts[id] = {
            'sum': summary,
            'status': status,
            'owner': owner,
            'prj': prj,
            'estim': estimated,
            'hours': hours,
            'mst': mst
        }
        if owner not in msts[mst]['own']:
            msts[mst]['own'][owner] = {'effort': 0.0, 't': [], 'done': 0.0}
        msts[mst]['own'][owner]['t'].append(id)
        msts[mst]['own'][owner]['done'] += hours

        if owner not in resources:
            resources[owner] = {'res': 'cust', 'prj': []}
        if mst_prj not in resources[owner]['prj']:
            resources[owner]['prj'].append(mst_prj)

        if estimated == 0:
            herror("Estimated hour not set for ticket (%s, %s)" %
                   (owner, a_link(TICKET_URL, id)))
            continue
        elif hours == 0:
            msts[mst]['own'][owner]['effort'] += estimated
        elif hours > estimated:
            msts[mst]['own'][owner]['effort'] += (estimated *
                                                  EFFORT_EXCEED_FACTOR)
        else:
            msts[mst]['own'][owner]['effort'] += (estimated - hours)

    # calculate mst order: set the priority
    for prj in msts_due:
        for wrk in msts_due[prj]:
            p = 1000
            for due in sorted(msts_due[prj][wrk]):  # sorted by due date
                for mst in msts_due[prj][wrk][due]:
                    if p > 1:
                        msts[mst]['prty'] = p
                    else:
                        msts[mst]['prty'] = 1
                p -= 50
    # find blocking milestone from blocking tickets
    for mst in msts:
        for tkt in msts[mst]['t']:
            if tkt not in tkts_links:
                continue  # no blocking
            for blck in tkts_links[tkt]:
                if not blck in tkts:
                    herror(
                        "Blocking ticket '%s' not found in due milestone tickets"
                        % (a_link(TICKET_URL, blck)))
                    continue
                blck_mst = msts[tkts[blck]['mst']]['id']
                # skipping self milestone dependency
                if tkts[blck]['mst'] != mst and blck_mst not in msts[mst][
                        'dep']:
                    msts[mst]['dep'].append(blck_mst)
    # group resources
    resources_gp = {'dll': [], 'ext': [], 'cust': []}
    for usr in sorted(resources.keys()):
        res = resources[usr].pop('res')
        if res != 'dll' and not resources[usr]['prj']:
            continue
        resources_gp[res].append((usr, resources[usr]))

    verbose("Records number: %d, Tickets number: %d" %
            (len(records), tickets_nb))
    print >> sys.stderr, "# Tickets number: %d<br />" % tickets_nb

    # generate trac.tjp file
    template = env.get_template('trac.tjp')
    rendered = template.render(
        prj_start=datetime.strftime(prj_start, "%Y-%m-%d-%H:%M"))
    write_to(outfiles, 'tjp', rendered.encode('utf8'))
    # generate resources.tji
    getLeaves(dsn)
    template = env.get_template('resources.tji')
    rendered = template.render(leaves=leaves,
                               resources=resources_gp,
                               prjs=msts_due)
    write_to(outfiles, 'resources', rendered.encode('utf8'))
    # generate reports.tji
    template = env.get_template('reports.tji')
    rendered = template.render(prjs=msts_due)
    write_to(outfiles, 'reports', rendered.encode('utf8'))
    # generate tasks.tji
    template = env.get_template('tasks.tji')
    rendered = template.render(prjs=msts_due, msts=msts)
    write_to(outfiles, 'tasks', rendered.encode('utf8'))

    close_outfiles(outfiles)
Exemplo n.º 10
0
            onModifyProject(brain.getObject(), None)
        ret = ps.restrictedTraverse('clean_budget/display')()
        verbose("After : {}".format(ret.split('<br />\n')[0]))
    transaction.commit()


info = [
    "You can pass following parameters (with the first one always script number):",
    "1: various"
]

scripts = {'1': script1, '2': script2}

if len(sys.argv) < 4 or sys.argv[3] not in scripts:
    error("Bad script parameter")
    verbose('\n>> =>'.join(info))
    sys.exit(0)

with api.env.adopt_user(username='******'):
    scripts[sys.argv[3]]()

### OLD scripts ###


def script1_1():
    verbose('Pst archive migrations on %s' % obj.absolute_url_path())
    from imio.project.pst.interfaces import IImioPSTProject
    from zope.interface import alsoProvides
    # consider modified schema for projectspace
    obj.portal_setup.runImportStepFromProfile('imio.project.core:default',
                                              'typeinfo',
Exemplo n.º 11
0
def add_archived():
    """Add archived columns to inforius exported file"""
    parser = argparse.ArgumentParser(
        description='From mixed archived column, add 2 new columns')
    parser.add_argument('-p',
                        '--parts',
                        dest='parts',
                        help='Run parts: 1 (add archived), 2 (write)',
                        default='12')
    parser.add_argument('tree_file', help='Tree file (csv format)')
    parser.add_argument(
        '-c',
        '--config',
        dest='tree_conf',
        required=True,
        help=
        'Tree file configuration: "separator|archived col|sf id col|sf tit col" (starting at 0). '
        'Like: ;|1||')
    ns = parser.parse_args()
    verbose("Start of %s" % sys.argv[0])
    verbose("Reading tree file '{}'".format(ns.tree_file))
    tree_confs = ns.tree_conf.split('|')
    if len(tree_confs) != 4:
        error("config parameter not well formated: {}".format(ns.tree_conf))
        parser.print_help()
        sys.exit(1)
    sep, arc_col, id_col, tit_col = tree_confs[0], int(tree_confs[1]), int(
        tree_confs[2]), int(tree_confs[3])
    lines = read_csv(ns.tree_file, strip_chars=' ', delimiter=sep)
    titles = lines.pop(0)
    titles.extend(['Archivé farde', 'Archivé chemise'])
    new_lines = [titles]
    if '1' in ns.parts:
        for i, line in enumerate(lines, start=1):
            ln_nb = i + 1
            archived = line[arc_col]  # '0', '1', 'VRAI', 'FAUX'
            f_arc = sf_arc = ''
            if archived == 'VRAI':
                archived = '1'
            elif archived == 'FAUX':
                archived = '0'
            if archived not in ('0', '1'):
                error("{}, bad archived value '{}'".format(ln_nb, archived))
            else:
                if line[id_col] or line[tit_col]:  # is subfolder
                    if archived == '1':
                        sf_arc = '1'
                else:  # is folder
                    if archived == '1':
                        f_arc = '1'
            line.extend([f_arc, sf_arc])
            new_lines.append(line)
    if '2' in ns.parts:
        new_file = ns.tree_file.replace('.csv', '_archived.csv')
        with open(new_file, 'wb') as csvfile:
            csvwriter = csv.writer(
                csvfile, delimiter=sep,
                quoting=csv.QUOTE_NONNUMERIC)  # csv.QUOTE_ALL
            for line in new_lines:
                csvwriter.writerow(line)
    verbose("End of %s" % sys.argv[0])
Exemplo n.º 12
0
def compare_tree_files():
    """Compares a tree file with a reference tree file"""
    parser = argparse.ArgumentParser(
        description='Compare a tree file with a reference one')
    parser.add_argument(
        '-p',
        '--parts',
        dest='parts',
        help='Run parts: 1 (load ref), 2 (load tree), 3 (compare)',
        default='123')
    parser.add_argument('-r',
                        '--reference',
                        dest='ref_file',
                        help='Reference file (csv format)')
    parser.add_argument(
        '-rc',
        '--reference_config',
        dest='ref_conf',
        help=
        'Reference file configuration: "skip lines|separator|id col|title col" (starting at 0). '
        'Like: 1|;|0|1')
    parser.add_argument('-f',
                        '--file',
                        dest='tree_file',
                        help='Tree file (csv format)',
                        required=True)
    parser.add_argument(
        '-fc',
        '--file_config',
        dest='tree_conf',
        required=True,
        help=
        'Tree file configuration: "skip lines|separator|id cols|title col" (starting at 0). '
        'Like: 1|;|0,4|1')
    parser.add_argument('-u',
                        '--unicity',
                        action='store_true',
                        dest='check_unicity',
                        help='Check code unicity')
    ns = parser.parse_args()
    verbose("Start of %s" % sys.argv[0])
    if '1' in ns.parts:
        if not ns.ref_file or not ns.ref_conf:
            error("Missing -r or -rc parameters for part 1 !")
            parser.print_help()
            sys.exit(1)
        verbose("Reading ref file '{}'".format(ns.ref_file))
        ref_confs = ns.ref_conf.split('|')
        if len(ref_confs) != 4:
            error("rc parameter not well formated: {}".format(ns.ref_conf))
            parser.print_help()
            sys.exit(1)
        skip_lines, ref_id_col, ref_tit_col = int(ref_confs[0]), int(
            ref_confs[2]), int(ref_confs[3])
        lines = read_csv(ns.ref_file,
                         skip_lines=skip_lines,
                         delimiter=ref_confs[1])
        ref_dic = {}
        for i, line in enumerate(lines, start=skip_lines + 1):
            k = line[ref_id_col]
            if k in ref_dic:
                error("Ref id already exists: {} : {} <=> {}".format(
                    k, ref_dic[k]['t'], line[ref_tit_col]))
            else:
                if not re.match(decimal_identifier, k):
                    error("{}, bad ref identifier value '{}', '{}'".format(
                        i, k, line[ref_tit_col]))
                ref_dic[k] = {'u': '', 't': line[ref_tit_col]}

    if '2' in ns.parts:
        verbose("Reading tree file '{}'".format(ns.tree_file))
        tree_confs = ns.tree_conf.split('|')
        if len(tree_confs) != 4:
            error("fc parameter not well formated: {}".format(ns.tree_conf))
            parser.print_help()
            sys.exit(1)
        skip_lines, tree_tit_col = int(tree_confs[0]), tree_confs[3]
        tree_id_cols = [int(c) for c in tree_confs[2].split(',')]
        lines = read_csv(ns.tree_file,
                         skip_lines=skip_lines,
                         delimiter=tree_confs[1])
        tree_dic = OrderedDict()
        for i, line in enumerate(lines, start=skip_lines + 1):
            for j, id_col in enumerate(tree_id_cols):
                code = line[id_col]
                if not code:
                    continue
                for k in code.split(','):
                    v = tree_tit_col and j == 0 and line[int(
                        tree_tit_col)] or ''
                    if k not in tree_dic:
                        # A. = specific to comblain
                        if not k.startswith('A.') and not re.match(
                                decimal_identifier, k):
                            error(
                                "{},{}, bad tree identifier value '{}', '{}'".
                                format(i, id_col, k, v))
                        tree_dic[k] = {'l': i, 'c': id_col, 't': v}
                    elif ns.check_unicity:
                        error("{}, id '{}' already found line {}".format(
                            i, k, tree_dic[k]['l']))
    if '123' == ns.parts:
        verbose("Comparing...")
        for k in sorted(tree_dic):
            tdk = tree_dic[k]
            o_k = k
            if k[0:1] in ('1', '2', '3', '4'):  # must begin with '-'
                k = '-{}'.format(k)
            if k in ref_dic:
                ref_dic[k]['u'] = 'd'  # direct usage
                if tdk['t'] and tdk['t'] != ref_dic[k]['t']:
                    print("{},{}, id '{}', different titles: '{}' <=> '{}'".
                          format(tdk['l'], tdk['c'], k, tdk['t'],
                                 ref_dic[k]['t']))
            elif not k.startswith('A.'):  # specific to comblain
                com = "{},{}, id '{}', not found in ref".format(
                    tdk['l'], tdk['c'], o_k)
                if tdk['t']:
                    com += " (tit='{}')".format(tdk['t'])
                print(com)
    verbose("End of %s" % sys.argv[0])
Exemplo n.º 13
0
def add_parent():
    """Add parent column"""
    parser = argparse.ArgumentParser(description='Analyse code to find parent')
    parser.add_argument(
        '-p',
        '--parts',
        dest='parts',
        help='Run parts: 1 (load codes), 2 (get parent), 3 (write)',
        default='123')
    parser.add_argument('tree_file', help='Tree file (csv format)')
    parser.add_argument(
        '-c',
        '--config',
        dest='tree_conf',
        required=True,
        help=
        'Tree file configuration: "separator|code col|id col|id parent" (starting at 0). '
        'Like: ;|1||')
    parser.add_argument('-u',
                        '--unicity',
                        action='store_true',
                        dest='check_unicity',
                        help='Check code unicity')
    ns = parser.parse_args()
    verbose("Start of %s" % sys.argv[0])
    verbose("Reading tree file '{}'".format(ns.tree_file))
    tree_confs = ns.tree_conf.split('|')
    if len(tree_confs) != 4:
        error("config parameter not well formated: {}".format(ns.tree_conf))
        parser.print_help()
        sys.exit(1)
    sep, code_col, id_col, id_parent = tree_confs[0], int(
        tree_confs[1]), tree_confs[2], tree_confs[3]
    has_id = id_col != ''
    has_parent = id_parent != ''
    lines = read_csv(ns.tree_file, strip_chars=' ', delimiter=sep)
    code_ids = {}  # store code and id
    all_ids = {}  # store id and code
    titles = lines.pop(0)
    titles.append('Parent')
    cols_nb = len(titles)
    new_lines = [titles]
    if '1' in ns.parts or '2' in ns.parts or '3' in ns.parts:
        for i, line in enumerate(lines, start=1):
            ln_nb = i + 1
            code = line[code_col]
            if not code:
                continue
            if code in code_ids:
                if ns.check_unicity:
                    error("{}, code already found '{}'".format(ln_nb, code))
            else:
                cid = has_id and int(line[int(id_col)]) or i
                code_ids[code] = cid
                all_ids[cid] = code
    next_id = 1

    def get_next_id(nid):
        while nid in all_ids:
            nid += 1
        all_ids.append(nid)
        return nid

    if '2' in ns.parts or '3' in ns.parts:
        for i, line in enumerate(lines, start=1):
            ln_nb = i + 1
            code = line[code_col]
            if not code:
                parent_code = ''
            elif has_parent:
                cid = code_ids[code]
                parent_code = all_ids[cid]
            elif re.match(decimal_identifier, code):
                parent_code = get_decimal_parent(code)
                if parent_code is None:
                    parent_code = ''
                elif False:
                    if parent_code in code_ids:
                        parent_code = code_ids[parent_code]
                    else:
                        parents = get_parents(parent_code)
                        prev_parent_id = ''
                        for level in parents:
                            if level not in code_ids:
                                next_id = get_next_id(next_id)
                                code_ids[level] = next_id
                                new_line = [''] * cols_nb
                                new_line[code_col] = level
                                nid_col = not has_id and -2 or int(id_col)
                                new_line[nid_col] = next_id
                                new_line[-1] = prev_parent_id
                                new_lines.append(new_line)
                                verbose("{}, added decimal level '{}'".format(
                                    ln_nb, level))
                            else:
                                prev_parent_id = code_ids[level]
                        parent_code = code_ids[parent_code]
            else:
                error("{}, bad ref identifier value '{}'".format(ln_nb, code))
                parent_code = '!'
            line.append(str(parent_code))
            new_lines.append(line)
    if '3' in ns.parts:
        new_file = ns.tree_file.replace('.csv', '_parent.csv')
        with open(new_file, 'wb') as csvfile:
            csvwriter = csv.writer(csvfile,
                                   delimiter=sep,
                                   quoting=csv.QUOTE_NONNUMERIC)
            for line in new_lines:
                csvwriter.writerow(line)
    verbose("End of %s" % sys.argv[0])