def upgrade():
    op.create_table(
        "runs",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("datetime", sa.DateTime(), nullable=True),
        sa.Column("job_trigger_id", sa.Integer(), nullable=True),
        sa.Column("srpm_build_id", sa.Integer(), nullable=True),
        sa.Column("copr_build_id", sa.Integer(), nullable=True),
        sa.Column("koji_build_id", sa.Integer(), nullable=True),
        sa.Column("test_run_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ["copr_build_id"],
            ["copr_builds.id"],
        ),
        sa.ForeignKeyConstraint(
            ["job_trigger_id"],
            ["build_triggers.id"],
        ),
        sa.ForeignKeyConstraint(
            ["koji_build_id"],
            ["koji_builds.id"],
        ),
        sa.ForeignKeyConstraint(
            ["srpm_build_id"],
            ["srpm_builds.id"],
        ),
        sa.ForeignKeyConstraint(
            ["test_run_id"],
            ["tft_test_runs.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.add_column("tft_test_runs",
                  sa.Column("submitted_time", sa.DateTime(), nullable=True))

    # Start data migration

    bind = op.get_bind()
    session = orm.Session(bind=bind)

    all_run_models = 0
    (
        deleted_copr_builds_for_no_srpm,
        all_copr_builds,
        fixed_srpm_matching_from_copr_build,
    ) = (0, 0, 0)
    (
        deleted_koji_builds_for_no_srpm,
        all_koji_builds,
        fixed_srpm_matching_from_koji_build,
    ) = (0, 0, 0)

    # Removing the builds without SRPMBuildModel set in JobTriggerModel.
    # Add matching between SRPMBuildModel and JobTriggerModel
    #     if we have srpm_build set as a build property.
    for job_trigger_model in session.query(JobTriggerModel).all():
        if not job_trigger_model.srpm_builds:
            for copr_build in job_trigger_model.copr_builds:
                if copr_build.srpm_build:
                    print(
                        f"Fixing SRPM matching: {copr_build.srpm_build} -> {copr_build.job_trigger}"
                    )
                    fixed_srpm_matching_from_copr_build += 1
                    copr_build.srpm_build.job_trigger = job_trigger_model
                    session.add(copr_build.srpm_build)
                else:
                    deleted_copr_builds_for_no_srpm += 1
                    all_copr_builds += 1
                    session.delete(copr_build)
            for koji_build in job_trigger_model.koji_builds:
                if koji_build.srpm_build:
                    print(
                        f"Fixing SRPM matching: {koji_build.srpm_build} -> {koji_build.job_trigger}"
                    )
                    fixed_srpm_matching_from_koji_build += 1
                    koji_build.srpm_build.job_trigger = job_trigger_model
                    session.add(koji_build.srpm_build)
                else:
                    deleted_koji_builds_for_no_srpm += 1
                    all_koji_builds += 1
                    session.delete(koji_build)

    # Remove the CoprBuildModel if there is no SRPMBuildModel set as a CoprBuildModel property.
    copr_builds_without_srpm = 0
    for copr_build in session.query(CoprBuildModel).all():
        all_copr_builds += 1
        if not copr_build.srpm_build:
            copr_builds_without_srpm += 1
            session.delete(copr_build)
            continue

        all_run_models += 1
        run_model = RunModel()
        run_model.job_trigger = copr_build.job_trigger
        run_model.srpm_build = copr_build.srpm_build
        run_model.copr_build = copr_build
        run_model.datetime = copr_build.srpm_build.build_submitted_time
        session.add(run_model)

    # Remove the KojiBuildModel if there is no SRPMBuildModeland set as a KojiBuildModel property.
    koji_builds_without_srpm = 0
    for koji_build in session.query(KojiBuildModel).all():
        all_koji_builds += 1
        if not koji_build.srpm_build:
            koji_builds_without_srpm += 1
            continue

        all_run_models += 1
        run_model = RunModel()
        run_model.job_trigger = koji_build.job_trigger
        run_model.srpm_build = koji_build.srpm_build
        run_model.datetime = koji_build.srpm_build.build_submitted_time
        run_model.koji_build = koji_build
        session.add(run_model)

    all_test_runs = 0
    test_runs_deleted = 0
    test_runs_attached = 0

    number_of_builds_and_tests_differ = 0
    run_models_successful = 0

    for job_trigger_model in session.query(JobTriggerModel).order_by(
            JobTriggerModel.id):
        copr_builds = defaultdict(list)
        for copr_build in job_trigger_model.copr_builds:
            if copr_build.status != "success":
                break
            copr_builds[(copr_build.commit_sha,
                         copr_build.target)].append(copr_build)

        test_runs = defaultdict(list)
        for test_run in job_trigger_model.test_runs:
            all_test_runs += 1
            test_runs[(test_run.commit_sha, test_run.target)].append(test_run)

        for ((commit, target), test_group) in test_runs.items():
            matching_builds = copr_builds[(commit, target)]
            if len(matching_builds) != len(test_group):
                number_of_builds_and_tests_differ += 1
                for test_run in test_group:
                    test_runs_deleted += 1
                    session.delete(test_run)
            else:
                run_models_successful += 1
                for test, build in zip(test_group, matching_builds):
                    if len(build.runs) != 1:
                        PackitException(
                            f"Build {build} does not have exactly one run:\n"
                            f"{build.runs}")
                    test_runs_attached += 1
                    build.runs[-1].test_run = test
                    session.add(build.runs[-1])

    srpm_builds_removed_for_no_job_trigger = 0
    for srpm_build in session.query(SRPMBuildModel).all():
        if not srpm_build.job_trigger:
            srpm_builds_removed_for_no_job_trigger += 1
            session.delete(srpm_build)

    srpms_without_build = 0
    # Create RunModel for SRPMBuildModels without any build.
    for job_trigger_model in session.query(JobTriggerModel).all():
        if job_trigger_model.id == 5504:
            print(f"job_trigger_model={job_trigger_model}\n"
                  f"runs={job_trigger_model.runs}\n"
                  f"srpm_builds={job_trigger_model.srpm_builds}")
        if not job_trigger_model.copr_builds and not job_trigger_model.koji_builds:
            for srpm_build in job_trigger_model.srpm_builds:
                print(
                    f"Creating RunModel for SRPMBuildModel without any build: {srpm_build}"
                )
                all_run_models += 1
                srpms_without_build += 1
                run_model = RunModel()
                run_model.job_trigger = srpm_build.job_trigger
                run_model.datetime = srpm_build.build_submitted_time
                run_model.srpm_build = srpm_build
                session.add(run_model)
                assert srpm_build.runs

    srpms_without_run = 0
    for srpm_build in session.query(SRPMBuildModel).all():
        if not srpm_build.runs:
            print(
                f"Creating RunModel for SRPMBuildModel without any RunModel: {srpm_build}"
            )
            all_run_models += 1
            srpms_without_run += 1
            run_model = RunModel()
            run_model.job_trigger = srpm_build.job_trigger
            run_model.datetime = srpm_build.build_submitted_time
            run_model.srpm_build = srpm_build
            session.add(run_model)
            assert srpm_build.runs

    print("================================")
    print(f"SRPM models without any build: {srpms_without_build}")
    print(
        f"SRPM models without any run (RunModel created): {srpms_without_run}")
    print(f"SRPM models removed because of no connection to any job trigger: "
          f"{srpm_builds_removed_for_no_job_trigger}")
    print("================================")
    print(f"All Copr builds: {all_copr_builds}")
    print(
        f"Copr builds deleted for no SRPM for trigger: {deleted_copr_builds_for_no_srpm}"
    )
    print(f"Copr builds deleted for no SRPM set: {copr_builds_without_srpm}")
    print(f"Fixed SRPM matching to trigger model from Copr build: "
          f"{fixed_srpm_matching_from_copr_build}")
    print("================================")
    print(f"All Koji builds: {all_koji_builds}")
    print(
        f"Koji builds deleted for no SRPM for trigger: {deleted_koji_builds_for_no_srpm}"
    )
    print(f"Koji builds deleted for no SRPM set: {koji_builds_without_srpm}")
    print(f"Fixed SRPM matching to trigger model from Koji build: "
          f"{fixed_srpm_matching_from_koji_build}")
    print("================================")
    print(f"All Test runs: {all_test_runs}")
    print(f"Attached correctly to build: {test_runs_attached}")
    print(f"All Run models: {all_run_models}")
    print(
        f"Run models with different number of tests and builds: {number_of_builds_and_tests_differ}"
    )
    print(f"Run models with test run correctly set: {run_models_successful}")
    print("================================")

    # Check:
    for srpm_build in session.query(SRPMBuildModel).all():
        if not srpm_build.runs:
            raise PackitException(
                f"SRPMBuildModel without any run: {srpm_build}")

    for copr_build in session.query(CoprBuildModel).all():
        srpm_builds = {run.srpm_build for run in copr_build.runs}
        if len(srpm_builds) != 1:
            raise PackitException(
                f"More SRPM builds for one copr_build {copr_build}:\n{srpm_builds}"
            )

    for koji_build in session.query(KojiBuildModel).all():
        srpm_builds = {run.srpm_build for run in koji_build.runs}
        if len(srpm_builds) != 1:
            raise PackitException(
                f"More SRPM builds for one koji_build {koji_build}:\n{srpm_builds}"
            )

    run_model_count = 0
    for run_model in session.query(RunModel).all():
        run_model_count += 1
        if not run_model.srpm_build:

            raise PackitException(
                f"Run model does not have SRPM build set: {run_model}")

    session.commit()

    # Remove direct connections:

    op.drop_constraint("copr_builds_job_trigger_id_fkey",
                       "copr_builds",
                       type_="foreignkey")
    op.drop_constraint("copr_builds_srpm_build_id_fkey1",
                       "copr_builds",
                       type_="foreignkey")
    op.drop_column("copr_builds", "job_trigger_id")
    op.drop_column("copr_builds", "srpm_build_id")
    op.drop_constraint("koji_builds_srpm_build_id_fkey",
                       "koji_builds",
                       type_="foreignkey")
    op.drop_constraint("koji_builds_job_trigger_id_fkey",
                       "koji_builds",
                       type_="foreignkey")
    op.drop_column("koji_builds", "job_trigger_id")
    op.drop_column("koji_builds", "srpm_build_id")
    op.drop_constraint("srpm_builds_job_trigger_id_fkey",
                       "srpm_builds",
                       type_="foreignkey")
    op.drop_column("srpm_builds", "job_trigger_id")
    op.drop_constraint("tft_test_runs_job_trigger_id_fkey",
                       "tft_test_runs",
                       type_="foreignkey")
    op.drop_column("tft_test_runs", "job_trigger_id")
Exemplo n.º 2
0
def upgrade():
    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)

    # Reset auto-increment ids
    op.get_bind().execute(
        """SELECT setval('event_id_seq', (SELECT max(id) FROM event));""")
    op.get_bind().execute(
        """SELECT setval('application_form_id_seq', (SELECT max(id) FROM application_form));"""
    )
    op.get_bind().execute(
        """SELECT setval('section_id_seq', (SELECT max(id) FROM section));""")
    op.get_bind().execute(
        """SELECT setval('question_id_seq', (SELECT max(id) FROM question));"""
    )

    #Add event
    DeepLearningIndaba2020 = Event(
        'Deep Learning Indaba 2020',
        'The Deep Learning Indaba 2020, Tunis, Tunisia',
        datetime.date(2020, 8, 23), datetime.date(2020, 8,
                                                  28), 'indaba2020', 1,
        '*****@*****.**', 'http://www.deeplearningindaba.com/',
        datetime.date(2020, 3, 1), datetime.date(2020, 4, 17),
        datetime.date(2020, 4, 25), datetime.date(2020, 5, 15),
        datetime.date(2020, 5, 15), datetime.date(2020, 6, 1),
        datetime.date(2020, 6, 1), datetime.date(2020, 7, 31),
        datetime.date(2020, 6, 1), datetime.date(2020, 7, 31), 'EVENT')

    session.add(DeepLearningIndaba2020)
    session.commit()

    app_form = ApplicationForm(DeepLearningIndaba2020.id, False, True)
    session.add(app_form)
    session.commit()

    #Add Section
    main_section = Section(
        app_form.id, 'Deep Learning Indaba 2020 Application Form',
        'This is the official application form to apply for participation in the Deep Learning Indaba to be held 23-28 August 2020 in Tunis, Tunisia.',
        1)
    session.add(main_section)
    session.commit()

    main_q1 = Question(
        app_form.id,
        main_section.id,
        'multi-choice',
        'Application Category',
        'Category',
        1,
        None,
        description='Please select the option that best describes you',
        options=[{
            'label': 'An undergraduate student',
            'value': 'undergrad'
        }, {
            'label': 'A masters student',
            'value': 'masters'
        }, {
            'label': 'A PhD student',
            'value': 'phd'
        }, {
            'label': 'A Post-doc',
            'value': 'postdoc'
        }, {
            'label': 'Academic Faculty',
            'value': 'faculty'
        }, {
            'label': 'Industry Professional',
            'value': 'industry'
        }, {
            'label': 'Student at a coding academy',
            'value': 'student-coding-academy'
        }, {
            'label': 'Unemployed',
            'value': 'unemployed'
        }])
    session.add(main_q1)

    demographics = Section(app_form.id, 'Demographics', '', 2)
    session.add(demographics)
    session.commit()

    demographics_q1 = Question(app_form.id,
                               demographics.id,
                               'multi-choice',
                               'Country of nationality',
                               'Country of nationality',
                               1,
                               None,
                               options=get_country_list(session))
    demographics_q2 = Question(app_form.id,
                               demographics.id,
                               'multi-choice',
                               'Country of residence',
                               'Country of residence',
                               2,
                               None,
                               options=get_country_list(session))
    demographics_q3 = Question(app_form.id,
                               demographics.id,
                               'multi-choice',
                               'Gender',
                               'Gender',
                               3,
                               None,
                               options=[{
                                   'label': 'Male',
                                   'value': 'male'
                               }, {
                                   'label': 'Female',
                                   'value': 'female'
                               }, {
                                   'label': 'Transgender',
                                   'value': 'transgender'
                               }, {
                                   'label':
                                   'Gender variant/non-conforming',
                                   'value':
                                   'gender-variant/non-conforming'
                               }, {
                                   'label': 'Prefer not to say',
                                   'value': 'prefer-not-to-say'
                               }])
    demographics_q4 = Question(
        app_form.id,
        demographics.id,
        'multi-choice',
        'Disabilities',
        'Disabilities',
        4,
        None,
        description=
        'We collect this information to ensure that we provide suitable facilities at our venue.',
        options=[{
            "label": "No disabilities",
            "value": "none"
        }, {
            "label": "Sight disability",
            "value": "sight"
        }, {
            "label": "Hearing disability",
            "value": "hearing"
        }, {
            "label": "Communication disability",
            "value": "communication"
        }, {
            "label": "Physical disability(e.g. difficulty in walking)",
            "value": "physical"
        }, {
            "label":
            "Mental disability(e.g. difficulty in remembering or concentrating)",
            "value": "mental"
        }, {
            "label": "Difficulty in self-care",
            "value": "self-care"
        }, {
            "label": "Other",
            "value": "other"
        }])
    demographics_q5 = Question(
        app_form.id,
        demographics.id,
        'short-text',
        'Affiliation',
        'Affiliation',
        5,
        None,
        description='The university / institution / company you are based at')
    demographics_q6 = Question(
        app_form.id,
        demographics.id,
        'short-text',
        'Department',
        'Department',
        6,
        None,
        description=
        'The department or field of study you fall under at your affiliation')

    session.add_all([
        demographics_q1, demographics_q2, demographics_q3, demographics_q4,
        demographics_q5, demographics_q6
    ])

    about_you = Section(
        app_form.id, 'Tell Us a Bit About You', """
                         Please use this section to tell us a bit more about yourself and your intentions as a future Deep Learning Indaba ambassador
                         Take your time to fill in this section as it has the highest impact on whether or not you will be offered a place at the Indaba!""",
        3)
    session.add(about_you)
    session.commit()

    about_you_q1 = Question(
        app_form.id,
        about_you.id,
        'long-text',
        'Why is attending the Deep Learning Indaba 2020 important to you?',
        'Enter 100 to 200 words',
        1,
        validation_regex='^\s*(\S+(\s+|$)){100,200}$',
        validation_text='You must enter 100 to 200 words',
        description='Enter 100 to 200 words')

    about_you_q2 = Question(
        app_form.id,
        about_you.id,
        'long-text',
        'How will you share what you have learned after the Indaba?',
        'Enter 50 to 150 words',
        2,
        validation_regex='^\s*(\S+(\s+|$)){50,150}$',
        validation_text='You must enter 50 to 150 words',
        description='Enter 50 to 150 words')
    about_you_q2.depends_on_question_id = main_q1.id
    about_you_q2.show_for_values = [
        'undergraduate', 'masters', 'phd', 'post-doc',
        'student at a coding academy', 'unemployed'
    ]

    about_you_q3 = Question(
        app_form.id,
        about_you.id,
        'long-text',
        'How will you use your experience at the Deep Learning Indaba to impact your teaching, research, supervision, and/or institution?',
        'Enter 50 to 150 words',
        3,
        validation_regex='^\s*(\S+(\s+|$)){50,150}$',
        validation_text='You must enter 50 to 150 words',
        description='Enter 50 to 150 words')
    about_you_q3.depends_on_question_id = main_q1.id
    about_you_q3.show_for_values = ['academic-faculty']

    about_you_q4 = Question(
        app_form.id,
        about_you.id,
        'long-text',
        'Share with us a favourite machine learning resource you use: a paper, blog post, algorithm, result, or finding. Tell us why.',
        'Enter up to 80 words',
        4,
        validation_regex="^\s*(\S+(\s+|$)){0,80}$",
        validation_text='You must enter up to 80 words',
        description='Enter up to 80 words, remember to include *why*')
    about_you_q4.depends_on_question_id = main_q1.id
    about_you_q4.show_for_values = [
        'undergraduate', 'masters', 'phd', 'post-doc',
        'student at a coding academy', 'unemployed', 'academic-faculty'
    ]

    about_you_q5 = Question(
        app_form.id,
        about_you.id,
        'long-text',
        'Are you or have you been a tutor for any relevant course, or part of any machine learning or data science society or meetup? If yes, give details.',
        'Enter up to 80 words',
        5,
        validation_regex='^\s*(\S+(\s+|$)){0,80}$',
        validation_text='You must enter up to 80 words',
        description='Enter up to 80 words')
    about_you_q5.depends_on_question_id = main_q1.id
    about_you_q5.show_for_values = [
        'undergraduate', 'masters', 'phd', 'post-doc',
        'student at a coding academy', 'unemployed'
    ]

    about_you_q6 = Question(
        app_form.id,
        about_you.id,
        'long-text',
        'Have you taught any Machine Learning courses at your institution or supervised any postgraduate students on Machine Learning projects?',
        'Enter up to 80 words',
        6,
        validation_regex='^\s*(\S+(\s+|$)){0,80}$',
        validation_text='You must enter up to 80 words',
        description='Enter up to 80 words')
    about_you_q6.depends_on_question_id = main_q1.id
    about_you_q6.show_for_values = ['academic-faculty']

    about_you_q7 = Question(
        app_form.id,
        about_you.id,
        'multi-choice',
        'Are you currently actively involved in Machine Learning Research?',
        'Choose an option',
        7,
        None,
        description='Choosing no will not count against you',
        options=[{
            'label': 'Yes',
            'value': 'yes'
        }, {
            'label': 'No',
            'value': 'no'
        }])
    about_you_q8 = Question(
        app_form.id,
        about_you.id,
        'long-text',
        'Add a short abstract describing your current research',
        'Enter 150 to 250 words',
        8,
        validation_regex='^\s*(\S+(\s+|$)){150,250}$',
        validation_text='You must enter 150 to 250 words',
        description=
        'This can be completed research or research in progress. Remember to include a description of your methodology and any key results you have so far.'
    )
    about_you_q8.depends_on_question_id = about_you_q7.id
    about_you_q8.show_for_values = ['yes']

    about_you_q9 = Question(
        app_form.id,
        about_you.id,
        'multi-choice',
        'Would you be interested in submitting an extended abstract or paper to the Indaba Symposium if you are selected to attend the Indaba?',
        'Choose an option',
        9,
        None,
        description="We won't hold you to this, it's just to gauge interest.")
    about_you_q9.depends_on_question_id = about_you_q7.id
    about_you_q9.show_for_values = ['yes']

    about_you_q10 = Question(
        app_form.id,
        about_you.id,
        'long-text',
        'Have you worked on a project that uses machine learning? Give a short description.',
        'Enter upto 150 words',
        10,
        validation_regex='^\s*(\S+(\s+|$)){0,150}$',
        validation_text='You must enter up to 150 words')
    about_you_q10.depends_on_question_id = about_you_q7.id
    about_you_q10.show_for_values = ['no']

    about_you_q11 = Question(app_form.id,
                             about_you.id,
                             'file',
                             'Upload CV',
                             None,
                             11,
                             None,
                             is_required=True)
    about_you_q11.depends_on_question_id = main_q1.id
    about_you_q11.show_for_values = [
        'undergraduate', 'masters', 'phd', 'post-doc',
        'student at a coding academy', 'unemployed'
    ]

    about_you_q12 = Question(
        app_form.id,
        about_you.id,
        'multi-choice',
        'May we add your CV and email address to a database for sharing with our sponsors?',
        'Choose an option',
        12,
        None,
        options=[{
            'label': 'Yes',
            'value': 'yes'
        }, {
            'label': 'No',
            'value': 'no'
        }])
    about_you_q12.depends_on_question_id = main_q1.id
    about_you_q12.show_for_values = [
        'undergraduate', 'masters', 'phd', 'post-doc',
        'student at a coding academy', 'unemployed'
    ]

    session.add_all([
        about_you_q1, about_you_q2, about_you_q3, about_you_q4, about_you_q5,
        about_you_q6, about_you_q7, about_you_q8, about_you_q9, about_you_q10,
        about_you_q11, about_you_q12
    ])

    travel_support = Section(
        app_form.id, 'Travel Support',
        """ We may be able to sponsor the cost of travel and accommodation for some attendees. These travel awards are limited and highly competitive,
                               but are assessed independently of Indaba attendance: applying for a travel award neither enhances nor undermines your chances of being accepted.
                               To help as many people attend as possible, before applying for travel support, please check if your supervisor, department or university is able to support you in any way.
                               """, 4)
    travel_support.depends_on_question_id = main_q1.id
    travel_support.show_for_values = [
        'undergraduate', 'masters', 'phd', 'post-doc',
        'student at a coding academy', 'unemployed', 'academic-faculty'
    ]
    session.add(travel_support)
    session.commit()

    travel_support_q1 = Question(
        app_form.id,
        travel_support.id,
        'multi-choice',
        'Would you like to be considered for a travel grant?',
        'Choose an option',
        1,
        None,
        description=
        'Travel awards will be used to cover the costs of return flights to the host city and/or accommodation in shared dorm rooms close to the venue.',
        options=[{
            'label': 'Travel',
            'value': 'travel'
        }, {
            'label': 'Accommodation',
            'value': 'accommodation'
        }, {
            'label': 'Travel and Accommodation',
            'value': 'travel-and-accommodation'
        }, {
            'label': 'None',
            'value': 'none'
        }])
    travel_support_q2 = Question(
        app_form.id,
        travel_support.id,
        'short-text',
        'Please state your intended airport of departure.',
        'Airport of Departure',
        2,
        None,
        description=
        'Please note that we will only provide flights in the form of a return ticket to and from a single airport on the continent of Africa.'
    )
    travel_support_q2.depends_on_question_id = travel_support_q1.id
    travel_support_q2.show_for_values = ['travel', 'travel-accommodation']

    travel_support_q3 = Question(
        app_form.id,
        travel_support.id,
        'multi-choice',
        'If you do not receive a travel award from us, will you still be able to attend?',
        'Choose an option',
        3,
        None,
        options=[{
            'label': 'Yes',
            'value': 'yes'
        }, {
            'label': 'No',
            'value': 'no'
        }])
    travel_support_q3.depends_on_question_id = travel_support_q1.id
    travel_support_q3.show_for_values = [
        'travel', 'accommodation', 'travel-accommodation'
    ]

    travel_support_q4 = Question(
        app_form.id,
        travel_support.id,
        'long-text',
        'Would you like to be considered for a registration fee waiver? If so, please provide a motivation',
        'Enter up to 80 words',
        4,
        None,
        description='Enter up to 80 words',
        is_required=False)
    travel_support_q4.depends_on_question_id = main_q1.id
    travel_support_q4.show_for_values = ['academic-faculty']

    session.add_all([
        travel_support_q1, travel_support_q2, travel_support_q3,
        travel_support_q4
    ])

    attendance = Section(
        app_form.id, 'Previous Attendance',
        'Help us quantify our progress by giving us some info on your previous Indaba experiences.',
        5)
    session.add(attendance)
    session.commit()

    attendance_q1 = Question(
        app_form.id,
        attendance.id,
        'multi-checkbox',
        'Did you attend a previous edition of the Indaba?',
        None,
        1,
        None,
        is_required=False,
        options=[{
            'label': 'Indaba 2017',
            'value': 'indaba-2017'
        }, {
            'label': 'Indaba 2018',
            'value': 'indaba-2018'
        }, {
            'label': 'Indaba 2019',
            'value': 'indaba-2019'
        }])
    attendance_q2 = Question(
        app_form.id,
        attendance.id,
        'long-text',
        'Tell us how your previous attendance has helped you grow or how you have used what you have learned.',
        'Enter up to 150 words',
        2,
        validation_regex='^\s*(\S+(\s+|$)){0,150}$',
        validation_text='You must enter up to 150 words',
        description='Enter up to 150 words')
    attendance_q2.depends_on_question_id = attendance_q1.id
    attendance_q2.show_for_values = [
        'indaba-2017', 'indaba-2018', 'indaba-2019', 'indaba-2017-indaba-2018',
        'indaba-2017-indaba-2019', 'indaba-2018-indaba-2019',
        'indaba-2017-indaba-2018-indaba-2019'
    ]

    session.add_all([attendance_q1, attendance_q2])

    additional_info = Section(app_form.id, 'Additional Information',
                              'Anything else you may want us to know', 6)
    session.add(additional_info)
    session.commit()

    info_q1 = Question(
        app_form.id,
        additional_info.id,
        'long-text',
        'Any additional comments or remarks for the selection committee',
        'Enter up to 80 words',
        1,
        validation_regex='^\s*(\S+(\s+|$)){0,80}$',
        validation_text='You must enter up to 80 words',
        description='Enter up to 80 words',
        is_required=False)
    info_q2 = Question(
        app_form.id,
        additional_info.id,
        'long-text',
        'Anything else you think relevant, for example links to personal webpage, papers, GitHub/code repositories, community and outreach activities.',
        'Enter up to 80 words',
        2,
        validation_regex='^\s*(\S+(\s+|$)){0,80}$',
        validation_text='You must enter up to 80 words',
        description='Enter up to 80 words',
        is_required=False)
    info_q3 = Question(
        app_form.id,
        additional_info.id,
        'multi-choice',
        'If you are selected to attend the Indaba, would you also like to attend the AI Hack Tunisia.',
        'Choose an option',
        3,
        None,
        description=
        'The AI Hack Tunisia will take place in the week of the 31st of August. Accepted Indaba attendees will automatically qualify for a place at the event.',
        options=[{
            'label': 'Yes',
            'value': 'yes'
        }, {
            'label': 'No',
            'value': 'no'
        }, {
            'label': 'Maybe',
            'value': 'maybe'
        }])
    session.add_all([info_q1, info_q2, info_q3])
Exemplo n.º 3
0
def upgrade():
    bind = op.get_bind()
    tables = Inspector.from_engine(bind).get_table_names()

    if "study_directions" not in tables:
        op.create_table(
            "study_directions",
            sa.Column("study_direction_id", sa.Integer(), nullable=False),
            sa.Column(
                "direction",
                sa.Enum("NOT_SET",
                        "MINIMIZE",
                        "MAXIMIZE",
                        name="studydirection"),
                nullable=False,
            ),
            sa.Column("study_id", sa.Integer(), nullable=False),
            sa.Column("objective", sa.Integer(), nullable=False),
            sa.ForeignKeyConstraint(
                ["study_id"],
                ["studies.study_id"],
            ),
            sa.PrimaryKeyConstraint("study_direction_id"),
            sa.UniqueConstraint("study_id", "objective"),
        )

    if "trial_intermediate_values" not in tables:
        op.create_table(
            "trial_intermediate_values",
            sa.Column("trial_intermediate_value_id",
                      sa.Integer(),
                      nullable=False),
            sa.Column("trial_id", sa.Integer(), nullable=False),
            sa.Column("step", sa.Integer(), nullable=False),
            sa.Column("intermediate_value", sa.Float(), nullable=False),
            sa.ForeignKeyConstraint(
                ["trial_id"],
                ["trials.trial_id"],
            ),
            sa.PrimaryKeyConstraint("trial_intermediate_value_id"),
            sa.UniqueConstraint("trial_id", "step"),
        )

    session = orm.Session(bind=bind)
    try:
        studies_records = session.query(StudyModel).all()
        objects = [
            StudyDirectionModel(study_id=r.study_id,
                                direction=r.direction,
                                objective=0) for r in studies_records
        ]
        session.bulk_save_objects(objects)

        intermediate_values_records = session.query(
            TrialValueModel.trial_id, TrialValueModel.value,
            TrialValueModel.step).all()
        objects = [
            TrialIntermediateValueModel(trial_id=r.trial_id,
                                        intermediate_value=r.value,
                                        step=r.step)
            for r in intermediate_values_records
        ]
        session.bulk_save_objects(objects)

        session.query(TrialValueModel).delete()
        session.commit()

        with op.batch_alter_table("trial_values", schema=None) as batch_op:
            batch_op.add_column(
                sa.Column("objective", sa.Integer(), nullable=False))
            # The name of this constraint is manually determined.
            # In the future, the naming convention may be determined based on
            # https://alembic.sqlalchemy.org/en/latest/naming.html
            batch_op.create_unique_constraint(
                "uq_trial_values_trial_id_objective",
                ["trial_id", "objective"])

        trials_records = session.query(TrialModel).all()
        objects = [
            TrialValueModel(trial_id=r.trial_id, value=r.value, objective=0)
            for r in trials_records
        ]
        session.bulk_save_objects(objects)

        session.commit()
    except SQLAlchemyError as e:
        session.rollback()
        raise e
    finally:
        session.close()

    with op.batch_alter_table("studies", schema=None) as batch_op:
        batch_op.drop_column("direction")

    with op.batch_alter_table("trial_values", schema=None) as batch_op:
        batch_op.drop_column("step")

    with op.batch_alter_table("trials", schema=None) as batch_op:
        batch_op.drop_column("value")
Exemplo n.º 4
0
def downgrade():
    # Get alembic DB bind
    connection = op.get_bind()
    session = orm.Session(bind=connection)

    session.commit()
Exemplo n.º 5
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table('app_users',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='User id'),
                    sa.Column('username', sa.String(length=64), nullable=False, comment='User name'),
                    sa.Column('salt', sa.CHAR(length=16), nullable=False, comment='Password salt'),
                    sa.Column('password', sa.CHAR(length=64), nullable=False,
                              comment='sha-256("salt"+":"+"user password")'),
                    sa.Column('user_status', sa.Enum('admin', 'basic', name='userstatus'), nullable=False,
                              comment='Sets privileges'),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('username'),
                    comment="Stores user data. Not named 'users' because of PostgreSQL keyword."
                    )
    op.create_table('content_types',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Content type id'),
                    sa.Column('name', sa.String(length=127), nullable=False, comment='MIME content type'),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('name'),
                    comment='Stores MIME content_types, e.g.:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet  //.xlsxapplication/vnd.ms-excel  //.xlsapplication/vnd.ms-excel.sheet.binary.macroEnabled.12 //.xlsbtext/csv  //.csv'
                    )
    op.create_table('cross_validations',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Cross-validation id'),
                    sa.Column('name', sa.String(length=64), nullable=False, comment='Cross-validation name'),
                    sa.PrimaryKeyConstraint('id'),
                    comment='Stores cross-validations.'
                    )
    op.create_table('deposits',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Deposit id'),
                    sa.Column('username', sa.String(length=64), nullable=False, comment='Deposit name'),
                    sa.PrimaryKeyConstraint('id'),
                    comment='Stores deposit data.'
                    )
    op.create_table('metrics',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Metric id'),
                    sa.Column('name', sa.String(length=64), nullable=False, comment='Metric name'),
                    sa.Column('description', sa.Text(), nullable=True, comment='Metric description, e.g. formulae'),
                    sa.Column('type', sa.Enum('class_stat', 'overall_stat', name='metrictype'), nullable=False,
                              comment='Metric type'),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('name'),
                    comment='Stores metrics.'
                    )
    op.create_table('deposit_borders',
                    sa.Column('deposit_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Deposit id'),
                    sa.Column('point_type', sa.Enum('max', 'min', name='borderpointtype'), nullable=False,
                              comment='Border point type'),
                    sa.Column('x_value', sa.Numeric(), nullable=False, comment='Point value on x-axis'),
                    sa.Column('y_value', sa.Numeric(), nullable=False, comment='Point value on y-axis'),
                    sa.Column('z_value', sa.Numeric(), nullable=False, comment='Point value on z-axis'),
                    sa.ForeignKeyConstraint(['deposit_id'], ['deposits.id'], ),
                    sa.PrimaryKeyConstraint('deposit_id', 'point_type'),
                    comment='Stores deposit borders data.'
                    )
    op.create_table('deposit_owners',
                    sa.Column('deposit_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Deposit id'),
                    sa.Column('user_id', neuroAPI.database.ext.GUID(), nullable=False, comment='User id'),
                    sa.ForeignKeyConstraint(['deposit_id'], ['deposits.id'], ),
                    sa.ForeignKeyConstraint(['user_id'], ['app_users.id'], ),
                    sa.PrimaryKeyConstraint('deposit_id', 'user_id'),
                    comment='Links users and owned deposits.'
                    )
    op.create_table('files',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='File id'),
                    sa.Column('name', sa.String(length=255), nullable=False,
                              comment='original filename with extension, e.g. "text.xlsx"'),
                    sa.Column('data_type', neuroAPI.database.ext.GUID(), nullable=False, comment='MIME content type'),
                    sa.Column('description', sa.Text(), nullable=True, comment='Long description'),
                    sa.Column('date_added', sa.DateTime(), nullable=False, comment='When was created'),
                    sa.Column('content', sa.LargeBinary(), nullable=False, comment='File itself in binary'),
                    sa.ForeignKeyConstraint(['data_type'], ['content_types.id'], ),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('name', 'data_type'),
                    comment='Stores files.'
                    )
    op.create_index('file_index', 'files', ['name', 'data_type'], unique=False)
    op.create_table('neural_models',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Neural model id'),
                    sa.Column('deposit_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Deposit id'),
                    sa.Column('block_size', sa.Numeric(), nullable=False, comment='Neural model block size'),
                    sa.Column('max_epochs', sa.Integer(), nullable=False, comment='Max epoch count'),
                    sa.Column('cross_validation_id', neuroAPI.database.ext.GUID(), nullable=True,
                              comment='Cross-validation grouping entity id'),
                    sa.Column('structure', sa.LargeBinary(), nullable=False, comment='NM structure'),
                    sa.Column('weights', sa.LargeBinary(), nullable=False, comment='NM weights'),
                    sa.ForeignKeyConstraint(['cross_validation_id'], ['cross_validations.id'], ),
                    sa.ForeignKeyConstraint(['deposit_id'], ['deposits.id'], ),
                    sa.PrimaryKeyConstraint('id'),
                    comment='Stores neural models.'
                    )
    op.create_table('rocks',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Rock id'),
                    sa.Column('deposit_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Deposit id'),
                    sa.Column('index', sa.Integer(), autoincrement=True, nullable=False,
                              comment='Rock index in deposit'),
                    sa.Column('name', sa.String(length=64), nullable=False, comment='Rock name'),
                    sa.Column('color', sa.CHAR(length=7), nullable=True, comment='Rock hex color, e.g. "#FFFFFF"'),
                    sa.ForeignKeyConstraint(['deposit_id'], ['deposits.id'], ),
                    sa.PrimaryKeyConstraint('id'),
                    comment='Store rock data.'
                    )
    op.create_table('user_contact_information',
                    sa.Column('user_id', neuroAPI.database.ext.GUID(), nullable=False, comment='User id'),
                    sa.Column('contact_info_type',
                              sa.Enum('firstname', 'lastname', 'email', name='contactinformationtype'), nullable=False,
                              comment='CI type'),
                    sa.Column('contact_info_value', sa.String(length=320), nullable=True, comment='CI value'),
                    sa.ForeignKeyConstraint(['user_id'], ['app_users.id'], ),
                    sa.PrimaryKeyConstraint('user_id', 'contact_info_type'),
                    comment='Stores user contact information.'
                    )
    op.create_table('wells',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Well id'),
                    sa.Column('deposit_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Deposit id'),
                    sa.Column('head_x', sa.Numeric(), nullable=False, comment='Head point value on x-axis'),
                    sa.Column('head_y', sa.Numeric(), nullable=False, comment='Head point value on y-axis'),
                    sa.Column('head_z', sa.Numeric(), nullable=False, comment='Head point value on z-axis'),
                    sa.Column('tail_x', sa.Numeric(), nullable=False, comment='Tail point value on x-axis'),
                    sa.Column('tail_y', sa.Numeric(), nullable=False, comment='Tail point value on y-axis'),
                    sa.Column('tail_z', sa.Numeric(), nullable=False, comment='Tail point value on z-axis'),
                    sa.ForeignKeyConstraint(['deposit_id'], ['deposits.id'], ),
                    sa.PrimaryKeyConstraint('id'),
                    comment='Store wells.'
                    )
    op.create_table('deposit_files',
                    sa.Column('deposit_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Deposit id'),
                    sa.Column('file_id', neuroAPI.database.ext.GUID(), nullable=False, comment='File id'),
                    sa.ForeignKeyConstraint(['deposit_id'], ['deposits.id'], ),
                    sa.ForeignKeyConstraint(['file_id'], ['files.id'], ),
                    sa.PrimaryKeyConstraint('deposit_id', 'file_id'),
                    comment='Lists links to deposit’s files.'
                    )
    op.create_table('known_blocks',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Known block id'),
                    sa.Column('well_id', neuroAPI.database.ext.GUID(), nullable=False, comment='This block well id'),
                    sa.Column('size', sa.Numeric(), nullable=False, comment='Block size'),
                    sa.Column('center_x', sa.Numeric(), nullable=False, comment='Center point value on x-axis'),
                    sa.Column('center_y', sa.Numeric(), nullable=False, comment='Center point value on y-axis'),
                    sa.Column('center_z', sa.Numeric(), nullable=False, comment='Center point value on z-axis'),
                    sa.Column('content', neuroAPI.database.ext.GUID(), nullable=False, comment='Rock on this block'),
                    sa.ForeignKeyConstraint(['content'], ['rocks.id'], ),
                    sa.ForeignKeyConstraint(['well_id'], ['wells.id'], ),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('well_id', 'size', 'center_x', 'center_y', 'center_z'),
                    comment='Stores known blocks.'
                    )
    op.create_table('neural_models_excluded_wells',
                    sa.Column('neural_model_id', neuroAPI.database.ext.GUID(), nullable=False,
                              comment='Neural model id'),
                    sa.Column('well_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Well id'),
                    sa.ForeignKeyConstraint(['neural_model_id'], ['neural_models.id'], ),
                    sa.ForeignKeyConstraint(['well_id'], ['wells.id'], ),
                    sa.PrimaryKeyConstraint('neural_model_id', 'well_id'),
                    comment='Lists excluded wells from training.'
                    )
    op.create_table('neural_models_metrics',
                    sa.Column('neural_model_id', neuroAPI.database.ext.GUID(), nullable=False,
                              comment='Neural model id'),
                    sa.Column('metric_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Metric id'),
                    sa.Column('epoch', sa.Integer(), nullable=False, comment='Current epoch'),
                    sa.Column('rock_id', neuroAPI.database.ext.GUID(), nullable=True,
                              comment='Rock id (if metric.type = class_stat))'),
                    sa.Column('value', sa.Text(), nullable=False, comment='Metric value'),
                    sa.ForeignKeyConstraint(['metric_id'], ['metrics.id'], ),
                    sa.ForeignKeyConstraint(['neural_model_id'], ['neural_models.id'], ),
                    sa.ForeignKeyConstraint(['rock_id'], ['rocks.id'], ),
                    sa.PrimaryKeyConstraint('neural_model_id', 'metric_id', 'epoch'),
                    comment='Lists metric data.'
                    )
    op.create_table('well_intervals',
                    sa.Column('well_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Well id'),
                    sa.Column('position', sa.Integer(), nullable=False, comment='Interval position from head'),
                    sa.Column('from_x', sa.Numeric(), nullable=False, comment='From point value on x-axis'),
                    sa.Column('from_y', sa.Numeric(), nullable=False, comment='From point value on y-axis'),
                    sa.Column('from_z', sa.Numeric(), nullable=False, comment='From point value on z-axis'),
                    sa.Column('to_x', sa.Numeric(), nullable=False, comment='To point value on x-axis'),
                    sa.Column('to_y', sa.Numeric(), nullable=False, comment='To point value on y-axis'),
                    sa.Column('to_z', sa.Numeric(), nullable=False, comment='To point value on z-axis'),
                    sa.ForeignKeyConstraint(['well_id'], ['wells.id'], ),
                    sa.PrimaryKeyConstraint('well_id', 'position'),
                    comment='Lists well’s intervals.'
                    )
    op.create_table('predicted_blocks',
                    sa.Column('id', neuroAPI.database.ext.GUID(), nullable=False, comment='Predicted block id'),
                    sa.Column('neural_model_id', neuroAPI.database.ext.GUID(), nullable=True,
                              comment='Neural model id'),
                    sa.Column('center_x', sa.Numeric(), nullable=False, comment='Center point value on x-axis'),
                    sa.Column('center_y', sa.Numeric(), nullable=False, comment='Center point value on y-axis'),
                    sa.Column('center_z', sa.Numeric(), nullable=False, comment='Center point value on z-axis'),
                    sa.Column('known_block_id', neuroAPI.database.ext.GUID(), nullable=True,
                              comment='paired Known block'),
                    sa.Column('content', neuroAPI.database.ext.GUID(), nullable=False, comment='Rock on this block'),
                    sa.ForeignKeyConstraint(['content'], ['rocks.id'], ),
                    sa.ForeignKeyConstraint(['known_block_id'], ['known_blocks.id'], ),
                    sa.ForeignKeyConstraint(['neural_model_id'], ['neural_models.id'], ),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('neural_model_id', 'center_x', 'center_y', 'center_z'),
                    comment='Stores predicted blocks.'
                    )
    op.create_index('known_block_index', 'predicted_blocks', ['known_block_id'], unique=False, postgresql_using='hash')
    op.create_table('predicted_blocks_outputs',
                    sa.Column('predicted_block_id', neuroAPI.database.ext.GUID(), nullable=False,
                              comment='Predicted block id'),
                    sa.Column('rock_id', neuroAPI.database.ext.GUID(), nullable=False, comment='Rock id'),
                    sa.Column('value', sa.Numeric(), nullable=False, comment='probability [0, 1]'),
                    sa.ForeignKeyConstraint(['predicted_block_id'], ['predicted_blocks.id'], ),
                    sa.ForeignKeyConstraint(['rock_id'], ['rocks.id'], ),
                    sa.PrimaryKeyConstraint('predicted_block_id', 'rock_id'),
                    comment='Lists predicted block outputs.'
                    )
    # ### end Alembic commands ###

    # ### omelched:INIT content_types

    from sqlalchemy import orm

    from neuroAPI.database.models import ContentType

    bind = op.get_bind()
    session = orm.Session(bind=bind)

    xlsx = ContentType(name='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
    xls = ContentType(name='application/vnd.ms-excel')
    xlsb = ContentType(name='application/vnd.ms-excel.sheet.binary.macroEnabled.12')
    csv = ContentType(name='text/csv')

    session.add_all([xlsx, xls, xlsb, csv])

    session.commit()
Exemplo n.º 6
0
def update_question_data():
    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)
    update_question(session, 1, 'Enter 50 to 150 words',
                    r'^\W*(\w+(\W+|$)){50,150}$')
    update_question(session, 2, 'Enter 50 to 150 words',
                    r'^\W*(\w+(\W+|$)){50,150}$')
    update_question(session, 3,
                    'Enter up to 80 words, remember to include *why*',
                    r'^\W*(\w+(\W+|$)){0,80}$')
    update_question(session, 4, 'Enter up to 80 words',
                    r'^\W*(\w+(\W+|$)){0,80}$')
    update_question(session, 5, 'Enter up to 150 words',
                    r'^\W*(\w+(\W+|$)){0,150}$')
    update_question(session, 9, 'Enter up to 150 words',
                    r'^\W*(\w+(\W+|$)){0,150}$')
    update_question(session, 10, 'Departure Airport', None)
    update_question(session, 18, 'Enter up to 150 words',
                    r'^\W*(\w+(\W+|$)){0,150}$')

    cv_question = session.query(Question).filter(Question.id == 7).first()
    cv_question.placeholder = 'Select and option...'
    cv_question.options = [{
        'value': 'yes',
        'label': 'Yes'
    }, {
        'value': 'no',
        'label': 'No'
    }]

    travel_question = session.query(Question).filter(Question.id == 8).first()
    travel_question.placeholder = 'Select an option...'
    travel_question.options = [{
        'value': 'transport-accommodation',
        'label': 'Yes, transport and accommodation'
    }, {
        'value': 'transport',
        'label': 'Yes, transport only'
    }, {
        'value': 'accommodation',
        'label': 'Yes, accommodation only'
    }, {
        'value': 'none',
        'label': 'None'
    }]

    reward_question = session.query(Question).filter(Question.id == 11).first()
    reward_question.placeholder = 'Select an option...'
    reward_question.options = [{
        'value': 'yes',
        'label': 'Yes'
    }, {
        'value': 'no',
        'label': 'No'
    }]

    previous_question = session.query(Question).filter(
        Question.id == 15).first()
    previous_question.placeholder = 'Select an option...'
    previous_question.options = [{
        'value': 'indaba-2017',
        'label': 'Yes, I attended the 2017 Indaba'
    }, {
        'value': 'indaba-2018',
        'label': 'Yes, I attended the 2018 Indaba'
    }, {
        'value': 'indaba-2017-2018',
        'label': 'Yes, I attended both Indabas'
    }, {
        'value': 'none',
        'label': 'No'
    }]

    grow_question = session.query(Question).filter(Question.id == 16).first()
    grow_question.placeholder = 'Enter up to 150 words'
    grow_question.validation_regex = r'^\W*(\w+(\W+|$)){0,150}$'
    grow_question.description = 'Maximum 150 words.'

    # Remove the demographic information section
    session.query(Question).filter(Question.section_id == 4).delete()
    session.query(Section).filter(Section.id == 4).delete()

    session.commit()
    session.flush()
Exemplo n.º 7
0
 def get_session(engine):
     session = orm.Session(engine)
     return session
Exemplo n.º 8
0
def upgrade():
    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)

    event = session.query(Event).filter_by(key='prc').first()
    form = session.query(ApplicationForm).filter_by(event_id=event.id).first()

    en = (session.query(SectionTranslation).filter_by(
        language='en',
        name='AI4D Call for Proposals: Scholarships program manager').first())

    fr = (session.query(SectionTranslation).filter_by(
        language='fr',
        name='Appel à propositions IAPD: Gestionnaire des programmes de bourses'
    ).first())

    en.description = """The International Development Research Centre (IDRC) and the Swedish International Development Agency (Sida) invite proposals from qualified institutions to manage the African Artificial Intelligence for Development (AI4D) Scholarships program.  

**The goal of this call is to identify a program and financial manager for the AI4D Scholarships program**. The host institution (“scholarships manager”) will design and administer a scholarships program that will foster the talent needed to meet a growing demand for responsible Artificial Intelligence (“AI”) for development research and innovation in African public universities. The program will support two scholarship activities: (i) the African AI4D PhD Scholarships and (ii) the African AI4D Advanced Scholars Program. The African AI4D Scholarships program will provide support to the next generation of AI and related disciplines (such as machine learning) academics, practitioners and students who are focused on responsible AI innovation for sustainable development. Responsible AI strives to be inclusive, rights-based and sustainable in its development and implementation, ensuring that AI applications are leveraged for public benefit.1  

For eligibility criteria, please see [www.ai4d.ai/calls](www.ai4d.ai/calls)
For the Scholarships Manager Call for Proposals, see: [https://bit.ly/3l5OJfN](https://bit.ly/3l5OJfN)
For the full AI4D background document, see: [https://resources.ai4d.ai/files/AI4D_Proposal.pdf](https://resources.ai4d.ai/files/AI4D_Proposal.pdf)

For all questions, email [email protected]

**The deadline for submission of the proposal online is 23:59 EST on December 8, 2020.**

"""

    fr.description = """**Le présent appel a pour objectif de désigner un gestionnaire de programme et directeur financier pour le programme de bourses d’études en IAPD**. L’institution d’accueil (« gestionnaire de bourses ») concevra et administrera un programme de bourses d’études qui favorisera le développement des talents nécessaires pour répondre à une demande croissante en recherche et en innovation dans le domaine de l’intelligence artificielle (« IA ») responsable pour le développement dans les universités publiques africaines. Le programme soutiendra deux activités d’allocation de bourses : (i) les bourses de doctorat africaines en IAPD et (ii) le programme de bourses avancées africaines en IAPD. Le programme de bourses d’études africaines en IAPD apportera un soutien à la prochaine génération d’universitaires, de praticiens et d’étudiants en IA et dans les disciplines connexes (telles que l’apprentissage automatique) qui se concentrent sur l’innovation en IA responsable pour le développement durable. L’IA responsable s’efforce d’être inclusive, fondée sur les droits et durable dans son développement et sa mise en oeuvre, en veillant à ce que les applications d’IA soient exploitées au profit du public. 

Pour les critères d'éligibilité, veuillez consulter www.ai4d.ai/calls
Appel de propositions pour désigner un gestionnaire de programme et directeur financier pour le programme de bourses d’études en IAPD:https://resources.ai4d.ai/files/2020/scholarships-call/AI4D_Scholarships_CFP_FR.pdf 

Proposition de programme IAPD Afrique: https://resources.ai4d.ai/files/AI4D_Proposal.pdf

Pour toute question, veuillez envoyer un e-mail à [email protected]

**La proposition et tous les documents à l’appui demandés doivent être soumis par l’intermédiaire du site baobab.ai4d.ai au plus tard le 8 decembre 2020, à 23 h 59 (HNE).**
"""

    session.commit()

    q = session.query(QuestionTranslation).filter_by(
        language='en', headline='Telephone').first()
    if q:
        q_id = q.id
        op.execute("""DELETE FROM Answer WHERE question_id={}""".format(q_id))
        session.query(QuestionTranslation).filter_by(question_id=q_id).delete()
        session.query(Question).filter_by(id=q_id).delete()

    q = session.query(QuestionTranslation).filter_by(
        language='en', headline='Mobile (Optional)').first()
    if q:
        q_id = q.id
        op.execute("""DELETE FROM Answer WHERE question_id={}""".format(q_id))
        session.query(QuestionTranslation).filter_by(question_id=q_id).delete()
        session.query(Question).filter_by(id=q_id).delete()

    q = session.query(QuestionTranslation).filter_by(
        language='en', headline='Email Address').first()
    if q:
        q_id = q.id
        op.execute("""DELETE FROM Answer WHERE question_id={}""".format(q_id))
        session.query(QuestionTranslation).filter_by(question_id=q_id).delete()
        session.query(Question).filter_by(id=q_id).delete()

    en = session.query(QuestionTranslation).filter_by(
        language='en',
        headline=
        '2. Summary of the proposed approach to the financial and administrative management of the AI4D scholarships'
    )
    en.description = """Briefly outline your proposal to manage the AI4D scholarship program according to the requirements outlined in section 2.1 of the Call for Proposals Background Document. What approaches, disciplines and modalities will you draw upon to support this?"""

    session.commit()

    en = session.query(QuestionTranslation).filter_by(
        language='en', headline='2a.Communication strategies')
    en.description = """Discuss your plan for launching the African AI4D PhD Scholarship and the African AI4D Advanced Scholars Program. How does your plan ensure that the program will be as inclusive as possible?"""

    session.commit()

    en = session.query(QuestionTranslation).filter_by(
        language='en', headline='3a. Gender and Inclusion Considerations')
    en.description = """How will your proposed approach promote increased diversity and equity in AI research? What literature or experience will you draw upon to support gender equity and linguistic equity goals in the design and execution of the calls? How will your process integrate best practices in marketing and outreach, engagement, and support for recipients?"""

    session.commit()

    en = session.query(QuestionTranslation).filter_by(
        language='en', headline='3b. Selection process and evaluation')
    en.description = """Broadly discuss the evaluation process for the two scholarships activities. 
How will the evaluation process ensure that the projects funded will be relevant to the AI4D Africa program’s responsible AI mandate, address ethical concerns and gender dimensions, and assess the capacity of applicants to carry out the proposed research?"""

    session.commit()
Exemplo n.º 9
0
def upgrade():
    # In cases of downgrade and upgrade those tables will no longer exits - and so the transaction will fail
    tables_names = get_tables_names()
    for table_name in [
            "roles_users",
            "roles",
            "report_preferences",
            "general_preferences",
    ]:
        if table_name in tables_names:
            op.drop_table(table_name)

    if "user_oauth" in tables_names:
        if "users" in tables_names:
            op.drop_table("users")
        op.rename_table("user_oauth", "users")

    op.create_table(
        "roles",
        sa.Column("id",
                  sa.Integer(),
                  primary_key=True,
                  autoincrement=True,
                  nullable=False),
        sa.Column("name",
                  sa.String(127),
                  unique=True,
                  index=True,
                  nullable=False),
        sa.Column("description", sa.String(255)),
        sa.Column("create_date",
                  sa.DateTime(),
                  nullable=False,
                  server_default=text("now()")),
    )

    op.create_table(
        "users_to_roles",
        sa.Column("user_id",
                  sa.BigInteger(),
                  sa.ForeignKey("users.id"),
                  index=True,
                  nullable=False),
        sa.Column("role_id",
                  sa.Integer(),
                  sa.ForeignKey("roles.id"),
                  index=True,
                  nullable=False),
        sa.Column("create_date",
                  sa.DateTime(),
                  nullable=False,
                  server_default=text("now()")),
        sa.PrimaryKeyConstraint("user_id", "role_id"),
    )

    from anyway.models import Roles, Users, users_to_roles

    bind = op.get_bind()
    session = orm.Session(bind=bind)

    role_admins = Roles(
        name=BackEndConstants.Roles2Names.Admins.value,
        description="This is the default admin role.",
        create_date=datetime.datetime.now(),
    )
    session.add(role_admins)

    res = session.query(Users).with_entities(
        Users.email).filter(Users.email == ADMIN_EMAIL).first()
    if res is None:
        user = Users(
            user_register_date=datetime.datetime.now(),
            user_last_login_date=datetime.datetime.now(),
            email=ADMIN_EMAIL,
            oauth_provider_user_name=ADMIN_EMAIL,
            is_active=True,
            oauth_provider="google",
            is_user_completed_registration=True,
            oauth_provider_user_id="unknown-manual-insert",
        )
        session.add(user)

    user_id = (session.query(Users).with_entities(
        Users.id).filter(Users.email == ADMIN_EMAIL).first())

    role_id = session.query(Roles).with_entities(
        Roles.id).filter(Roles.name == "admins").first()

    insert_users_to_roles = users_to_roles.insert().values(
        user_id=user_id.id,
        role_id=role_id.id,
        create_date=datetime.datetime.now(),
    )
    session.execute(insert_users_to_roles)

    session.commit()
def upgrade():
    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)

    event = session.query(Event).filter_by(key='prc').first()
    form = session.query(ApplicationForm).filter_by(event_id=event.id).first()

    def get_question_by_en_headline(en_headline):
        en = (session.query(QuestionTranslation)
                .filter_by(headline=en_headline, language='en')
                .join(Question)
                .filter_by(application_form_id=form.id)
                .first())
        question = en.question
        fr = question.get_translation('fr')
        return question, en, fr
        
    # Remove organisation email question
    question, en, fr = get_question_by_en_headline('Email Address')
    session.query(Answer).filter_by(question_id=question.id).delete()
    session.query(QuestionTranslation).filter_by(id=en.id).delete()
    session.query(QuestionTranslation).filter_by(id=fr.id).delete()
    session.query(Question).filter_by(id=question.id).delete()

    # Update capitalization
    question, en, fr = get_question_by_en_headline('Name of Organisation')
    en.headline = 'Name of organization'

    question, en, fr = get_question_by_en_headline('Email Address of principal contact')
    en.headline = 'Email address of principal contact'

    question, en, fr = get_question_by_en_headline('Policy research')
    en.description = """How will you answer the proposed research questions in the most rigorous way possible? Please include a discussion of the conceptual and theoretical framework/s, user participation, data collection and analysis.
    
Maximum 750 words"""

    question, en, fr = get_question_by_en_headline('1. Policy engagement')
    en.description = """Please describe the kinds of engagement your organization has had with national governing institutions (e.g. government departments, bodies and agencies, commissions, parliaments, regulators, and other public-sector institutions) on any of the above topics (from Section I, question 3)? This can include a reflection on what you have done well with communications and what you have not done so well.

Please give up to three examples of this engagement, such as requests for input, sitting on policy steering committees and meetings with policy makers.

Maximum 350 words"""
    fr.description = """Veuillez décrire les types d’interactions que votre organisation a eus avec des institutions gouvernementales nationales (p. ex. ministères et organismes gouvernementaux, commissions, parlements, organismes de réglementation et autres institutions du secteur public) en ce qui concerne les sujets susmentionnés (à la section I, question 3). Vous pouvez notamment faire part de vos réflexions sur ce que vous avez bien fait en matière de communication, ainsi que sur les éléments que vous devez améliorer.
        
Veuillez fournir un maximum de trois exemples de ces interactions (p. ex. demandes de consultation, participation à des comités directeurs, réunions avec des décideurs politiques).

Maximum 350 mots"""

    question, en, fr = get_question_by_en_headline('3. Regional Engagement')
    en.headline = '3. Regional engagement'

    question, en, fr = get_question_by_en_headline('Regional Engagement')
    en.headline = 'Regional engagement'

    section_en = session.query(SectionTranslation).filter_by(
        name='Section IV: Budget').first()
    section_en.description = """Please submit a full three year budget with notes using the Excel template available on the IDRC public website: [https://www.idrc.ca/en/resources/guides-and-forms](https://www.idrc.ca/en/resources/guides-and-forms)
(Select the ‘Proposal budget’ and download.) 

Applications that do not submit a complete budget in this template will not be considered.
"""

    session.commit()
def downgrade():
    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)

    event = session.query(Event).filter_by(key='prc').first()
    form = session.query(ApplicationForm).filter_by(event_id=event.id).first()

    def get_question_by_en_headline(en_headline):
        en = (session.query(QuestionTranslation)
                .filter_by(headline=en_headline, language='en')
                .join(Question)
                .filter_by(application_form_id=form.id)
                .first())
        question = en.question
        fr = question.get_translation('fr')
        return question, en, fr

    def add_question(section, order, questionType, headlines,
        descriptions=None,
        placeholders=None,
        validation_regexs=None,
        validation_texts=None,
        options=None,
        show_for_values=None,
        is_required=True,
        depends_on_question_id=None):
        question = Question(form.id, section.id, order, questionType, is_required=is_required)

        if depends_on_question_id is not None:
            question.depends_on_question_id = depends_on_question_id

        session.add(question)
        session.commit()

        translations = []
        for language in headlines:
            translations.append(QuestionTranslation(question.id, language, 
                headline=headlines[language],
                description=None if descriptions is None else descriptions[language],
                placeholder=None if placeholders is None else placeholders[language],
                validation_regex=None if validation_regexs is None else validation_regexs[language],
                validation_text=None if validation_texts is None else validation_texts[language],
                options=None if options is None else options[language],
                show_for_values=None if show_for_values is None else show_for_values[language]))
        session.add_all(translations)
        session.commit()
        return question

    section_en = session.query(SectionTranslation).filter_by(name='Organization and lead applicant contact details').first()
    add_question(section_en.section, 9, 'short-text', {
        'en': 'Email Address',
        'fr': 'Adresse de courriel'
    })

    question, en, fr = get_question_by_en_headline('Name of organization')
    en.headline = 'Name of Organisation'

    question, en, fr = get_question_by_en_headline('Email address of principal contact')
    en.headline = 'Email Address of principal contact'

    question, en, fr = get_question_by_en_headline('3. Regional engagement')
    en.headline = '3. Regional Engagement'

    question, en, fr = get_question_by_en_headline('Regional engagement')
    en.headline = 'Regional Engagement'
    
    session.commit()
Exemplo n.º 12
0
def upgrade():
    try:
        op.create_table('tokenowner',
                        sa.Column('id', sa.Integer()),
                        sa.Column('token_id', sa.Integer(), nullable=True),
                        sa.Column('resolver',
                                  sa.Unicode(length=120),
                                  nullable=True),
                        sa.Column('user_id',
                                  sa.Unicode(length=320),
                                  nullable=True),
                        sa.Column('realm_id', sa.Integer(), nullable=True),
                        sa.ForeignKeyConstraint(
                            ['realm_id'],
                            ['realm.id'],
                        ),
                        sa.ForeignKeyConstraint(
                            ['token_id'],
                            ['token.id'],
                        ),
                        sa.PrimaryKeyConstraint('id'),
                        mysql_row_format='DYNAMIC')
        op.create_index(op.f('ix_tokenowner_resolver'),
                        'tokenowner', ['resolver'],
                        unique=False)
        op.create_index(op.f('ix_tokenowner_user_id'),
                        'tokenowner', ['user_id'],
                        unique=False)
    except Exception as exx:
        print("Can not create table 'tokenowner'. It probably already exists")
        print(exx)

    try:
        bind = op.get_bind()
        session = orm.Session(bind=bind)
        # For each token, that has an owner, create a tokenowner entry
        for token in session.query(Token).filter(Token.user_id != "",
                                                 Token.user_id.isnot(None)):
            token_realms = session.query(TokenRealm).filter(
                TokenRealm.token_id == token.id).all()
            realm_id = None
            if not token_realms:
                sys.stderr.write(
                    u"{serial!s}, {userid!s}, {resolver!s}, "
                    u"Error while migrating token assignment. "
                    u"This token has no realm assignments!\n".format(
                        serial=token.serial,
                        userid=token.user_id,
                        resolver=token.resolver))
            elif len(token_realms) == 1:
                realm_id = token_realms[0].realm_id
            elif len(token_realms) > 1:
                # The token has more than one realm.
                # In order to figure out the right realm, we first fetch the token's resolver
                resolver = session.query(Resolver).filter_by(
                    name=token.resolver).first()
                if not resolver:
                    sys.stderr.write(
                        u"{serial!s}, {userid!s}, {resolver!s}, "
                        u"The token is assigned, but the assigned resolver can not "
                        u"be found!\n".format(serial=token.serial,
                                              userid=token.user_id,
                                              resolver=token.resolver))
                else:
                    # Then, fetch the list of ``Realm`` objects in which the token resolver is contained.
                    resolver_realms = [r.realm for r in resolver.realm_list]
                    if not resolver_realms:
                        sys.stderr.write(
                            u"{serial!s}, {userid!s}, {resolver!s}, "
                            u"The token is assigned, but the assigned resolver is not "
                            u"contained in any realm!\n".format(
                                serial=token.serial,
                                userid=token.user_id,
                                resolver=token.resolver))
                    elif len(resolver_realms) == 1:
                        # The resolver is only in one realm, so this is the new realm of the token!
                        realm_id = resolver_realms[0].id
                    elif len(resolver_realms) > 1:
                        # The resolver is contained in more than one realm, we have to apply more logic
                        # between the realms in which the resolver is contained and the realms,
                        # to which the token is assigend.
                        # More specifically, we find all realms which are both a token realm and
                        # a realm of the token resolver.
                        # If there is exactly one such realm, we have found our token owner realm.
                        # If there is more than one such realm, we cannot uniquely identify a token owner realm.
                        # If there is no such realm, we have an inconsistent database.
                        found_realm_ids = []
                        found_realm_names = []
                        for token_realm in token_realms:
                            if token_realm.realm in resolver_realms:
                                # The token realm, that also fits the resolver_realm is used as owner realm
                                found_realm_ids.append(token_realm.realm.id)
                                found_realm_names.append(
                                    token_realm.realm.name)
                        if len(found_realm_ids) > 1:
                            sys.stderr.write(
                                u"{serial!s}, {userid!s}, {resolver!s}, Can not assign token. "
                                u"Your realm configuration for the token is not distinct! "
                                u"The tokenowner could be in multiple realms! "
                                u"The token is assigned to the following realms and the resolver is also "
                                u"contained in these realm IDs: {realms!s}.\n".
                                format(serial=token.serial,
                                       userid=token.user_id,
                                       resolver=token.resolver,
                                       realms=found_realm_names))
                        elif len(found_realm_ids) == 1:
                            realm_id = found_realm_ids[0]
                        else:
                            sys.stderr.write(
                                u"{serial!s}, {userid!s}, {resolver!s}, "
                                u"Can not assign token. The resolver is not contained in any "
                                u"realms, to which the token is assigned!\n".
                                format(serial=token.serial,
                                       userid=token.user_id,
                                       resolver=token.resolver))
            # If we could not figure out a tokenowner realm, we skip the token assignment.
            if realm_id is not None:
                to = TokenOwner(token_id=token.id,
                                user_id=token.user_id,
                                resolver=token.resolver,
                                realm_id=realm_id)
                session.add(to)
        session.commit()

        # Now we drop the columns
        op.drop_column('token', 'user_id')
        op.drop_column('token', 'resolver')
        op.drop_column('token', 'resolver_type')

    except Exception as exx:
        session.rollback()
        print("Failed to migrate token assignment data!")
        print(exx)
Exemplo n.º 13
0
def upgrade():
    bind = op.get_bind()
    session = orm.Session(bind=bind)

    session.add_all([
        Tentativa("Não atende o telefone"),
        Tentativa("O usuário não estava em casa"),
        Tentativa(
            "O usuário estava em casa mas não disponível para o atendimento"),
        Tentativa("Recusou o atendimento")
    ])

    session.add_all([
        Etnia("Negra"),
        Etnia("Amarela"),
        Etnia("Branca"),
        Etnia("Parda"),
        Etnia("Indígena")
    ])

    session.add_all(
        [Genero("Masculino"),
         Genero("Feminino"),
         Genero("Não Opinou")])

    session.add_all([
        DoencaCronica("Diabetes"),
        DoencaCronica("Hipertensão"),
        DoencaCronica("Alzheimer"),
        DoencaCronica("AIDS"),
        DoencaCronica("Asma"),
        DoencaCronica("Bronquite"),
        DoencaCronica("Câncer"),
        DoencaCronica("Mal de Parkinson"),
        DoencaCronica("DPOC (Doença Pulmonar Obstrutiva Crônica)"),
        DoencaCronica("Artrite ou outras doenças reumáticas"),
        DoencaCronica("Doença renal Crônica"),
        DoencaCronica("Hanseníase")
    ])

    session.add_all([
        Parentesco("Pai/Mãe"),
        Parentesco("Filho/Filha"),
        Parentesco("Enteado/Enteada"),
        Parentesco("Tio/Tia"),
        Parentesco("Sobrinho/Sobrinha"),
        Parentesco("Avô/Avó"),
        Parentesco("Colega/Amigo/Amiga"),
        Parentesco("Marido/Esposa/Namorado/Namorada"),
        Parentesco("Primo/Prima")
    ])

    session.add_all([
        BeneficioSocial("Aposentadoria para pessoa de baixa renda"),
        BeneficioSocial("Auxílio emergencial"),
        BeneficioSocial(
            "Benefícios eventuais (cesta básica emergencial, vale-Feira, auxílio funeral)"
        ),
        BeneficioSocial("Bolsa Família"),
        BeneficioSocial("BPC ( benefício de prestação continuada)"),
        BeneficioSocial("Carteira do Idoso"),
        BeneficioSocial("CNH social"),
        BeneficioSocial("Facultativo baixa renda"),
        BeneficioSocial("ID Jovem"),
        BeneficioSocial("Isenção para serviço ambulante"),
        BeneficioSocial("Minha casa, minha vida"),
        BeneficioSocial("Programa de erradicação do trabalho infantil- PITI"),
        BeneficioSocial("Passe-livre para pessoa com deficiência"),
        BeneficioSocial("Tarifa social de água"),
        BeneficioSocial("Tarifa social de energia elétrica"),
        BeneficioSocial(
            "Não se insere em nenhum programa ou não recebe benefícíos"),
        BeneficioSocial("Não sabe informar")
    ])

    session.add_all([
        MotivoSair("Ir ao supermercado ou a farmácia"),
        MotivoSair("Trabalhar"),
        MotivoSair("Ir a banco/caixas eletrônicos"),
        MotivoSair("Ir a casa de familiares e amigos"),
        MotivoSair("Trabalho voluntário"),
        MotivoSair(
            "Ir a consultas médicas/fazer exames diagnósticos/tratamentos")
    ])

    session.add_all([
        Sintoma("Febre"),
        Sintoma("Cansaço"),
        Sintoma("Tosse Seca"),
        Sintoma("Mialgia"),
        Sintoma("Fadiga"),
        Sintoma("Congestão Nasal"),
        Sintoma("Dor de Cabeça"),
        Sintoma("Conjutivite"),
        Sintoma("Dor de Garganta"),
        Sintoma("Diarréia"),
        Sintoma("Perda de Paladar ou Olfato"),
        Sintoma("Erupção Cutânea"),
        Sintoma("Descoloração dos Dedos das Mãos e dos Pés"),
        Sintoma("Não apresentou nenhum sintoma")
    ])

    session.add_all([
        Indicador("Médico"),
        Indicador("Enfermeiro"),
        Indicador("Vizinho/Familiar/Amigo/Conhecido"),
        Indicador("Dentista"),
        Indicador("Tomou por conta própria")
    ])

    session.add_all([
        OrientacaoFinal("Encaminhamento para avaliação presencial"),
        OrientacaoFinal("Acompanhamento telefônico em 24 horas"),
        OrientacaoFinal("Acompanhamento telefônico em 48 horas"),
        OrientacaoFinal("Discussão do caso com o supervisor"),
        OrientacaoFinal("Contato com o serviço")
    ])

    session.add(TempoContatoAcompanhamento(48, 16, None))

    session.add(
        AdmSaude("Dummy User", "0101010101", "01010101010", "master",
                 "123456789"))

    session.commit()
def downgrade():

    # Recreated direct connections:

    op.add_column(
        "tft_test_runs",
        sa.Column("job_trigger_id",
                  sa.INTEGER(),
                  autoincrement=False,
                  nullable=True),
    )
    op.create_foreign_key(
        "tft_test_runs_job_trigger_id_fkey",
        "tft_test_runs",
        "build_triggers",
        ["job_trigger_id"],
        ["id"],
    )
    op.drop_column("tft_test_runs", "submitted_time")
    op.add_column(
        "srpm_builds",
        sa.Column("job_trigger_id",
                  sa.INTEGER(),
                  autoincrement=False,
                  nullable=True),
    )
    op.create_foreign_key(
        "srpm_builds_job_trigger_id_fkey",
        "srpm_builds",
        "build_triggers",
        ["job_trigger_id"],
        ["id"],
    )
    op.add_column(
        "koji_builds",
        sa.Column("srpm_build_id",
                  sa.INTEGER(),
                  autoincrement=False,
                  nullable=True),
    )
    op.add_column(
        "koji_builds",
        sa.Column("job_trigger_id",
                  sa.INTEGER(),
                  autoincrement=False,
                  nullable=True),
    )
    op.create_foreign_key(
        "koji_builds_job_trigger_id_fkey",
        "koji_builds",
        "build_triggers",
        ["job_trigger_id"],
        ["id"],
    )
    op.create_foreign_key(
        "koji_builds_srpm_build_id_fkey",
        "koji_builds",
        "srpm_builds",
        ["srpm_build_id"],
        ["id"],
    )
    op.add_column(
        "copr_builds",
        sa.Column("srpm_build_id",
                  sa.INTEGER(),
                  autoincrement=False,
                  nullable=True),
    )
    op.add_column(
        "copr_builds",
        sa.Column("job_trigger_id",
                  sa.INTEGER(),
                  autoincrement=False,
                  nullable=True),
    )
    op.create_foreign_key(
        "copr_builds_srpm_build_id_fkey1",
        "copr_builds",
        "srpm_builds",
        ["srpm_build_id"],
        ["id"],
    )
    op.create_foreign_key(
        "copr_builds_job_trigger_id_fkey",
        "copr_builds",
        "build_triggers",
        ["job_trigger_id"],
        ["id"],
    )

    # Migrate data:

    bind = op.get_bind()
    session = orm.Session(bind=bind)
    for run_model in session.query(RunModel).all():
        run_model.srpm_build.job_trigger = run_model.job_trigger

        if run_model.copr_build:
            run_model.copr_build.job_trigger = run_model.job_trigger
            run_model.copr_build.srpm_build = run_model.srpm_build
            session.add(run_model.copr_build)

        if run_model.koji_build:
            run_model.koji_build.job_trigger = run_model.job_trigger
            run_model.koji_build.srpm_build = run_model.srpm_build
            session.add(run_model.koji_build)

        if run_model.test_run:
            run_model.test_run.job_trigger = run_model.job_trigger
            session.add(run_model.test_run)

    session.commit()

    op.drop_table("runs")
def upgrade():
    cleanups: List[Callable] = []
    # SCHEMA MIGRATION
    # renaming constraint 'address' → 'building_address'
    op.create_unique_constraint('building_address', 'building',
                                ['street', 'number'])
    op.drop_constraint('address', 'building', type_='unique')

    op.create_table(
        'address',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('street', sa.String(), nullable=False),
        sa.Column('number', sa.String(), nullable=False),
        sa.Column('addition', sa.String(), nullable=True),
        sa.Column('zip_code', sa.String(), nullable=False),
        sa.Column('city',
                  sa.String(),
                  nullable=False,
                  server_default=DEFAULT_CITY),
        sa.Column('state', sa.String(), nullable=True),
        sa.Column('country',
                  sa.String(),
                  nullable=False,
                  server_default=DEFAULT_COUNTRY),

        # Temporary columns
        sa.Column('tmp_building_id', sa.Integer, nullable=True),
        sa.Column('tmp_room_id', sa.Integer, nullable=True),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('street', 'number', 'addition', 'zip_code', 'city',
                            'state', 'country'))
    cleanups.append(lambda: op.drop_column('address', 'tmp_building_id'))
    cleanups.append(lambda: op.drop_column('address', 'tmp_room_id'))

    bind = op.get_bind()
    session = orm.Session(bind=bind)

    # DATA MIGRATION I: add dummy address
    dummy_id = add_dummy_address(session)
    dummy_default_cause = sa.schema.DefaultClause(f"{dummy_id}")

    # DATA MIGRATION II: add building addresses
    add_building_addresses(session)

    # DATA MIGRATION III: add room addresses
    add_room_addresses(session)

    # FURTHER SCHEMA MIGRATION…
    op.add_column(
        'room',
        sa.Column('address_id',
                  sa.Integer(),
                  nullable=False,
                  server_default=dummy_default_cause))
    cleanups.append(
        lambda: op.alter_column('room', 'address_id', server_default=None))
    op.create_index(op.f('ix_room_address_id'),
                    'room', ['address_id'],
                    unique=False)
    op.create_foreign_key(None, 'room', 'address', ['address_id'], ['id'])

    # DATA MIGRATION IV: set `address_id` to building's address for uninhabitable rooms
    set_uninhabitable_room_addresses(session)

    # DATA MIGRATION IV: set `address_id` to room's address for inhabitable rooms
    set_inhabitable_room_addresses(session)

    # FURTHER SCHEMA MIGRATION…
    op.add_column(
        'user',
        sa.Column('address_id',
                  sa.Integer(),
                  nullable=False,
                  server_default=dummy_default_cause))
    cleanups.append(
        lambda: op.alter_column('user', 'address_id', server_default=None))
    op.create_index(op.f('ix_user_address_id'),
                    'user', ['address_id'],
                    unique=False)
    op.create_foreign_key(None, 'user', 'address', ['address_id'], ['id'])

    # DATA MIGRATION VI: set `user.address` for users with room
    set_user_addresses(session)

    # FURTHER SCHEMA MIGRATION (cleanup)
    for action in cleanups:
        action()
def upgrade():
    conn = op.get_bind()
    orm.Session(bind=conn)

    rc_tbl = op.create_table(
        'repo_changes', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('created', sa.DateTime(), nullable=False),
        sa.Column('updated', sa.DateTime(), nullable=False),
        sa.Column('deleted', sa.DateTime(), nullable=True),
        sa.Column('data',
                  postgresql.JSONB(astext_type=sa.Text()),
                  nullable=False), sa.PrimaryKeyConstraint('id'))

    op.add_column('flows', sa.Column('trigger_data_id', sa.Integer()))
    op.create_foreign_key('flows_trigger_data_id_fkey', 'flows',
                          'repo_changes', ['trigger_data_id'], ['id'])

    op.add_column('runs', sa.Column('repo_data_id', sa.Integer()))
    op.create_foreign_key('runs_repo_data_id_fkey', 'runs', 'repo_changes',
                          ['repo_data_id'], ['id'])

    print('updating flows')
    res = conn.execute(
        "SELECT id, created, trigger_data FROM flows WHERE trigger_data IS NOT NULL AND trigger_data::text != 'null'"
    )
    for flow_id, created, d in res.fetchall():
        if isinstance(d, dict):
            d = [d]
        rc = {'created': created, 'updated': created, 'data': d}
        ret = conn.execute(insert(rc_tbl).values(rc).returning(rc_tbl.c.id))
        d_id = ret.fetchone()[0]
        print('flow: %d  repo_changes %d' % (flow_id, d_id))
        conn.execute("UPDATE flows SET trigger_data_id = %d WHERE id = %d" %
                     (d_id, flow_id))

    print('updating runs')
    res = conn.execute(
        "SELECT id, created, repo_data FROM runs WHERE repo_data IS NOT NULL AND repo_data::text != 'null'"
    )
    for run_id, created, d in res.fetchall():
        if isinstance(d, dict):
            new_ds = []
            for url, commits in d.items():
                new_d = {}
                new_d['repo'] = url
                new_d['trigger'] = 'git-push'

                new_commits = []
                for c in commits:
                    nc = dict(id=c['commit'],
                              author=dict(name=c['author'], email=c['email']),
                              timestamp=c['date'],
                              message=c['subject'])
                    new_commits.append(nc)

                new_d['commits'] = new_commits
                new_d['before'] = new_commits[-1]['id']
                new_d['after'] = new_commits[0]['id']

                new_ds.append(new_d)

            d = new_ds

        rc = {'created': created, 'updated': created, 'data': d}
        ret = conn.execute(insert(rc_tbl).values(rc).returning(rc_tbl.c.id))
        d_id = ret.fetchone()[0]
        print('run: %d  repo_changes %d' % (run_id, d_id))
        conn.execute("UPDATE runs SET repo_data_id = %d WHERE id = %d" %
                     (d_id, run_id))
Exemplo n.º 17
0
def upgrade():
    op.add_column('executions',
                  sa.Column('token', sa.String(length=100), nullable=True))

    bind = op.get_bind()
    session = orm.Session(bind=bind)
    Config.__table__.create(bind)

    session.add_all([
        Config(name='rest_service_log_path',
               value='/var/log/cloudify/rest/cloudify-rest-service.log',
               scope=['rest'],
               schema=None,
               is_editable=False),
        Config(name='rest_service_log_level',
               value='INFO',
               scope=['rest'],
               schema={
                   'type': 'string',
                   'enum': ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
               },
               is_editable=True),
        Config(name='ldap_server',
               value=None,
               scope=['rest'],
               schema={'type': 'string'},
               is_editable=True),
        Config(name='ldap_username',
               value=None,
               scope=['rest'],
               schema={'type': 'string'},
               is_editable=True),
        Config(name='ldap_password',
               value=None,
               scope=['rest'],
               schema={'type': 'string'},
               is_editable=True),
        Config(name='ldap_domain',
               value=None,
               scope=['rest'],
               schema={'type': 'string'},
               is_editable=True),
        Config(name='ldap_is_active_directory',
               value=None,
               scope=['rest'],
               schema={'type': 'boolean'},
               is_editable=True),
        Config(name='ldap_dn_extra',
               value=None,
               scope=['rest'],
               schema=None,
               is_editable=True),
        Config(name='ldap_timeout',
               value=5.0,
               scope=['rest'],
               schema={'type': 'number'},
               is_editable=True),
        Config(name='file_server_root',
               value='/opt/manager/resources',
               scope=['rest'],
               schema=None,
               is_editable=False),
        Config(name='file_server_url',
               value='http://127.0.0.1:53333/resources',
               scope=['rest'],
               schema=None,
               is_editable=False),
        Config(name='insecure_endpoints_disabled',
               value=True,
               scope=['rest'],
               schema={'type': 'boolean'},
               is_editable=False),
        Config(name='maintenance_folder',
               value='/opt/manager/maintenance',
               scope=['rest'],
               schema=None,
               is_editable=False),
        Config(name='min_available_memory_mb',
               value=100,
               scope=['rest'],
               schema={
                   'type': 'number',
                   'minimum': 0
               },
               is_editable=True),
        Config(name='failed_logins_before_account_lock',
               value=4,
               scope=['rest'],
               schema={
                   'type': 'number',
                   'minimum': 1
               },
               is_editable=True),
        Config(name='account_lock_period',
               value=-1,
               scope=['rest'],
               schema={
                   'type': 'number',
                   'minimum': -1
               },
               is_editable=True),
        Config(name='public_ip',
               value=None,
               scope=['rest'],
               schema=None,
               is_editable=False),
        Config(name='default_page_size',
               value=1000,
               scope=['rest'],
               schema={
                   'type': 'number',
                   'minimum': 1
               },
               is_editable=True),
        Config(name='mgmtworker_max_workers',
               value=5,
               scope=['mgmtworker'],
               schema={
                   'type': 'number',
                   'minimum': 1
               },
               is_editable=True),
        Config(name='mgmtworker_min_workers',
               value=2,
               scope=['mgmtworker'],
               schema={
                   'type': 'number',
                   'minimum': 1
               },
               is_editable=True),
        Config(name='broker_port',
               value=5671,
               scope=['agent'],
               schema={
                   'type': 'number',
                   'minimum': 1,
                   'maximum': 65535
               },
               is_editable=True),
        Config(name='min_workers',
               value=2,
               scope=['agent'],
               schema={
                   'type': 'number',
                   'minimum': 1
               },
               is_editable=True),
        Config(name='max_workers',
               value=5,
               scope=['agent'],
               schema={
                   'type': 'number',
                   'minimum': 1
               },
               is_editable=True),
        Config(name='heartbeat',
               value=30,
               scope=['agent'],
               schema={
                   'type': 'number',
                   'minimum': 0
               },
               is_editable=True),
    ])
    session.commit()

    op.create_table(
        'managers',
        sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
        sa.Column('hostname', sa.Text(), unique=True, nullable=False),
        sa.Column('private_ip', sa.Text(), unique=True, nullable=False),
        sa.Column('public_ip', sa.Text(), unique=True, nullable=False),
        sa.Column('version', sa.Text(), nullable=False),
        sa.Column('edition', sa.Text(), nullable=False),
        sa.Column('distribution', sa.Text(), nullable=False),
        sa.Column('distro_release', sa.Text(), nullable=False),
        sa.Column('fs_sync_node_id', sa.Text(), nullable=True),
        sa.PrimaryKeyConstraint('id', name=op.f('managers_pkey')))
    op.create_table(
        'certificates',
        sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
        sa.Column('name', sa.Text(), unique=True, nullable=False),
        sa.Column('value', sa.Text(), unique=False, nullable=False),
        sa.Column('updated_at', UTCDateTime(), nullable=True),
        sa.Column('_updater_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['_updater_id'], [u'users.id'],
                                ondelete='SET NULL'),
        sa.PrimaryKeyConstraint('id', name=op.f('certificates_pkey')))
    op.create_table(
        'rabbitmq_brokers', sa.Column('name', sa.Text(), nullable=False),
        sa.Column('host', sa.Text(), nullable=False),
        sa.Column('management_host', sa.Text(), nullable=True),
        sa.Column('port', sa.Integer()),
        sa.Column('username', sa.Text(), nullable=True),
        sa.Column('password', sa.Text(), nullable=True),
        sa.Column('params', JSONString(), nullable=True),
        sa.Column('_ca_cert_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['_ca_cert_id'], [u'certificates.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('name', name=op.f('rabbitmq_brokers_pkey')))
Exemplo n.º 18
0
def upgrade():
    bind = op.get_bind()
    session = orm.Session(bind=bind)

    db = Redis(
        host=getenv("REDIS_SERVICE_HOST", "localhost"),
        port=int(getenv("REDIS_SERVICE_PORT", "6379")),
        db=0,
        decode_responses=True,
    )

    # tasks
    keys = db.keys("celery-task-meta-*")
    for key in keys:
        data = loads(db.get(key))
        task_id = data.get("task_id")
        status = data.get("status")
        result = data.get("result")
        traceback = data.get("traceback")
        date_done = data.get("data_done")
        if isinstance(date_done, str):
            date_done = datetime.fromisoformat(date_done)

        # our table
        TaskResultUpgradeModel.add_task_result(session=session,
                                               task_id=task_id,
                                               task_result_dict=result)
        # celery table
        add_task_to_celery_table(
            session=session,
            task_id=task_id,
            status=status,
            result=result,
            traceback=traceback,
            date_done=date_done,
        )

    # whitelist
    db = PersistentDict(hash_name="whitelist")
    for account, data in db.get_all().items():
        if not isinstance(data, dict):
            continue

        status = data.get("status")
        WhitelistUpgradeModel.add_account(session=session,
                                          account_name=account,
                                          status=status)

    # installations
    for event in RedisInstallation.db().get_all().values():
        if not isinstance(event, dict):
            continue

        event = event["event_data"]
        account_login = event.get("account_login")
        account_id = event.get("account_id")
        account_url = event.get("account_url")
        account_type = event.get("account_type")
        sender_id = event.get("sender_id")
        sender_login = event.get("sender_login")

        created_at = event.get("created_at")
        if isinstance(created_at, (int, float)):
            created_at = datetime.fromtimestamp(created_at, timezone.utc)
        elif isinstance(created_at, str):
            created_at = created_at.replace("Z", "+00:00")
            created_at = datetime.fromisoformat(created_at)

        InstallationUpgradeModel.create(
            session=session,
            account_login=account_login,
            account_id=account_id,
            account_type=account_type,
            account_url=account_url,
            sender_login=sender_login,
            sender_id=sender_id,
            created_at=created_at,
        )

    #  copr-builds
    for copr_build in RedisCoprBuild.db().get_all().values():
        if not isinstance(copr_build, dict):
            continue

        project_name = copr_build.get("project")
        owner = copr_build.get("owner")
        chroots = copr_build.get("chroots")
        build_submitted_time = (
            datetime.fromisoformat(copr_build.get("build_submitted_time"))
            if copr_build.get("build_submitted_time") else None)
        build_id = copr_build.get("build_id")

        if not build_id:
            continue

        status = copr_build.get("status")
        web_url = (
            f"https://copr.fedorainfracloud.org/coprs/{owner}/{project_name}/"
            f"build/{build_id}/")

        try:
            project_name_list = project_name.split("-")
            if project_name_list[-1] == "stg":
                pr_id = int(project_name_list[-2])
            else:
                pr_id = int(project_name_list[-1])

            job_trigger = JobTriggerUpgradeModel.get_or_create(
                type=JobTriggerModelType.pull_request,
                trigger_id=pr_id,
                session=session,
            )
        except Exception:
            continue

        try:
            copr = Client.create_from_config_file()
            build = copr.build_proxy.get(build_id)
            build_submitted_time = datetime.fromtimestamp(build.submitted_on)
            build_start_time = datetime.fromtimestamp(build.started_on)
            build_finished_time = datetime.fromtimestamp(build.ended_on)

        except CoprNoResultException:
            build_submitted_time = build_submitted_time or datetime(
                2020, 1, 1, 0, 0, 0)
            build_start_time = datetime(2020, 1, 1, 0, 10, 0)
            build_finished_time = datetime(2020, 1, 1, 0, 20, 0)

        for chroot in chroots:
            CoprBuildUpgradeModel.get_or_create(
                session=session,
                build_id=str(build_id),
                project_name=project_name,
                owner=owner,
                target=chroot,
                status=status,
                job_trigger=job_trigger,
                web_url=web_url,
                build_submitted_time=build_submitted_time,
                build_start_time=build_start_time,
                build_finished_time=build_finished_time,
            )

    session.commit()
Exemplo n.º 19
0
def upgrade():
    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)
    event = session.query(Event).filter_by(key='eeml2020').first()

    form = RegistrationForm(event.id)
    session.add(form)
    session.commit()

    mentorship = RegistrationSection(form.id, 'Mentorship', '', 1, None, None,
                                     None)
    session.add(mentorship)
    session.commit()

    yes_no = [{'value': 'yes', 'label': 'Yes'}, {'value': 'no', 'label': 'No'}]
    mentorship_q1 = RegistrationQuestion(
        form.id,
        mentorship.id,
        'Would you be interested in a mentorship session?',
        'Choose an option',
        1,
        'multi-choice',
        None,
        None,
        True,
        'We have a limited number of spots for 1:1 mentorship sessions (15 minutes by teleconference) with speakers from the school or other ML experts. Would you be interested in such a session?',
        options=yes_no)
    session.add(mentorship_q1)
    session.commit()

    mentorship_q2 = RegistrationQuestion(
        form.id,
        mentorship.id,
        'On what topic would you like to ask questions?',
        'Choose an option',
        2,
        'choice-with-other',
        None,
        None,
        True,
        options=[
            {
                'value': 'career-advice',
                'label': 'Career advice'
            },
            {
                'value': 'reinforcement-learning',
                'label': 'Reinforcement learning'
            },
            {
                'value': 'unsupervised-learning',
                'label': 'Unsupervised learning'
            },
            {
                'value': 'computer-vision',
                'label': 'Computer Vision'
            },
            {
                'value': 'nlp',
                'label': 'NLP'
            },
            {
                'value': 'theory-of-deep-learning',
                'label': 'Theory of Deep Learning'
            },
        ])
    mentorship_q2.depends_on_question_id = mentorship_q1.id
    mentorship_q2.hide_for_dependent_value = 'no'

    mentorship_q3 = RegistrationQuestion(
        form.id,
        mentorship.id,
        'Any other information we should have when doing the matching?',
        '',
        3,
        'long-text',
        None,
        is_required=False,
        description=
        'e.g. expert from academia or industry, preferred gender/race of the expert for you to be more comfortable in the discussion etc.'
    )
    mentorship_q3.depends_on_question_id = mentorship_q1.id
    mentorship_q3.hide_for_dependent_value = 'no'

    mentorship_q4 = RegistrationQuestion(
        form.id,
        mentorship.id,
        'Mentorship discussions are scheduled normally during lunch time (13:10-15:00 CEST time). If this time does not work for you because of the time zone, please indicate another time (in CEST), and we will do our best to accommodate that.',
        'Choose an option',
        4,
        'choice-with-other',
        None,
        None,
        is_required=True,
        options=[
            {
                'value': 'yes',
                'label': 'Yes, 13:10-15:00 CEST time works for me'
            },
        ])
    mentorship_q4.depends_on_question_id = mentorship_q1.id
    mentorship_q4.hide_for_dependent_value = 'no'

    session.add_all([mentorship_q2, mentorship_q3, mentorship_q4])
    session.commit()

    background = RegistrationSection(form.id, 'Your Background', '', 2, None,
                                     None, None)
    session.add(background)
    session.commit()

    background_q1 = RegistrationQuestion(
        form.id,
        background.id,
        'What topic are you most familiar with?',
        '',
        1,
        'multi-checkbox-with-other',
        None,
        options=[
            {
                'value': 'reinforcementLearning',
                'label': 'Reinforcement learning'
            },
            {
                'value': 'unsupervisedAndGenerative',
                'label': 'Unsupervised learning & Generative Models'
            },
            {
                'value': 'computerVision',
                'label': 'Computer Vision'
            },
            {
                'value': 'rnnNlp',
                'label': 'RNNs & NLP'
            },
            {
                'value': 'theoryDeepLearning',
                'label': 'Theory of Deep Learning'
            },
            {
                'value': 'robotics',
                'label': 'Robotics'
            },
        ])
    background_q2 = RegistrationQuestion(
        form.id,
        background.id,
        'Please indicate your level of familiarity with Python.',
        '',
        2,
        'multi-choice',
        None,
        options=[
            {
                'value': '1',
                'label': '1 - never used or used very rarely'
            },
            {
                'value': '2',
                'label': '2'
            },
            {
                'value': '3',
                'label': '3'
            },
            {
                'value': '4',
                'label': '4'
            },
            {
                'value': '5',
                'label': '5 - very familiar'
            },
        ])
    background_q3 = RegistrationQuestion(
        form.id,
        background.id,
        'Which Deep Learning library are you most familiar with?',
        '',
        3,
        'multi-checkbox-with-other',
        None,
        options=[
            {
                'value': 'pytorch',
                'label': 'Pytorch'
            },
            {
                'value': 'tensorflow1',
                'label': 'Tensorflow 1'
            },
            {
                'value': 'tensorflow2',
                'label': 'Tensorflow 2'
            },
            {
                'value': 'jax',
                'label': 'Jax'
            },
            {
                'value': 'none',
                'label': """I haven't used a DL library before"""
            },
        ])
    background_q4 = RegistrationQuestion(
        form.id,
        background.id,
        'The online school will rely on a number of platforms. Please confirm that you will be able to install and/or use the following:',
        '',
        4,
        'multi-checkbox',
        None,
        options=[
            {
                'value': 'gmail',
                'label': 'Gmail account (for practical sessions)'
            },
            {
                'value': 'chrome',
                'label': 'Google Chrome (for practical sessions)'
            },
            {
                'value': 'slack',
                'label': 'Slack'
            },
            {
                'value': 'youtube',
                'label': 'Youtube (to watch, upload content)'
            },
            {
                'value': 'none',
                'label': """I can't access any of the above"""
            },
        ])
    session.add_all(
        [background_q1, background_q2, background_q3, background_q4])
    session.commit()

    poster = RegistrationSection(
        form.id, 'Poster',
        'Only if you were selected to present a poster! If you were, please provide the following (Note: You can make changes to the project / title / authors submitted initially in your application.):',
        3, None, None, None)
    session.add(poster)
    session.commit()

    poster_q1 = RegistrationQuestion(form.id,
                                     poster.id,
                                     'Title',
                                     'Title',
                                     1,
                                     'short-text',
                                     None,
                                     is_required=False)
    poster_q2 = RegistrationQuestion(form.id,
                                     poster.id,
                                     'Authors',
                                     'Authors',
                                     2,
                                     'short-text',
                                     None,
                                     is_required=False)
    poster_q3 = RegistrationQuestion(
        form.id,
        poster.id,
        'Affiliations of authors (in the same order as authors)',
        'Affiliation(s)',
        3,
        'short-text',
        None,
        is_required=False)
    poster_q4 = RegistrationQuestion(
        form.id,
        poster.id,
        'Short abstract',
        'Enter up to 100 words',
        4,
        'long-text',
        r'^\W*(\w+(\W+|$)){0,150}$',
        validation_text='Enter less than 100 words',
        is_required=False,
        description='Max 100 words')
    poster_q5 = RegistrationQuestion(form.id,
                                     poster.id,
                                     'Teaser image',
                                     '',
                                     5,
                                     'file',
                                     None,
                                     is_required=False)
    poster_q6 = RegistrationQuestion(form.id,
                                     poster.id,
                                     'Topic',
                                     'Choose an option',
                                     6,
                                     'choice-with-other',
                                     None,
                                     is_required=False,
                                     options=[
                                         {
                                             'value': 'computeVision',
                                             'label': 'Computer Vision'
                                         },
                                         {
                                             'value': 'robotics',
                                             'label': 'Robotics'
                                         },
                                         {
                                             'value': 'reinforcementLearning',
                                             'label': 'Reinforcement learning'
                                         },
                                         {
                                             'value': 'mlMedicalData',
                                             'label': 'ML for medical data'
                                         },
                                         {
                                             'value': 'neuroscience',
                                             'label': 'Neuroscience'
                                         },
                                         {
                                             'value': 'unsupervisedLearning',
                                             'label': 'Unsupervised learning'
                                         },
                                         {
                                             'value': 'nlp',
                                             'label': 'NLP'
                                         },
                                         {
                                             'value': 'deepLearning',
                                             'label': 'Deep Learning'
                                         },
                                         {
                                             'value': 'optimization',
                                             'label': 'Optimization'
                                         },
                                         {
                                             'value': 'theory',
                                             'label': 'Theory'
                                         },
                                         {
                                             'value': 'applications',
                                             'label': 'Applications'
                                         },
                                     ])
    poster_q7 = RegistrationQuestion(form.id,
                                     poster.id,
                                     'Youtube link to video presentation',
                                     'Link',
                                     7,
                                     'short-text',
                                     None,
                                     is_required=False)
    poster_q8 = RegistrationQuestion(
        form.id,
        poster.id,
        'Please check the following',
        '',
        8,
        'multi-checkbox',
        None,
        is_required=False,
        options=[{
            'value': 'less3Minutes',
            'label': 'My video is no longer than 3 minutes.'
        }, {
            'value': 'wontAlter',
            'label': 'I will not alter the video after submission.'
        }])
    session.add_all([
        poster_q1, poster_q2, poster_q3, poster_q4, poster_q5, poster_q6,
        poster_q7, poster_q8
    ])
    session.commit()

    other = RegistrationSection(form.id, 'Other', '', 4, None, None, None)
    session.add(other)
    session.commit()

    other_q1 = RegistrationQuestion(
        form.id,
        other.id,
        'Would you be interested in sharing your CV with EEML2020 sponsors and partners for recruiting purposes?',
        '',
        1,
        'multi-choice',
        None,
        description=
        'Please check the website https://www.eeml.eu for a complete list of sponsors and partners',
        options=[{
            'value': 'yes',
            'label': 'Yes'
        }, {
            'value':
            'no',
            'label':
            'No or Does not apply (e.g. you represent a sponsor or you did not submit your CV)'
        }])
    other_q2 = RegistrationQuestion(
        form.id,
        other.id,
        'For the weekend days during the school period (July 4-5) we plan to organise relaxing/socialising activities. Please indicate below if you would be interested to participate in such sessions and which sessions or suggest ideas for different sessions.',
        '',
        2,
        'multi-checkbox-with-other',
        None,
        is_required=True,
        options=[
            {
                'value': 'notInterested',
                'label': 'Not interested'
            },
            {
                'value': 'chess',
                'label': 'Chess'
            },
            {
                'value': 'go',
                'label': 'Go'
            },
            {
                'value': 'videoGames',
                'label': 'Video games'
            },
            {
                'value': 'yoga',
                'label': 'Yoga'
            },
        ])
    session.add_all([other_q1, other_q2])
    session.commit()
Exemplo n.º 20
0
 def __copy__(self):
     """Implement a safe copy.copy()."""
     session = orm.Session()
     copy = session.merge(self, load=False)
     session.expunge(copy)
     return copy
Exemplo n.º 21
0
def upgrade():
    connection = op.get_bind()
    session = orm.Session(bind=connection)

    session.commit()
Exemplo n.º 22
0
    def __copy__(self):
        session = orm.Session()

        copy = session.merge(self, load=False)
        session.expunge(copy)
        return copy
    department_id = sa.Column(sa.ForeignKey("department.id"))
    department = orm.relationship(
        "Department",
        # 虽然不是必须的, 但是显式指定总是没错的
        foreign_keys=[
            department_id,
        ],
        back_populates="devices",
    )


engine = sam.EngineCreator().create_sqlite()
Base.metadata.create_all(engine)

# 插入一些数据
with orm.Session(engine) as ses:
    ses.add_all([
        Department(id=1, name="IT"),
        Department(id=2, name="HR"),
        Employee(id=1, name="Alice", department_id=1),
        Employee(id=2, name="Bob", department_id=1),
        Employee(id=3, name="Cathy", department_id=2),
        Employee(id=4, name="David", department_id=2),
        Device(id=1, name="device A", department_id=1),
        Device(id=2, name="device B", department_id=1),
        Device(id=3, name="device C", department_id=2),
        Device(id=4, name="device D", department_id=2),
    ])
    ses.commit()

with orm.Session(engine) as ses:
Exemplo n.º 24
0
def upgrade_data():
    """Data migration adding 1 generic asset type for each user defined generic asset type,
    plus 1 generic asset type for each AssetType, MarketType and WeatherSensorType.
    """

    # Get user defined generic asset types
    generic_asset_types = context.get_x_argument()

    # Declare ORM table views
    t_asset_types = sa.Table(
        "asset_type",
        sa.MetaData(),
        sa.Column("name", sa.String(80)),
        sa.Column("display_name", sa.String(80)),
    )
    t_market_types = sa.Table(
        "market_type",
        sa.MetaData(),
        sa.Column("name", sa.String(80)),
        sa.Column("display_name", sa.String(80)),
    )
    t_weather_sensor_types = sa.Table(
        "weather_sensor_type",
        sa.MetaData(),
        sa.Column("name", sa.String(80)),
        sa.Column("display_name", sa.String(80)),
    )

    # Use SQLAlchemy's connection and transaction to go through the data
    connection = op.get_bind()
    session = orm.Session(bind=connection)

    # Select all existing ids that need migrating, while keeping names intact
    asset_type_results = connection.execute(
        sa.select([
            t_asset_types.c.name,
            t_asset_types.c.display_name,
        ])).fetchall()
    market_type_results = connection.execute(
        sa.select([
            t_market_types.c.name,
            t_market_types.c.display_name,
        ])).fetchall()
    weather_sensor_type_results = connection.execute(
        sa.select([
            t_weather_sensor_types.c.name,
            t_weather_sensor_types.c.display_name,
        ])).fetchall()

    # Prepare to build a list of new generic assets
    new_generic_asset_types = []

    # Construct generic asset type for each user defined generic asset type
    asset_type_results_dict = {k: v for k, v in asset_type_results}
    market_type_results_dict = {k: v for k, v in market_type_results}
    weather_sensor_type_results_dict = {
        k: v
        for k, v in weather_sensor_type_results
    }
    for i, generic_asset_type in enumerate(generic_asset_types):
        generic_asset_type_dict = json.loads(generic_asset_type)
        print(
            f"Constructing one generic asset type according to: {generic_asset_type_dict}"
        )
        if generic_asset_type_dict["name"] in asset_type_results_dict.keys():
            raise ValueError(
                f"User defined generic asset type named '{generic_asset_type_dict['name']}' already exists as asset type."
            )
        if generic_asset_type_dict["name"] in market_type_results_dict.keys():
            raise ValueError(
                f"User defined generic asset type named '{generic_asset_type_dict['name']}' already exists as market type."
            )
        if generic_asset_type_dict[
                "name"] in weather_sensor_type_results_dict.keys():
            raise ValueError(
                f"User defined generic asset type named '{generic_asset_type_dict['name']}' already exists as weather sensor type."
            )
        new_generic_asset_type = GenericAssetType(
            name=generic_asset_type_dict["name"],
            description=generic_asset_type_dict.get("description", None),
        )
        new_generic_asset_types.append(new_generic_asset_type)

    # Construct generic asset types for each AssetType
    if asset_type_results_dict:
        print(
            f"Constructing generic asset types for each of the following asset types: {asset_type_results_dict}"
        )
    for name, display_name in asset_type_results_dict.items():
        # Create new GenericAssets with matching names
        new_generic_asset_type = GenericAssetType(name=name,
                                                  description=display_name)
        new_generic_asset_types.append(new_generic_asset_type)

    # Construct generic asset types for each MarketType
    if market_type_results_dict:
        print(
            f"Constructing generic asset types for each of the following market types: {market_type_results_dict}"
        )
    for name, display_name in market_type_results_dict.items():
        # Create new GenericAssets with matching names
        new_generic_asset_type = GenericAssetType(name=name,
                                                  description=display_name)
        new_generic_asset_types.append(new_generic_asset_type)

    # Construct generic asset types for each WeatherSensorType
    if weather_sensor_type_results_dict:
        print(
            f"Constructing generic asset types for each of the following weather sensor types: {weather_sensor_type_results_dict}"
        )
    for name, display_name in weather_sensor_type_results_dict.items():
        # Create new GenericAssets with matching names
        new_generic_asset_type = GenericAssetType(name=name,
                                                  description=display_name)
        new_generic_asset_types.append(new_generic_asset_type)

    # Add the new generic asset types
    session.add_all(new_generic_asset_types)
    session.commit()
Exemplo n.º 25
0
def main():
    try:
        config_path = str(Path("D:\\Oracle")) + "\\config.ini"
        config = configparser.ConfigParser()
        config.read(config_path, encoding="utf-8-sig")
        price_db = config.get('section1', 'price_db')
        log_db = config.get('section1', 'log_db')
        user = config.get('section1', 'user')
        pw = config.get('section1', 'pw')
        ip = config.get('section1', 'ip')
        port = config.get('section1', 'port')
        #資料庫連線:
        #價格資料表
        engine = create_engine('mysql+pymysql://' + user + ':' + pw + '@' +
                               ip + ':' + port + '/' + price_db)
        con = engine.connect()
        #爬蟲狀態資料表
        engine2 = create_engine('mysql+pymysql://' + user + ':' + pw + '@' +
                                ip + ':' + port + '/' + log_db +
                                '?charset=utf8')
        con2 = engine2.connect()

        #log記錄區
        FROMAT = '%(asctime)s-%(levelname)s-> %(message)s'
        log_filename = "D:\\python_crawler\\log\\china_market_daily\\" + datetime.now(
        ).strftime("%Y-%m-%d_%H_%M_%S.log")
        logging.getLogger('').handlers = []
        logging.basicConfig(level=logging.DEBUG,
                            filename=log_filename,
                            format=FROMAT)
        #簡轉繁翻譯器
        cc = OpenCC('s2t')

        today = datetime.now().strftime('%Y-%m-%d')
        start_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        logging.info("================開始爬蟲時間:" + start_time +
                     "==================")
        url = 'http://www.51garlic.com/hq/list-139.html'
        user_agent = set_header_user_agent()
        headers = {'User-Agent': user_agent, 'Connection': 'close'}
        html = requests.get(url, headers=headers)
        time.sleep(random.uniform(1, 5))
        html.encoding = "utf-8"
        soup = bs(html.text, 'html.parser')
        create_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        P = soup.find('div', class_='td-lm-list').find_all('li')
        #資料庫日期check
        #價格
        market_check_date = con.execute(
            'select max(time) from price_chinese_garlic_market').fetchone(
            )[0].strftime('%Y-%m-%d')
        #評論
        dynamics_check_date = con.execute(
            'select max(time) from china_market_dynamics').fetchone(
            )[0].strftime('%Y-%m-%d')
        #今天跟資料庫日期差距
        #價格
        m_days = (datetime.today() -
                  datetime.strptime(market_check_date, "%Y-%m-%d")).days
        #評論
        d_days = (datetime.today() -
                  datetime.strptime(dynamics_check_date, "%Y-%m-%d")).days
        #差距的日期
        the_day_list = []

        for m in range(1, m_days + 1):
            the_day = datetime.strptime(today,
                                        "%Y-%m-%d") + timedelta(days=-m + 1)
            the_day_list.append(the_day.strftime('%Y-%m-%d'))
        logging.info("the_day_list:" + str(the_day_list))
        d_the_day_list = []

        for d in range(1, d_days + 1):
            the_day = datetime.strptime(today,
                                        "%Y-%m-%d") + timedelta(days=-d + 1)
            d_the_day_list.append(the_day.strftime('%Y-%m-%d'))
        logging.info("the_day_list:" + str(d_the_day_list))
        #價格
        if m_days == 1:
            #評論
            for O in P:
                #如果資料庫最新一筆日期==今天就新增
                page_date = datetime.strptime(
                    O.find('span').text, "%Y-%m-%d %H:%M").strftime('%Y-%m-%d')

                #如果網頁資料=今天就取出今天連結
                if '金乡' in O.text and '国际交易市场' not in O.text and '农产品交易大厅' not in O.text and today in O.text:
                    logging.info(cc.convert(O.text))
                    link = O.find('a').get('href')

                    #資料庫最新一筆資料

                    print(page_date)
                    if market_check_date == page_date:
                        logging.info("資料庫已有今日資料")
                        logging.info('check_date=' + str(market_check_date) +
                                     ',連結日期=' + str(O.find('span').text))
                        pass
                    else:
                        logging.info("資料庫沒有今天資料,準備新增")
                        logging.info('check_date=' + str(market_check_date) +
                                     ',連結日期=' + str(O.find('span').text))
                        data_table(link, con)

        elif m_days > 1:
            for day in the_day_list:
                #超過2天從第1頁找到第5頁
                for v in range(1, 5):
                    url = 'http://www.51garlic.com/hq/list-139-' + str(
                        v) + '.html'
                    user_agent = set_header_user_agent()
                    headers = {'User-Agent': user_agent, 'Connection': 'close'}
                    html = requests.get(url, headers=headers)
                    time.sleep(5)
                    html.encoding = "utf-8"
                    soup = bs(html.text, 'html.parser')
                    create_time = time.strftime("%Y-%m-%d %H:%M:%S",
                                                time.localtime())
                    P = soup.find('div', class_='td-lm-list').find_all('li')
                    for O in P:
                        if day in O.text and '金乡' in O.text and '国际交易市场' not in O.text and '农产品交易大厅' not in O.text and '下午' not in O.text:

                            link = O.find('a').get('href')

                            data_table(link, today, start_time, con, con2,
                                       dynamics_check_date)

        else:
            logging.info("資料庫已有最新資料")

        #大蒜評論
        if d_days == 1:
            for I in P:
                #如果網頁資料=今天就取出今天連結

                page_date = datetime.strptime(
                    I.find('span').text, "%Y-%m-%d %H:%M").strftime('%Y-%m-%d')
                if '金乡' in I.text and '国际交易市场' not in I.text and '农产品交易大厅' not in I.text and today in I.text:
                    logging.info(cc.convert(I.text))
                    link = I.find('a').get('href')

                    if dynamics_check_date == page_date:
                        logging.info("資料庫已有今日資料")
                        logging.info('dynamics_check_date=' +
                                     str(dynamics_check_date) + ',連結日期=' +
                                     str(I.find('span').text))
                        pass

                    else:
                        logging.info("資料庫沒有今天資料,準備新增")
                        logging.info('check_date=' + str(dynamics_check_date) +
                                     ',連結日期=' + str(I.find('span').text))
                        market_dynamics_table(link, con)
                else:
                    if dynamics_check_date != page_date:
                        logging.info('check_date=' + str(dynamics_check_date) +
                                     ',連結日期=' + str(I.find('span').text))
        elif d_days > 1:
            for day in d_the_day_list:
                #超過2天從第1頁找到第5頁
                for u in range(1, 5):
                    url = 'http://www.51garlic.com/hq/list-139-' + str(
                        u) + '.html'
                    user_agent = set_header_user_agent()
                    headers = {'User-Agent': user_agent, 'Connection': 'close'}
                    html = requests.get(url, headers=headers)
                    time.sleep(random.uniform(1, 5))
                    html.encoding = "utf-8"
                    soup = bs(html.text, 'html.parser')
                    create_time = time.strftime("%Y-%m-%d %H:%M:%S",
                                                time.localtime())
                    P = soup.find('div', class_='td-lm-list').find_all('li')
                    for I in P:
                        if day in I.text and '金乡' in I.text and '国际交易市场' not in I.text and '农产品交易大厅' not in I.text and '下午' not in I.text:
                            link = I.find('a').get('href')
                            market_dynamics_table(link, today, start_time, con,
                                                  con2, market_check_date)
        else:
            logging.info("資料庫已有最新資料")
        finish_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        logging.info("====================爬蟲結束,結束時間:" + finish_time +
                     "======================")
        metadata = schema.MetaData(engine2)
        automap = automap_base()
        automap.prepare(engine2, reflect=True)
        session = orm.Session(engine2)
        Table('tool_crower_status2', metadata, autoload=True)
        info = automap.classes['tool_crower_status2']
        #多條件and_filter使用
        re = session.query(info).filter(
            and_(info.crower_name == 'china_market_daily', info.time == today))
        #更新資料
        re.update({info.finish_time: finish_time}, synchronize_session=False)
        #連線關閉
        session.commit()
        session.close()
        con.close()
        con2.close()

    except:
        logging.exception("=========Exception Logged=============")
Exemplo n.º 26
0
def upgrade():
    bind = op.get_bind()

    sa.Enum(IntermediateValueModel.TrialIntermediateValueType).create(
        bind, checkfirst=True)

    # MySQL and PostgreSQL supports DEFAULT clause like 'ALTER TABLE <tbl_name>
    # ADD COLUMN <col_name> ... DEFAULT "FINITE_OR_NAN"', but seemingly Alembic
    # does not support such a SQL statement. So first add a column with schema-level
    # default value setting, then remove it by `batch_op.alter_column()`.
    with op.batch_alter_table("trial_intermediate_values") as batch_op:
        batch_op.add_column(
            sa.Column(
                "intermediate_value_type",
                sa.Enum("FINITE",
                        "INF_POS",
                        "INF_NEG",
                        "NAN",
                        name="trialintermediatevaluetype"),
                nullable=False,
                server_default="FINITE",
            ), )
    with op.batch_alter_table("trial_intermediate_values") as batch_op:
        batch_op.alter_column(
            "intermediate_value_type",
            existing_type=sa.Enum("FINITE",
                                  "INF_POS",
                                  "INF_NEG",
                                  "NAN",
                                  name="trialintermediatevaluetype"),
            existing_nullable=False,
            server_default=None,
        )

    session = orm.Session(bind=bind)
    try:
        records = session.query(IntermediateValueModel).all()
        mapping = []
        for r in records:
            value: float
            if np.isclose(r.intermediate_value, RDB_MAX_FLOAT) or np.isposinf(
                    r.intermediate_value):
                value = float("inf")
            elif np.isclose(r.intermediate_value,
                            RDB_MIN_FLOAT) or np.isneginf(
                                r.intermediate_value):
                value = float("-inf")
            elif np.isnan(r.intermediate_value):
                value = float("nan")
            else:
                value = r.intermediate_value
            (
                stored_value,
                float_type,
            ) = IntermediateValueModel.intermediate_value_to_stored_repr(value)
            mapping.append({
                "trial_intermediate_value_id": r.trial_intermediate_value_id,
                "intermediate_value_type": float_type,
                "intermediate_value": stored_value,
            })
        session.bulk_update_mappings(IntermediateValueModel, mapping)
        session.commit()
    except SQLAlchemyError as e:
        session.rollback()
        raise e
    finally:
        session.close()
Exemplo n.º 27
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    # pass
    # ### end Alembic commands ###
    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)

    # Update section title for Kambule Doctoral Dissertation award
    event = session.query(Event).filter_by(key='kambuledoctoral2020').first()
    app_form = session.query(ApplicationForm).filter_by(
        event_id=event.id).first()

    # Updating main section
    section = session.query(Section).filter_by(
        name='Thamsanqa Kambule Doctoral Dissertation Award 2020').first()
    question = session.query(Question).filter(
        Question.section_id == section.id,
        Question.headline == 'Nomination Capacity').first()
    question.validation_regex = None

    # Updating nominator section
    section = session.query(Section).filter(
        Section.application_form_id == app_form.id,
        Section.order == 2).first()
    section.name = 'Nominator Information'
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 1).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 2).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 3).first()
    question.validation_regex = None

    # Updating candidate section
    section = session.query(Section).filter_by(
        name='Doctoral Candidate Information').first()
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 1).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 2).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 3).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 4).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 5).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 6).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 7).first()
    question.validation_regex = None

    # Updating dissertation section
    section = session.query(Section).filter_by(
        name='Doctoral dissertation information').first()
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 1).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 2).first()
    question.validation_regex = r'^\s*(\S+(\s+|$)){0,800}$'
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 3).first()
    question.validation_regex = r'^\s*(\S+(\s+|$)){400,500}$'
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 4).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 5).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 6).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 7).first()
    question.validation_regex = None

    # Updating dissertation section
    section = session.query(Section).filter_by(
        name='Supporting Documentation').first()
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 1).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 2).first()
    question.validation_regex = None
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 3).first()
    question.validation_regex = None
    question.options = {"min_num_referrals": 1, "max_num_referrals": 1}
    question = session.query(Question).filter(
        Question.section_id == section.id, Question.order == 4).first()
    question.validation_regex = None
Exemplo n.º 28
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    # pass
    # ### end Alembic commands ###

    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)

    maathaiimpact2020 = Event('Wangari Maathai Impact Award 2020',
        'Wangari Maathai Impact Award 2020', 
        datetime.date(2020, 8, 23), datetime.date(2020, 8, 28), 'maathai2020',
        1, '*****@*****.**', 'http://www.deeplearningindaba.com', 
        datetime.date(2020,3,1), datetime.date(2020,4,17), datetime.date(2020,4,25), 
        datetime.date(2020,5,15),datetime.date(2020,1,1), datetime.date(2020,1,1), 
        datetime.date(2020,1,1),datetime.date(2020,1,1), datetime.date(2020,1,1), 
        datetime.date(2020,1,1), EventType.AWARD)
    session.add(maathaiimpact2020)
    session.commit()

    event_id = maathaiimpact2020.id

    application_form = ApplicationForm(event_id, True, True)
    session.add(application_form)
    session.commit()

    app_form_id = application_form.id

    main_section = Section(app_form_id, 'Wangari Maathai Impact Award 2020', """
This is the official application form for the Wangari Maathai Impact Award 2020, an award to encourage and recognise work by African innovators that shows impactful application of machine learning and artificial intelligence. This award will be made at the Deep Learning Indaba in Tunis, Tunisia in August 2020.

This application will require:
- Personal details about the nominee,
- Details about the impactful work, including why it is impactful, who it impacts and why is it innovative,
- Details of 2 people other than the nominator to provide supporting letters for the nominee

For eligibility criteria for the Maathai Award, please see www.deeplearningindaba.com/maathai-2020

For any queries, please email [email protected].
    """, 1)
    session.add(main_section)
    session.commit()

    q1_nomination_capacity = Question(
        application_form_id=app_form_id,
        section_id=main_section.id,
        headline='Nominating Capacity',
        placeholder='',
        order=1,
        questionType='multi-choice',
        validation_regex=None,
        is_required=True,
        options=[
            {'label':'Self-nomination', 'value':'self'},
            {'label':'Nomination on behalf of a candidate','value':'other'}
        ]
    )
    session.add(q1_nomination_capacity)
    session.commit()

    nominator_information = Section(app_form_id, 'Nominator Information',"""
    Details of the person nominating an individual, team or organisation
    """,2)
    nominator_information.depends_on_question_id = q1_nomination_capacity.id
    nominator_information.show_for_values = ['other']
    session.add(nominator_information)
    session.commit()

    nominator_q1 = Question(
        application_form_id=app_form_id,
        section_id=nominator_information.id,
        headline='Affiliation',
        placeholder='Affiliation',
        order=1,
        questionType='short-text',
        validation_regex=None,
        is_required=True,
        description='(university, institute, company, etc)'
    )
    nominator_q2 = Question(
        application_form_id=app_form_id,
        section_id=nominator_information.id,
        headline='Department',
        placeholder='Department',
        order=2,
        questionType='short-text',
        validation_regex=None,
        is_required=True
    )
    nominator_q3 = Question(
        application_form_id=app_form_id,
        section_id=nominator_information.id,
        headline='Describe your relationship to the nominee',
        placeholder='',
        order=3,
        questionType='long-text',
        validation_regex=None,
        is_required=True
    )
    session.add_all([nominator_q1, nominator_q2, nominator_q3])
    session.commit()

    nominee_information = Section(app_form_id, 'Nominee Information',"""
    Details of the nominated individual, team or organisation to be considered for the award. For any teams/organisations, details of the principal contact should be entered below.
    """,3)
    session.add(nominee_information)
    session.commit()

    nominee_q1 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='Title',
        placeholder='Title',
        order=1,
        questionType='short-text',
        validation_regex=None,
        is_required=True
    )
    nominee_q1.depends_on_question_id = q1_nomination_capacity.id
    nominee_q1.show_for_values = ['other']
    nominee_q2 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='Firstname',
        placeholder='Firstname',
        order=2,
        questionType='short-text',
        validation_regex=None,
        is_required=True
    )
    nominee_q2.depends_on_question_id = q1_nomination_capacity.id
    nominee_q2.show_for_values = ['other']
    nominee_q3 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='Lastname',
        placeholder='Lastname',
        order=3,
        questionType='short-text',
        validation_regex=None,
        is_required=True
    )
    nominee_q3.depends_on_question_id = q1_nomination_capacity.id
    nominee_q3.show_for_values = ['other']
    nominee_q4 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='Email Address',
        placeholder='Email Address',
        order=4,
        questionType='short-text',
        validation_regex=None,
        is_required=True
    )
    nominee_q4.depends_on_question_id = q1_nomination_capacity.id
    nominee_q4.show_for_values = ['other']
    nominee_q5 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='Affiliation',
        placeholder='Affiliation',
        order=5,
        questionType='short-text',
        validation_regex=None,
        is_required=True,
        description='(university, institute, company, etc)'
    )
    nominee_q6 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='If a team/organisation, names of team members',
        placeholder='Names of team members',
        order=6,
        questionType='short-text',
        validation_regex=None,
        is_required=False
    )
    nominee_q7 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='Country of Residence',
        placeholder='Choose an option',
        order=7,
        questionType='multi-choice',
        validation_regex=None,
        is_required=True,
        options=get_country_list(session)
    )
    nominee_q8 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='Nationality',
        placeholder='Choose an option',
        order=8,
        questionType='multi-choice',
        validation_regex=None,
        is_required=True,
        options=get_country_list(session)
    )
    nominee_q9 = Question(
        application_form_id=app_form_id,
        section_id=nominee_information.id,
        headline='Website (or other online presence)',
        placeholder='Enter a URL',
        order=9,
        questionType='short-text',
        validation_regex=None,
        is_required=False
    )
    session.add_all([nominee_q1,nominee_q2,nominee_q3,nominee_q4,nominee_q5,
        nominee_q6,nominee_q7,nominee_q8,nominee_q9])
    session.commit()

    impact_info = Section(app_form_id, 'Information about impactful work','',3)
    session.add(impact_info)
    session.commit()

    impact_q1 = Question(
        application_form_id=app_form_id,
        section_id=impact_info.id,
        headline='What impactful work or project is the team/individual doing?',
        placeholder='Enter 300-500 words',
        order=1,
        questionType='long-text',
        validation_regex=r'^\s*(\S+(\s+|$)){300,500}$',
        is_required=True,
        description='Describe the work/project. In particular, describe the role of machine learning and/or artificial intelligence (300-500 words)'
    )
    impact_q2 = Question(
        application_form_id=app_form_id,
        section_id=impact_info.id,
        headline='Who does this work impact? Say how.',
        placeholder='Enter 150-200 words',
        order=2,
        questionType='long-text',
        validation_regex=r'^\s*(\S+(\s+|$)){150,200}$',
        is_required=True,
        description='Describe who is benefitting from this work (location, how many people etc). Describe how this work is positively affecting this group (150-200 words)'
    )
    impact_q3 = Question(
        application_form_id=app_form_id,
        section_id=impact_info.id,
        headline='Why is this work innovative?',
        placeholder='Enter 150-200 words',
        order=3,
        questionType='long-text',
        validation_regex=r'^\s*(\S+(\s+|$)){150,200}$',
        is_required=True,
        description='Describe the novel parts of the work, what difference it is making, or how it is moving Africa forwards (150-200 words)'
    )
    session.add_all([impact_q1,impact_q2,impact_q3])
    session.commit()

    supporting_docs = Section(app_form_id, 'Supporting Documentation', """
    If this is a self-nomination, two supporting letters are required, otherwise one supporting letter is sufficient. The supporting letters should describe the nature of the impactful work, why it is considered to be impactful, and in what way the candidate strengthens African machine learning, and any other relevant information. Letter writers can be from anyone familiar with the impactful work.

    Supporting letters should be 600 words at most, written in English, and submitted electronically in PDF by the closing date through Baobab
    """,4)
    session.add(supporting_docs)
    session.commit()

    supporting_docs_q1 = Question(
        application_form_id=app_form_id,
        section_id=supporting_docs.id,
        headline='Add the details of the 1 or 2 people who will provide supporting letters.',
        placeholder='',
        order=1,
        questionType='reference',
        validation_regex=None,
        is_required=True,
        description='Add at least two people if this is a self nomination and at least one if you are nominating someone else.',
        options={'min_num_referral': 1, 'max_num_referral': 3}
    )
    supporting_docs_q2 = Question(
        application_form_id=app_form_id,
        section_id=supporting_docs.id,
        headline='Additional comments',
        placeholder='',
        order=2,
        questionType='long-text',
        validation_regex=None,
        is_required=False,
        description='Use this space to provide any additional details which you feel are relevant to this nomination and have not been captured by this form.'
    )
    session.add_all([supporting_docs_q1, supporting_docs_q2])
    session.commit()
Exemplo n.º 29
0
def upgrade():
    # pass
    Base.metadata.bind = op.get_bind()
    session = orm.Session(bind=Base.metadata.bind)

    kambuledoctoral2020 = Event(
        'Thamsanqa Kambule Doctoral Dissertation Award 2020',
        'Thamsanqa Kambule Doctoral Dissertation Award 2020',
        datetime.date(2020, 8,
                      23), datetime.date(2020, 8,
                                         28), 'kambuledoctoral2020', 1,
        '*****@*****.**', 'http://www.deeplearningindaba.com',
        datetime.date(2020, 3, 1), datetime.date(2020, 4, 17),
        datetime.date(2020, 4, 25), datetime.date(2020, 5, 15),
        datetime.date(2020, 1, 1), datetime.date(2020, 1, 1),
        datetime.date(2020, 1, 1), datetime.date(2020, 1, 1),
        datetime.date(2020, 1, 1), datetime.date(2020, 1, 1), EventType.AWARD)

    session.add(kambuledoctoral2020)
    session.commit()

    event_id = kambuledoctoral2020.id

    application_form = ApplicationForm(event_id, True, True)
    session.add(application_form)
    session.commit()

    app_form_id = application_form.id

    main_section = Section(
        app_form_id, 'Thamsanqa Kambule Doctoral Dissertation Award 2020', """
    This is the official application form for the Thamsanqa Kambule Doctoral Dissertation Award 2020, an award to encourage and recognise excellence in research and writing by Doctoral candidates at African universities, in any area of computational and statistical science. This award will be made at the Deep Learning Indaba at Tunis, Tunisia, in August 2020. 

This application will require:
- Personal details about the nominee,
- Details about the dissertation, including a PDF of the dissertation itself, its abstract and core contributions,
- 1 supporting letter from a person other than the nominator (submitted separately through Baobab)

For eligibility criteria for the Kambule Doctoral Award, please see www.deeplearningindaba.com/awards

For any queries, please email [email protected].
    """, 1)

    session.add(main_section)
    session.commit()

    q1_nomination_capacity = Question(
        application_form_id=app_form_id,
        section_id=main_section.id,
        headline='Nomination Capacity',
        placeholder='',
        order=1,
        questionType='multi-choice',
        validation_regex=1,
        is_required=True,
        options=[{
            'label': 'Self-nomination',
            'value': 'self'
        }, {
            'label': 'Nomination on behalf of a candidate',
            'value': 'other'
        }])
    session.add(q1_nomination_capacity)
    session.commit()

    nominator_information = Section(
        app_form_id, 'Nomination Information', """
Details of the person nominating the doctoral candidate.
        """, 2)
    nominator_information.depends_on_question_id = q1_nomination_capacity.id
    nominator_information.show_for_values = ['other']
    session.add(nominator_information)
    session.commit()

    nomination_q1 = Question(
        application_form_id=app_form_id,
        section_id=nominator_information.id,
        headline='University',
        placeholder='University',
        order=1,
        questionType='short-text',
        validation_regex=1,
        is_required=True,
        description='The university that you (the nominator) are based at.')
    nomination_q2 = Question(
        application_form_id=app_form_id,
        section_id=nominator_information.id,
        headline='Department',
        placeholder='Department',
        order=2,
        questionType='short-text',
        validation_regex=1,
        is_required=True,
        description='The department that you (the nominator) are based at.')
    nomination_q3 = Question(
        application_form_id=app_form_id,
        section_id=nominator_information.id,
        headline='Describe your relationship to the doctoral candidate',
        placeholder='',
        order=3,
        questionType='long-text',
        validation_regex=1,
        is_required=True)
    session.add_all([nomination_q1, nomination_q2, nomination_q3])
    session.commit()

    candidate_information = Section(
        app_form_id, 'Doctoral Candidate Information',
        'Details of the nominated doctoral candidate to be considered for the award.',
        3)
    session.add(candidate_information)
    session.commit()

    candidate_q1 = Question(application_form_id=app_form_id,
                            section_id=candidate_information.id,
                            headline='Title',
                            placeholder='Title',
                            order=1,
                            questionType='short-text',
                            validation_regex=1,
                            is_required=True)
    candidate_q1.depends_on_question_id = q1_nomination_capacity.id
    candidate_q1.show_for_values = ['other']

    candidate_q2 = Question(application_form_id=app_form_id,
                            section_id=candidate_information.id,
                            headline='Firstname',
                            placeholder='Firstname',
                            order=2,
                            questionType='short-text',
                            validation_regex=1,
                            is_required=True)
    candidate_q2.depends_on_question_id = q1_nomination_capacity.id
    candidate_q2.show_for_values = ['other']

    candidate_q3 = Question(application_form_id=app_form_id,
                            section_id=candidate_information.id,
                            headline='Lastname',
                            placeholder='Lastname',
                            order=3,
                            questionType='short-text',
                            validation_regex=1,
                            is_required=True)
    candidate_q3.depends_on_question_id = q1_nomination_capacity.id
    candidate_q3.show_for_values = ['other']

    candidate_q4 = Question(application_form_id=app_form_id,
                            section_id=candidate_information.id,
                            headline='Email Address',
                            placeholder='Email Address',
                            order=4,
                            questionType='short-text',
                            validation_regex=1,
                            is_required=True)
    candidate_q4.depends_on_question_id = q1_nomination_capacity.id
    candidate_q4.show_for_values = ['other']

    candidate_q5 = Question(application_form_id=app_form_id,
                            section_id=candidate_information.id,
                            headline='University',
                            placeholder='University',
                            order=5,
                            questionType='short-text',
                            validation_regex=1,
                            is_required=True)
    candidate_q6 = Question(application_form_id=app_form_id,
                            section_id=candidate_information.id,
                            headline='Country of Residence',
                            placeholder='Choose an option',
                            order=6,
                            questionType='multi-choice',
                            validation_regex=1,
                            is_required=True,
                            options=get_country_list(session))
    candidate_q7 = Question(application_form_id=app_form_id,
                            section_id=candidate_information.id,
                            headline='Nationality',
                            placeholder='Choose an option',
                            order=7,
                            questionType='multi-choice',
                            validation_regex=1,
                            is_required=True,
                            options=get_country_list(session))
    session.add_all([
        candidate_q1, candidate_q2, candidate_q3, candidate_q4, candidate_q5,
        candidate_q6, candidate_q7
    ])
    session.commit()

    dissertation_information = Section(
        app_form_id, 'Doctoral dissertation information',
        'Details of the Doctoral dissertation of the nominated candidate.', 4)
    session.add(dissertation_information)
    session.commit()

    dissertation_q1 = Question(application_form_id=app_form_id,
                               section_id=dissertation_information.id,
                               headline='Title',
                               placeholder='Title',
                               order=1,
                               questionType='short-text',
                               validation_regex=1,
                               is_required=True)
    dissertation_q2 = Question(
        application_form_id=app_form_id,
        section_id=dissertation_information.id,
        headline='Abstract',
        placeholder='Enter up to 800 words',
        order=2,
        questionType='long-text',
        validation_regex='^\s*(\S+(\s+|$)){0,800}$',
        is_required=True,
        description='Abstract of dissertation (<= 800 words)')
    dissertation_q3 = Question(
        application_form_id=app_form_id,
        section_id=dissertation_information.id,
        headline=
        "What are the dissertation's primary contributions to its field of research?",
        placeholder='Enter 400-500 words',
        order=3,
        questionType='long-text',
        validation_regex='^\s*(\S+(\s+|$)){400,500}$',
        is_required=True,
        description=
        'Succinctly describe the novel contributions of this work (400-500 words)'
    )
    dissertation_q4 = Question(application_form_id=app_form_id,
                               section_id=dissertation_information.id,
                               headline='Name of Doctoral academic supervisor',
                               placeholder='Name of Supervisor',
                               order=4,
                               questionType='short-text',
                               validation_regex=1,
                               is_required=True)
    dissertation_q5 = Question(
        application_form_id=app_form_id,
        section_id=dissertation_information.id,
        headline='E-mail address of Doctoral academic supervisor',
        placeholder='Supervisor Email Address',
        order=5,
        questionType='short-text',
        validation_regex=1,
        is_required=True)
    dissertation_q6 = Question(
        application_form_id=app_form_id,
        section_id=dissertation_information.id,
        headline='Date of submission/defence of dissertation',
        placeholder='Date of submission',
        order=6,
        questionType='date',
        validation_regex=1,
        is_required=True)
    dissertation_q7 = Question(application_form_id=app_form_id,
                               section_id=dissertation_information.id,
                               headline='Name(s) of examiner/s',
                               placeholder='Name(s) of examiner/s',
                               order=7,
                               questionType='short-text',
                               validation_regex=1,
                               is_required=False)

    session.add_all([
        dissertation_q1, dissertation_q2, dissertation_q3, dissertation_q4,
        dissertation_q5, dissertation_q6, dissertation_q7
    ])
    session.commit()

    supporting_information = Section(app_form_id, 'Supporting Documentation',
                                     '', 5)
    session.add(supporting_information)
    session.commit()

    supporting_q1 = Question(
        application_form_id=app_form_id,
        section_id=supporting_information.id,
        headline='Dissertation',
        placeholder='Dissertation',
        order=1,
        questionType='file',
        validation_regex=1,
        is_required=True,
        description=
        'We require the electronic submission of the dissertation. We recommended that dissertations be written in English (the Awards Committee may require an English translation for full consideration of theses written in other languages).'
    )
    supporting_q2 = Question(
        application_form_id=app_form_id,
        section_id=supporting_information.id,
        headline="Examiners' reports",
        placeholder="Examiners' reports",
        order=2,
        questionType='file',
        validation_regex=1,
        is_required=True,
        description=
        "We require the examiners' report to be submitted. If this is not available, please contact [email protected]."
    )
    supporting_q3 = Question(
        application_form_id=app_form_id,
        section_id=supporting_information.id,
        headline='Supporting Letter',
        placeholder='',
        order=3,
        questionType='reference',
        validation_regex='{"min_num_referrals": 1, "max_num_referrals": 1}',
        is_required=True,
        description="""
        A supporting letter that describes the main theoretical, methodological, and/or applied contributions of the thesis should be submitted by an academic who is in a position to comment on the merits of the work and the candidate, e.g., Doctoral supervisor, thesis examiner, academic mentor, collaborators, etc. The letter should be written by someone other than the person who is nominating the candidate.

        Supporting letters should be 600 words at most, written in English, and submitted electronically in PDF format by the closing date, using Baobab
        """)
    supporting_q4 = Question(
        application_form_id=app_form_id,
        section_id=supporting_information.id,
        headline='Additional Comments',
        placeholder='',
        order=4,
        questionType='long-text',
        validation_regex=1,
        is_required=False,
        description=
        'Use this space to provide any additional details which you feel are relevant to this nomination and have not been captured by this form.'
    )

    session.add_all(
        [supporting_q1, supporting_q2, supporting_q3, supporting_q4])
    session.commit()
Exemplo n.º 30
0
    def __init__(self, db_path=":memory:", echo=False):
        self._engine = sqlalchemy.create_engine(
            'sqlite:///%s?check_same_thread=False' % db_path, echo=echo)
        self._session = orm.Session(bind=self._engine)

        Base.metadata.create_all(self._engine)