Example #1
0
def test_torsiondrive_initial_final_molecule(torsiondrive_fixture,
                                             fractal_compute_server):
    """ With single initial molecule in torsion proc"""

    torsion, client = torsiondrive_fixture

    r = fractal_compute_server.storage.custom_query("torsiondrive",
                                                    "initial_molecules_ids",
                                                    torsion_id=torsion["id"])

    assert r["meta"]["success"]
    assert len(r["data"]) == 9

    r = fractal_compute_server.storage.custom_query("torsiondrive",
                                                    "initial_molecules",
                                                    torsion_id=torsion["id"])
    assert r["meta"]["success"]
    assert len(r["data"]) == 9
    mol = r["data"][0]

    # Msgpack field
    assert isinstance(msgpackext_loads(mol["geometry"]), np.ndarray)  # TODO

    # Sample fields in the molecule dict
    assert all(
        x in mol.keys()
        for x in ["schema_name", "symbols", "geometry", "molecular_charge"])

    r = fractal_compute_server.storage.custom_query("torsiondrive",
                                                    "final_molecules_ids",
                                                    torsion_id=torsion["id"])

    assert r["meta"]["success"]
    assert len(r["data"]) == 9

    r = fractal_compute_server.storage.custom_query("torsiondrive",
                                                    "final_molecules",
                                                    torsion_id=torsion["id"])
    assert r["meta"]["success"]
    assert len(r["data"]) == 9
    mol = r["data"][0]
def json_to_msgpack_table_altercolumns(table_name,
                                       update_columns,
                                       nullable_true=None):

    if nullable_true is None:
        nullable_true = set()

    connection = op.get_bind()
    table, cols = _intermediate_table(table_name, update_columns)

    column_pairs, old_names, new_names = _get_colnames(update_columns)
    num_records = connection.execute(
        f"select count(*) from {table_name}").scalar()

    old_columns = [getattr(table.c, x) for x in old_names]
    new_columns = [getattr(table.c, x) for x in new_names]

    logger.info(f"Checking converted columns...")
    # Pull chunk to migrate
    data = connection.execute(
        sa.select([
            table.c.id,
            *old_columns,
            *new_columns,
        ],
                  order_by=table.c.id.asc())).fetchall()
    # ], limit=100, order_by=func.random())).fetchall()

    col_names = ["id"] + old_names + new_names
    for values in data:
        row = {k: v for k, v in zip(col_names, values)}
        # print(row["id"])
        # print(row.keys())
        # for k, v in row.items():
        #     print(k, v)

        for name in old_names:
            comp_data = msgpackext_loads(row[name + "_"])
            # try:
            #     assert compare_recursive(comp_data, row[name])
            # except AssertionError:
            #     assert compare_recursive(comp_data.ravel(), row[name], quiet=True)

            # try:
            #     print(name, comp_data.dtype, comp_data)
            # except:
            #     print(name, comp_data[0].dtype, comp_data)
            #     pass
    # raise Exception()
    logger.info(f"Dropping old columns and renaming new.")
    # Drop old tables and swamp new ones in.
    for old_name, new_name in column_pairs:
        nullable = False
        if old_name in nullable_true:
            nullable = True

        op.drop_column(table_name, old_name)
        op.alter_column(table_name,
                        new_name,
                        new_column_name=old_name,
                        nullable=nullable)
Example #3
0
 def process_result_value(self, value, dialect):
     if value is None:
         return value
     else:
         return msgpackext_loads(value)
Example #4
0
 def process_result_value(self, value, dialect):
     return msgpackext_loads(value)