コード例 #1
0
def test12():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        inst = TestModel12(id=1)
        inst2 = TestModel12(id=2)
        inst3 = TestModel12(id=3)

        TestModel12.table_create()
    try:

        with DatabaseContext(project=UNIT_TEST_PROJECT):
            TestModel12.create_load_job([inst, inst2])
            print(TestModel12.query().all_as_list())

        with DatabaseContext(project=UNIT_TEST_PROJECT,
                             default_dataset='unittest'):
            print(ReadOnlyTestModel12.query().all_as_list())

            try:
                ReadOnlyTestModel12.create_load_job([inst3])
                raise RuntimeError("create_load_job")
            except AttributeError:
                pass

            try:
                ReadOnlyTestModel12.create([inst3])
                raise RuntimeError("create")
            except AttributeError:
                pass

            try:
                ReadOnlyTestModel12.query().update({'column1': 2})
                raise RuntimeError("update")
            except BigQueryOrmError:
                pass

            try:
                ReadOnlyTestModel12.query().delete()
                raise RuntimeError("delete")
            except BigQueryOrmError:
                pass

            try:
                ReadOnlyTestModel12.table_create()
                raise RuntimeError("table_create")
            except AttributeError:
                pass

            try:
                ReadOnlyTestModel12.table_delete()()
                raise RuntimeError("table_delete")
            except AttributeError:
                pass
    finally:
        with DatabaseContext(project=UNIT_TEST_PROJECT):
            TestModel12.table_delete()
コード例 #2
0
    def create_from_query(cls, query, flatten_results=True):
        """
        Load instances through a query job.
        The job is asynchronous but this function will wait for the job to complete.
        See https://cloud.google.com/bigquery/docs/writing-results
        Note that this method must compile the sql query to a string.
        It does so using sqlalchemy_query.statement.compile(compile_kwargs={"literal_binds": True}).
        This will fail for certain queries and should not be used for queries which depend on untrusted input.
        See https://docs.sqlalchemy.org/en/13/faq/sqlexpressions.html for more information.
        Args:
            query (BigQueryQuery):  A query object whose results are
                to be appended to the table.
            flatten_results (Optional[bool]): If True, will flatten the query results.
                Defaults to True.
        """
        client = DatabaseContext.get_session().connection().connection._client
        table_ref = _get_table_ref(cls.__table__.name, client)

        job_config = bigquery_job.QueryJobConfig(
            destination=table_ref,
            create_disposition=bigquery_job.CreateDisposition.CREATE_NEVER,
            write_disposition=bigquery_job.WriteDisposition.WRITE_APPEND,
            flatten_results=flatten_results,
            allow_large_results=not flatten_results,
        )

        dialect = DatabaseContext.get_engine().dialect
        compiled_sql = query.sqlalchemy_query.statement.compile(
            dialect=dialect, compile_kwargs={
                'literal_binds': True,
            })
        raw_sql = str(compiled_sql)

        query_job = client.query(raw_sql, job_config=job_config)

        try:
            query_job.result()
        except Exception as e:
            raise exceptions.DatabaseError('{}\n{}\n{}'.format(
                query_job.errors,
                '{}({})'.format(type(e), e),
                query_job.error_result,
            ))

        if ((query_job.error_result and len(query_job.error_result) > 0)
                or (query_job.errors and len(query_job.errors) > 0)):
            raise exceptions.DatabaseError('{}\n{}'.format(
                query_job.errors, query_job.error_result))
コード例 #3
0
 def all_as_pandas(self):
     """
     Returns:
         (pandas.DataFrame):  The result of the query as a pandas DataFrame.
     """
     return pd.read_sql(self.sqlalchemy_query.statement,
                        DatabaseContext.get_engine())
コード例 #4
0
def test4():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        Test4Model1.table_create()
        Test4Model2.table_create()

        Test4Model1.create(
            [Test4Model1(id=i, c1=i + 1, c2=i * 2) for i in range(5)] +
            [Test4Model1(id=-i, c1=i + 1, c2=i * 2) for i in range(1, 3)])
        Test4Model2.create(
            [Test4Model2(id=i, c2=i + 1, c3=-i) for i in range(5)] +
            [Test4Model2(id=i + 100, c2=i + 1, c3=-i) for i in range(2)])

        results = Test4Model1.query(
            Test4Model2.id.label('2nd_id'),
            Test4Model2.c2.label('2nd_c2'),
            Test4Model2.c3.label('2nd_c3'),
        ).join(
            Test4Model2,
            Test4Model1.id == Test4Model2.id,
            full=True  # full outer join
        ).all()
        results = list(results)
        print(results)
        assert len(results) == 9

        Test4Model1.table_delete()
        Test4Model2.table_delete()
コード例 #5
0
def test3():
    with DatabaseContext(project=UNIT_TEST_PROJECT,
                         default_dataset='unittest'):
        TestModel3.table_create()

        TestModel3.create([
            TestModel3(
                id=i,
                geo='POLYGON((0 0,1 0,1 1,0 0))',
            ) for i in range(2000)
        ])

        TestModel3.create([
            TestModel3(
                id=i,
                geo='POLYGON((0 0,1 0,1 1,0 0))',
            ) for i in range(2001, 2001 + 100)
        ],
                          batch_size=20)

        query_results = list(TestModel3.query().all())

        assert len(query_results) == 2100

        TestModel3.table_delete()
コード例 #6
0
def test_table_methods():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        for klass in [TestModel7_1, TestModel7_2]:
            assert not klass.table_exists()
            klass.table_create()
            assert klass.table_exists()
            klass.table_delete()
            assert not klass.table_exists()
コード例 #7
0
 def query_raw(self, sql_statement):
     """
     Execute a sql statement, attempt to convert the query
     results to the appropriate ORM objects (passed to query constructor).
     Args
         sql_statement (str):  A sql statement to execute
     Returns:
         (Iterable[Any]):  An iterable of the query results.
     """
     result_proxy = DatabaseContext.get_engine().execute(sql_statement)
     return self.instances(result_proxy)
コード例 #8
0
ファイル: tables.py プロジェクト: AnthonyPerez/bigorm
 def table_get(cls):
     """
     Returns:
         (google.cloud.bigquery.table.Table):  The table this class maps to.
     Raises:
         (google.api_core.exceptions.NotFound): If the table does not exist.
     """
     client = DatabaseContext.get_session().connection().connection._client
     table_ref = _get_table_ref(cls.__table__.name, client)
     table = client.get_table(table_ref)
     return table
コード例 #9
0
    def create_load_job(cls, instances):
        """
        Load instances through a load job.
        The job is asynchronous but this function will wait for the job to complete.
        https://cloud.google.com/bigquery/quotas#load_jobs
        Limited to 1000 per table per day.
        Maximum row size limit is 100MB.
        https://cloud.google.com/bigquery/docs/loading-data-cloud-storage-json

        Args:
            instances (List[BigQueryCRUDMixin]):  Instances of cls.
                These will be appended to the database, duplicates will be added.
                Table metadata is eventually consistent.  This means that if you've
                recently create this table or changed the schema, this method may
                incorrectly report no errors.
        """
        if not all([type(inst) == cls for inst in instances]):
            raise BigQueryOrmError(
                'Got invalid class in {}\'s create method'.format(cls))

        instances_json_str = '\n'.join(
            [instance.serialize_as_json() for instance in instances])
        json_bytes_file = six.BytesIO(instances_json_str.encode('utf-8'))

        client = DatabaseContext.get_session().connection().connection._client
        table_ref = _get_table_ref(cls.__table__.name, client)

        job_config = bigquery_job.LoadJobConfig(
            autodetect=False,
            create_disposition=bigquery_job.CreateDisposition.CREATE_NEVER,
            ignore_unknown_values=False,
            source_format=bigquery_job.SourceFormat.NEWLINE_DELIMITED_JSON,
            write_disposition=bigquery_job.WriteDisposition.WRITE_APPEND)
        load_job = client.load_table_from_file(file_obj=json_bytes_file,
                                               destination=table_ref,
                                               job_config=job_config)

        try:
            load_job.result()
        except Exception as e:
            raise exceptions.DatabaseError('{}\n{}\n{}\n{}'.format(
                load_job.errors,
                '{}({})'.format(type(e), e),
                load_job.error_result,
                'This error may have occured because a column'
                ' default value could not be created locally.  Only'
                ' scalar defaults or python callables are supported.',
            ))

        if ((load_job.error_result and len(load_job.error_result) > 0)
                or (load_job.errors and len(load_job.errors) > 0)):
            raise exceptions.DatabaseError('{}\n{}'.format(
                load_job.errors, load_job.error_result))
コード例 #10
0
    def parse_from_pandas(cls, df, relabel=None, if_exists='append'):
        """
        Create instances from a pandas DataFrame.  If the table does not
        exist it will be created (see if_exists).

        Args:
            df (pandas.DataFrame):  The data frame to be converted
                to class instances.  Column names must match the
                column names in the ORM class.  Will fail if the column
                names do not match the names of the properties that
                represent them in the ORM class.
            relabel (Optional[Mapping[str, str]]):
                A dictionary that maps pandas column names to names
                of properties in the ORM.  This is required if
                the pandas column names differ from the property
                names representing columns in this class.
            if_exists (str):  One of {'fail', 'replace', 'append'}, default 'append'.
                How to behave if the table already exists.
                fail: Raise a ValueError.
                replace: Drop the table before inserting new values.
                append: Insert new values to the existing table.
        Returns:
            (List[BigQueryCRUDMixin]):  Returns a list
                of class instances.
        """
        if relabel is None:
            relabel = {}
        instances = []

        def method(sqlalchemy_table, conn, keys, data_iter):
            """
            Args:
                sqlalchemy_table (): Ignored
                conn (): Ignored
                keys (Tuple): The column names.
                data_iter (Iterable[Tuple]):  An iterable
                    iterating over column values.  Matches keys.
            """
            keys = tuple(relabel.get(key, key) for key in keys)
            for params in data_iter:
                instances.append(cls(**dict(zip(keys, params))))

        table_name = cls.__tablename__
        df.to_sql(
            name=table_name,
            con=DatabaseContext.get_engine(),
            if_exists=if_exists,
            index=False,
            method=method,
        )
        return instances
コード例 #11
0
    def _create_streaming(cls, instances):
        client = DatabaseContext.get_session().connection().connection._client
        table_ref = _get_table_ref(cls.__table__.name, client)
        table = client.get_table(table_ref)

        # https://cloud.google.com/bigquery/quotas#streaming_inserts
        empty_row = {field.name: None for field in table.schema}
        seq_of_parameters = [inst.serialize_as_dict() for inst in instances]
        seq_of_parameters = [
            dict(empty_row, **params) for params in seq_of_parameters
        ]
        errors = client.insert_rows(table, seq_of_parameters)
        if len(errors) > 0:
            raise exceptions.DatabaseError(errors)
コード例 #12
0
    def serialize_as_dict(self, excluded_keys=None):
        """
        Returns this object as a dictionary.
        Will get default values as possible and will call bind_processor
        on any value.
        (e.g. bind_processor will turn geometries into geojson strings)

        Args:
            excluded_keys (Iterable[str]):  A list of keys to exclude.
        Returns:
            (Dict[str, Any]):  Returns the dict representation of this object
                with values populated by their defaults if available.
        """
        if excluded_keys is None:
            excluded_keys = set()
        else:
            excluded_keys = set(excluded_keys)
        attr_names = JsonSerializableOrmMixin.get_entity_loaded_property_names_to_columns(
            self)

        json_out = {}
        for property_name, columns in attr_names.items():
            if property_name in excluded_keys:
                continue

            if len(columns) > 1:
                raise ValueError(
                    'serialize_as_json does not support composite types.')
            column = columns[0]

            key = column.key
            value = getattr(self, property_name)

            if value is None:
                if column.default is not None:
                    default_arg = column.default.arg
                    if column.default.is_callable:
                        value = default_arg(None)
                    elif column.default.is_scalar:
                        value = default_arg

            if value is not None:
                bind_processor = column.type.bind_processor(
                    dialect=DatabaseContext.get_session().bind.dialect)
                if bind_processor is not None:
                    value = bind_processor(value)

            json_out[key] = value

        return json_out
コード例 #13
0
def test9():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        TestModel9.table_create()

        date = datetime.datetime.utcnow()
        point = {'type': 'Point', 'coordinates': [5, 7]}

        df = pd.DataFrame({
            'intr': [1, 2, 3],
            'created_date': [date] * 3,
            'geojson': [point] * 3,
        })
        instances = TestModel9.parse_from_pandas(df)

        assert instances == [
            TestModel9(intr=1, created_date=date, geojson=point),
            TestModel9(intr=2, created_date=date, geojson=point),
            TestModel9(intr=3, created_date=date, geojson=point),
        ]

        geojson = {
            'features': [
                {
                    'properties': {
                        'intr': 1,
                    },
                    'geometry': point,
                },
                {
                    'properties': {
                        'intr': 2,
                    },
                    'geometry': point,
                },
                {
                    'properties': {
                        'intr': 3,
                    },
                    'geometry': point,
                },
            ]
        }

        assert instances == TestModel9.parse_from_geojson(
            geojson,
            geometry_property_name='geojson',
            defaults={'created_date': date})

        TestModel9.table_delete()
コード例 #14
0
def test_11():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        inst = TestModel11(intr=1, date=datetime.date(2019, 1, 1))
        inst2 = TestModel11(intr=1, date=None)

        inst.__repr__()  # This presents objects in serialized form.
        inst.serialize_as_json()

        inst2.__repr__()  # This presents objects in serialized form.
        inst2.serialize_as_json()

        TestModel11.table_create()
        TestModel11.create_load_job([inst, inst2])
        print(TestModel11.query().all_as_list())
        TestModel11.table_delete()
コード例 #15
0
def test8():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        TestModel8.table_create()

        df = pd.DataFrame({
            'intr': [1, 2, 3],
            'created_date': [datetime.datetime.utcnow()] * 3
        })
        TestModel8.create_from_pandas(df)

        table_results = TestModel8.query().all_as_list()
        print(table_results)
        assert len(table_results) == 3

        TestModel8.table_delete()
コード例 #16
0
    def query_empty(cls, *args, **kwargs):
        """
        https://docs.sqlalchemy.org/en/latest/orm/query.html#sqlalchemy.orm.query.Query

        Selects no columns by default.

        Args:
            *args (Union[Column, BigQueryModel]):
                Columns or classes matching what the sql statement is expected to return
                (e.g. what is selects).
            **kwargs (Any):  Passed to sqlalchemy.orm.query
        Returns
            (ReadOnlyBigQueryQuery):  A query object that wraps sqlalchemy.orm.Query.
        """
        return ReadOnlyBigQueryQuery(DatabaseContext.get_session().query(
            *args, **kwargs))
コード例 #17
0
    def query(cls, *args, **kwargs):
        """
        https://docs.sqlalchemy.org/en/latest/orm/query.html#sqlalchemy.orm.query.Query

        Selects all columns of the class and any additional
        ORM objects requested through arguments.

        Args:
            *args (Union[Column, BigQueryModel]):
                Columns or classes matching what the sql statement is expected to return
                (e.g. what is selects).
            **kwargs (Any):  Passed to sqlalchemy.orm.query
        Returns
            (ReadOnlyBigQueryQuery):  A query object that wraps sqlalchemy.orm.Query.
        """
        return ReadOnlyBigQueryQuery(DatabaseContext.get_session().query(
            cls, *args, **kwargs))
コード例 #18
0
def test7():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        TestModel7_1.table_create()
        TestModel7_2.table_create()

        TestModel7_1.create([
            TestModel7_1(id=1),
            TestModel7_1(id=2),
            TestModel7_1(id=3),
        ])
        TestModel7_1.create_load_job([
            TestModel7_1(id=4),
            TestModel7_1(id=5),
            TestModel7_1(id=6),
        ])

        TestModel7_2.create([
            TestModel7_2(id=1),
            TestModel7_2(id=2),
            TestModel7_2(id=3),
        ])
        TestModel7_2.create_load_job([
            TestModel7_2(id=4),
            TestModel7_2(id=5),
            TestModel7_2(id=6),
        ])

        m71s = TestModel7_1.query().all_as_list()
        m72s = TestModel7_2.query().all_as_list()

        m71s = [m for _, m in sorted((m.id, m) for m in m71s)]
        m72s = [m for _, m in sorted((m.id, m) for m in m72s)]

        print(m71s)
        print(m72s)
        assert m71s == m72s

        joined_result = TestModel7_1.query(TestModel7_2.id.label('id2')).join(
            TestModel7_2, TestModel7_2.id == TestModel7_1.id).all_as_list()

        print(joined_result)
        assert len(joined_result) == 6

        TestModel7_1.table_delete()
        TestModel7_2.table_delete()
コード例 #19
0
ファイル: tables.py プロジェクト: AnthonyPerez/bigorm
def test_partition():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        TestModelCluster3.table_create()

        now = datetime.datetime.utcnow()
        one_day_old = now - datetime.timedelta(days=1)
        two_days_old = now - datetime.timedelta(days=2)
        three_days_old = now - datetime.timedelta(days=3)

        instances = [
            TestModelCluster3(intr=i,
                              string='load_str1',
                              boolean=False,
                              partition_column=now) for i in range(200)
        ]
        instances += [
            TestModelCluster3(intr=i,
                              string='load_str1',
                              boolean=True,
                              partition_column=one_day_old) for i in range(200)
        ]
        instances += [
            TestModelCluster3(intr=i,
                              string='load_str2',
                              boolean=False,
                              partition_column=two_days_old)
            for i in range(200)
        ]
        instances += [
            TestModelCluster3(intr=i,
                              string='load_str2',
                              boolean=True,
                              partition_column=three_days_old)
            for i in range(200)
        ]

        TestModelCluster3.create_load_job(instances)

        query_result = TestModelCluster3.query_empty(
            TestModelCluster3.intr).filter_by(string='load_str1',
                                              boolean=False).all_as_list()
        assert len(query_result) == 200

        TestModelCluster3.table_delete()
コード例 #20
0
def test2():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        TestModel2.table_create()

        TestModel2.create([
            TestModel2(id=1),
            TestModel2(id=2, c2=4),
            TestModel2(id=3, c1=None, c2=3),
            TestModel2(id=4, c1=1),
        ])

        TestModel2.create([
            TestModel2(id=None),
            TestModel2(id=None, c2=5),
            TestModel2(id=None, c1=None, c2=6),
            TestModel2(id=None, c1=1),
        ])

        TestModel2.table_delete()
コード例 #21
0
def test1():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        # Leaving this comment for illustrative purposes, but it's not what we want to do for this test.
        # Base.metadata.create_all(DatabaseContext.get_engine())
        TestModel.table_create()

        TestModel.create_load_job([
            TestModel(id=1),
            TestModel(id=2),
            TestModel(id=2),
            TestModel(id=2, column1=2),
            TestModel(id=3),
            TestModel(id=4),
            TestModel(id=4, column1=2, column2=5),
        ])

        id1 = TestModel.query().filter_by(
            id=1).one()  # Get one or raise an error
        column1_is_1 = list(TestModel.query().filter_by(
            column1=1).all())  # Get all as iterable

        print(id1)
        print(column1_is_1)

        assert id1 == TestModel(id=1, column1=1, column2=2)
        assert len(column1_is_1) == 5

        update_count = (TestModel.query().filter(TestModel.id >= 2).filter(
            TestModel.id <= 3).update({'column1': TestModel.column1 + 3}))
        print(update_count)
        assert update_count == 4

        column1_is_4 = list(TestModel.query().filter_by(column1=4).all())
        print(column1_is_4)
        assert len(column1_is_4) == 3

        delete_count = (TestModel.query().filter(
            TestModel.column1 >= 3).delete())

        print(delete_count)
        assert delete_count == 4

        TestModel.table_delete()
コード例 #22
0
def test_10():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        TestModel10_1.table_create()
        TestModel10_2.table_create()
        TestModel10_3.table_create()

        instances_1 = [
            TestModel10_1(intr=1, double=1.0),
            TestModel10_1(intr=2, double=2.0),
        ]
        instances_2 = [
            TestModel10_2(intr=1, string='1'),
            TestModel10_2(intr=2, string='2'),
        ]

        TestModel10_1.create_load_job(instances_1)
        TestModel10_2.create_load_job(instances_2)

        join_query = TestModel10_1.query(TestModel10_2.string).join(
            TestModel10_2, TestModel10_1.intr == TestModel10_2.intr)

        TestModel10_3.create_from_query(join_query)

        instances_3 = TestModel10_3.query().all_as_list()
        instances_3 = [
            i for _, i in sorted([(i.intr, i) for i in instances_3])
        ]
        expected_instances_3 = [
            TestModel10_3(intr=1, double=1.0, string='1'),
            TestModel10_3(intr=2, double=2.0, string='2'),
        ]

        for actual, expected in zip(instances_3, expected_instances_3):
            print('Actual')
            print(actual)
            print('Expected')
            print(expected)
            assert actual == expected

        TestModel10_1.table_delete()
        TestModel10_2.table_delete()
        TestModel10_3.table_delete()
コード例 #23
0
def test_geo():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        TestGeoModel.table_create()

        TestGeoModel.create([
            TestGeoModel(
                id=1,
                geometry1='POLYGON((0 0,1 0,1 1,0 1,0 0))',
                geometry2={
                    "type": "Point",
                    "coordinates": [5, 7]
                },
            ),
            TestGeoModel(
                id=2,
                geometry1='POLYGON((1 1,2 1,2 2,1 2,1 1))',
                geometry2={
                    "type": "Point",
                    "coordinates": [5, 7]
                },
            ),
        ])

        id1 = TestGeoModel.query().filter(
            sqlalchemy.func.ST_Contains(
                TestGeoModel.geometry1,
                sqlalchemy.func.ST_GeogFromText('POINT(0.5 0.5)'))).one()
        print(id1)
        assert id1.id == 1

        print(list(TestGeoModel.query().all()))

        assert id1.geometry2['type'] == 'Point'
        assert id1.geometry2['coordinates'] == [5, 7]

        TestGeoModel.table_delete()
コード例 #24
0
ファイル: tables.py プロジェクト: AnthonyPerez/bigorm
 def table_create(cls):
     """
     Creates the table corresponding to this class
     """
     engine = DatabaseContext.get_engine()
     cls.__table__.create(engine)
コード例 #25
0
ファイル: tables.py プロジェクト: AnthonyPerez/bigorm
 def table_delete(cls):
     """
     Deletes the table corresponding to this class
     """
     engine = DatabaseContext.get_engine()
     cls.__table__.drop(engine)
コード例 #26
0
def test6():
    with DatabaseContext(project=UNIT_TEST_PROJECT):

        hole_comes_second = TestModel6(boolean=True,
                                       geojson={
                                           "type":
                                           "Polygon",
                                           "coordinates": [
                                               [[-120, 60], [120, 60],
                                                [120, -60], [-120, -60],
                                                [-120, 60]],
                                               [[-60, 30], [60, 30], [60, -30],
                                                [-60, -30], [-60, 30]],
                                           ]
                                       })

        hole_comes_first = TestModel6(boolean=True,
                                      geojson={
                                          "type":
                                          "Polygon",
                                          "coordinates": [
                                              [[-60, 30], [60, 30], [60, -30],
                                               [-60, -30], [-60, 30]],
                                              [[-120, 60], [120, 60],
                                               [120, -60], [-120, -60],
                                               [-120, 60]],
                                          ]
                                      })

        instances = [
            TestModel6(
                intr=4,
                double=1. / 3.,
                boolean=True,
                string='str',
                wkt='POLYGON((0 0,1 0,1 1,0 1,0 0))',
                geojson={
                    "type": "Point",
                    "coordinates": [5, 7]
                },
            ),
            TestModel6(
                intr=4,
                boolean=False,
            ),
            TestModel6(
                intr=3,
                boolean=False,
            ),
        ]

        print(instances)
        assert instances[0].geojson['coordinates'] == [5, 7]

        json_repr = {
            'intr_label':
            4,
            'intr_def_label':
            5,
            'double':
            1. / 3.,
            'boolean':
            True,
            'string':
            'str',
            'created_date':
            None,
            'wkt':
            str({
                "type":
                "Polygon",
                "coordinates": [[
                    [0., 0.],
                    [1., 0.],
                    [1., 1.],
                    [0., 1.],
                    [0., 0.],
                ]],
            }).replace("'", '"'),
            'geojson':
            str({
                "type": "Point",
                "coordinates": [5., 7.]
            }).replace("'", '"'),
        }

        as_json = instances[0].serialize_as_dict()
        print(as_json)
        as_json['created_date'] = None
        assert as_json == json_repr

        json_repr = {
            'intr_label': 4,
            'intr_def_label': 5,
            'double': None,
            'boolean': False,
            'string': None,
            'created_date': None,
            'wkt': None,
            'geojson': None,
        }

        as_json = instances[1].serialize_as_dict()
        print(as_json)
        as_json['created_date'] = None
        assert as_json == json_repr

        TestModel6.table_create()

        TestModel6.create_load_job(instances)

        query_result = TestModel6.query().all_as_list()
        print(query_result)
        assert len(query_result) == 3

        TestModel6.table_delete()
コード例 #27
0
def test6_2():
    with DatabaseContext(project=UNIT_TEST_PROJECT):

        exterior = [[-120, 60], [120, 60], [120, -60], [-120, -60], [-120, 60]]

        interior = [[-60, 30], [60, 30], [60, -30], [-60, -30], [-60, 30]]

        hole_comes_second_list = [
            TestModel6(boolean=True,
                       geojson={
                           "type": "Polygon",
                           "coordinates": [
                               list(exterior),
                               list(interior),
                           ]
                       }),
            TestModel6(boolean=True,
                       geojson={
                           "type":
                           "Polygon",
                           "coordinates": [
                               list(reversed(exterior)),
                               list(interior),
                           ]
                       }),
            TestModel6(boolean=True,
                       geojson={
                           "type":
                           "Polygon",
                           "coordinates": [
                               list(exterior),
                               list(reversed(interior)),
                           ]
                       }),
            TestModel6(boolean=True,
                       geojson={
                           "type":
                           "Polygon",
                           "coordinates": [
                               list(reversed(exterior)),
                               list(reversed(interior)),
                           ]
                       }),
        ]

        hole_comes_first = TestModel6(boolean=True,
                                      geojson={
                                          "type": "Polygon",
                                          "coordinates": [
                                              interior,
                                              exterior,
                                          ]
                                      })

        TestModel6.table_create()

        hole_2nd_dicts = [
            m.serialize_as_dict() for m in hole_comes_second_list
        ]
        for serialized_form in hole_2nd_dicts:
            # created_date will be different for each object
            serialized_form.pop('created_date', None)
        for serialized_form in hole_2nd_dicts:
            assert serialized_form == hole_2nd_dicts[0]
            print(serialized_form)

        bad_input_fails = False
        try:
            hole_comes_first.serialize_as_dict()
        except ValueError:
            print('hole first fails')
            bad_input_fails = True

        if not bad_input_fails:
            raise RuntimeError(
                'Polygon with hole first successfully serialized when it shouldn\'t be.'
            )

        TestModel6.create_load_job(hole_comes_second_list)

        query_result = TestModel6.query().all_as_list()
        print(query_result)
        assert len(query_result) == 4

        TestModel6.table_delete()
コード例 #28
0
def test_geojson_serialize():
    """
    test:
        TestModel9.query(...).all_as_dicts()
        TestModel9.query(...).all_as_geojson()
        TestModel9.serialize_as_geojson()

        serialize_as_geojson and parse_from_geojson should be inverses.
    """
    with DatabaseContext(project=UNIT_TEST_PROJECT):

        date = datetime.datetime.utcnow()
        date_as_str = date.strftime('%Y-%m-%d %H:%M:%S.%f')
        instances = [
            TestModel9(intr=i,
                       created_date=date_as_str,
                       geojson={
                           'type': 'Point',
                           'coordinates': [5.0, i]
                       }) for i in range(3)
        ]
        expected_geojson = {
            'type':
            'FeatureCollection',
            'features': [{
                'type': 'Feature',
                'geometry': {
                    'type': 'Point',
                    'coordinates': [5.0, float(i)]
                },
                'properties': {
                    'intr': i,
                    'double': None,
                    'created_date': date_as_str
                }
            } for i in range(3)]
        }
        expected_geojson_str = json.dumps(expected_geojson, sort_keys=True)
        instances_as_dicts = [{
            'intr': instance.intr,
            'created_date': instance.created_date,
            'geojson': instance.geojson,
            'double': instance.double,
        } for instance in instances]

        # test parse_from_geojson
        parsed_from_geojson = TestModel9.parse_from_geojson(
            expected_geojson, geometry_property_name='geojson')
        assert parsed_from_geojson == instances

        # test parse_from_geojson
        parsed_from_geojson = TestModel9.parse_from_geojson(
            json.loads(expected_geojson_str), geometry_property_name='geojson')
        assert parsed_from_geojson == instances

        # test serialize_as_geojson
        serialized_as_geojson = TestModel9.serialize_as_geojson(
            instances, geometry_column='geojson', excluded_keys=None)
        assert serialized_as_geojson == expected_geojson_str

        # test parse_from_geojson and serialize_as_geojson consistency
        assert (TestModel9.parse_from_geojson(
            json.loads(
                TestModel9.serialize_as_geojson(instances,
                                                geometry_column='geojson',
                                                excluded_keys=None)),
            geometry_property_name='geojson') == instances)

        # Goes from unittest to intregration test below this line.

        TestModel9.table_create()
        TestModel9.create(instances)

        all_as_dicts = TestModel9.query().order_by(
            TestModel9.intr).all_as_dicts()
        all_as_dicts = [
            dict(
                d, **{
                    'created_date':
                    d['created_date'].strftime('%Y-%m-%d %H:%M:%S.%f')
                }) for d in all_as_dicts
        ]
        assert all_as_dicts == instances_as_dicts

        all_as_geojson = TestModel9.query().order_by(
            TestModel9.intr).all_as_geojson(geometry_column='geojson')
        assert json.loads(all_as_geojson) == json.loads(expected_geojson_str)

        all_as_geojson_empty_query = TestModel9.query_empty(
            TestModel9.intr).order_by(
                TestModel9.intr).all_as_geojson(geometry_column=None)
        assert (json.loads(all_as_geojson_empty_query) == {
            'type':
            'FeatureCollection',
            'features': [{
                'type': 'Feature',
                'geometry': None,
                'properties': {
                    'intr': i
                }
            } for i in range(3)]
        })

        TestModel9.table_delete()
コード例 #29
0
def _open_context():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        DatabaseContext.get_session()
コード例 #30
0
def test_multithread():
    with DatabaseContext(project=UNIT_TEST_PROJECT):
        pass
    thread_id = _thread.start_new_thread(_open_context, ())