def test_cmd_1(): file_path = "/tmp/test.csv" datasource_name = "test-src" with open(file_path, "wt") as csv_file: csv_file.write( "type, name, a, b, c\n" "Node, 001, 12, 232, high\n" "Node, 002, 43, 334, medium\n") with closing(connect()) as conn: with closing(conn.cursor()) as cursor: DataSource.from_name(cursor, datasource_name) EntityType.from_name(cursor, 'Node') conn.commit() cmd = [ "import-csv", "--timestamp", "now", "--identifier", "Node={name}", "--datasource", datasource_name, file_path ] p = Popen(cmd, stdout=PIPE, stdin=PIPE, stderr=PIPE) stdout_data, stderr_data = p.communicate(input=None) eq_('', stderr_data) eq_('', stdout_data)
def test_translate_postgresql_exception_decorated(): """ The translate decorator should do the some as plain translation calls """ query = 'SELECT 1 FROM "non-existing-table"' with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute(query)
def test_duplicatetableerror(): create_table_query = ( "CREATE TABLE test(" "id integer, " "name text)") with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute(create_table_query) cursor.execute(create_table_query)
def test_translate_postgresql_exception(): """ The translated exception should be NoSuchTable """ query = 'SELECT 1 FROM "non-existing-table"' with closing(connect()) as conn: with closing(conn.cursor()) as cursor: try: cursor.execute(query) except psycopg2.DatabaseError as exc: raise translate_postgresql_exception(exc)
def test_datatypemismatcherror(): create_table_query = ( "CREATE TABLE test(" "id integer, " "name text)") insert_query = ( 'INSERT INTO test(id, name) ' 'VALUES (%s, %s)') with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute(create_table_query) cursor.execute(insert_query, ("first", "bob"))
def test_nosuchcolumnerror(): create_table_query = ( "CREATE TABLE test(" "id integer, " "name text)") select_query = ( 'SELECT "non-existing-column" ' 'FROM test') with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute(create_table_query) cursor.execute(select_query)
def test_timestamp_as_data(): with closing(connect()) as conn: readfile = StringIO( u'ts;CIC;CCR;Drops;created\n' '20140511_1300;10023;0.9919;17;20111111_0000\n' '20140511_1300;10047;0.9963;18;20101010_0000\n') profile = Profile({ "storage": { "type": "trend", "config": { "granularity": 86400, "datasource": "integration_test", "timestamp_is_start": False } }, "identifier": { "template": "Cell={CIC}", "regex": "(.*)" }, "timestamp": { "type": "from_column", "config": { "format": "%Y%m%d_%H%M", "name": "ts", "timezone": "UTC" } }, "identifier_is_alias": False, "field_selector": { "type": "all" }, "timestamp_is_start": True, "character_encoding": "utf-8", "dialect": { "type": "auto" }, "fields": { "created": { "datatype": "timestamp", "string_format": { "format": "%Y%m%d_%H%M" } } } }) import_csv(conn, profile, readfile)
def test_timestamp_as_data(): with closing(connect()) as conn: readfile = StringIO(u'ts;CIC;CCR;Drops;created\n' '20140511_1300;10023;0.9919;17;20111111_0000\n' '20140511_1300;10047;0.9963;18;20101010_0000\n') profile = Profile({ "storage": { "type": "trend", "config": { "granularity": 86400, "datasource": "integration_test", "timestamp_is_start": False } }, "identifier": { "template": "Cell={CIC}", "regex": "(.*)" }, "timestamp": { "type": "from_column", "config": { "format": "%Y%m%d_%H%M", "name": "ts", "timezone": "UTC" } }, "identifier_is_alias": False, "field_selector": { "type": "all" }, "timestamp_is_start": True, "character_encoding": "utf-8", "dialect": { "type": "auto" }, "fields": { "created": { "datatype": "timestamp", "string_format": { "format": "%Y%m%d_%H%M" } } } }) import_csv(conn, profile, readfile)
def setup(self): self.conn = connect()