diff --git a/volttrontesting/fixtures/docker_wrapper.py b/volttrontesting/fixtures/docker_wrapper.py index 549d53d2b6..133fceab9c 100644 --- a/volttrontesting/fixtures/docker_wrapper.py +++ b/volttrontesting/fixtures/docker_wrapper.py @@ -13,7 +13,7 @@ @contextlib.contextmanager def create_container(image_name: str, ports: dict = None, env: dict = None, command: (list, str) = None, - volumes: (dict, list) = None, startup_time_seconds: int = 30) -> \ + startup_time_seconds: int = 30) -> \ (docker.models.containers.Container, None): """ Creates a container instance in a context that will clean up after itself. @@ -54,8 +54,7 @@ def create_container(image_name: str, ports: dict = None, env: dict = None, comm # So all tags aren't pulled. According to docs https://docker-py.readthedocs.io/en/stable/images.html. full_docker_image = full_docker_image + ":latest" client.images.pull(full_docker_image) - container = client.containers.run(image_name, ports=ports, environment=env, auto_remove=True, detach=True, - volumes=volumes) + container = client.containers.run(image_name, ports=ports, environment=env, auto_remove=True, detach=True) except (ImageNotFound, APIError, RuntimeError) as e: raise RuntimeError(e) diff --git a/volttrontesting/platform/dbutils/postgresqlfuncts_tests/init_metadata_only/init-metadata-db.sh b/volttrontesting/platform/dbutils/postgresqlfuncts_tests/init_metadata_only/init-metadata-db.sh deleted file mode 100644 index de9e3e0bd7..0000000000 --- a/volttrontesting/platform/dbutils/postgresqlfuncts_tests/init_metadata_only/init-metadata-db.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -set -e - -psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL - CREATE DATABASE test_historian; - \c test_historian - - CREATE TABLE metadata - (table_id VARCHAR(512) PRIMARY KEY NOT NULL, - table_name VARCHAR(512) NOT NULL); - INSERT INTO metadata VALUES ('data_table', 'data'); - INSERT INTO metadata VALUES ('topics_table', 'topics'); - INSERT INTO metadata VALUES ('meta_table', 'meta'); - - GRANT ALL PRIVILEGES ON DATABASE test_historian TO postgres; -EOSQL diff --git a/volttrontesting/platform/dbutils/postgresqlfuncts_tests/init_test_db/init-test-db.sh b/volttrontesting/platform/dbutils/postgresqlfuncts_tests/init_test_db/init-test-db.sh deleted file mode 100644 index eb7021d4ef..0000000000 --- a/volttrontesting/platform/dbutils/postgresqlfuncts_tests/init_test_db/init-test-db.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash -set -e - -psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL - CREATE DATABASE test_historian; - \c test_historian - - CREATE TABLE data ( - ts TIMESTAMP NOT NULL, - topic_id INTEGER NOT NULL, - value_string TEXT NOT NULL, - UNIQUE (topic_id, ts)); - INSERT INTO data VALUES ('2020-06-01 12:30:59', 13, '[2,3]'); - INSERT INTO data VALUES ('2020-06-01 06:30:59', 42, '2'); - INSERT INTO data VALUES ('2020-06-01 12:31:59', 43, '8'); - - CREATE TABLE IF NOT EXISTS topics ( - topic_id SERIAL PRIMARY KEY NOT NULL, - topic_name VARCHAR(512) NOT NULL, - UNIQUE (topic_name)); - INSERT INTO topics (topic_name) VALUES ('football'); - INSERT INTO topics (topic_name) VALUES ('baseball'); - INSERT INTO topics (topic_name) VALUES ('foobar'); - INSERT INTO topics (topic_name) VALUES ('xctljglfkjsgfklsd'); - - CREATE TABLE IF NOT EXISTS meta ( - topic_id INTEGER PRIMARY KEY NOT NULL, - metadata TEXT NOT NULL); - - CREATE TABLE metadata - (table_id VARCHAR(512) PRIMARY KEY NOT NULL, - table_name VARCHAR(512) NOT NULL); - INSERT INTO metadata VALUES ('data_table', 'data'); - INSERT INTO metadata VALUES ('topics_table', 'topics'); - INSERT INTO metadata VALUES ('meta_table', 'meta'); - - CREATE TABLE IF NOT EXISTS aggregate_topics ( - agg_topic_id SERIAL PRIMARY KEY NOT NULL, - agg_topic_name VARCHAR(512) NOT NULL, - agg_type VARCHAR(512) NOT NULL, - agg_time_period VARCHAR(512) NOT NULL, - UNIQUE (agg_topic_name, agg_type, agg_time_period)); - INSERT INTO aggregate_topics (agg_topic_name, agg_type, agg_time_period) VALUES ('some_agg_topic', 'AVG', '2019'); - - CREATE TABLE IF NOT EXISTS aggregate_meta ( - agg_topic_id INTEGER PRIMARY KEY NOT NULL, - metadata TEXT NOT NULL); - INSERT INTO aggregate_meta VALUES (1, '{"configured_topics": "meaning of life"}'); - - CREATE TABLE avg_1776 ( - ts timestamp NOT NULL, - topic_id INTEGER NOT NULL, - value_string TEXT NOT NULL, - topics_list TEXT, - UNIQUE(ts, topic_id)); - CREATE INDEX IF NOT EXISTS idx_avg_1776 ON avg_1776 (ts ASC); - - GRANT ALL PRIVILEGES ON DATABASE test_historian TO postgres; -EOSQL diff --git a/volttrontesting/platform/dbutils/postgresqlfuncts_tests/test_postgresqlfuncts.py b/volttrontesting/platform/dbutils/test_postgresqlfuncts.py similarity index 63% rename from volttrontesting/platform/dbutils/postgresqlfuncts_tests/test_postgresqlfuncts.py rename to volttrontesting/platform/dbutils/test_postgresqlfuncts.py index 6bb52922ed..56f97b6c4f 100644 --- a/volttrontesting/platform/dbutils/postgresqlfuncts_tests/test_postgresqlfuncts.py +++ b/volttrontesting/platform/dbutils/test_postgresqlfuncts.py @@ -1,6 +1,5 @@ import contextlib import datetime -import os from time import time @@ -12,35 +11,18 @@ from volttrontesting.fixtures.docker_wrapper import create_container from volttrontesting.utils.utils import get_rand_port - # Current documentation claims that we have tested Historian on Postgres 10 # See https://volttron.readthedocs.io/en/develop/core_services/historians/SQL-Historian.html#postgresql-and-redshift IMAGES = ["postgres:10"] -ALLOW_CONNECTION_TIME = 3 +ALLOW_CONNECTION_TIME = 2 TEST_DATABASE = "test_historian" ROOT_USER = "postgres" ROOT_PASSWORD = "password" ENV_POSTGRESQL = { "POSTGRES_USER": ROOT_USER, # defining user not necessary but added to be explicit "POSTGRES_PASSWORD": ROOT_PASSWORD, -} -ENV_POSTGRESQL_WITH_DB = { - "POSTGRES_USER": ROOT_USER, # defining user not necessary but added to be explicit - "POSTGRES_PASSWORD": ROOT_PASSWORD, "POSTGRES_DB": TEST_DATABASE, } -VOLUMES_TEST_DB = { - "".join([os.getcwd(), "/init_test_db"]): { - "bind": "/docker-entrypoint-initdb.d", - "mode": "ro", - } -} -VOLUMES_METADATA_ONLY = volume = { - "".join([os.getcwd(), "/init_metadata_only"]): { - "bind": "/docker-entrypoint-initdb.d", - "mode": "ro", - } -} DATA_TABLE = "data" TOPICS_TABLE = "topics" META_TABLE = "meta" @@ -53,7 +35,7 @@ def test_setup_historian_tables_should_create_tables(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL_WITH_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) @@ -76,7 +58,7 @@ def test_record_table_definitions_should_create_meta_table( ): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL_WITH_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) @@ -104,14 +86,11 @@ def test_read_tablenames_from_db_should_return_table_names( ): get_container, image = get_container_func with get_container( - image, - ports=ports_config["ports"], - env=ENV_POSTGRESQL, - volumes=VOLUMES_METADATA_ONLY, + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) - # create_meta_data_table(container) + create_meta_data_table(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: expected_tables = { @@ -132,11 +111,9 @@ def test_setup_aggregate_historian_tables_should_create_aggregate_tables( get_container_func, ports_config ): get_container, image = get_container_func + with get_container( - image, - ports=ports_config["ports"], - env=ENV_POSTGRESQL, - volumes=VOLUMES_METADATA_ONLY, + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) @@ -149,6 +126,7 @@ def test_setup_aggregate_historian_tables_should_create_aggregate_tables( assert agg_topic_table not in original_tables assert agg_meta_table not in original_tables + create_meta_data_table(container) expected_agg_topic_fields = { "agg_topic_id", "agg_topic_name", @@ -174,11 +152,11 @@ def test_setup_aggregate_historian_tables_should_create_aggregate_tables( @pytest.mark.parametrize( "topic_ids, id_name_map, expected_values", [ - ([24], {24: "topic24"}, {"topic24": []}), + ([42], {42: "topic42"}, {"topic42": []}), ( - [13], - {13: "topic13"}, - {"topic13": [("2020-06-01T12:30:59.000000+00:00", [2, 3])]}, + [43], + {43: "topic43"}, + {"topic43": [("2020-06-01T12:30:59.000000+00:00", [2, 3])]}, ), ], ) @@ -188,12 +166,18 @@ def test_query_should_return_data( ): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: + query = f""" + INSERT INTO {DATA_TABLE} VALUES ('2020-06-01 12:30:59', 43, '[2,3]') + """ + seed_database(container, query) + actual_values = postgresqlfuncts.query(topic_ids, id_name_map) assert actual_values == expected_values @@ -204,17 +188,20 @@ def test_insert_topic_should_return_topic_id(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: - topic = "hockey" + + topic = "football" + expected_topic_id = 1 actual_topic_id = postgresqlfuncts.insert_topic(topic) - assert isinstance(actual_topic_id, int) + assert actual_topic_id == expected_topic_id @pytest.mark.postgresqlfuncts @@ -222,28 +209,25 @@ def test_insert_agg_topic_should_return_agg_topic_id(get_container_func, ports_c get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: - topic = "foobar_topic" - agg_type = "SUM" - agg_time_period = "1989" + + topic = "some_agg_topic" + agg_type = "AVG" + agg_time_period = "2019" + expected_data = (1, "some_agg_topic", "AVG", "2019") actual_id = postgresqlfuncts.insert_agg_topic( topic, agg_type, agg_time_period ) - expected_data = [(actual_id, topic, agg_type, agg_time_period)] assert isinstance(actual_id, int) - assert ( - get_data_from_aggregate_by_topic_id( - port_on_host, AGG_TOPICS_TABLE, actual_id - ) - == expected_data - ) + assert get_data_in_table(port_on_host, AGG_TOPICS_TABLE)[0] == expected_data @pytest.mark.postgresqlfuncts @@ -251,10 +235,11 @@ def test_insert_meta_should_return_true(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: topic_id = "44" @@ -264,7 +249,7 @@ def test_insert_meta_should_return_true(get_container_func, ports_config): res = postgresqlfuncts.insert_meta(topic_id, metadata) assert res is True - assert get_data_(port_on_host, "meta")[0] == expected_data + assert get_data_in_table(port_on_host, "meta")[0] == expected_data @pytest.mark.postgresqlfuncts @@ -272,23 +257,22 @@ def test_insert_data_should_return_true(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: ts = "2001-09-11 08:46:00" topic_id = "11" data = "1wtc" - expected_data = (datetime.datetime(2001, 9, 11, 8, 46), 11, '"1wtc"') + expected_data = [(datetime.datetime(2001, 9, 11, 8, 46), 11, '"1wtc"')] res = postgresqlfuncts.insert_data(ts, topic_id, data) assert res is True - assert ( - expected_data == get_data_by_topic_id(port_on_host, "data", topic_id)[0] - ) + assert get_data_in_table(port_on_host, "data") == expected_data @pytest.mark.postgresqlfuncts @@ -296,26 +280,23 @@ def test_update_topic_should_return_true(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: - topic = "basketball" + topic = "football" actual_id = postgresqlfuncts.insert_topic(topic) - expected_data = (actual_id, "soccer") assert isinstance(actual_id, int) result = postgresqlfuncts.update_topic("soccer", actual_id) assert result is True - assert ( - get_data_by_topic_id(port_on_host, "topics", actual_id)[0] - == expected_data - ) + assert (actual_id, "soccer") == get_data_in_table(port_on_host, "topics")[0] @pytest.mark.postgresqlfuncts @@ -323,7 +304,7 @@ def test_get_aggregation_list_should_return_list(get_container_func, ports_confi get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) @@ -351,46 +332,62 @@ def test_get_aggregation_list_should_return_list(get_container_func, ports_confi assert postgresqlfuncts.get_aggregation_list() == expected_list +@pytest.mark.postgresqlfuncts +def test_insert_agg_topic_should_return_true(get_container_func, ports_config): + get_container, image = get_container_func + + with get_container( + image, ports=ports_config["ports"], env=ENV_POSTGRESQL + ) as container: + port_on_host = ports_config["port_on_host"] + wait_for_connection(container, port_on_host) + create_all_tables(container) + + with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: + topic = "some_agg_topic" + agg_type = "AVG" + agg_time_period = "2019" + expected_data = (1, "some_agg_topic", "AVG", "2019") + + actual_id = postgresqlfuncts.insert_agg_topic( + topic, agg_type, agg_time_period + ) + + assert isinstance(actual_id, int) + assert get_data_in_table(port_on_host, AGG_TOPICS_TABLE)[0] == expected_data + + @pytest.mark.postgresqlfuncts def test_update_agg_topic_should_return_true(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: topic = "cars" agg_type = "SUM" agg_time_period = "2100ZULU" + expected_data = (1, "cars", "SUM", "2100ZULU") actual_id = postgresqlfuncts.insert_agg_topic( topic, agg_type, agg_time_period ) assert isinstance(actual_id, int) - expected_data = [(actual_id, "cars", "SUM", "2100ZULU")] - assert ( - get_data_from_aggregate_by_topic_id( - port_on_host, AGG_TOPICS_TABLE, actual_id - ) - == expected_data - ) + assert get_data_in_table(port_on_host, AGG_TOPICS_TABLE)[0] == expected_data new_agg_topic_name = "boats" - expected_data = [(actual_id, "boats", "SUM", "2100ZULU")] + expected_data = (1, "boats", "SUM", "2100ZULU") result = postgresqlfuncts.update_agg_topic(actual_id, new_agg_topic_name) assert result is True - assert ( - get_data_from_aggregate_by_topic_id( - port_on_host, AGG_TOPICS_TABLE, actual_id - ) - == expected_data - ) + assert get_data_in_table(port_on_host, AGG_TOPICS_TABLE)[0] == expected_data @pytest.mark.postgresqlfuncts @@ -398,26 +395,22 @@ def test_insert_agg_meta_should_return_true(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: topic_id = 42 # metadata must be in the following convention because aggregation methods, i.e. get_agg_topics, rely on metadata having a key called "configured_topics" metadata = {"configured_topics": "meaning of life"} - expected_data = [(42, '{"configured_topics": "meaning of life"}')] + expected_data = (42, '{"configured_topics": "meaning of life"}') result = postgresqlfuncts.insert_agg_meta(topic_id, metadata) assert result is True - assert ( - get_data_from_aggregate_by_topic_id( - port_on_host, AGG_META_TABLE, topic_id - ) - == expected_data - ) + assert get_data_in_table(port_on_host, AGG_META_TABLE)[0] == expected_data @pytest.mark.postgresqlfuncts @@ -425,20 +418,23 @@ def test_get_topic_map_should_return_maps(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: + query = """ + INSERT INTO topics (topic_name) + VALUES ('football'); + INSERT INTO topics (topic_name) + VALUES ('baseball'); + """ + seed_database(container, query) expected = ( - {"baseball": 2, "football": 1, "foobar": 3, "xctljglfkjsgfklsd": 4}, - { - "baseball": "baseball", - "football": "football", - "xctljglfkjsgfklsd": "xctljglfkjsgfklsd", - "foobar": "foobar", - }, + {"baseball": 2, "football": 1}, + {"baseball": "baseball", "football": "football"}, ) actual = postgresqlfuncts.get_topic_map() @@ -451,12 +447,21 @@ def test_get_agg_topics_should_return_list(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: + topic = "some_agg_topic" + agg_type = "AVG" + agg_time_period = "2019" + topic_id = postgresqlfuncts.insert_agg_topic( + topic, agg_type, agg_time_period + ) + metadata = {"configured_topics": "meaning of life"} + postgresqlfuncts.insert_agg_meta(topic_id, metadata) expected_list = [("some_agg_topic", "AVG", "2019", "meaning of life")] actual_list = postgresqlfuncts.get_agg_topics() @@ -469,17 +474,24 @@ def test_get_agg_topic_map_should_return_dict(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: - expected_dict = {("some_agg_topic", "AVG", "2019"): 1} + query = f""" + INSERT INTO {AGG_TOPICS_TABLE} + (agg_topic_name, agg_type, agg_time_period) + VALUES ('topic_name', 'AVG', '2001'); + """ + seed_database(container, query) + expected = {("topic_name", "AVG", "2001"): 1} - actual_dict = postgresqlfuncts.get_agg_topic_map() + actual = postgresqlfuncts.get_agg_topic_map() - assert actual_dict == expected_dict + assert actual == expected @pytest.mark.postgresqlfuncts @@ -489,18 +501,28 @@ def test_query_topics_by_pattern_should_return_matching_results( get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: - expected_topics = {"football": 1, "foobar": 3} + query = f""" + INSERT INTO {TOPICS_TABLE} (topic_name) + VALUES ('football'); + INSERT INTO {TOPICS_TABLE} (topic_name) + VALUES ('foobar'); + INSERT INTO {TOPICS_TABLE} (topic_name) + VALUES ('xyzzzzzzzz'); + """ + seed_database(container, query) + expected = {"football": 1, "foobar": 2} topic_pattern = "foo" - actual_topics = postgresqlfuncts.query_topics_by_pattern(topic_pattern) + actual = postgresqlfuncts.query_topics_by_pattern(topic_pattern) - assert actual_topics == expected_topics + assert actual == expected @pytest.mark.postgresqlfuncts @@ -508,10 +530,11 @@ def test_create_aggregate_store_should_succeed(get_container_func, ports_config) get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: agg_type = "AVG" @@ -529,37 +552,56 @@ def test_create_aggregate_store_should_succeed(get_container_func, ports_config) @pytest.mark.postgresqlfuncts -def test_insert_aggregate_stmt_should_return_true(get_container_func, ports_config): +def test_insert_aggregate_stmt_should_succeed(get_container_func, ports_config): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: + # be aware that Postgresql will automatically fold unquoted names into lower case + # From : https://www.postgresql.org/docs/current/sql-syntax-lexical.html + # Quoting an identifier also makes it case-sensitive, whereas unquoted names are always folded to lower case. + # For example, the identifiers FOO, foo, and "foo" are considered the same by PostgreSQL, + # but "Foo" and "FOO" are different from these three and each other. + # (The folding of unquoted names to lower case in PostgreSQL is incompatible with the SQL standard, + # which says that unquoted names should be folded to upper case. + # Thus, foo should be equivalent to "FOO" not "foo" according to the standard. + # If you want to write portable applications you are advised to always quote a particular name or never quote it.) + query = """ + CREATE TABLE AVG_1776 ( + ts timestamp NOT NULL, + topic_id INTEGER NOT NULL, + value_string TEXT NOT NULL, + topics_list TEXT, + UNIQUE(ts, topic_id)); + CREATE INDEX IF NOT EXISTS idx_avg_1776 ON avg_1776 (ts ASC); + """ + seed_database(container, query) + agg_topic_id = 42 agg_type = "avg" period = "1776" ts = "2020-06-01 12:30:59" data = "some_data" topic_ids = [12, 54, 65] - expected_data = [ - ( - datetime.datetime(2020, 6, 1, 12, 30, 59), - 42, - '"some_data"', - "[12, 54, 65]", - ) - ] + expected_data = ( + datetime.datetime(2020, 6, 1, 12, 30, 59), + 42, + '"some_data"', + "[12, 54, 65]", + ) res = postgresqlfuncts.insert_aggregate( agg_topic_id, agg_type, period, ts, data, topic_ids ) assert res is True - assert get_data_(port_on_host, "avg_1776") == expected_data + assert get_data_in_table(port_on_host, "avg_1776")[0] == expected_data @pytest.mark.postgresqlfuncts @@ -567,12 +609,21 @@ def test_collect_aggregate_stmt_should_return_rows(get_container_func, ports_con get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) + create_all_tables(container) with get_postgresqlfuncts(port_on_host) as postgresqlfuncts: + query = f""" + INSERT INTO {DATA_TABLE} + VALUES ('2020-06-01 12:30:59', 42, '2'); + INSERT INTO {DATA_TABLE} + VALUES ('2020-06-01 12:31:59', 43, '8') + """ + seed_database(container, query) + topic_ids = [42, 43] agg_type = "avg" expected_aggregate = (5.0, 2) @@ -588,7 +639,7 @@ def test_collect_aggregate_stmt_should_raise_value_error( ): get_container, image = get_container_func with get_container( - image, ports=ports_config["ports"], env=ENV_POSTGRESQL, volumes=VOLUMES_TEST_DB + image, ports=ports_config["ports"], env=ENV_POSTGRESQL ) as container: port_on_host = ports_config["port_on_host"] wait_for_connection(container, port_on_host) @@ -632,6 +683,78 @@ def ports_config(): return {"port_on_host": port_on_host, "ports": {"5432/tcp": port_on_host}} +def create_all_tables(container): + create_historian_tables(container) + create_meta_data_table(container) + create_aggregate_historian_tables(container) + + +def create_historian_tables(container): + query = f""" + CREATE TABLE {DATA_TABLE} ( + ts TIMESTAMP NOT NULL, + topic_id INTEGER NOT NULL, + value_string TEXT NOT NULL, + UNIQUE (topic_id, ts)); + CREATE TABLE IF NOT EXISTS {TOPICS_TABLE} ( + topic_id SERIAL PRIMARY KEY NOT NULL, + topic_name VARCHAR(512) NOT NULL, + UNIQUE (topic_name)); + CREATE TABLE IF NOT EXISTS {META_TABLE} ( + topic_id INTEGER PRIMARY KEY NOT NULL, + metadata TEXT NOT NULL); + """ + + seed_database(container, query) + + return + + +def create_meta_data_table(container): + query = f""" + CREATE TABLE {METADATA_TABLE} + (table_id VARCHAR(512) PRIMARY KEY NOT NULL, + table_name VARCHAR(512) NOT NULL); + INSERT INTO {METADATA_TABLE} VALUES ('data_table', '{DATA_TABLE}'); + INSERT INTO {METADATA_TABLE} VALUES ('topics_table', '{TOPICS_TABLE}'); + INSERT INTO {METADATA_TABLE} VALUES ('meta_table', '{META_TABLE}'); + """ + seed_database(container, query) + + return + + +def create_aggregate_historian_tables(container): + query = f""" + CREATE TABLE IF NOT EXISTS {AGG_TOPICS_TABLE} ( + agg_topic_id SERIAL PRIMARY KEY NOT NULL, + agg_topic_name VARCHAR(512) NOT NULL, + agg_type VARCHAR(512) NOT NULL, + agg_time_period VARCHAR(512) NOT NULL, + UNIQUE (agg_topic_name, agg_type, agg_time_period)); + CREATE TABLE IF NOT EXISTS {AGG_META_TABLE} ( + agg_topic_id INTEGER PRIMARY KEY NOT NULL, + metadata TEXT NOT NULL); + """ + + seed_database(container, query) + + return + + +def seed_database(container, query): + command = ( + f'psql --username="{ROOT_USER}" --dbname="{TEST_DATABASE}" --command="{query}"' + ) + r = container.exec_run(cmd=command, tty=True) + print(r) + if r[0] == 1: + raise RuntimeError( + f"SQL query did not successfully complete on the container: \n {r}" + ) + return + + def get_tables(port): cnx, cursor = get_cnx_cursor(port) # unlike MYSQL, Postgresql does not have a "SHOW TABLES" shortcut @@ -658,7 +781,7 @@ def describe_table(port, table): return {t[0] for t in results} -def get_data_(port, table): +def get_data_in_table(port, table): cnx, cursor = get_cnx_cursor(port) query = SQL("SELECT * " "FROM {table_name}").format(table_name=Identifier(table)) @@ -667,24 +790,6 @@ def get_data_(port, table): return results -def get_data_by_topic_id(port, table, topic_id): - cnx, cursor = get_cnx_cursor(port) - query = SQL("SELECT * " "FROM {table_name}" "WHERE topic_id = %s").format( - table_name=Identifier(table) - ) - - return execute_statement(cnx, cursor, query, args=[topic_id]) - - -def get_data_from_aggregate_by_topic_id(port, table, topic_id): - cnx, cursor = get_cnx_cursor(port) - query = SQL("SELECT * " "FROM {table_name}" "WHERE agg_topic_id = %s").format( - table_name=Identifier(table) - ) - - return execute_statement(cnx, cursor, query, args=[topic_id]) - - def execute_statement(cnx, cursor, query, args=None): cursor.execute(query, vars=args) @@ -723,7 +828,6 @@ def wait_for_connection(container, port): # and 3 if an error occurred in a script and the variable ON_ERROR_STOP was set. exit_code = response[0] - # print(response) if exit_code == 0: return elif exit_code == 1: