diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index bdedbd0..ed61dd8 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 2.1.6
+current_version = 2.1.7
commit = False
tag = False
message = 'Version {new_version}'
diff --git a/Makefile b/Makefile
index e850076..82dca3f 100644
--- a/Makefile
+++ b/Makefile
@@ -35,11 +35,4 @@ test_sgw_cli:
python -m pytest tests/test_5.py
test_capella:
python -m pytest tests/test_6.py
-test:
- python -m pytest \
- tests/test_1.py \
- tests/test_2.py \
- tests/test_3.py \
- tests/test_4.py \
- tests/test_5.py \
- tests/test_6.py
+test: test_sync_drv test_async_drv test_cbc_cli test_random test_sgw_cli test_capella
diff --git a/README.md b/README.md
index 9e137c8..c08e7e7 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# cb-util 2.1.6
+# cb-util 2.1.7
## Couchbase Utilities
Couchbase connection manager. Simplifies connecting to a Couchbase cluster and performing data and management operations.
diff --git a/VERSION b/VERSION
index 399088b..04b10b4 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-2.1.6
+2.1.7
diff --git a/cbcmgr/__init__.py b/cbcmgr/__init__.py
index 56782a1..d562886 100644
--- a/cbcmgr/__init__.py
+++ b/cbcmgr/__init__.py
@@ -2,7 +2,7 @@
from pkg_resources import parse_version
_ROOT = os.path.abspath(os.path.dirname(__file__))
-__version__ = "2.1.6"
+__version__ = "2.1.7"
VERSION = parse_version(__version__)
diff --git a/cbcmgr/cb_connect.py b/cbcmgr/cb_connect.py
index 2be1de0..7052f07 100644
--- a/cbcmgr/cb_connect.py
+++ b/cbcmgr/cb_connect.py
@@ -95,7 +95,7 @@ def bucket_stats(self, bucket: str):
except Exception as err:
raise BucketStatsError(f"can not get bucket {bucket} stats: {err}")
- @retry(factor=0.5)
+ @retry()
def bucket_wait(self, bucket: str, count: int = 0):
try:
bucket_stats = self.bucket_stats(bucket)
@@ -104,7 +104,7 @@ def bucket_wait(self, bucket: str, count: int = 0):
except Exception as err:
raise BucketWaitException(f"bucket_wait: error: {err}")
- @retry(factor=0.5)
+ @retry()
def scope_wait(self, bucket: str, scope: str):
bucket = self._cluster.bucket(bucket)
cm = bucket.collections()
diff --git a/cbcmgr/cb_connect_lite.py b/cbcmgr/cb_connect_lite.py
index 4218130..00ed952 100644
--- a/cbcmgr/cb_connect_lite.py
+++ b/cbcmgr/cb_connect_lite.py
@@ -130,6 +130,13 @@ def get_collection(self, bucket: Bucket, scope: Scope, name: str = "_default") -
raise CollectionNotFoundException(f"collection {name} does not exist")
return scope.collection(name)
+ @retry()
+ def collection_wait(self, bucket: Bucket, scope: Scope, name: str = "_default"):
+ if name is None:
+ raise TypeError("name can not be None")
+ if not self.is_collection(bucket, scope.name, name):
+ raise CollectionNotFoundException(f"wait timeout: collection {name} does not exist")
+
@retry()
def create_collection(self, bucket: Bucket, scope: Scope, name: str):
if name is None:
@@ -140,6 +147,7 @@ def create_collection(self, bucket: Bucket, scope: Scope, name: str):
collection_spec = CollectionSpec(name, scope_name=scope.name)
cm = bucket.collections()
cm.create_collection(collection_spec)
+ self.collection_wait(bucket, scope, name)
except CollectionAlreadyExistsException:
pass
@@ -240,8 +248,6 @@ def create_group(self, name: str = None, description: str = None, roles: List[Ro
if self.capella_project and self.capella_db:
logger.warning("Skipping group creation on Capella")
else:
- for role in roles:
- print(role.as_dict())
um = self._cluster.users()
# noinspection PyTypeChecker
group = Group(name=name, description=description, roles=roles)
@@ -287,13 +293,13 @@ def index_by_query(self, sql: str):
raise IndexInternalError(f"can not determine index for query")
@retry()
- def index_create(self, index: CBQueryIndex, timeout: int = 480):
+ def index_create(self, index: CBQueryIndex, timeout: int = 480, deferred: bool = True):
if index.is_primary:
index_options = CreatePrimaryQueryIndexOptions()
else:
index_options = CreateQueryIndexOptions()
- index_options.update(deferred=True)
+ index_options.update(deferred=deferred)
index_options.update(timeout=timedelta(seconds=timeout))
index_options.update(num_replicas=index.num_replica)
index_options.update(ignore_if_exists=True)
diff --git a/cbcmgr/cb_management.py b/cbcmgr/cb_management.py
index 8944240..360a175 100644
--- a/cbcmgr/cb_management.py
+++ b/cbcmgr/cb_management.py
@@ -168,7 +168,7 @@ def wait_for_query_ready(self):
cluster = Cluster.connect(self.cb_connect_string, self.cluster_options)
cluster.wait_until_ready(timedelta(seconds=30), WaitUntilReadyOptions(service_types=[ServiceType.Query, ServiceType.Management]))
- @retry(factor=0.5)
+ @retry()
def wait_for_index_ready(self):
value = []
query_str = r"SELECT * FROM system:indexes;"
@@ -425,7 +425,7 @@ def is_index(self, index_name: str = None):
return False
- @retry(factor=0.5, allow_list=(IndexNotReady,))
+ @retry(allow_list=(IndexNotReady,))
def index_wait(self, index_name: str = None):
record_count = self.collection_count()
try:
@@ -482,7 +482,7 @@ def index_online(self, name=None, primary=False, timeout=480):
except WatchQueryIndexTimeoutException:
raise IndexNotReady(f"Indexes not build within {timeout} seconds...")
- @retry(factor=0.5, allow_list=(IndexNotReady,))
+ @retry(allow_list=(IndexNotReady,))
def index_list(self):
return_list = {}
try:
@@ -494,7 +494,7 @@ def index_list(self):
except Exception as err:
raise IndexNotReady(f"index_list: bucket {self._bucket.name} error: {err}")
- @retry(factor=0.5, allow_list=(IndexNotReady,))
+ @retry(allow_list=(IndexNotReady,))
def delete_wait(self, index_name: str = None):
if self.is_index(index_name=index_name):
raise IndexNotReady(f"delete_wait: index still exists")
diff --git a/cbcmgr/cb_operation_s.py b/cbcmgr/cb_operation_s.py
index 197680c..ba2c39c 100644
--- a/cbcmgr/cb_operation_s.py
+++ b/cbcmgr/cb_operation_s.py
@@ -185,6 +185,7 @@ def _collection_(self, name: str = "_default"):
except CollectionNotFoundException:
if self.create:
self.create_collection(self._bucket, self._scope, name)
+ self._collection_name = name
self.reconnect()
return self._collection_(name)
self._collection_name = name
diff --git a/cbcmgr/cb_session.py b/cbcmgr/cb_session.py
index 698dce5..20fd0d4 100644
--- a/cbcmgr/cb_session.py
+++ b/cbcmgr/cb_session.py
@@ -142,7 +142,7 @@ def cb_connect_string(self):
logger.debug(f"Connect string: {connect_string}")
return connect_string
- @retry(retry_count=5)
+ @retry(retry_count=7)
def is_reachable(self):
resolver = dns.resolver.Resolver()
resolver.timeout = 5
@@ -209,7 +209,7 @@ def process_cluster_data(self):
self.cbas_memory_quota = self.cluster_info.get('cbasMemoryQuota', 0)
self.eventing_memory_quota = self.cluster_info.get('eventingMemoryQuota', 0)
- @retry(retry_count=5)
+ @retry(retry_count=7)
def check_node_connectivity(self, hostname, port):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@@ -226,7 +226,7 @@ def check_node_connectivity(self, hostname, port):
else:
raise NodeConnectionFailed(f"node {hostname}:{port} unreachable")
- @retry(factor=0.5)
+ @retry()
def wait_until_ready(self):
nodes = []
cluster = Cluster(self.cb_connect_string, ClusterOptions(self.auth,
diff --git a/cbcmgr/cli/cbcutil.py b/cbcmgr/cli/cbcutil.py
index a350de3..3e8aad1 100644
--- a/cbcmgr/cli/cbcutil.py
+++ b/cbcmgr/cli/cbcutil.py
@@ -61,6 +61,7 @@ def local_args(self):
opt_parser.add_argument('--docid', action='store', help="Import document ID field", default="doc_id")
opt_parser.add_argument('--tls', action='store_true', help="Enable SSL")
opt_parser.add_argument('--safe', action='store_true', help="Do not overwrite data")
+ opt_parser.add_argument('--defer', action='store_true', help="Defer index build")
opt_parser.add_argument('-e', '--external', action='store_true', help='Use external network')
opt_parser.add_argument('-f', '--file', action='store', help="File based collection schema JSON")
opt_parser.add_argument('--outfile', action='store', help="Output file", default="output.dat")
@@ -118,7 +119,7 @@ def run(self):
if self.options.replicate_command == 'source':
Replicator(self.options.filter).source()
elif self.options.replicate_command == 'target':
- Replicator().target()
+ Replicator(deferred=self.options.defer).target()
else:
if config.op_mode == OperatingMode.LOAD.value and self.options.schema:
MainLoop().schema_load()
diff --git a/cbcmgr/cli/replicate.py b/cbcmgr/cli/replicate.py
index ea15801..98405f5 100644
--- a/cbcmgr/cli/replicate.py
+++ b/cbcmgr/cli/replicate.py
@@ -47,12 +47,13 @@ def default(self, obj):
class Replicator(object):
- def __init__(self, filters=None):
+ def __init__(self, filters=None, deferred=True):
self.output = {}
if filters:
self.filters = filters
else:
self.filters = []
+ self.deferred = deferred
self.bucket_filters = []
self.scope_filters = []
self.collection_filters = []
@@ -210,7 +211,7 @@ def read_schema_from_input(self):
for index in index_list:
entry = CBQueryIndex.from_dict(index)
logger.info(f"Replicating index [{entry.keyspace_id}] {entry.name}")
- operator.index_create(entry)
+ operator.index_create(entry, deferred=self.deferred)
except Empty:
time.sleep(0.1)
continue
diff --git a/cbcmgr/cli/sgwutil.py b/cbcmgr/cli/sgwutil.py
index a542206..349909f 100755
--- a/cbcmgr/cli/sgwutil.py
+++ b/cbcmgr/cli/sgwutil.py
@@ -208,7 +208,7 @@ def resync(self, name):
logger.error(f"Resync failed for database {name}: {err}")
sys.exit(1)
- @retry(factor=0.5, retry_count=20)
+ @retry(retry_count=12)
def resync_wait(self, name):
self.api_post(f"/{name}/_online", None)
@@ -237,7 +237,7 @@ def list_all(self):
logger.error(f"Database list failed: {err}")
sys.exit(1)
- @retry(factor=0.5, retry_count=20)
+ @retry(retry_count=12)
def ready_wait(self, name):
self.api_get(f"/{name}/_config").json()
diff --git a/tests/common.py b/tests/common.py
new file mode 100644
index 0000000..ea24216
--- /dev/null
+++ b/tests/common.py
@@ -0,0 +1,342 @@
+##
+
+import docker
+from docker.errors import APIError
+from docker.models.containers import Container
+from docker import APIClient
+from typing import Union, List
+from io import BytesIO
+import io
+import os
+import tarfile
+import warnings
+import logging
+import subprocess
+
+warnings.filterwarnings("ignore")
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+logging.getLogger("docker").setLevel(logging.WARNING)
+logging.getLogger("urllib3").setLevel(logging.WARNING)
+
+image_name = "mminichino/cbdev:latest"
+document = {
+ "id": 1,
+ "data": "data",
+ "one": "one",
+ "two": "two",
+ "three": "tree"
+}
+new_document = {
+ "id": 1,
+ "data": "new",
+ "one": "one",
+ "two": "two",
+ "three": "tree"
+}
+query_result = [
+ {
+ 'data': 'data'
+ }
+]
+
+json_data = {
+ "name": "John Doe",
+ "email": "jdoe@example.com",
+ "addresses": {
+ "billing": {
+ "line1": "123 Any Street",
+ "line2": "Anywhere",
+ "country": "United States"
+ },
+ "delivery": {
+ "line1": "123 Any Street",
+ "line2": "Anywhere",
+ "country": "United States"
+ }
+ },
+ "history": {
+ "events": [
+ {
+ "event_id": "1",
+ "date": "1/1/1970",
+ "type": "contact"
+ },
+ {
+ "event_id": "2",
+ "date": "1/1/1970",
+ "type": "contact"
+ }
+ ]
+ },
+ "purchases": {
+ "complete": [
+ 339, 976, 442, 777
+ ],
+ "abandoned": [
+ 157, 42, 999
+ ]
+ }
+}
+
+xml_data = """
+
+ John Doe
+ jdoe@example.com
+
+
+ 123 Any Street
+ Anywhere
+ United States
+
+
+ 123 Any Street
+ Anywhere
+ United States
+
+
+
+
+ 1
+ 1/1/1970
+ contact
+
+
+ 2
+ 1/1/1970
+ contact
+
+
+
+ 339
+ 976
+ 442
+ 777
+ 157
+ 42
+ 999
+
+
+ """
+
+
+def make_local_dir(name: str):
+ if not os.path.exists(name):
+ path_dir = os.path.dirname(name)
+ if not os.path.exists(path_dir):
+ make_local_dir(path_dir)
+ try:
+ os.mkdir(name)
+ except OSError:
+ raise
+
+
+def cmd_exec(command: Union[str, List[str]], directory: str):
+ buffer = io.BytesIO()
+
+ p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=directory)
+
+ while True:
+ data = p.stdout.read()
+ if not data:
+ break
+ buffer.write(data)
+
+ p.communicate()
+
+ if p.returncode != 0:
+ raise ValueError("command exited with non-zero return code")
+
+ buffer.seek(0)
+ return buffer
+
+
+def cli_run(cmd: str, *args: str, input_file: str = None):
+ command_output = ""
+ run_cmd = [
+ cmd,
+ *args
+ ]
+
+ p = subprocess.Popen(run_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
+ if input_file:
+ with open(input_file, 'rb') as input_data:
+ while True:
+ line = input_data.readline()
+ if not line:
+ break
+ p.stdin.write(line)
+ p.stdin.close()
+
+ while True:
+ line = p.stdout.readline()
+ if not line:
+ break
+ line_string = line.decode("utf-8")
+ command_output += line_string
+
+ p.wait()
+
+ return p.returncode, command_output
+
+
+def copy_to_container(container_id: Container, src: str, dst: str):
+ print(f"Copying {src} to {dst}")
+ stream = io.BytesIO()
+ with tarfile.open(fileobj=stream, mode='w|') as tar, open(src, 'rb') as file:
+ info = tar.gettarinfo(fileobj=file)
+ info.name = os.path.basename(src)
+ tar.addfile(info, file)
+
+ container_id.put_archive(dst, stream.getvalue())
+
+
+def copy_log_from_container(container_id: Container, src: str, directory: str):
+ make_local_dir(directory)
+ src_base = os.path.basename(src)
+ dst = f"{directory}/{src_base}"
+ print(f"Copying {src} to {dst}")
+ try:
+ bits, stat = container_id.get_archive(src)
+ except docker.errors.NotFound:
+ print(f"{src}: not found")
+ return
+ stream = io.BytesIO()
+ for chunk in bits:
+ stream.write(chunk)
+ stream.seek(0)
+ with tarfile.open(fileobj=stream, mode='r') as tar, open(dst, 'wb') as file:
+ f = tar.extractfile(src_base)
+ data = f.read()
+ file.write(data)
+
+
+def copy_dir_to_container(container_id: Container, src_dir: str, dst: str):
+ print(f"Copying {src_dir} to {dst}")
+ stream = io.BytesIO()
+ with tarfile.open(fileobj=stream, mode='w|') as tar:
+ name = os.path.basename(src_dir)
+ tar.add(src_dir, arcname=name, recursive=True)
+
+ container_id.put_archive(dst, stream.getvalue())
+
+
+def copy_git_to_container(container_id: Container, src: str, dst: str):
+ container_mkdir(container_id, dst)
+ file_list = []
+ print(f"Copying git HEAD to {dst}")
+ output: BytesIO = cmd_exec(["git", "ls-tree", "--full-tree", "--name-only", "-r", "HEAD"], src)
+ while True:
+ line = output.readline()
+ if not line:
+ break
+ line_string = line.decode("utf-8")
+ file_list.append(line_string.strip())
+ stream = io.BytesIO()
+ with tarfile.open(fileobj=stream, mode='w|') as tar:
+ for filename in file_list:
+ tar.add(filename, recursive=True)
+
+ container_id.put_archive(dst, stream.getvalue())
+
+
+def container_mkdir(container_id: Container, directory: str):
+ command = ["mkdir", "-p", directory]
+ exit_code, output = container_id.exec_run(command)
+ assert exit_code == 0
+
+
+def start_container(image: str, platform: str = "linux/amd64") -> Container:
+ docker_api = APIClient(base_url='unix://var/run/docker.sock')
+ client = docker.from_env()
+ client.images.prune()
+ client.containers.prune()
+ client.networks.prune()
+ client.volumes.prune()
+ docker_api.prune_builds()
+
+ print(f"Starting {image}")
+
+ try:
+ container_id = client.containers.run(image,
+ tty=True,
+ detach=True,
+ platform=platform,
+ name="pytest",
+ ports={
+ 8091: 8091,
+ 18091: 18091,
+ 8092: 8092,
+ 18092: 18092,
+ 8093: 8093,
+ 18093: 18093,
+ 8094: 8094,
+ 18094: 18094,
+ 8095: 8095,
+ 18095: 18095,
+ 8096: 8096,
+ 18096: 18096,
+ 8097: 8097,
+ 18097: 18097,
+ 11207: 11207,
+ 11210: 11210,
+ 9102: 9102,
+ 4984: 4984,
+ 4985: 4985,
+ },
+ )
+ except docker.errors.APIError as e:
+ if e.status_code == 409:
+ container_id = client.containers.get('pytest')
+ else:
+ raise
+
+ print("Container started")
+ return container_id
+
+
+def get_image_name(container_id: Container):
+ tags = container_id.image.tags
+ return tags[0].split(':')[0].replace('/', '-')
+
+
+def container_log(container_id: Container, directory: str):
+ make_local_dir(directory)
+ print(f"Copying {container_id.name} log to {directory}")
+ filename = f"{directory}/{container_id.name}.log"
+ output = container_id.attach(stdout=True, stderr=True, logs=True)
+ with open(filename, 'w') as out_file:
+ out_file.write(output.decode("utf-8"))
+ out_file.close()
+
+
+def run_in_container(container_id: Container, command: Union[str, List[str]], directory: Union[str, None] = None):
+ exit_code, output = container_id.exec_run(command, workdir=directory)
+ for line in output.split(b'\n'):
+ if len(line) > 0:
+ print(line.decode("utf-8"))
+ if exit_code == 0:
+ return True
+ else:
+ return False
+
+
+def get_container_id(name: str = "pytest"):
+ client = docker.from_env()
+ try:
+ return client.containers.get(name)
+ except docker.errors.NotFound:
+ return None
+
+
+def stop_container(container_id: Container):
+ client = docker.from_env()
+ container_id.stop()
+ container_id.remove()
+ try:
+ volume = client.volumes.get("pytest-volume")
+ volume.remove()
+ except docker.errors.NotFound:
+ pass
+ print("Container stopped")
diff --git a/tests/conftest.py b/tests/conftest.py
index a36ed9f..58e49f2 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,11 +1,14 @@
+##
+##
+
import pytest
-import docker
def pytest_addoption(parser):
parser.addoption("--host", action="store", default="127.0.0.1")
parser.addoption("--bucket", action="store", default="test")
parser.addoption("--external", action="store_true")
+ parser.addoption("--image", action="store", default="mminichino/cbdev:latest")
@pytest.fixture
@@ -18,79 +21,22 @@ def bucket(request):
return request.config.getoption("--bucket")
-def pytest_configure(config):
+@pytest.fixture
+def image(request):
+ return request.config.getoption("--image")
+
+
+def pytest_configure():
+ pass
+
+
+def pytest_sessionstart():
+ pass
+
+
+def pytest_sessionfinish():
pass
-def pytest_sessionstart(session):
- if session:
- external = session.config.getoption('--external')
- if external:
- return
- print("Starting test container")
- client = docker.from_env()
- container_id = client.containers.run('mminichino/cbdev:latest',
- detach=True,
- name="pytest",
- ports={
- 8091: 8091,
- 18091: 18091,
- 8092: 8092,
- 18092: 18092,
- 8093: 8093,
- 18093: 18093,
- 8094: 8094,
- 18094: 18094,
- 8095: 8095,
- 18095: 18095,
- 8096: 8096,
- 18096: 18096,
- 8097: 8097,
- 18097: 18097,
- 11207: 11207,
- 11210: 11210,
- 9102: 9102,
- 4984: 4984,
- 4985: 4985,
- },
- )
-
- print("Container started")
- print("Waiting for Couchbase Server to be ready")
-
- while True:
- exit_code, output = container_id.exec_run(['/bin/bash',
- '-c',
- 'test -f /demo/couchbase/.ready'])
- if exit_code == 0:
- break
-
- exit_code, output = container_id.exec_run(['cbcutil',
- 'list',
- '--host', '127.0.0.1',
- '--wait'])
-
- for line in output.split(b'\n'):
- print(line.decode("utf-8"))
- assert exit_code == 0
-
- print("Ready.")
-
-
-def pytest_sessionfinish(session, exitstatus):
- if session:
- external = session.config.getoption('--external')
- if external:
- return
- print("")
- print("Stopping container")
- client = docker.from_env()
- container_id = client.containers.get('pytest')
- container_id.stop()
- print("Removing test container")
- container_id.remove()
- print("Done.")
-
-
-def pytest_unconfigure(config):
+def pytest_unconfigure():
pass
diff --git a/tests/test_1.py b/tests/test_1.py
index 9e032a7..5e990b8 100644
--- a/tests/test_1.py
+++ b/tests/test_1.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
+import os
import warnings
import pytest
import json
@@ -13,264 +14,190 @@
from cbcmgr.cb_operation_s import CBOperation, Operation
from cbcmgr.cb_pathmap import CBPathMap
from cbcmgr.mt_pool import CBPool
+from tests.common import start_container, stop_container, run_in_container, document, new_document, query_result, json_data, xml_data, image_name
warnings.filterwarnings("ignore")
-document = {
- "id": 1,
- "data": "data",
- "one": "one",
- "two": "two",
- "three": "tree"
-}
-new_document = {
- "id": 1,
- "data": "new",
- "one": "one",
- "two": "two",
- "three": "tree"
-}
-query_result = [
- {
- 'data': 'data'
- }
-]
-
-json_data = {
- "name": "John Doe",
- "email": "jdoe@example.com",
- "addresses": {
- "billing": {
- "line1": "123 Any Street",
- "line2": "Anywhere",
- "country": "United States"
- },
- "delivery": {
- "line1": "123 Any Street",
- "line2": "Anywhere",
- "country": "United States"
- }
- },
- "history": {
- "events": [
- {
- "event_id": "1",
- "date": "1/1/1970",
- "type": "contact"
- },
- {
- "event_id": "2",
- "date": "1/1/1970",
- "type": "contact"
- }
- ]
- },
- "purchases": {
- "complete": [
- 339, 976, 442, 777
- ],
- "abandoned": [
- 157, 42, 999
- ]
- }
- }
-
-xml_data = """
-
- John Doe
- jdoe@example.com
-
-
- 123 Any Street
- Anywhere
- United States
-
-
- 123 Any Street
- Anywhere
- United States
-
-
-
-
- 1
- 1/1/1970
- contact
-
-
- 2
- 1/1/1970
- contact
-
-
-
- 339
- 976
- 442
- 777
- 157
- 42
- 999
-
-
-"""
-@pytest.mark.parametrize("scope, collection", [("_default", "_default"), ("test", "test")])
-@pytest.mark.parametrize("tls", [False, True])
-def test_cb_driver_1(hostname, bucket, tls, scope, collection):
- replica_count = 0
- bucket_opts = Bucket(**dict(
- name=bucket,
- num_replicas=0
- ))
-
- dbm = CBManager(hostname, "Administrator", "password", ssl=False).connect()
- dbm.create_bucket(bucket_opts)
- dbm.create_scope(scope)
- dbm.create_collection(collection)
- result = dbm.get_bucket(bucket)
- assert result is not None
-
- dbc = CBConnect(hostname, "Administrator", "password", ssl=False).connect(bucket, scope, collection)
-
- dbm.cb_create_primary_index(replica=replica_count)
- index_name = dbm.cb_create_index(fields=["data"], replica=replica_count)
- dbm.index_wait()
- dbm.index_wait(index_name)
- result = dbm.is_index()
- assert result is True
- result = dbm.is_index(index_name)
- assert result is True
- dbc.cb_upsert("test::1", document)
- dbc.bucket_wait(bucket, count=1)
- result = dbc.cb_doc_exists("test::1")
- assert result is True
-
- result = dbc.has_primary_index()
- assert result is True
- result = dbc.cb_get("test::1")
- assert result == document
- result = dbc.collection_count(expect_count=1)
- assert result == 1
- result = dbc.cb_query(field="data", empty_retry=True)
- assert result == query_result
- dbc.cb_upsert("test::2", document)
- dbc.cb_subdoc_multi_upsert(["test::1", "test::2"], "data", ["new", "new"])
- result = dbc.cb_get("test::1")
- assert result == new_document
- result = dbc.collection_count(expect_count=2)
- assert result == 2
- dbc.cb_upsert("test::3", document)
- dbc.cb_subdoc_upsert("test::3", "data", "new")
- result = dbc.cb_get("test::3")
- assert result == new_document
-
- inventory = dbm.cluster_schema_dump()
- assert type(inventory) is dict
-
- dbm.cb_drop_primary_index()
- dbm.cb_drop_index(index_name)
- dbm.delete_wait()
- dbm.delete_wait(index_name)
- dbm.drop_bucket(bucket)
-
+@pytest.fixture(scope="module", autouse=True)
+def setup(request):
+ print("Starting test container")
+ platform = f"linux/{os.uname().machine}"
+ container_id = start_container(image_name, platform)
+ command = ['/bin/bash', '-c', 'test -f /demo/couchbase/.ready']
+ while not run_in_container(container_id, command):
+ time.sleep(1)
+ command = ['cbcutil', 'list', '--host', '127.0.0.1', '--wait']
+ run_in_container(container_id, command)
-@pytest.mark.parametrize("scope, collection", [("_default", "_default"), ("test", "test")])
-@pytest.mark.parametrize("tls", [False, True])
-def test_cb_driver_2(hostname, bucket, tls, scope, collection):
- cfg = UpsertMapConfig().new()
- cfg.add('addresses.billing')
- cfg.add('addresses.delivery')
- cfg.add('history.events',
- p_type=MapUpsertType.LIST,
- id_key="event_id")
+ yield container_id
- p_map = CBPathMap(cfg, hostname, "Administrator", "password", bucket, scope, ssl=False, quota=128)
- p_map.connect()
- p_map.load_data("testdata", json_data=json.dumps(json_data, indent=2))
- CBOperation(hostname, "Administrator", "password", ssl=tls).connect(bucket).cleanup()
+ print("Stopping test container")
+ stop_container(container_id)
+@pytest.mark.parametrize("hostname", ["127.0.0.1"])
+@pytest.mark.parametrize("bucket", ["test"])
@pytest.mark.parametrize("scope, collection", [("_default", "_default"), ("test", "test")])
@pytest.mark.parametrize("tls", [False, True])
-def test_cb_driver_3(hostname, bucket, tls, scope, collection):
- cfg = UpsertMapConfig().new()
- cfg.add('root.addresses.billing')
- cfg.add('root.addresses.delivery')
- cfg.add('root.history.events',
- p_type=MapUpsertType.LIST,
- id_key="event_id")
+class TestSyncDrv1(object):
+
+ def test_1(self, hostname, bucket, tls, scope, collection):
+ replica_count = 0
+ bucket_opts = Bucket(**dict(
+ name=bucket,
+ num_replicas=0
+ ))
+
+ dbm = CBManager(hostname, "Administrator", "password", ssl=tls).connect()
+ dbm.create_bucket(bucket_opts)
+ dbm.create_scope(scope)
+ dbm.create_collection(collection)
+ result = dbm.get_bucket(bucket)
+ assert result is not None
+
+ dbc = CBConnect(hostname, "Administrator", "password", ssl=tls).connect(bucket, scope, collection)
+
+ dbm.cb_create_primary_index(replica=replica_count)
+ index_name = dbm.cb_create_index(fields=["data"], replica=replica_count)
+ dbm.index_wait()
+ dbm.index_wait(index_name)
+ result = dbm.is_index()
+ assert result is True
+ result = dbm.is_index(index_name)
+ assert result is True
+ dbc.cb_upsert("test::1", document)
+ dbc.bucket_wait(bucket, count=1)
+ result = dbc.cb_doc_exists("test::1")
+ assert result is True
+
+ result = dbc.has_primary_index()
+ assert result is True
+ result = dbc.cb_get("test::1")
+ assert result == document
+ result = dbc.collection_count(expect_count=1)
+ assert result == 1
+ result = dbc.cb_query(field="data", empty_retry=True)
+ assert result == query_result
+ dbc.cb_upsert("test::2", document)
+ dbc.cb_subdoc_multi_upsert(["test::1", "test::2"], "data", ["new", "new"])
+ result = dbc.cb_get("test::1")
+ assert result == new_document
+ result = dbc.collection_count(expect_count=2)
+ assert result == 2
+ dbc.cb_upsert("test::3", document)
+ dbc.cb_subdoc_upsert("test::3", "data", "new")
+ result = dbc.cb_get("test::3")
+ assert result == new_document
+
+ inventory = dbm.cluster_schema_dump()
+ assert type(inventory) is dict
+
+ dbm.cb_drop_primary_index()
+ dbm.cb_drop_index(index_name)
+ dbm.delete_wait()
+ dbm.delete_wait(index_name)
+ dbm.drop_bucket(bucket)
+
+ def test_2(self, hostname, bucket, tls, scope, collection):
+ keyspace = f"{bucket}.{scope}.{collection}"
+ try:
+ opm = CBOperation(hostname, "Administrator", "password", ssl=tls)
+ col_a = opm.connect(keyspace)
+ col_a.cleanup()
+ except (BucketNotFoundException, ScopeNotFoundException, CollectionNotFoundException):
+ pass
+
+ col_a = CBOperation(hostname, "Administrator", "password", ssl=tls, quota=128, create=True).connect(keyspace)
+
+ col_a.put_doc(col_a.collection, "test::1", document)
+ d = col_a.get_doc(col_a.collection, "test::1")
+ assert d == document
+
+ col_a.index_by_query(f"select data from {keyspace}")
+
+ r = col_a.run_query(col_a.cluster, f"select data from {keyspace}")
+ assert r[0]['data'] == 'data'
- p_map = CBPathMap(cfg, hostname, "Administrator", "password", bucket, scope, ssl=False, quota=128)
- p_map.connect()
- p_map.load_data("testdata", xml_data=xml_data)
- CBOperation(hostname, "Administrator", "password", ssl=tls).connect(bucket).cleanup()
-
-
-@pytest.mark.parametrize("scope, collection", [("_default", "_default"), ("test", "test")])
-@pytest.mark.parametrize("tls", [False, True])
-def test_cb_driver_4(hostname, bucket, tls, scope, collection):
- keyspace = f"{bucket}.{scope}.{collection}"
- try:
- opm = CBOperation(hostname, "Administrator", "password", ssl=tls)
- col_a = opm.connect(keyspace)
col_a.cleanup()
- except (BucketNotFoundException, ScopeNotFoundException, CollectionNotFoundException):
- pass
- col_a = CBOperation(hostname, "Administrator", "password", ssl=tls, quota=128, create=True).connect(keyspace)
+ col_t = CBOperation(hostname, "Administrator", "password", ssl=tls, quota=128, create=True).connect(keyspace)
+ a_read = col_t.get_operator(Operation.READ)
+ a_write = col_t.get_operator(Operation.WRITE)
+ a_query = col_t.get_operator(Operation.QUERY)
- col_a.put_doc(col_a.collection, "test::1", document)
- d = col_a.get_doc(col_a.collection, "test::1")
- assert d == document
+ a_write.prep("test::1", document)
+ a_write.execute()
+ a_read.prep("test::1")
+ a_read.execute()
+ assert document == a_read.result["test::1"]
- col_a.index_by_query(f"select data from {keyspace}")
+ col_t.index_by_query(f"select data from {keyspace}")
+ a_query.prep(f"select data from {keyspace}")
+ a_query.execute()
+ assert a_query.result[0]['data'] == 'data'
- r = col_a.run_query(col_a.cluster, f"select data from {keyspace}")
- assert r[0]['data'] == 'data'
-
- col_a.cleanup()
-
- col_t = CBOperation(hostname, "Administrator", "password", ssl=tls, quota=128, create=True).connect(keyspace)
- a_read = col_t.get_operator(Operation.READ)
- a_write = col_t.get_operator(Operation.WRITE)
- a_query = col_t.get_operator(Operation.QUERY)
-
- a_write.prep("test::1", document)
- a_write.execute()
- a_read.prep("test::1")
- a_read.execute()
- assert document == a_read.result["test::1"]
-
- col_t.index_by_query(f"select data from {keyspace}")
- a_query.prep(f"select data from {keyspace}")
- a_query.execute()
- assert a_query.result[0]['data'] == 'data'
-
- col_a.cleanup()
+ col_a.cleanup()
+@pytest.mark.parametrize("hostname", ["127.0.0.1"])
+@pytest.mark.parametrize("bucket", ["test"])
+@pytest.mark.parametrize("scope", ["_default", "test"])
+@pytest.mark.parametrize("tls", [False, True])
+class TestSyncDrv2(object):
+
+ def test_1(self, hostname, bucket, tls, scope):
+ cfg = UpsertMapConfig().new()
+ cfg.add('addresses.billing')
+ cfg.add('addresses.delivery')
+ cfg.add('history.events',
+ p_type=MapUpsertType.LIST,
+ id_key="event_id")
+
+ p_map = CBPathMap(cfg, hostname, "Administrator", "password", bucket, scope, ssl=tls, quota=128)
+ p_map.connect()
+ p_map.load_data("testdata", json_data=json.dumps(json_data, indent=2))
+ CBOperation(hostname, "Administrator", "password", ssl=tls).connect(bucket).cleanup()
+
+ def test_2(self, hostname, bucket, tls, scope):
+ cfg = UpsertMapConfig().new()
+ cfg.add('root.addresses.billing')
+ cfg.add('root.addresses.delivery')
+ cfg.add('root.history.events',
+ p_type=MapUpsertType.LIST,
+ id_key="event_id")
+
+ p_map = CBPathMap(cfg, hostname, "Administrator", "password", bucket, scope, ssl=False, quota=128)
+ p_map.connect()
+ p_map.load_data("testdata", xml_data=xml_data)
+ CBOperation(hostname, "Administrator", "password", ssl=tls).connect(bucket).cleanup()
+
+
+@pytest.mark.parametrize("hostname", ["127.0.0.1"])
+@pytest.mark.parametrize("bucket", ["test"])
@pytest.mark.parametrize("scope", ["_default", "test"])
@pytest.mark.parametrize("collection", ["test"])
@pytest.mark.parametrize("tls", [False, True])
-def test_cb_driver_5(hostname, bucket, tls, scope, collection):
- pool = CBPool(hostname, "Administrator", "password", ssl=tls, quota=128, create=True)
-
- for n in range(10):
- c = string.ascii_lowercase[n:n + 1]
- keyspace = f"{bucket}.{scope}.{collection}{c}"
- pool.connect(keyspace)
- for i in range(1000):
- pool.dispatch(keyspace, Operation.WRITE, f"test::{i+1}", document)
-
- pool.join()
- time.sleep(1)
- count = 0
- for n in range(10):
- c = string.ascii_lowercase[n:n + 1]
- keyspace = f"{bucket}.{scope}.{collection}{c}"
- count += CBOperation(hostname, "Administrator", "password", ssl=tls).connect(keyspace).get_count()
- assert count == 10000
-
- CBOperation(hostname, "Administrator", "password", ssl=tls).connect(bucket).cleanup()
+class TestSyncDrv3(object):
+
+ def test_1(self, hostname, bucket, tls, scope, collection):
+ pool = CBPool(hostname, "Administrator", "password", ssl=tls, quota=128, create=True)
+
+ for n in range(10):
+ c = string.ascii_lowercase[n:n + 1]
+ keyspace = f"{bucket}.{scope}.{collection}{c}"
+ pool.connect(keyspace)
+ for i in range(1000):
+ pool.dispatch(keyspace, Operation.WRITE, f"test::{i+1}", document)
+
+ pool.join()
+ time.sleep(1)
+ count = 0
+ for n in range(10):
+ c = string.ascii_lowercase[n:n + 1]
+ keyspace = f"{bucket}.{scope}.{collection}{c}"
+ count += CBOperation(hostname, "Administrator", "password", ssl=tls).connect(keyspace).get_count()
+ assert count == 10000
+
+ CBOperation(hostname, "Administrator", "password", ssl=tls).connect(bucket).cleanup()
diff --git a/tests/test_2.py b/tests/test_2.py
index b972be5..d0cd169 100644
--- a/tests/test_2.py
+++ b/tests/test_2.py
@@ -1,228 +1,154 @@
#!/usr/bin/env python3
+import os
import warnings
import pytest
import asyncio
import string
+import time
from couchbase.exceptions import (BucketNotFoundException, ScopeNotFoundException, CollectionNotFoundException)
from cbcmgr.cb_connect_lite_a import CBConnectLiteAsync
from cbcmgr.cb_operation_a import CBOperationAsync, Operation
from cbcmgr.async_pool import CBPoolAsync
+from tests.common import start_container, stop_container, run_in_container, document, image_name
warnings.filterwarnings("ignore")
-document = {
- "id": 1,
- "data": "data",
- "one": "one",
- "two": "two",
- "three": "tree"
-}
-new_document = {
- "id": 1,
- "data": "new",
- "one": "one",
- "two": "two",
- "three": "tree"
-}
-query_result = [
- {
- 'data': 'data'
- }
-]
-
-json_data = {
- "name": "John Doe",
- "email": "jdoe@example.com",
- "addresses": {
- "billing": {
- "line1": "123 Any Street",
- "line2": "Anywhere",
- "country": "United States"
- },
- "delivery": {
- "line1": "123 Any Street",
- "line2": "Anywhere",
- "country": "United States"
- }
- },
- "history": {
- "events": [
- {
- "event_id": "1",
- "date": "1/1/1970",
- "type": "contact"
- },
- {
- "event_id": "2",
- "date": "1/1/1970",
- "type": "contact"
- }
- ]
- },
- "purchases": {
- "complete": [
- 339, 976, 442, 777
- ],
- "abandoned": [
- 157, 42, 999
- ]
- }
- }
-
-xml_data = """
-
- John Doe
- jdoe@example.com
-
-
- 123 Any Street
- Anywhere
- United States
-
-
- 123 Any Street
- Anywhere
- United States
-
-
-
-
- 1
- 1/1/1970
- contact
-
-
- 2
- 1/1/1970
- contact
-
-
-
- 339
- 976
- 442
- 777
- 157
- 42
- 999
-
-
-"""
-@pytest.mark.parametrize("bucket_name", ["test"])
-@pytest.mark.parametrize("scope_name, collection_name", [("_default", "_default"), ("test", "test")])
-@pytest.mark.parametrize("tls", [False, True])
-@pytest.mark.asyncio
-async def test_async_driver_1(hostname, bucket_name, tls, scope_name, collection_name):
- replica_count = 0
- keyspace = f"{bucket_name}.{scope_name}.{collection_name}"
-
- ca = CBConnectLiteAsync(hostname, "Administrator", "password", ssl=tls)
- cluster = await ca.session_a()
-
- await ca.create_bucket(cluster, bucket_name, quota=128)
- bucket = await ca.get_bucket(cluster, bucket_name)
- await ca.create_scope(bucket, scope_name)
- scope = await ca.get_scope(bucket, scope_name)
- await ca.create_collection(bucket, scope, collection_name)
- collection = await ca.get_collection(bucket, scope, collection_name)
-
- await ca.create_primary_index(cluster, bucket, scope, collection, replica=replica_count)
- await ca.create_indexes(cluster, bucket, scope, collection, fields=["data"], replica=replica_count)
-
- await ca.put_doc(collection, "test::1", document)
- result = await ca.get_doc(collection, "test::1")
- assert result == document
+@pytest.fixture(scope="module", autouse=True)
+def setup(request):
+ print("Starting test container")
+ platform = f"linux/{os.uname().machine}"
+ container_id = start_container(image_name, platform)
+ command = ['/bin/bash', '-c', 'test -f /demo/couchbase/.ready']
+ while not run_in_container(container_id, command):
+ time.sleep(1)
+ command = ['cbcutil', 'list', '--host', '127.0.0.1', '--wait']
+ run_in_container(container_id, command)
- result = await ca.collection_count(cluster, keyspace)
- assert result == 1
+ yield container_id
- result = await ca.run_query(cluster, f"select data from {keyspace}")
- assert result[0]['data'] == 'data'
+ print("Stopping test container")
+ stop_container(container_id)
- bm = cluster.buckets()
- await bm.drop_bucket(bucket_name)
-
-@pytest.mark.parametrize("scope, collection", [("_default", "_default"), ("test", "test")])
+@pytest.mark.parametrize("hostname", ["127.0.0.1"])
+@pytest.mark.parametrize("bucket_name", ["test"])
+@pytest.mark.parametrize("scope_name, collection_name", [("_default", "_default"), ("test", "test")])
@pytest.mark.parametrize("tls", [False, True])
-@pytest.mark.asyncio
-async def test_async_driver_2(hostname, bucket, tls, scope, collection):
- keyspace = f"{bucket}.{scope}.{collection}"
- try:
- opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls)
+class TestAsyncDrv1(object):
+
+ @pytest.mark.asyncio
+ async def test_1(self, hostname, bucket_name, tls, scope_name, collection_name):
+ replica_count = 0
+ keyspace = f"{bucket_name}.{scope_name}.{collection_name}"
+
+ ca = CBConnectLiteAsync(hostname, "Administrator", "password", ssl=tls)
+ cluster = await ca.session_a()
+
+ await ca.create_bucket(cluster, bucket_name, quota=128)
+ bucket = await ca.get_bucket(cluster, bucket_name)
+ await ca.create_scope(bucket, scope_name)
+ scope = await ca.get_scope(bucket, scope_name)
+ await ca.create_collection(bucket, scope, collection_name)
+ collection = await ca.get_collection(bucket, scope, collection_name)
+
+ await ca.create_primary_index(cluster, bucket, scope, collection, replica=replica_count)
+ await ca.create_indexes(cluster, bucket, scope, collection, fields=["data"], replica=replica_count)
+
+ await ca.put_doc(collection, "test::1", document)
+ result = await ca.get_doc(collection, "test::1")
+ assert result == document
+
+ result = await ca.collection_count(cluster, keyspace)
+ assert result == 1
+
+ result = await ca.run_query(cluster, f"select data from {keyspace}")
+ assert result[0]['data'] == 'data'
+
+ bm = cluster.buckets()
+ await bm.drop_bucket(bucket_name)
+
+ @pytest.mark.asyncio
+ async def test_2(self, hostname, bucket_name, tls, scope_name, collection_name):
+ keyspace = f"{bucket_name}.{scope_name}.{collection_name}"
+ try:
+ opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls)
+ opm = await opc.init()
+ col_a = await opm.connect(keyspace)
+ col_a.cleanup()
+ except (BucketNotFoundException, ScopeNotFoundException, CollectionNotFoundException):
+ pass
+
+ opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls, quota=128, create=True)
opm = await opc.init()
col_a = await opm.connect(keyspace)
- col_a.cleanup()
- except (BucketNotFoundException, ScopeNotFoundException, CollectionNotFoundException):
- pass
- opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls, quota=128, create=True)
- opm = await opc.init()
- col_a = await opm.connect(keyspace)
+ await col_a.put_doc(col_a.collection, "test::1", document)
+ d = await col_a.get_doc(col_a.collection, "test::1")
+ assert d == document
- await col_a.put_doc(col_a.collection, "test::1", document)
- d = await col_a.get_doc(col_a.collection, "test::1")
- assert d == document
+ await col_a.index_by_query(f"select data from {keyspace}")
- await col_a.index_by_query(f"select data from {keyspace}")
+ r = await col_a.run_query(col_a.cluster, f"select data from {keyspace}")
+ assert r[0]['data'] == 'data'
- r = await col_a.run_query(col_a.cluster, f"select data from {keyspace}")
- assert r[0]['data'] == 'data'
+ await col_a.cleanup()
- await col_a.cleanup()
-
- opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls, quota=128, create=True)
- opm = await opc.init()
- col_t = await opm.connect(keyspace)
- a_read = col_t.get_operator(Operation.READ)
- a_write = col_t.get_operator(Operation.WRITE)
- a_query = col_t.get_operator(Operation.QUERY)
+ opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls, quota=128, create=True)
+ opm = await opc.init()
+ col_t = await opm.connect(keyspace)
+ a_read = col_t.get_operator(Operation.READ)
+ a_write = col_t.get_operator(Operation.WRITE)
+ a_query = col_t.get_operator(Operation.QUERY)
- a_write.prep("test::1", document)
- await a_write.execute()
- a_read.prep("test::1")
- await a_read.execute()
- assert document == a_read.result["test::1"]
+ a_write.prep("test::1", document)
+ await a_write.execute()
+ a_read.prep("test::1")
+ await a_read.execute()
+ assert document == a_read.result["test::1"]
- await col_t.index_by_query(f"select data from {keyspace}")
- a_query.prep(f"select data from {keyspace}")
- await a_query.execute()
- assert a_query.result[0]['data'] == 'data'
+ await col_t.index_by_query(f"select data from {keyspace}")
+ a_query.prep(f"select data from {keyspace}")
+ await a_query.execute()
+ assert a_query.result[0]['data'] == 'data'
- await col_a.cleanup()
+ await col_a.cleanup()
+@pytest.mark.parametrize("hostname", ["127.0.0.1"])
+@pytest.mark.parametrize("bucket", ["test"])
@pytest.mark.parametrize("scope", ["_default", "test"])
@pytest.mark.parametrize("collection", ["test"])
@pytest.mark.parametrize("tls", [False, True])
-@pytest.mark.asyncio
-async def test_async_driver_3(hostname, bucket, tls, scope, collection):
- pool = CBPoolAsync(hostname, "Administrator", "password", ssl=False, quota=128, create=True)
-
- for n in range(10):
- c = string.ascii_lowercase[n:n + 1]
- keyspace = f"{bucket}.{scope}.{collection}{c}"
- await pool.connect(keyspace)
- for i in range(1000):
- await pool.dispatch(keyspace, Operation.WRITE, f"test::{i + 1}", document)
-
- await pool.join()
- await asyncio.sleep(1)
- count = 0
- for n in range(10):
- c = string.ascii_lowercase[n:n + 1]
- keyspace = f"{bucket}.{scope}.{collection}{c}"
- opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls)
+class TestAsyncDrv2(object):
+
+ @pytest.mark.asyncio
+ async def test_1(self, hostname, bucket, tls, scope, collection):
+ pool = CBPoolAsync(hostname, "Administrator", "password", ssl=False, quota=128, create=True)
+
+ for n in range(10):
+ c = string.ascii_lowercase[n:n + 1]
+ keyspace = f"{bucket}.{scope}.{collection}{c}"
+ await pool.connect(keyspace)
+ for i in range(1000):
+ await pool.dispatch(keyspace, Operation.WRITE, f"test::{i + 1}", document)
+
+ await pool.join()
+ await asyncio.sleep(1)
+ count = 0
+ for n in range(10):
+ c = string.ascii_lowercase[n:n + 1]
+ keyspace = f"{bucket}.{scope}.{collection}{c}"
+ opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls)
+ opm = await opc.init()
+ opk = await opm.connect(keyspace)
+ count += await opk.get_count()
+ assert count == 10000
+
+ opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls, quota=128, create=True)
opm = await opc.init()
- opk = await opm.connect(keyspace)
- count += await opk.get_count()
- assert count == 10000
-
- opc = CBOperationAsync(hostname, "Administrator", "password", ssl=tls, quota=128, create=True)
- opm = await opc.init()
- keyspace = f"{bucket}.{scope}.{collection}"
- col_a = await opm.connect(keyspace)
- await col_a.cleanup()
+ keyspace = f"{bucket}.{scope}.{collection}"
+ col_a = await opm.connect(keyspace)
+ await col_a.cleanup()
diff --git a/tests/test_3.py b/tests/test_3.py
index 96a663c..6585310 100644
--- a/tests/test_3.py
+++ b/tests/test_3.py
@@ -1,304 +1,274 @@
#!/usr/bin/env python3
import os
-import subprocess
import re
+import time
+import pytest
import warnings
+from tests.common import start_container, stop_container, run_in_container, cli_run, image_name
+
warnings.filterwarnings("ignore")
current = os.path.dirname(os.path.realpath(__file__))
parent = os.path.dirname(current)
-def cli_run(cmd: str, *args: str, input_file: str = None):
- command_output = ""
- run_cmd = [
- cmd,
- *args
- ]
-
- p = subprocess.Popen(run_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-
- if input_file:
- with open(input_file, 'rb') as input_data:
- while True:
- line = input_data.readline()
- if not line:
- break
- p.stdin.write(line)
- p.stdin.close()
-
- while True:
- line = p.stdout.readline()
- if not line:
- break
- line_string = line.decode("utf-8")
- command_output += line_string
-
- p.wait()
-
- return p.returncode, command_output
-
-
-def test_cli_1(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--count', '30', '--schema', 'employee_demo', '--replica', '0', '--quota', '128']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Inserted 30")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_2(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--count', '30', '--schema', 'employee_demo', '--replica', '0', '--quota', '128', '--safe']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Inserted 0")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_3(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname, '--schema', 'employee_demo']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Removing bucket employees")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_4(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--count', '1000', '--schema', 'profile_demo', '--replica', '0']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Running link rule rule0")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_5(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname, '--schema', 'profile_demo']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Removing bucket sample_app")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_6(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--count', '1000', '--schema', 'default', '--replica', '0']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Processing rules")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_7(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname, '--schema', 'default']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Removing bucket cbperf")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_8(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['list', '--host', hostname, '--wait']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Cluster Host List")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_9(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--count', '100', '--file', current + '/input_template.json', '--replica', '0']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Processing rules")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_10(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r"Removing bucket pillowfight")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_11(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname]
-
- result, output = cli_run(cmd, *args, input_file=current + '/input_stdin.dat')
- p = re.compile(r"Collection had 0 documents - inserted 7 additional record")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_12(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['get', '--host', hostname, '-k', 'pillowfight:1']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r'"record_id": 1')
- assert p.findall(output) is not None
- assert result == 0
-
-
-def test_cli_13(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['get', '--host', hostname, '-k', 'pillowfight:%N']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r'"record_id": 7')
- assert p.findall(output) is not None
- assert result == 0
-
-
-def test_cli_14(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r"Removing bucket pillowfight")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_15(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--count', '30', '--schema', 'employee_demo', '--replica', '0']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Processing rules")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_16(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['export', 'json', '--host', hostname, '-i', '-b', 'employees', '--directory', '/var/tmp']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Retrieved 30 records")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_17(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['export', 'csv', '--host', hostname, '-i', '-b', 'employees', '--directory', '/var/tmp']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Retrieved 30 records")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_18(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname, '--schema', 'employee_demo']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Removing bucket employees")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_19(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--count', '100', '--schema', 'adjuster_demo', '--replica', '0']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Processing rules")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_20(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname, '--schema', 'adjuster_demo']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Removing bucket adjuster_demo")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_21(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--schema', 'timecard_sample', '--replica', '0']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Processing rules")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_22(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname, '--schema', 'timecard_sample']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Removing bucket timecard_sample")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_23(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['load', '--host', hostname, '--schema', 'insurance_sample', '--replica', '0']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Processing rules")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_24(hostname):
- global parent
- cmd = parent + '/tests/test_cli.py'
- args = ['clean', '--host', hostname, '--schema', 'insurance_sample']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Removing bucket insurance_sample")
- assert p.search(output) is not None
- assert result == 0
+@pytest.fixture(scope="module", autouse=True)
+def setup(request):
+ print("Starting test container")
+ platform = f"linux/{os.uname().machine}"
+ container_id = start_container(image_name, platform)
+ command = ['/bin/bash', '-c', 'test -f /demo/couchbase/.ready']
+ while not run_in_container(container_id, command):
+ time.sleep(1)
+ command = ['cbcutil', 'list', '--host', '127.0.0.1', '--wait']
+ run_in_container(container_id, command)
+
+ yield container_id
+
+ print("Stopping test container")
+ stop_container(container_id)
+
+
+@pytest.mark.parametrize("hostname", ["127.0.0.1"])
+class TestCBCCLI(object):
+
+ def test_cli_1(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--count', '30', '--schema', 'employee_demo', '--replica', '0', '--quota', '128']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Inserted 30")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_2(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--count', '30', '--schema', 'employee_demo', '--replica', '0', '--quota', '128', '--safe']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Inserted 0")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_3(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname, '--schema', 'employee_demo']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Removing bucket employees")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_4(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--count', '1000', '--schema', 'profile_demo', '--replica', '0']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Running link rule rule0")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_5(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname, '--schema', 'profile_demo']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Removing bucket sample_app")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_6(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--count', '1000', '--schema', 'default', '--replica', '0']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Processing rules")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_7(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname, '--schema', 'default']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Removing bucket cbperf")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_8(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['list', '--host', hostname, '--wait']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Cluster Host List")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_9(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--count', '100', '--file', current + '/input_template.json', '--replica', '0']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Processing rules")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_10(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r"Removing bucket pillowfight")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_11(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname]
+
+ result, output = cli_run(cmd, *args, input_file=current + '/input_stdin.dat')
+ p = re.compile(r"Collection had 0 documents - inserted 7 additional record")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_12(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['get', '--host', hostname, '-k', 'pillowfight:1']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r'"record_id": 1')
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_13(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['get', '--host', hostname, '-k', 'pillowfight:%N']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r'"record_id": 7')
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_14(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r"Removing bucket pillowfight")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_15(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--count', '30', '--schema', 'employee_demo', '--replica', '0']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Processing rules")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_16(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['export', 'json', '--host', hostname, '-i', '-b', 'employees', '--directory', '/var/tmp']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Retrieved 30 records")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_17(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['export', 'csv', '--host', hostname, '-i', '-b', 'employees', '--directory', '/var/tmp']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Retrieved 30 records")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_18(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname, '--schema', 'employee_demo']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Removing bucket employees")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_19(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--count', '100', '--schema', 'adjuster_demo', '--replica', '0']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Processing rules")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_20(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname, '--schema', 'adjuster_demo']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Removing bucket adjuster_demo")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_21(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--schema', 'timecard_sample', '--replica', '0']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Processing rules")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_22(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname, '--schema', 'timecard_sample']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Removing bucket timecard_sample")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_23(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['load', '--host', hostname, '--schema', 'insurance_sample', '--replica', '0']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Processing rules")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_24(self, hostname):
+ global parent
+ cmd = parent + '/tests/test_cli.py'
+ args = ['clean', '--host', hostname, '--schema', 'insurance_sample']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Removing bucket insurance_sample")
+ assert p.search(output) is not None
+ assert result == 0
diff --git a/tests/test_4.py b/tests/test_4.py
index 993704d..a29e7ae 100644
--- a/tests/test_4.py
+++ b/tests/test_4.py
@@ -9,41 +9,43 @@
warnings.filterwarnings("ignore")
-def test_random_1():
- rand_init()
- g = rand_gender()
- _past_date = past_date()
- _dob_date = dob_date()
- first_name = rand_first_name(g)
- last_name = rand_last_name()
- month = month_value()
- print("Credit Card: " + credit_card())
- print("SSN : " + social_security_number())
- print("Four Digits: " + four_digits())
- print("ZIP Code : " + zip_code())
- print("Account : " + account_number())
- print("Dollar : " + dollar_amount())
- print("Sequence : " + numeric_sequence())
- print("Hash : " + hash_code())
- print("Address : " + address_line())
- print("City : " + rand_city())
- print("State : " + rand_state())
- print("First : " + first_name)
- print("Last : " + last_name)
- print("Nickname : " + nick_name(first_name, last_name))
- print("Email : " + email_address(first_name, last_name))
- print("Username : " + user_name(first_name, last_name))
- print("Phone : " + phone_number())
- print("Boolean : " + str(boolean_value()))
- print("Date : " + date_code())
- print("Year : " + year_value())
- print("Month : " + month)
- print("Day : " + day_value(month))
- print("Franchise : " + rand_franchise())
- print("Corporation: " + rand_corporation())
- print("Past Date 1: " + past_date_slash(_past_date))
- print("Past Date 2: " + past_date_hyphen(_past_date))
- print("Past Date 3: " + past_date_text(_past_date))
- print("DOB Date 1 : " + dob_slash(_dob_date))
- print("DOB Date 2 : " + dob_hyphen(_dob_date))
- print("DOB Date 3 : " + dob_text(_dob_date))
+class TestRandomizer(object):
+
+ def test_1(self):
+ rand_init()
+ g = rand_gender()
+ _past_date = past_date()
+ _dob_date = dob_date()
+ first_name = rand_first_name(g)
+ last_name = rand_last_name()
+ month = month_value()
+ print("Credit Card: " + credit_card())
+ print("SSN : " + social_security_number())
+ print("Four Digits: " + four_digits())
+ print("ZIP Code : " + zip_code())
+ print("Account : " + account_number())
+ print("Dollar : " + dollar_amount())
+ print("Sequence : " + numeric_sequence())
+ print("Hash : " + hash_code())
+ print("Address : " + address_line())
+ print("City : " + rand_city())
+ print("State : " + rand_state())
+ print("First : " + first_name)
+ print("Last : " + last_name)
+ print("Nickname : " + nick_name(first_name, last_name))
+ print("Email : " + email_address(first_name, last_name))
+ print("Username : " + user_name(first_name, last_name))
+ print("Phone : " + phone_number())
+ print("Boolean : " + str(boolean_value()))
+ print("Date : " + date_code())
+ print("Year : " + year_value())
+ print("Month : " + month)
+ print("Day : " + day_value(month))
+ print("Franchise : " + rand_franchise())
+ print("Corporation: " + rand_corporation())
+ print("Past Date 1: " + past_date_slash(_past_date))
+ print("Past Date 2: " + past_date_hyphen(_past_date))
+ print("Past Date 3: " + past_date_text(_past_date))
+ print("DOB Date 1 : " + dob_slash(_dob_date))
+ print("DOB Date 2 : " + dob_hyphen(_dob_date))
+ print("DOB Date 3 : " + dob_text(_dob_date))
diff --git a/tests/test_5.py b/tests/test_5.py
index a34f974..498d12d 100755
--- a/tests/test_5.py
+++ b/tests/test_5.py
@@ -1,279 +1,224 @@
#!/usr/bin/env python3
-import subprocess
import re
import warnings
import pytest
-import docker
+import time
+import os
from tests import get_test_file
+from tests.common import start_container, stop_container, run_in_container, cli_run, image_name
-warnings.filterwarnings("ignore")
-
-
-def cli_run(cmd: str, *args: str):
- command_output = ""
- run_cmd = [
- cmd,
- *args
- ]
-
- p = subprocess.Popen(run_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-
- while True:
- line = p.stdout.readline()
- if not line:
- break
- line_string = line.decode("utf-8")
- command_output += line_string
-
- p.communicate()
- return p.returncode, command_output
+warnings.filterwarnings("ignore")
@pytest.fixture(scope="module", autouse=True)
-def data_load(request):
- client = docker.from_env()
- container_id = client.containers.get('pytest')
- print("Creating test buckets and loading data")
- exit_code, output = container_id.exec_run(['cbcutil',
- 'load',
- '--host', '127.0.0.1',
- '--count', '30',
- '--schema', 'employee_demo',
- '--replica', '0',
- '--safe',
- '--quota', '128'])
- for line in output.split(b'\n'):
- print(line.decode("utf-8"))
- assert exit_code == 0
- exit_code, output = container_id.exec_run(['cbcutil',
- 'load',
- '--host', '127.0.0.1',
- '--schema', 'insurance_sample',
- '--replica', '0',
- '--safe',
- '--quota', '128'])
- for line in output.split(b'\n'):
- print(line.decode("utf-8"))
- assert exit_code == 0
- print("Ready.")
-
- yield exit_code
-
- print("Removing test buckets")
- exit_code, output = container_id.exec_run(['cbcutil',
- 'clean',
- '--host', '127.0.0.1',
- '--schema', 'employee_demo'])
- for line in output.split(b'\n'):
- print(line.decode("utf-8"))
- assert exit_code == 0
- exit_code, output = container_id.exec_run(['cbcutil',
- 'clean',
- '--host', '127.0.0.1',
- '--schema', 'insurance_sample'])
- for line in output.split(b'\n'):
- print(line.decode("utf-8"))
- assert exit_code == 0
- print("Test Complete")
-
-
-def test_cli_1(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'list', '-h', hostname, '-n', "testdb"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Database testdb does not exist.")
- assert p.search(output) is not None
- assert result == 1
-
-
-def test_cli_2(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'create', '-h', hostname, '-n', "testdb", '-b', 'employees']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Database testdb created for bucket employees")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_3(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'list', '-h', hostname, '-n', "testdb"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Bucket:.*employees")
- assert p.search(output) is not None
- p = re.compile(f"Name:.*testdb")
- assert p.search(output) is not None
- p = re.compile(f"Replicas:.*0")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_4(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'dump', '-h', hostname, '-n', "testdb"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r"Key: .* Id: .* Channels: .*")
- assert p.findall(output) is not None
- assert result == 0
-
-
-def test_cli_5(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['user', 'list', '-h', hostname, '-n', "testdb", '--sguser', 'demouser']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r"User demouser does not exist")
- assert p.search(output) is not None
- assert result == 1
-
-
-def test_cli_6(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['user', 'create', '-h', hostname, '-n', "testdb", '--sguser', "demouser", '--sgpass', "password"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"User demouser created for database testdb")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_7(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['user', 'list', '-h', hostname, '-n', "testdb", '--all']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"demouser.*")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_8(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['user', 'list', '-h', hostname, '-n', "testdb", '--sguser', "demouser"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Name:.*demouser")
- assert p.search(output) is not None
- p = re.compile(f"Admin channels")
- assert p.search(output) is not None
- p = re.compile(f"All channels")
- assert p.search(output) is not None
- p = re.compile(f"Disabled:.*False")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_9(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['user', 'map', '-h', hostname, '-d', hostname, '-F', 'store_id', '-k', 'employees', '-n', 'testdb']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r"User store_id@1 created for database testdb")
- assert p.findall(output) is not None
- assert result == 0
-
-
-def test_cli_10(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['user', 'list', '-h', hostname, '-n', "testdb", '--sguser', "store_id@1"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r"Name:.*store_id@1")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_11(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'sync', '-h', hostname, '-n', 'testdb', '-f', get_test_file('employee.js')]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Sync function created for database testdb")
- assert p.findall(output) is not None
- assert result == 0
+def setup(request):
+ print("Starting test container")
+ platform = f"linux/{os.uname().machine}"
+ container_id = start_container(image_name, platform)
+ command = ['/bin/bash', '-c', 'test -f /demo/couchbase/.ready']
+ while not run_in_container(container_id, command):
+ time.sleep(1)
-def test_cli_12(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'sync', '-h', hostname, '-n', 'testdb', '-g']
+ command = ['cbcutil', 'list', '--host', '127.0.0.1', '--wait']
+ run_in_container(container_id, command)
- result, output = cli_run(cmd, *args)
- p = re.compile(r"function sync.*")
- assert p.findall(output) is not None
- assert result == 0
-
-
-def test_cli_13(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['user', 'delete', '-h', hostname, '-n', "testdb", '--sguser', "demouser"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"User demouser deleted from testdb")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_14(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'delete', '-h', hostname, '-n', "testdb"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Database testdb deleted")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_15(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'create', '-h', hostname, '-n', 'insurance', '-b', 'insurance_sample', '-k', 'insurance_sample.data']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Database insurance created")
- assert p.search(output) is not None
- assert result == 0
-
-
-def test_cli_16(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['user', 'map', '-h', hostname, '-d', hostname, '-F', 'region', '-k', 'insurance_sample', '-n', 'insurance']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(r"User region@global created for database insurance")
- assert p.findall(output) is not None
- assert result == 0
-
-
-def test_cli_17(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'sync', '-h', hostname, '-n', 'insurance', '-f', get_test_file('insurance.js')]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Sync function created for database insurance.data.adjuster")
- assert p.findall(output) is not None
- assert result == 0
-
-
-def test_cli_18(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['auth', 'session', '-h', hostname, '-n', 'insurance', '-U', 'region@central']
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f".*cookie_name.*SyncGatewaySession")
- assert p.findall(output) is not None
- assert result == 0
-
-
-def test_cli_19(hostname, bucket):
- cmd = get_test_file('test_sgw_cli.py')
- args = ['database', 'delete', '-h', hostname, '-n', "insurance"]
-
- result, output = cli_run(cmd, *args)
- p = re.compile(f"Database insurance deleted")
- assert p.search(output) is not None
- assert result == 0
+ print("Creating test buckets and loading data")
+ command = ['cbcutil', 'load', '--host', '127.0.0.1', '--count', '30', '--schema', 'employee_demo', '--replica', '0', '--safe', '--quota', '128']
+ assert run_in_container(container_id, command) is True
+
+ command = ['cbcutil', 'load', '--host', '127.0.0.1', '--schema', 'insurance_sample', '--replica', '0', '--safe', '--quota', '128']
+ assert run_in_container(container_id, command) is True
+
+ yield container_id
+
+ print("Stopping test container")
+ stop_container(container_id)
+
+
+@pytest.mark.parametrize("hostname", ["127.0.0.1"])
+@pytest.mark.parametrize("bucket", ["test"])
+class TestSGWCLI(object):
+
+ def test_cli_1(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'list', '-h', hostname, '-n', "testdb"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Database testdb does not exist.")
+ assert p.search(output) is not None
+ assert result == 1
+
+ def test_cli_2(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'create', '-h', hostname, '-n', "testdb", '-b', 'employees']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Database testdb created for bucket employees")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_3(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'list', '-h', hostname, '-n', "testdb"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Bucket:.*employees")
+ assert p.search(output) is not None
+ p = re.compile(f"Name:.*testdb")
+ assert p.search(output) is not None
+ p = re.compile(f"Replicas:.*0")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_4(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'dump', '-h', hostname, '-n', "testdb"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r"Key: .* Id: .* Channels: .*")
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_5(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['user', 'list', '-h', hostname, '-n', "testdb", '--sguser', 'demouser']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r"User demouser does not exist")
+ assert p.search(output) is not None
+ assert result == 1
+
+ def test_cli_6(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['user', 'create', '-h', hostname, '-n', "testdb", '--sguser', "demouser", '--sgpass', "password"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"User demouser created for database testdb")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_7(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['user', 'list', '-h', hostname, '-n', "testdb", '--all']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"demouser.*")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_8(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['user', 'list', '-h', hostname, '-n', "testdb", '--sguser', "demouser"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Name:.*demouser")
+ assert p.search(output) is not None
+ p = re.compile(f"Admin channels")
+ assert p.search(output) is not None
+ p = re.compile(f"All channels")
+ assert p.search(output) is not None
+ p = re.compile(f"Disabled:.*False")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_9(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['user', 'map', '-h', hostname, '-d', hostname, '-F', 'store_id', '-k', 'employees', '-n', 'testdb']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r"User store_id@1 created for database testdb")
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_10(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['user', 'list', '-h', hostname, '-n', "testdb", '--sguser', "store_id@1"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r"Name:.*store_id@1")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_11(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'sync', '-h', hostname, '-n', 'testdb', '-f', get_test_file('employee.js')]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Sync function created for database testdb")
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_12(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'sync', '-h', hostname, '-n', 'testdb', '-g']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r"function sync.*")
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_13(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['user', 'delete', '-h', hostname, '-n', "testdb", '--sguser', "demouser"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"User demouser deleted from testdb")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_14(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'delete', '-h', hostname, '-n', "testdb"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Database testdb deleted")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_15(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'create', '-h', hostname, '-n', 'insurance', '-b', 'insurance_sample', '-k', 'insurance_sample.data']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Database insurance created")
+ assert p.search(output) is not None
+ assert result == 0
+
+ def test_cli_16(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['user', 'map', '-h', hostname, '-d', hostname, '-F', 'region', '-k', 'insurance_sample', '-n', 'insurance']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(r"User region@global created for database insurance")
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_17(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'sync', '-h', hostname, '-n', 'insurance', '-f', get_test_file('insurance.js')]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Sync function created for database insurance.data.adjuster")
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_18(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['auth', 'session', '-h', hostname, '-n', 'insurance', '-U', 'region@central']
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f".*cookie_name.*SyncGatewaySession")
+ assert p.findall(output) is not None
+ assert result == 0
+
+ def test_cli_19(self, hostname, bucket):
+ cmd = get_test_file('test_sgw_cli.py')
+ args = ['database', 'delete', '-h', hostname, '-n', "insurance"]
+
+ result, output = cli_run(cmd, *args)
+ p = re.compile(f"Database insurance deleted")
+ assert p.search(output) is not None
+ assert result == 0
diff --git a/tests/test_6.py b/tests/test_6.py
index c510ad4..041a1da 100755
--- a/tests/test_6.py
+++ b/tests/test_6.py
@@ -8,56 +8,58 @@
warnings.filterwarnings("ignore")
-def test_capella_1():
- project = Capella().get_project('pytest-project')
- project_id = project.get('id')
+class TestCapella(object):
- assert project_id is not None
+ def test_1(self):
+ project = Capella().get_project('pytest-project')
+ project_id = project.get('id')
- cluster = CapellaCluster().create("pytest-cluster", "Pytest created cluster", "aws", "us-east-2")
- cluster.add_service_group("aws", "4x16")
+ assert project_id is not None
- print("Creating cluster")
- cluster_id = Capella(project_id=project_id).create_cluster(cluster)
+ cluster = CapellaCluster().create("pytest-cluster", "Pytest created cluster", "aws", "us-east-2")
+ cluster.add_service_group("aws", "4x16")
- assert cluster_id is not None
+ print("Creating cluster")
+ cluster_id = Capella(project_id=project_id).create_cluster(cluster)
- print("Waiting for cluster creation to complete")
- result = Capella(project_id=project_id).wait_for_cluster("pytest-cluster")
+ assert cluster_id is not None
- assert result is True
+ print("Waiting for cluster creation to complete")
+ result = Capella(project_id=project_id).wait_for_cluster("pytest-cluster")
- cidr = AllowedCIDR().create()
+ assert result is True
- print("Creating allowed CIDR")
- cidr_id = Capella(project_id=project_id).allow_cidr(cluster_id, cidr)
+ cidr = AllowedCIDR().create()
- assert cidr_id is not None
+ print("Creating allowed CIDR")
+ cidr_id = Capella(project_id=project_id).allow_cidr(cluster_id, cidr)
- credentials = Credentials().create("sysdba", "Passw0rd!")
+ assert cidr_id is not None
- print("Creating database credentials")
- account_id = Capella(project_id=project_id).add_db_user(cluster_id, credentials)
+ credentials = Credentials().create("sysdba", "Passw0rd!")
- assert account_id is not None
+ print("Creating database credentials")
+ account_id = Capella(project_id=project_id).add_db_user(cluster_id, credentials)
- bucket = Bucket(**dict(
- name="employees",
- ram_quota_mb=128
- ))
+ assert account_id is not None
- print("Creating bucket")
- bucket_id = Capella(project_id=project_id).add_bucket(cluster_id, bucket)
+ bucket = Bucket(**dict(
+ name="employees",
+ ram_quota_mb=128
+ ))
- assert bucket_id is not None
- time.sleep(1)
+ print("Creating bucket")
+ bucket_id = Capella(project_id=project_id).add_bucket(cluster_id, bucket)
- print("Deleting bucket")
- Capella(project_id=project_id).delete_bucket("pytest-cluster", "employees")
+ assert bucket_id is not None
+ time.sleep(1)
- print("Deleting cluster")
- Capella(project_id=project_id).delete_cluster("pytest-cluster")
- print("Waiting for cluster deletion to complete")
- result = Capella(project_id=project_id).wait_for_cluster_delete("pytest-cluster")
+ print("Deleting bucket")
+ Capella(project_id=project_id).delete_bucket("pytest-cluster", "employees")
- assert result is True
+ print("Deleting cluster")
+ Capella(project_id=project_id).delete_cluster("pytest-cluster")
+ print("Waiting for cluster deletion to complete")
+ result = Capella(project_id=project_id).wait_for_cluster_delete("pytest-cluster")
+
+ assert result is True