Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

merge 3 test cases from legacy branch #1781

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions keywords/couchbaseserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,19 @@ def delete_buckets(self):
count += 1
time.sleep(15)

def flush_bucket(self, bucket_name):
""" Flush a given bucket data"""
log_info("Flushing bucket {}".format(bucket_name))

resp = None
try:
resp = self._session.post("{}/pools/default/buckets/{}/controller/doFlush".format(self.url, bucket_name))
log_r(resp)
resp.raise_for_status()
except HTTPError as h:
log_info("resp code: {}; resp text: {}; error: {}".format(resp, resp.json(), h))
raise

def wait_for_ready_state(self):
"""
Verify all server node is in are in a "healthy" state to avoid sync_gateway startup failures
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1561,6 +1561,39 @@ def test_getDoc_withLocale(params_from_base_suite_setup, doc_id_prefix):
assert len(docs_from_cbl) == 5, "Results for locale with - did not return 5 records"


@pytest.mark.parametrize("doc_id_prefix", [
("doc_with_no_data_1")
])
def test_getDoc_withNoData(params_from_base_suite_setup, doc_id_prefix):
""" @summary
1. Add a doc with doc_id = Meta.id and data = empty dictionary
2. Fetch the doc
3.run the below query
Query query = QueryBuilder
.select(SelectResult.all())
.from(DataSource.database(database))
.where((Meta.id).equalTo(doc_id));
"""
base_url = params_from_base_suite_setup["base_url"]
database = Database(base_url)
qy = Query(base_url)

# Create docs with empty data
db = database.create("db_name")
documents = dict()

data = {}
doc_id = "{}_{}".format(doc_id_prefix, 0)
documents[doc_id] = data
database.saveDocuments(db, documents)

result_set = qy.query_get_doc(db, doc_id)
docs_from_cbl = []
for docs in result_set:
docs_from_cbl.append(docs)
assert len(docs_from_cbl) == 1, "Results for doc with empty data - did not return the record"


def test_query_arthimetic(params_from_base_suite_setup):
"""
@summary:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from keywords.MobileRestClient import MobileRestClient
from keywords.ClusterKeywords import ClusterKeywords
from keywords import couchbaseserver
from keywords.couchbaseserver import CouchbaseServer
from keywords.utils import log_info, random_string, get_embedded_asset_file_path
from CBLClient.Database import Database
from CBLClient.Replication import Replication
Expand All @@ -15,7 +16,7 @@
from CBLClient.Blob import Blob
from CBLClient.Dictionary import Dictionary

from keywords.SyncGateway import sync_gateway_config_path_for_mode
from keywords.SyncGateway import sync_gateway_config_path_for_mode, SyncGateway
from keywords import document, attachment
from libraries.testkit import cluster
from utilities.cluster_config_utils import persist_cluster_config_environment_prop, copy_to_temp_conf
Expand Down Expand Up @@ -4023,6 +4024,153 @@ def test_replication_pull_from_empty_database(params_from_base_test_setup, attac
assert len(cbl_docs) == 75, "The number of docs pull from SGW is incorrect"


def test_replication_stop_requested_on_replicator_busy(params_from_base_test_setup):
"""
@summary: to verify no crash if stop replication requested while replicator is still busy
1. Create a bigger chuck of docs on SGW
2. Start replication
3. Send a replicator stop request, ensure the request is sent while replicator is busy
4. Verify the replicator status, ensure there is no crash
"""
sg_db = "db"
sg_url = params_from_base_test_setup["sg_url"]
sg_admin_url = params_from_base_test_setup["sg_admin_url"]
sg_blip_url = params_from_base_test_setup["target_url"]
base_url = params_from_base_test_setup["base_url"]
cluster_config = params_from_base_test_setup["cluster_config"]
sg_config = params_from_base_test_setup["sg_config"]
cbl_db = params_from_base_test_setup["source_db"]
channels = ["replication-channel"]

num_of_docs = 10000
sg_client = MobileRestClient()

# Modify sync-gateway config
c = cluster.Cluster(config=cluster_config)
c.reset(sg_config_path=sg_config)

# 1. Add docs to SG.
sg_client.create_user(sg_admin_url, sg_db, "autotest", password="password", channels=channels)
cookie, session_id = sg_client.create_session(sg_admin_url, sg_db, "autotest")
session = cookie, session_id
sg_docs = document.create_docs(doc_id_prefix='sg_docs', number=num_of_docs, channels=channels)
sg_docs = sg_client.add_bulk_docs(url=sg_url, db=sg_db, docs=sg_docs, auth=session)
assert len(sg_docs) == num_of_docs

# 2. Pull replication to CBL
replicator = Replication(base_url)
authenticator = Authenticator(base_url)
replicator_authenticator = authenticator.authentication(session_id, cookie, authentication_type="session")
repl_config = replicator.configure(source_db=cbl_db,
target_url=sg_blip_url,
continuous=True,
channels=channels,
replicator_authenticator=replicator_authenticator,
replication_type="pull")
repl = replicator.create(repl_config)
replicator.start(repl)
time.sleep(2)
# 3. Stop replicator while it's busy or still connecting
if replicator.getActivitylevel(repl) == "busy" or replicator.getActivitylevel(repl) == "connecting":
replicator.stop(repl)

# 4. Validate completed count, ensure there is no crash
total = replicator.getTotal(repl)
completed = replicator.getCompleted(repl)
assert total >= completed, "total is less than completed"


def test_replication_flush_checkpoint_no_crash(params_from_base_test_setup):
"""
@summary: to verify no crash if checkpoint data gets flush
1. add docs to the db on SGW
2. start a one-shot pull replicator that the CBL db will pull the the docs on SGW db
3. stop sync gateway service
4. flush the bucket on CBS which has sync gateway checkpoint data
5. start sync gateway service
6. start a pull replicator, to verify there is no crash
"""
sg_db = "db"
sg_url = params_from_base_test_setup["sg_url"]
sg_admin_url = params_from_base_test_setup["sg_admin_url"]
cluster_config = params_from_base_test_setup["cluster_config"]
sg_blip_url = params_from_base_test_setup["target_url"]
base_url = params_from_base_test_setup["base_url"]
sg_config = params_from_base_test_setup["sg_config"]
db = params_from_base_test_setup["db"]
cbl_db = params_from_base_test_setup["source_db"]
cbs_url = params_from_base_test_setup["cbs_url"]

channels = ["*"]
sg_client = MobileRestClient()
num_of_docs = 10

# Modify sync-gateway config
c = cluster.Cluster(config=cluster_config)
c.reset(sg_config_path=sg_config)

# 1. Add docs to SG.
sg_client.create_user(sg_admin_url, sg_db, "autotest", password="password", channels=channels)
cookie, session_id = sg_client.create_session(sg_admin_url, sg_db, "autotest")
session = cookie, session_id
sg_docs = document.create_docs(doc_id_prefix='sg_docs', number=num_of_docs, channels=channels)
sg_docs = sg_client.add_bulk_docs(url=sg_admin_url, db=sg_db, docs=sg_docs, auth=session)
assert len(sg_docs) == num_of_docs

# 2. Pull replication to CBL
replicator = Replication(base_url)
authenticator = Authenticator(base_url)
replicator_authenticator = authenticator.authentication(session_id, cookie, authentication_type="session")
repl_config = replicator.configure(source_db=cbl_db,
target_url=sg_blip_url,
continuous=False,
channels=channels,
replicator_authenticator=replicator_authenticator,
replication_type="pull")
repl = replicator.create(repl_config)
replicator.start(repl)
replicator.wait_until_replicator_idle(repl)
total = replicator.getTotal(repl)
completed = replicator.getCompleted(repl)
assert total == completed, "total is not equal to completed"
replicator.stop(repl)

sg_docs = sg_client.get_all_docs(url=sg_admin_url, db=sg_db, include_docs=True)["rows"]
cbl_doc_count = db.getCount(cbl_db)
assert len(sg_docs) == cbl_doc_count, "Expected number of docs does not exist in sync-gateway after replication"

# 3. Stop sync gateway service
sg = SyncGateway()
sg.stop_sync_gateways(cluster_config=cluster_config, url=sg_url)

# 4. Flush bucket on cbs server
cbs = CouchbaseServer(cbs_url)
cbs.flush_bucket("travel-sample")

# 5. Start sync gateway service
sg.start_sync_gateways(cluster_config=cluster_config, url=sg_url, config=sg_config)

# 6. Start a pull replicator, to verify there is no crash
sg_client.create_user(sg_admin_url, sg_db, "autotest", password="password", channels=channels)
cookie, session_id = sg_client.create_session(sg_admin_url, sg_db, "autotest")
replicator = Replication(base_url)
authenticator = Authenticator(base_url)
replicator_authenticator = authenticator.authentication(session_id, cookie, authentication_type="session")
repl_config = replicator.configure(source_db=cbl_db,
target_url=sg_blip_url,
continuous=False,
channels=channels,
replicator_authenticator=replicator_authenticator,
replication_type="pull")
repl = replicator.create(repl_config)
replicator.start(repl)
replicator.wait_until_replicator_idle(repl)
total = replicator.getTotal(repl)
completed = replicator.getCompleted(repl)
assert total == completed, "total is not equal to completed"
replicator.stop(repl)


def update_and_resetCheckPoint(db, cbl_db, replicator, repl, replication_type, repl_config, num_of_updates):
# update docs in CBL
db.update_bulk_docs(cbl_db)
Expand Down
3 changes: 3 additions & 0 deletions testsuites/CBLTester/CBL_Functional_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,6 +504,7 @@ def params_from_base_suite_setup(request):
"suite_db_log_files": suite_db_log_files,
"enable_encryption": enable_encryption,
"encryption_password": encryption_password,
"cbs_url": cbs_url,
"cbs_ce": cbs_ce,
"sg_ce": sg_ce,
"cbl_ce": cbl_ce
Expand Down Expand Up @@ -575,6 +576,7 @@ def params_from_base_test_setup(request, params_from_base_suite_setup):
cbl_log_decoder_build = params_from_base_suite_setup["cbl_log_decoder_build"]
encryption_password = params_from_base_suite_setup["encryption_password"]
enable_encryption = params_from_base_suite_setup["enable_encryption"]
cbs_url = params_from_base_suite_setup["cbs_url"]
use_local_testserver = request.config.getoption("--use-local-testserver")
cbl_ce = params_from_base_suite_setup["cbl_ce"]
cbs_ce = params_from_base_suite_setup["cbs_ce"]
Expand Down Expand Up @@ -672,6 +674,7 @@ def params_from_base_test_setup(request, params_from_base_suite_setup):
"enable_encryption": enable_encryption,
"encryption_password": encryption_password,
"enable_file_logging": enable_file_logging,
"cbs_url": cbs_url,
"test_cbllog": test_cbllog,
"cbs_ce": cbs_ce,
"sg_ce": sg_ce,
Expand Down