From 8bfebd95b0d489c5469a2e6a1f968add50116161 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Fri, 2 Feb 2018 00:05:12 +0000 Subject: [PATCH 01/16] Fix self nsprintf Add logging to udf used in test --- src/main/client/admin.c | 2 +- src/main/client/info_node.c | 2 +- src/main/client/operate.c | 4 ++-- test/bin_lua.lua | 1 + 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/client/admin.c b/src/main/client/admin.c index f561091b3..56fec9485 100644 --- a/src/main/client/admin.c +++ b/src/main/client/admin.c @@ -830,7 +830,7 @@ PyObject * AerospikeClient_Admin_Query_Users( AerospikeClient * self, PyObject * aerospike_query_users(self->as, &err, admin_policy_p, &users, &users_size); Py_END_ALLOW_THREADS if (err.code != AEROSPIKE_OK) { - as_error_update(&err, err.code, err.message); + as_error_update(&err, err.code, NULL); goto CLEANUP; } diff --git a/src/main/client/info_node.c b/src/main/client/info_node.c index f95ae8a1f..6d098a7d4 100644 --- a/src/main/client/info_node.c +++ b/src/main/client/info_node.c @@ -174,7 +174,7 @@ static PyObject * AerospikeClient_InfoNode_Invoke( goto CLEANUP; } } else { - as_error_update(err, err->code, err->message); + as_error_update(err, err->code, NULL); goto CLEANUP; } diff --git a/src/main/client/operate.c b/src/main/client/operate.c index 6322a586a..45b95d125 100644 --- a/src/main/client/operate.c +++ b/src/main/client/operate.c @@ -863,7 +863,7 @@ static PyObject * AerospikeClient_OperateOrdered_Invoke( } if (err->code != AEROSPIKE_OK) { - as_error_update(err, err->code, err->message); + as_error_update(err, err->code, NULL); goto CLEANUP; } @@ -875,7 +875,7 @@ static PyObject * AerospikeClient_OperateOrdered_Invoke( Py_END_ALLOW_THREADS if (err->code != AEROSPIKE_OK) { - as_error_update(err, err->code, err->message); + as_error_update(err, err->code, NULL); goto CLEANUP; } diff --git a/test/bin_lua.lua b/test/bin_lua.lua index 2d33ab19d..a504cb629 100644 --- a/test/bin_lua.lua +++ b/test/bin_lua.lua @@ -1,4 +1,5 @@ function mytransform(rec, bin, offset) + info("my transform: %s", tostring(record.digest(rec))) rec['age'] = rec['age'] + offset aerospike:update(rec) end From eae44f8d4e767b98f5a4fc14c06084bc943061f0 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Sat, 3 Feb 2018 01:03:50 +0000 Subject: [PATCH 02/16] Switch from casting to uint32_t to having individual setters for each enum value --- src/include/policy_config.h | 7 ++ src/main/policy_config.c | 196 +++++++++++++++++++++++++++++++----- 2 files changed, 180 insertions(+), 23 deletions(-) diff --git a/src/include/policy_config.h b/src/include/policy_config.h index 02bf89309..d6e04d8ec 100644 --- a/src/include/policy_config.h +++ b/src/include/policy_config.h @@ -24,6 +24,13 @@ as_status set_optional_uint32_property(uint32_t* target_ptr, PyObject* policy_di as_status set_optional_bool_property(bool* target_ptr, PyObject* py_policy, const char* name); as_status set_base_policy(as_policy_base* base, PyObject* py_policy); +as_status set_optional_key(as_policy_key* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_replica(as_policy_replica* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_commit_level(as_policy_commit_level* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_consistency_level(as_policy_consistency_level* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_gen(as_policy_gen* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_exists(as_policy_exists* target_ptr, PyObject* py_policy, const char* name); + as_status set_subpolicies(as_config* config, PyObject* py_policies); as_status set_read_policy(as_policy_read* read_policy, PyObject* py_policy); as_status set_write_policy(as_policy_write* write_policy, PyObject* py_policy); diff --git a/src/main/policy_config.c b/src/main/policy_config.c index 81425c206..b5995a639 100644 --- a/src/main/policy_config.c +++ b/src/main/policy_config.c @@ -15,7 +15,13 @@ ******************************************************************************/ #include "policy_config.h" - +as_status set_optional_key(as_policy_key* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_replica(as_policy_replica* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_commit_level(as_policy_commit_level* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_consistency_level(as_policy_consistency_level* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_gen(as_policy_gen* target_ptr, PyObject* py_policy, const char* name); +as_status set_optional_exists(as_policy_exists* target_ptr, PyObject* py_policy, const char* name); +as_status get_uint32_value(PyObject* py_policy_val, uint32_t* return_uint32); /* * py_policies must exist, and be a dictionary */ @@ -90,17 +96,17 @@ as_status set_read_policy(as_policy_read* read_policy, PyObject* py_policy) { return status; } - status = set_optional_uint32_property((uint32_t*)&read_policy->key, py_policy, "key"); + status = set_optional_key(&read_policy->key, py_policy, "key"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&read_policy->replica, py_policy, "replica"); + status = set_optional_replica(&read_policy->replica, py_policy, "replica"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&read_policy->consistency_level, py_policy, "consistency_level"); + status = set_optional_consistency_level(&read_policy->consistency_level, py_policy, "consistency_level"); if (status != AEROSPIKE_OK) { return status; } @@ -135,27 +141,27 @@ as_status set_write_policy(as_policy_write* write_policy, PyObject* py_policy) { return status; } - status = set_optional_uint32_property((uint32_t*)&write_policy->key, py_policy, "key"); + status = set_optional_key(&write_policy->key, py_policy, "key"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&write_policy->replica, py_policy, "replica"); + status = set_optional_replica(&write_policy->replica, py_policy, "replica"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&write_policy->commit_level, py_policy, "commit_level"); + status = set_optional_commit_level(&write_policy->commit_level, py_policy, "commit_level"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&write_policy->gen, py_policy, "gen"); + status = set_optional_gen(&write_policy->gen, py_policy, "gen"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&write_policy->exists, py_policy, "exists"); + status = set_optional_exists(&write_policy->exists, py_policy, "exists"); if (status != AEROSPIKE_OK) { return status; } @@ -190,22 +196,22 @@ as_status set_apply_policy(as_policy_apply* apply_policy, PyObject* py_policy) { return status; } - status = set_optional_uint32_property((uint32_t*)&apply_policy->key, py_policy, "key"); + status = set_optional_key(&apply_policy->key, py_policy, "key"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&apply_policy->replica, py_policy, "replica"); + status = set_optional_replica(&apply_policy->replica, py_policy, "replica"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&apply_policy->gen, py_policy, "gen"); + status = set_optional_gen(&apply_policy->gen, py_policy, "gen"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&apply_policy->commit_level, py_policy, "commit_level"); + status = set_optional_commit_level(&apply_policy->commit_level, py_policy, "commit_level"); if (status != AEROSPIKE_OK) { return status; } @@ -240,22 +246,22 @@ as_status set_remove_policy(as_policy_remove* remove_policy, PyObject* py_policy return status; } - status = set_optional_uint32_property((uint32_t*)&remove_policy->key, py_policy, "key"); + status = set_optional_key(&remove_policy->key, py_policy, "key"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&remove_policy->replica, py_policy, "replica"); + status = set_optional_replica(&remove_policy->replica, py_policy, "replica"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&remove_policy->commit_level, py_policy, "commit_level"); + status = set_optional_commit_level(&remove_policy->commit_level, py_policy, "commit_level"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&remove_policy->gen, py_policy, "gen"); + status = set_optional_gen(&remove_policy->gen, py_policy, "gen"); if (status != AEROSPIKE_OK) { return status; } @@ -339,27 +345,27 @@ as_status set_operate_policy(as_policy_operate* operate_policy, PyObject* py_pol return status; } - status = set_optional_uint32_property((uint32_t*)&operate_policy->key, py_policy, "key"); + status = set_optional_key(&operate_policy->key, py_policy, "key"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&operate_policy->replica, py_policy, "replica"); + status = set_optional_replica(&operate_policy->replica, py_policy, "replica"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&operate_policy->commit_level, py_policy, "commit_level"); + status = set_optional_commit_level(&operate_policy->commit_level, py_policy, "commit_level"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&operate_policy->gen, py_policy, "gen"); + status = set_optional_gen(&operate_policy->gen, py_policy, "gen"); if (status != AEROSPIKE_OK) { return status; } - status = set_optional_uint32_property((uint32_t*)&operate_policy->consistency_level, py_policy, "consistency_level"); + status = set_optional_consistency_level(&operate_policy->consistency_level, py_policy, "consistency_level"); if (status != AEROSPIKE_OK) { return status; } @@ -399,7 +405,7 @@ as_status set_batch_policy(as_policy_batch* batch_policy, PyObject* py_policy) { return status; } - status = set_optional_uint32_property((uint32_t*)&batch_policy->consistency_level, py_policy, "consistency_level"); + status = set_optional_consistency_level(&batch_policy->consistency_level, py_policy, "consistency_level"); if (status != AEROSPIKE_OK) { return status; } @@ -473,6 +479,30 @@ as_status set_base_policy(as_policy_base* base_policy, PyObject* py_policy) { return AEROSPIKE_OK; } +as_status get_uint32_value(PyObject* py_policy_val, uint32_t* return_uint32) { + long long int uint32_max = 0xFFFFFFFF; + + if (!py_policy_val) { + return AEROSPIKE_ERR_PARAM; + } + if (PyInt_Check(py_policy_val)) { + long int_value = PyInt_AsLong(py_policy_val); + + if (int_value == -1 && PyErr_Occurred()) { + PyErr_Clear(); + return AEROSPIKE_ERR_PARAM; + } + + if (int_value < 0 || int_value > uint32_max) { + return AEROSPIKE_ERR_PARAM; + } + + *return_uint32 = (uint32_t)int_value; + return AEROSPIKE_OK; + } + return AEROSPIKE_ERR_PARAM; +} + as_status set_optional_uint32_property(uint32_t* target_ptr, PyObject* py_policy, const char* name) { PyObject* py_policy_val = NULL; long long int uint32_max = 0xFFFFFFFF; @@ -521,3 +551,123 @@ as_status set_optional_bool_property(bool* target_ptr, PyObject* py_policy, cons } return AEROSPIKE_ERR_PARAM; } + +as_status set_optional_key(as_policy_key* target_ptr, PyObject* py_policy, const char* name) { + PyObject* py_policy_val = NULL; + if (!py_policy || !PyDict_Check(py_policy)) { + return AEROSPIKE_OK; + } + + py_policy_val = PyDict_GetItemString(py_policy, name); + if (!py_policy_val || py_policy_val == Py_None) { + return AEROSPIKE_OK; + } + + uint32_t out_uint32; + as_status status = get_uint32_value(py_policy, &out_uint32); + if (status != AEROSPIKE_OK) { + return status; + } + *target_ptr = (as_policy_key)out_uint32; + return AEROSPIKE_OK; +} + +as_status set_optional_replica(as_policy_replica* target_ptr, PyObject* py_policy, const char* name) { + PyObject* py_policy_val = NULL; + if (!py_policy || !PyDict_Check(py_policy)) { + return AEROSPIKE_OK; + } + + py_policy_val = PyDict_GetItemString(py_policy, name); + if (!py_policy_val || py_policy_val == Py_None) { + return AEROSPIKE_OK; + } + + uint32_t out_uint32; + as_status status = get_uint32_value(py_policy, &out_uint32); + if (status != AEROSPIKE_OK) { + return status; + } + *target_ptr = (as_policy_replica)out_uint32; + return AEROSPIKE_OK; +} + +as_status set_optional_commit_level(as_policy_commit_level* target_ptr, PyObject* py_policy, const char* name) { + PyObject* py_policy_val = NULL; + if (!py_policy || !PyDict_Check(py_policy)) { + return AEROSPIKE_OK; + } + + py_policy_val = PyDict_GetItemString(py_policy, name); + if (!py_policy_val || py_policy_val == Py_None) { + return AEROSPIKE_OK; + } + + uint32_t out_uint32; + as_status status = get_uint32_value(py_policy, &out_uint32); + if (status != AEROSPIKE_OK) { + return status; + } + *target_ptr = (as_policy_commit_level)out_uint32; + return AEROSPIKE_OK; +} + +as_status set_optional_consistency_level(as_policy_consistency_level* target_ptr, PyObject* py_policy, const char* name) { + PyObject* py_policy_val = NULL; + if (!py_policy || !PyDict_Check(py_policy)) { + return AEROSPIKE_OK; + } + + py_policy_val = PyDict_GetItemString(py_policy, name); + if (!py_policy_val || py_policy_val == Py_None) { + return AEROSPIKE_OK; + } + + uint32_t out_uint32; + as_status status = get_uint32_value(py_policy, &out_uint32); + if (status != AEROSPIKE_OK) { + return status; + } + *target_ptr = (as_policy_consistency_level)out_uint32; + return AEROSPIKE_OK; +} + +as_status set_optional_gen(as_policy_gen* target_ptr, PyObject* py_policy, const char* name) { + PyObject* py_policy_val = NULL; + if (!py_policy || !PyDict_Check(py_policy)) { + return AEROSPIKE_OK; + } + + py_policy_val = PyDict_GetItemString(py_policy, name); + if (!py_policy_val || py_policy_val == Py_None) { + return AEROSPIKE_OK; + } + + uint32_t out_uint32; + as_status status = get_uint32_value(py_policy, &out_uint32); + if (status != AEROSPIKE_OK) { + return status; + } + *target_ptr = (as_policy_gen)out_uint32; + return AEROSPIKE_OK; +} + +as_status set_optional_exists(as_policy_exists* target_ptr, PyObject* py_policy, const char* name) { + PyObject* py_policy_val = NULL; + if (!py_policy || !PyDict_Check(py_policy)) { + return AEROSPIKE_OK; + } + + py_policy_val = PyDict_GetItemString(py_policy, name); + if (!py_policy_val || py_policy_val == Py_None) { + return AEROSPIKE_OK; + } + + uint32_t out_uint32; + as_status status = get_uint32_value(py_policy, &out_uint32); + if (status != AEROSPIKE_OK) { + return status; + } + *target_ptr = (as_policy_exists)out_uint32; + return AEROSPIKE_OK; +} From 04c1b432f4b5610878d761ca259f583c4e868e67 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Sat, 3 Feb 2018 01:29:36 +0000 Subject: [PATCH 03/16] Update version numbers --- VERSION | 2 +- src/main/aerospike.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/VERSION b/VERSION index cb2b00e4f..b50214693 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.0.1 +3.0.2 diff --git a/src/main/aerospike.c b/src/main/aerospike.c index 81ea00f31..e50613eb7 100644 --- a/src/main/aerospike.c +++ b/src/main/aerospike.c @@ -93,7 +93,7 @@ AerospikeConstants operator_constants[] = { MOD_INIT(aerospike) { - const char version[8] = "3.0.1"; + const char version[8] = "3.0.2"; // Makes things "thread-safe" PyEval_InitThreads(); int i = 0; From 7298d28ee4f26c191d922de9cdd5ff2cc0b4cdbe Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Tue, 6 Feb 2018 22:01:20 +0000 Subject: [PATCH 04/16] Fix parameter to use newer name --- test/new_tests/test_query_apply.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/new_tests/test_query_apply.py b/test/new_tests/test_query_apply.py index 5f86e0935..c98baadda 100644 --- a/test/new_tests/test_query_apply.py +++ b/test/new_tests/test_query_apply.py @@ -110,7 +110,7 @@ def test_query_apply_with_correct_policy(self): """ Invoke query_apply() with correct policy """ - policy = {'timeout': 1000} + policy = {'total_timeout': 0} query_id = self.as_connection.query_apply( "test", "demo", self.age_range_pred, "bin_lua", "mytransform", ['age', 2], policy) @@ -136,7 +136,7 @@ def test_query_apply_with_none_set(self): Invoke query_apply() with correct policy, Should casuse no changes as the """ - policy = {'timeout': 1000} + policy = {'total_timeout': 0} query_id = self.as_connection.query_apply( "test", None, self.age_range_pred, "bin_lua", "mytransform", ['age', 2], policy) From 84eb92e0a03cd6aa6fa762b9ef4c6d9270db73d6 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Wed, 7 Feb 2018 18:03:12 +0000 Subject: [PATCH 05/16] Clean up query_apply test --- test/new_tests/test_query_apply.py | 369 ++++++++++------------------- test/query_apply.lua | 15 ++ 2 files changed, 141 insertions(+), 243 deletions(-) create mode 100644 test/query_apply.lua diff --git a/test/new_tests/test_query_apply.py b/test/new_tests/test_query_apply.py index c98baadda..38ba8f299 100644 --- a/test/new_tests/test_query_apply.py +++ b/test/new_tests/test_query_apply.py @@ -28,13 +28,31 @@ def add_indexes_to_client(client): pass +def create_records(client): + for i in range(1, 10): + key = ('test', 'demo', i) + rec = {'name': str(i), 'age': i, 'val': i} + client.put(key, rec) + + key = ('test', None, "no_set") + rec = {'name': 'no_set_name', 'age': 0} + client.put(key, rec) + + +def drop_records(client): + for i in range(1, 10): + key = ('test', 'demo', i) + client.remove(key) + client.remove(('test', None, "no_set")) + + def add_test_udf(client): policy = {} - client.udf_put(u"bin_lua.lua", 0, policy) + client.udf_put(u"query_apply.lua", 0, policy) def drop_test_udf(client): - client.udf_remove("bin_lua.lua") + client.udf_remove("query_apply.lua") def remove_indexes_from_client(client): @@ -48,29 +66,13 @@ class TestQueryApply(object): # required setup and teardown connection_setup_functions = (add_test_udf, add_indexes_to_client) connection_teardown_functions = (drop_test_udf, remove_indexes_from_client) - age_range_pred = p.between('age', 0, 5) # Predicate for ages between [0,5) + age_range_pred = p.between('age', 0, 4) # Predicate for ages between [0,5) + no_set_key = ('test', None, "no_set") # Key for item stored in a namespace but not in a set @pytest.fixture(autouse=True) def setup(self, request, connection_with_config_funcs): client = connection_with_config_funcs - for i in range(1, 5): - key = ('test', 'demo', i) - rec = {'name': 'name%s' % (str(i)), 'age': i} - client.put(key, rec) - - key = ('test', None, "no_set") - self.no_set_key = key - rec = {'name': 'no_set_name', 'age': 0} - client.put(key, rec) - - def teardown(): - for i in range(1, 5): - key = ('test', 'demo', i) - client.remove(key) - - client.remove(self.no_set_key) - - request.addfinalizer(teardown) + create_records(client) def test_query_apply_with_no_parameters(self): """ @@ -80,31 +82,18 @@ def test_query_apply_with_no_parameters(self): with pytest.raises(TypeError) as typeError: self.as_connection.query_apply() - def test_query_apply_with_correct_parameters(self): + def test_query_apply_with_correct_parameters_no_policy(self): """ Invoke query_apply() with correct parameters. It should apply the proper UDF, and """ query_id = self.as_connection.query_apply( - "test", "demo", self.age_range_pred, "bin_lua", - "mytransform", ['age', 2]) - - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) - - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) + "test", "demo", self.age_range_pred, "query_apply", + "mark_as_applied", ['name', 2]) - assert bins['age'] == i + 2 + self._wait_for_query_complete(query_id) - _, _, bins = self.as_connection.get(self.no_set_key) - assert bins['age'] == 0 + self._correct_items_have_been_applied() def test_query_apply_with_correct_policy(self): """ @@ -112,53 +101,24 @@ def test_query_apply_with_correct_policy(self): """ policy = {'total_timeout': 0} query_id = self.as_connection.query_apply( - "test", "demo", self.age_range_pred, "bin_lua", - "mytransform", ['age', 2], policy) + "test", "demo", self.age_range_pred, "query_apply", + "mark_as_applied", ['name', 2], policy) - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) + self._wait_for_query_complete(query_id) + self._correct_items_have_been_applied() - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) - assert bins['age'] == i + 2 - - _, _, bins = self.as_connection.get(self.no_set_key) - assert bins['age'] == 0 - - def test_query_apply_with_none_set(self): + def test_query_apply_with_set_argument_as_none(self): """ Invoke query_apply() with correct policy, Should casuse no changes as the """ policy = {'total_timeout': 0} query_id = self.as_connection.query_apply( - "test", None, self.age_range_pred, "bin_lua", - "mytransform", ['age', 2], policy) + "test", None, self.age_range_pred, "query_apply", + "mark_as_applied", ['name', 2], policy) - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) - - # Since this query passes no set, the records stored in a set should - # not be affected - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) - assert bins['age'] == i - - # the setless record should have been changed - _, _, bins = self.as_connection.get(self.no_set_key) - assert bins['age'] == 2 + self._wait_for_query_complete(query_id) + self._items_without_set_have_been_applied() def test_query_apply_with_incorrect_policy(self): """ @@ -168,25 +128,19 @@ def test_query_apply_with_incorrect_policy(self): 'timeout': 0.5 } - with pytest.raises(e.ParamError) as err_info: + with pytest.raises(e.ParamError): self.as_connection.query_apply( - "test", "demo", self.age_range_pred, "bin_lua", - "mytransform", ['age', 2], policy) + "test", "demo", self.age_range_pred, "query_apply", + "mark_as_applied", ['name', 2], policy) - err_code = err_info.value.code - assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM - - def test_query_apply_with_incorrect_ns_set(self): + def test_query_apply_with_nonexistent_set(self): """ Invoke query_apply() with incorrect ns and set """ - with pytest.raises(e.NamespaceNotFound) as err_info: + with pytest.raises(e.NamespaceNotFound): self.as_connection.query_apply( - "test1", "demo1", self.age_range_pred, "bin_lua", - "mytransform", ['age', 2]) - - err_code = err_info.value.code - assert err_code == AerospikeStatus.AEROSPIKE_ERR_NAMESPACE_NOT_FOUND + "test1", "demo1", self.age_range_pred, "query_apply", + "mark_as_applied", ['name', 2]) def test_query_apply_with_incorrect_module_name(self): """ @@ -195,21 +149,11 @@ def test_query_apply_with_incorrect_module_name(self): not call a function """ query_id = self.as_connection.query_apply( - "test", "demo", self.age_range_pred, "bin_lua_incorrect", - "mytransform", ['age', 2]) + "test", "demo", self.age_range_pred, "query_apply_incorrect", + "mark_as_applied", ['name', 2]) - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) - - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) - assert bins['age'] == i + self._wait_for_query_complete(query_id) + self._no_items_have_been_applied() def test_query_apply_with_incorrect_function_name(self): """ @@ -217,82 +161,58 @@ def test_query_apply_with_incorrect_function_name(self): does not invoke a different function """ query_id = self.as_connection.query_apply( - "test", "demo", self.age_range_pred, "bin_lua", - "mytransform_incorrect", ['age', 2]) + "test", "demo", self.age_range_pred, "query_apply", + "mytransform_incorrect", ['name', 2]) - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) - - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) - assert bins['age'] == i + self._wait_for_query_complete(query_id) + self._no_items_have_been_applied() def test_query_apply_with_ns_set_none(self): """ Invoke query_apply() with ns and set as None """ - with pytest.raises(TypeError) as typeError: + with pytest.raises(TypeError): self.as_connection.query_apply(None, None, self.age_range_pred, - "bin_lua", "mytransform", - ['age', 2]) + "query_apply", "mark_as_applied", + ['name', 2]) - assert "query_apply() argument 1 must be str" in str(typeError.value) - - def test_query_apply_with_module_function_none(self): + def test_query_apply_with_module_argument_value_is_none(self): """ Invoke query_apply() with None module function """ - with pytest.raises(e.ParamError) as err_info: + with pytest.raises(e.ParamError): self.as_connection.query_apply( - "test", "demo", self.age_range_pred, None, None, ['age', 2]) + "test", "demo", self.age_range_pred, None, None, ['name', 2]) - err_code = err_info.value.code - assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM - - def test_query_apply_with_extra_argument(self): + def test_query_apply_with_too_many_arguments(self): """ Invoke query_apply() with extra argument """ policy = {'timeout': 1000} - with pytest.raises(TypeError) as typeError: + with pytest.raises(TypeError): self.as_connection.query_apply( - "test", "demo", self.age_range_pred, "bin_lua", - "mytransform_incorrect", ['age', 2], policy, "") - - assert "query_apply() takes at most 7 arguments (8 given)" in str( - typeError.value) + "test", "demo", self.age_range_pred, "query_apply", + "mytransform_incorrect", ['name', 2], policy, "") - def test_query_apply_with_argument_is_string(self): + def test_query_apply_with_udf_arguments_as_string(self): """ Invoke query_apply() with arguments as string """ - with pytest.raises(e.ParamError) as err_info: + with pytest.raises(e.ParamError): self.as_connection.query_apply("test", "demo", self.age_range_pred, - "bin_lua", "mytransform", "") + "query_apply", "mark_as_applied", "") - err_code = err_info.value.code - assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM - - def test_query_apply_with_argument_is_none(self): + def test_query_apply_with_udf_argument_as_none(self): """ Invoke query_apply() with arguments as None """ - with pytest.raises(e.ParamError) as err_info: + with pytest.raises(e.ParamError): self.as_connection.query_apply( "test", "demo", self.age_range_pred, - "bin_lua", "mytransform", None) + "query_apply", "mark_as_applied", None) - err_code = err_info.value.code - assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM - - def test_query_apply_with_extra_call_to_lua(self): + def test_query_apply_with_extra_parameter_to_lua_function(self): """ Invoke query_apply() with extra call to lua test that passing an extra argument to a udf does @@ -300,96 +220,37 @@ def test_query_apply_with_extra_call_to_lua(self): """ query_id = self.as_connection.query_apply( "test", "demo", self.age_range_pred, - "bin_lua", "mytransform", ['age', 2, 3]) + "query_apply", "mark_as_applied", ['name', 2, 3]) # time.sleep(2) - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) + self._wait_for_query_complete(query_id) + self._correct_items_have_been_applied() - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) - assert bins['age'] == i + 2 - - def test_query_apply_with_extra_parameter_in_lua(self): + def test_query_apply_with_missing_parameter_to_function(self): """ Invoke query_apply() with a missing argument to a lua function does not cause an error """ query_id = self.as_connection.query_apply( "test", "demo", self.age_range_pred, - "bin_lua", "mytransformextra", ['age', 2]) - - # time.sleep(2) - - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) - - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) - assert bins['age'] == i + 2 - - def test_query_apply_with_less_parameter_in_lua(self): - """ - Invoke query_apply() with less parameter in lua - this verifies that passing 3 arguments to a lua - function which expects 2, will cause an undefined int - to be treated as 0, and not crash. - """ - query_id = self.as_connection.query_apply( - "test", "demo", self.age_range_pred, - "bin_lua", "mytransformless", ['age', 2]) + "query_apply", "mark_as_applied_three_arg", ['name', 2]) # time.sleep(2) - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) - - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) - if bins['age'] != i: - assert True is False - else: - assert True is True + self._wait_for_query_complete(query_id) + self._correct_items_have_been_applied() - def test_query_apply_unicode_input(self): + def test_query_apply_unicode_literal_for_strings(self): """ Invoke query_apply() with unicode udf """ query_id = self.as_connection.query_apply( - "test", "demo", self.age_range_pred, u"bin_lua", - u"mytransform", ['age', 2]) + "test", "demo", self.age_range_pred, u"query_apply", + u"mark_as_applied", ['name', 2]) - time.sleep(0.1) - while True: - response = self.as_connection.job_info( - query_id, aerospike.JOB_QUERY) - if response['status'] != aerospike.JOB_STATUS_INPROGRESS: - break - time.sleep(0.1) - - for i in range(1, 5): - key = ('test', 'demo', i) - _, _, bins = self.as_connection.get(key) - assert bins['age'] == i + 2 + self._wait_for_query_complete(query_id) + self._correct_items_have_been_applied() def test_query_apply_with_correct_parameters_without_connection(self): """ @@ -400,31 +261,12 @@ def test_query_apply_with_correct_parameters_without_connection(self): with pytest.raises(e.ClusterError) as err_info: client1.query_apply("test", "demo", self.age_range_pred, - "bin_lua", "mytransform", ['age', 2]) + "query_apply", "mark_as_applied", ['name', 2]) err_code = err_info.value.code assert err_code == AerospikeStatus.AEROSPIKE_CLUSTER_ERROR - @pytest.mark.skip(reason="This isn't a test for query_apply," + - " but for job_info") - def test_job_info_with_incorrect_module_type(self): - """ - Invoke query_apply() with incorrect module type - """ - query_id = self.as_connection.query_apply( - "test", "demo", self.age_range_pred, "bin_lua", - "mytransform", ['age', 2]) - - with pytest.raises(e.ParamError) as err_info: - time.sleep(0.1) - self.as_connection.job_info(query_id, "aggregate") - err_code = err_info.value.code - - assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM - - # @pytest.mark.xfail(reason="Passing an invalid predicate currently works " + - # " or raises a System Error") @pytest.mark.parametrize( "predicate", ( @@ -436,12 +278,53 @@ def test_job_info_with_incorrect_module_type(self): (1, 1, 'bin') # start of a valid predicate ) ) - def test_invalid_predicate(self, predicate): + def test_invalid_predicate_tuple(self, predicate): with pytest.raises(e.ParamError) as err_info: query_id = self.as_connection.query_apply( - "test", "demo", predicate, "bin_lua", - "mytransform", ['age', 2]) + "test", "demo", predicate, "query_apply", + "mark_as_applied", ['name', 2]) err_code = err_info.value.code assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM + + def _correct_items_have_been_applied(self): + for i in range(1, 5): + key = ('test', 'demo', i) + _, _, bins = self.as_connection.get(key) + assert bins['name'] == 'aerospike' + + for i in range(5, 10): + key = ('test', 'demo', i) + _, _, bins = self.as_connection.get(key) + assert bins['name'] != 'aerospike' + + _, _, bins = self.as_connection.get(self.no_set_key) + assert bins['name'] != 'aerospike' + + def _items_without_set_have_been_applied(self): + for i in range(1, 10): + key = ('test', 'demo', i) + _, _, bins = self.as_connection.get(key) + assert bins['name'] != 'aerospike' + + _, _, bins = self.as_connection.get(self.no_set_key) + assert bins['name'] == 'aerospike' + + def _wait_for_query_complete(self, query_id): + while True: + response = self.as_connection.job_info( + query_id, aerospike.JOB_QUERY) + if response['status'] != aerospike.JOB_STATUS_INPROGRESS: + return + time.sleep(0.1) + + def _no_items_have_been_applied(self): + + for i in range(1, 10): + key = ('test', 'demo', i) + _, _, bins = self.as_connection.get(key) + assert bins['name'] != 'aerospike' + + _, _, bins = self.as_connection.get(self.no_set_key) + assert bins['name'] != 'aerospike' diff --git a/test/query_apply.lua b/test/query_apply.lua new file mode 100644 index 000000000..c83560d56 --- /dev/null +++ b/test/query_apply.lua @@ -0,0 +1,15 @@ +function mark_as_applied(rec, bin, unused) + info("my transform: %s", tostring(record.digest(rec))) + rec[bin] = 'aerospike' + aerospike:update(rec) +end + +function mark_as_applied_three_arg(rec, bin, unused, unused1) + rec[bin] = 'aerospike' + aerospike:update(rec) +end + +function mark_as_applied_one_arg(rec, bin) + rec[bin] = 'aerospike' + aerospike:update(rec) +end From bee4eab5c687528497204417de6c0dd978677768 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Wed, 7 Feb 2018 18:25:32 +0000 Subject: [PATCH 06/16] Clean up after query apply test --- test/new_tests/test_query_apply.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/test/new_tests/test_query_apply.py b/test/new_tests/test_query_apply.py index 38ba8f299..710d8fc29 100644 --- a/test/new_tests/test_query_apply.py +++ b/test/new_tests/test_query_apply.py @@ -42,9 +42,15 @@ def create_records(client): def drop_records(client): for i in range(1, 10): key = ('test', 'demo', i) - client.remove(key) - client.remove(('test', None, "no_set")) + try: + client.remove(key) + except e.RecordNotFound: + pass + try: + client.remove(('test', None, "no_set")) + except e.RecordNotFound: + pass def add_test_udf(client): policy = {} @@ -64,8 +70,8 @@ class TestQueryApply(object): # These functions will run once for this test class, and do all of the # required setup and teardown - connection_setup_functions = (add_test_udf, add_indexes_to_client) - connection_teardown_functions = (drop_test_udf, remove_indexes_from_client) + connection_setup_functions = (add_test_udf, add_indexes_to_client, create_records) + connection_teardown_functions = (drop_test_udf, remove_indexes_from_client, drop_records) age_range_pred = p.between('age', 0, 4) # Predicate for ages between [0,5) no_set_key = ('test', None, "no_set") # Key for item stored in a namespace but not in a set From e4e15ebc4035117fc704e62f51224dd54b7af694 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Tue, 6 Feb 2018 00:14:20 +0000 Subject: [PATCH 07/16] Add better to logic to test config file parsing --- test/new_tests/test_base_class.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/test/new_tests/test_base_class.py b/test/new_tests/test_base_class.py index a9a70b3f3..beec5ee74 100644 --- a/test/new_tests/test_base_class.py +++ b/test/new_tests/test_base_class.py @@ -53,27 +53,37 @@ def get_tls_info(): config.read("config.conf") if (config.has_option('tls', 'enable') and ( - config.get('tls', 'enable') == 'True')): + config.getboolean('tls', 'enable'))): TestBaseClass.using_tls = True tls_dict['enable'] = True if config.has_option('tls', 'cafile'): - tls_dict['cafile'] = config.get('tls', 'cafile') + if config.get('tls', 'cafile') != '': + tls_dict['cafile'] = config.get('tls', 'cafile') if config.has_option('tls', 'capath'): - tls_dict['capath'] = config.get('tls', 'capath') + if config.get('tls', 'capath') != '': + tls_dict['capath'] = config.get('tls', 'capath') if config.has_option('tls', 'protocols'): - tls_dict['protocols'] = config.get('tls', 'protocols') + if config.get('tls', 'protocols') != '': + tls_dict['protocols'] = config.get('tls', 'protocols') if config.has_option('tls', 'cipher_suite'): - tls_dict['cipher_suite'] = config.get('tls', 'cipher_suite') + if config.get('tls', 'cipher_suite') != '': + tls_dict['cipher_suite'] = config.get('tls', 'cipher_suite') if config.has_option('tls', 'keyfile'): - tls_dict['keyfile'] = config.get('tls', 'keyfile') + if config.get('tls', 'keyfile') != '': + tls_dict['keyfile'] = config.get('tls', 'keyfile') + + if config.has_option('tls', 'cert_blacklist'): + if config.get('tls', 'cert_blacklist') != '': + tls_dict['cert_blacklist'] = config.get('tls', 'cert_blacklist') if config.has_option('tls', 'certfile'): - tls_dict['certfile'] = config.get('tls', 'certfile') + if config.get('tls', 'certfile') != '': + tls_dict['certfile'] = config.get('tls', 'certfile') if config.has_option('tls', 'crl_check'): if config.get('tls', 'crl_check') == 'True': From 4f853546dd3691cdd9437aac2731fc2f2117c829 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Thu, 8 Feb 2018 21:38:11 +0000 Subject: [PATCH 08/16] update to c client 4.3.5 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 316fbff9e..74a04688d 100644 --- a/setup.py +++ b/setup.py @@ -72,7 +72,7 @@ def run(self): os.environ['ARCHFLAGS'] = '-arch x86_64' AEROSPIKE_C_VERSION = os.getenv('AEROSPIKE_C_VERSION') if not AEROSPIKE_C_VERSION: - AEROSPIKE_C_VERSION = '4.3.3' + AEROSPIKE_C_VERSION = '4.3.5' DOWNLOAD_C_CLIENT = os.getenv('DOWNLOAD_C_CLIENT') AEROSPIKE_C_HOME = os.getenv('AEROSPIKE_C_HOME') PREFIX = None From 87e4f06a1fbe0f5d190aa03b3f166b4a791852dd Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Thu, 8 Feb 2018 22:04:03 +0000 Subject: [PATCH 09/16] Added the in_doubt argument to the tuple passed to as_error constructor --- src/main/conversions.c | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/conversions.c b/src/main/conversions.c index 834bf1aea..87330553b 100644 --- a/src/main/conversions.c +++ b/src/main/conversions.c @@ -51,6 +51,7 @@ #define PY_EXCEPTION_MSG 1 #define PY_EXCEPTION_FILE 2 #define PY_EXCEPTION_LINE 3 +#define AS_PY_EXCEPTION_IN_DOUBT 4 as_status as_udf_file_to_pyobject( as_error *err, as_udf_file * entry, PyObject ** py_file ) { @@ -1552,11 +1553,15 @@ bool error_to_pyobject(const as_error * err, PyObject ** obj) PyObject * py_code = PyLong_FromLongLong(err->code); PyObject * py_message = PyString_FromString(err->message); - PyObject * py_err = PyTuple_New(4); + PyObject* py_in_doubt = err->in_doubt ? Py_True : Py_False; + Py_INCREF(py_in_doubt); + + PyObject * py_err = PyTuple_New(5); PyTuple_SetItem(py_err, PY_EXCEPTION_CODE, py_code); PyTuple_SetItem(py_err, PY_EXCEPTION_MSG, py_message); PyTuple_SetItem(py_err, PY_EXCEPTION_FILE, py_file); PyTuple_SetItem(py_err, PY_EXCEPTION_LINE, py_line); + PyTuple_SetItem(py_err, AS_PY_EXCEPTION_IN_DOUBT, py_in_doubt); *obj = py_err; return true; } From 7314b0ce1041c46ffb1e5dcbd315a40ab3fa8b0d Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Fri, 9 Feb 2018 17:29:05 +0000 Subject: [PATCH 10/16] Added big list example --- examples/client/client_big_list.py | 374 +++++++++++++++++++++++++++++ 1 file changed, 374 insertions(+) create mode 100644 examples/client/client_big_list.py diff --git a/examples/client/client_big_list.py b/examples/client/client_big_list.py new file mode 100644 index 000000000..c5fc2f7e6 --- /dev/null +++ b/examples/client/client_big_list.py @@ -0,0 +1,374 @@ +# -*- coding: utf-8 -*- +########################################################################## +# Copyright 2018 Aerospike, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +########################################################################## + +from __future__ import print_function +import argparse + +import aerospike +from aerospike import exception as as_exceptions +''' +This provides a rough implementation of an expandable list for Aerospike +It utilizes a metadata record to provide information about associated subrecords. +It should be considered to be roughly accurate, as distruptions and timeouts can +cause stale data and pointers to be referenced. + +The rough algorithm to add an item is: + +1. Read top level metadata record to find a subrecord into which an item should be stored. If this fails + Create the top level record. +2. Try to store the items in the appropriate subrecord. +3. If step 2 fails because the subrecord is too big, create a new subrecord with the given items, + and increment the metadata record's reference count. + +Caveats: + + 1. Due to records potentially being stored on different nodes, it is possible that the + metadata record exists, and is reachable, but certain subrecords are not reachable. + 2. In case of disruptions it is possible to have a subrecord created and not have a reference + to it stored in the top level metadata record. + 3. No ordering can be assumed, as various delays between additions of items may cause them to be + stored out of orer. + 4. Fetching all subrecords, will only fetch subrecords known about at the time of the request. Subrecords created after + the metadata record has been read will not be returned. + 5. This design does not facilitate a way to remove items from the conceptual list. + 6. This design does not allow for writing a single record larger than the write-block-size. + +The top level record is roughly +{ + 'subrecord_count': # +} + +Where the # is greater than or less than or equal to the number of subrecords + +each subrecord is roughly: +{ + 'items': [item10, item2, item5, item0, item100] +} +''' + + +class ASMetadataRecordTooLarge(Exception): + pass + + +class ClientSideBigList(object): + ''' + Abstraction around an unbounded size list for Aerospike. Relies on a top level record + containing metadata about subrecords. When a subrecord fills up, a new subrecord is created + and the metadata is updated. + ''' + + def __init__(self, client, base_key, ns='test', setname='demo', + subrecourd_count_name='sr_count', subrecord_list_bin='items'): + ''' + Args: + client (Aerospike.client): a connected client to be used to talk with the database + base_key (string): The base key around which all record keys will be constructed + if base_key is 'person1', the top record will have the key (ns, set, 'person1') + and subrecords will be of the form (ns, set, 'person1-1'), (ns, set, 'person1-2')... + ns (string): The namespace into which the records will be stored. + setname (string): The set into which the records will be stored. + subrecord_count_name (string): The name of the bin in the metadata record which will store the count + of subrecords. + subrecord_list_bin (string): The name of the list bin in each of the subrecords. + ''' + self.ns = ns + self.set = setname + self.client = client + self.base_key = base_key + self.metadata_key = (self.ns, self.set, self.base_key) + + # This is the name of the bin in the top record which contains a set of subrecords + # There are at least this many subrecords. Unless the value of this is 1, in which case there may be no + # subrecords. + self.subrecourd_count_name = subrecourd_count_name + + # This is the name of the bin containing items in each of the subrecords + self.subrecord_list_bin = subrecord_list_bin + + def get_metadata_record(self): + ''' + Fetches the top level record containing metadata about this list. + Returns None if the record does not exist + + Returns: + Tuple: The record tuple: (key, meta, bins) for the metadata record if found + None: if the record does not exist. + Raises: + RecordNotFound: If the metadata record for this list does not yet exist. + AerospikeError: If the operation fails + ''' + return self.client.get(self.metadata_key) + + def add_item(self, item): + ''' + Add a given item to this conceptual group of lists. If a top level + record has not yet been created, this operation will create it. + + Args: + item: The item to be stored into the list. + Raises: + AerospikeError: + If communication with the cluster fails. + ''' + + try: + _, meta, bins = self.get_metadata_record() + generation = meta['gen'] + subrecord_count = bins[self.subrecourd_count_name] + subrecord_count = max(1, subrecord_count) # Ensure that level is at least 1 + + self._create_or_update_subrecord(item, subrecord_count, generation) + + except as_exceptions.RecordNotFound as e: + try: + # If this fails for a reason other than it already existing, error out + self._create_metadata_record() + userkey_1 = self._make_user_key(1) + # Create the first subrecord + self.client.put((self.ns, self.set, userkey_1), {self.subrecord_list_bin: []}) + # Metadata record has just been created. Add the first subrecord + self._create_or_update_subrecord(item, 1, 1) + except as_exceptions.RecordExistsError: + # If the metadata record alread exists, try to insert into the correct subrecord by recursing. + add_item(self, item) + + def get_all_entries(self, extended_search=False): + ''' + Get all of the entries from all subrecords flattened and buffered into a list + + Args: + extended_search(bool): Whether to attempt to fetch records beyond the count of known subrecords + Returns: + list: A list of all of the items from all subrecords for this record + None: If the metadata record does not exist. + Raises: + AerospikeError: + If any of the fetch operations fail. + ''' + try: + _, _, bins = self.get_metadata_record() + min_count = bins[self.subrecourd_count_name] + keys = [] + for i in range(1, min_count + 1): + key = ( + self.ns, + self.set, + self._make_user_key(i) + ) + keys.append(key) + + subrecords = self.client.get_many(keys) + entries = self._get_items_from_subrecords(subrecords) + + # Try to get subrecords beyond the listed amount. + # It is possible but not guaranteed that they exist. + if extended_search: + record_number = min_count + 1 + while True: + key = ( + self.ns, + self.set, + self._make_user_key(record_number) + ) + try: + _, _, bins = self.client.get(key) + entries.append(bins) + except as_exceptions.RecordNotFound: + break + return entries + except as_exceptions.RecordNotFound: + return None + + def _make_user_key(self, record_number): + ''' + Returns a formatted string to be used as the userkey portion of a key. + + Args: + record_number (int): Integer >= 1 specifying which subrecord for which to create a key. + + Returns: + string: A formatted string of the form: 'base-#' + ''' + return "{}-{}".format(self.base_key, record_number) + + def _create_metadata_record(self): + ''' + Create the top level information about the key. + + Raises: + RecordExistsError: + If the metadata record already exists. + AerospikeError: + If the operation fails for any other reason + ''' + + # Only create the metadata record if it does not exist + policy = {'exists': aerospike.POLICY_EXISTS_CREATE} + self.client.put(self.metadata_key, {self.subrecourd_count_name: 1}) + + def _create_or_update_subrecord(self, item, subrecord_number, generation, retries_remaining=3): + ''' + Create a new subrecord for the item. + 1. Create or append an item to the given specified subrecord + 2. Update the top level metadata record to mark this subrecord's existence + + If the update causes the specified record to be too large, the operation + is retried with a new subrecord number + + Args: + item: The item to be inserted into a subrecord + subrecord_number (int): An integer >= 1 indicating which subrecord to insert into. + generation (int): The generation of the metadata record. + retries_remaining (int): Number of retries remaining for an error caused by inserting + a record which by itself is greater than the write block size. + Raises: + Subclass of AerospikeError if an operation fails for a reason + other than the update causing the record to be too large. + ''' + subrecord_userkey = self._make_user_key(subrecord_number) + subrecord_record_key = (self.ns, self.set, subrecord_userkey) + try: + self.client.list_append( + subrecord_record_key, self.subrecord_list_bin, item) + except as_exceptions.RecordTooBig as e: + if retries_remaining == 0: + raise e + # The insert overflowed the size capacity of the record, increment the top level record metadata. + self._update_metadata_record(generation) + self._create_or_update_subrecord(item, subrecord_number + 1, generation, retries_remaining=(retries_remaining - 1)) + + def _update_metadata_record(self, generation): + ''' + Increment the metadata record's count of subrecords. This is only safe to do if the generation of the metadata matches + the expected value. Ignore if this fails. + ''' + update_policy = {'gen': aerospike.POLICY_GEN_EQ} + meta = {'gen': generation} + try: + self.client.increment(self.metadata_key, self.subrecourd_count_name, 1, meta=meta, policy=update_policy) + except as_exceptions.RecordTooBig: + raise ASMetadataRecordTooLarge + except as_exceptions.RecordGenerationError: + # This means that somebody else has updated the record count already. Don't risk updating again. + pass + + def _get_items_from_subrecords(self, subrecords): + ''' + Extract only the items from a list of subrecord tuples + given records = [ + (key, meta, {'items': [1, 2, ,3]}), + (key, meta, {'items': [4, 5, 6]}), + (key, meta, {'items': 7, 8, 9})) + ] + returns [1, 2, 3, 4, 5, 6, 7, 8, 9] + + Args: + subrecords (list<(k, m, b)>): A list of record tuples. + + Returns: + list: A flattened list of items. Containing all items from the subrecords. + ''' + entries = [] + # If a subrecord was included in the header of the top level record, but the matching subrecord + # was not found, ignore it. + for _, _, sr_bins in subrecords: + if sr_bins: + entries.extend(sr_bins[self.subrecord_list_bin]) + return entries + + def subrecord_iterator(self): + + _, _, bins = self.get_metadata_record() + count = bins[self.subrecourd_count_name] + + def sr_iter(): + ''' + Generator which fetches one subrecord at a time, and yields it to caller + ''' + for i in range(1, count + 1): + key = ( + self.ns, + self.set, + self._make_user_key(i) + ) + try: + yield self.client.get(key) + except as_exceptions.RecordNotFound: + continue + + # Instantiate the generator and return it. + return sr_iter() + + +def main(): + ''' + Simple tests demonstrating the functionality. + If the database is set up with a small enough write block-size, several subrecords + will be created. + ''' + + optparser = argparse.ArgumentParser() + + optparser.add_argument( + "--host", type=str, default="127.0.0.1", metavar="
", + help="Address of Aerospike server.") + + optparser.add_argument( + "--port", type=int, default=3000, metavar="", + help="Port of the Aerospike server.") + + optparser.add_argument( + "--namespace", type=str, default="test", metavar="", + help="Namespace to use for this example") + + optparser.add_argument( + "-s", "--set", type=str, default="demo", metavar="", + help="Set to use for this example") + + optparser.add_argument( + "-i", "--items", type=int, default=1000, metavar="", + help="Number of items to store into the big list") + + options = optparser.parse_args() + print(options) + + client = aerospike.client({'hosts': [('localhost', 3000)]}).connect() + ldt = ClientSideBigList(client, 'person1_friends') + + for i in range(options.items): + # Store a reasonably large item + ldt.add_item('friend{}'.format(i) * 100) + + print("Stored {} items".format(options.items)) + + items = ldt.get_all_entries() + _, _, bins = ldt.get_metadata_record() + print(bins) + print("Known subrecord count is: {}".format(bins['sr_count'])) + print("Fetched {} items:".format(len(items))) + + count = 0 + for sr in ldt.subrecord_iterator(): + if sr: + count = count + 1 + + print("Records yielded: {}".format(count)) + + +if __name__ == '__main__': + main() From a47c6be66795c834248231ae6b70ba9d28a75c38 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Tue, 13 Feb 2018 01:35:36 +0000 Subject: [PATCH 11/16] Fix null return possibility from list operation, update tests for new server behavior --- src/main/client/operate_list.c | 9 +++++++++ test/new_tests/test_list_clear.py | 13 +++---------- test/new_tests/test_list_pop.py | 6 ++---- test/new_tests/test_list_pop_range.py | 6 ++---- test/new_tests/test_list_remove.py | 6 ++---- test/new_tests/test_list_remove_range.py | 5 +---- test/new_tests/test_list_trim.py | 5 +---- 7 files changed, 20 insertions(+), 30 deletions(-) diff --git a/src/main/client/operate_list.c b/src/main/client/operate_list.c index 274f3d5c0..25f255268 100644 --- a/src/main/client/operate_list.c +++ b/src/main/client/operate_list.c @@ -450,6 +450,9 @@ PyObject * AerospikeClient_ListPop(AerospikeClient * self, PyObject * args, PyOb PyObject *py_val = NULL; if (rec && rec->bins.size) { val_to_pyobject(self, &err, (as_val*) (rec->bins.entries[0].valuep), &py_val); + } else { + py_val = Py_None; + Py_INCREF(py_val); } CLEANUP: @@ -510,6 +513,9 @@ PyObject * AerospikeClient_ListPopRange(AerospikeClient * self, PyObject * args, } if (rec && rec->bins.size) { list_to_pyobject(self, &err, as_record_get_list(rec, bin), &py_list); + } else { + py_list = Py_None; + Py_INCREF(py_list); } CLEANUP: @@ -744,6 +750,9 @@ PyObject * AerospikeClient_ListGet(AerospikeClient * self, PyObject * args, PyOb PyObject *py_val = NULL; if (rec && rec->bins.size) { val_to_pyobject(self, &err, (as_val*) (rec->bins.entries[0].valuep), &py_val); + } else { + py_val = Py_None; + Py_INCREF(py_val); } CLEANUP: diff --git a/test/new_tests/test_list_clear.py b/test/new_tests/test_list_clear.py index 8455810e3..2c2acca08 100644 --- a/test/new_tests/test_list_clear.py +++ b/test/new_tests/test_list_clear.py @@ -85,17 +85,10 @@ def test_neg_list_clear_with_nonexistent_key(self): """ Invoke list_clear() with non-existent key """ - charSet = 'abcdefghijklmnopqrstuvwxyz1234567890' - minLength = 5 - maxLength = 30 - length = random.randint(minLength, maxLength) - key = ('test', 'demo', ''.join(map(lambda unused: - random.choice(charSet), - range(length))) + ".com") - try: + + key = ('test', 'demo', 'test_neg_list_clear_with_nonexistent_key') + with pytest.raises(e.RecordNotFound): self.as_connection.list_clear(key, "contact_no") - except e.BinIncompatibleType as exception: - assert exception.code == 12 def test_neg_list_clear_with_nonexistent_bin(self): """ diff --git a/test/new_tests/test_list_pop.py b/test/new_tests/test_list_pop.py index 9fa7d1074..1a6df811f 100644 --- a/test/new_tests/test_list_pop.py +++ b/test/new_tests/test_list_pop.py @@ -103,11 +103,9 @@ def test_neg_list_pop_with_nonexistent_key(self): key = ('test', 'demo', ''.join(map(lambda unused: random.choice(charSet), range(length))) + ".com") - try: - self.as_connection.list_pop(key, "abc", 0) - except e.BinIncompatibleType as exception: - assert exception.code == 12 + with pytest.raises(e.RecordNotFound): + self.as_connection.list_pop(key, "abc", 0) def test_neg_list_pop_with_nonexistent_bin(self): """ diff --git a/test/new_tests/test_list_pop_range.py b/test/new_tests/test_list_pop_range.py index e08a2aa43..952778331 100644 --- a/test/new_tests/test_list_pop_range.py +++ b/test/new_tests/test_list_pop_range.py @@ -106,11 +106,9 @@ def test_neg_list_pop_range_with_nonexistent_key(self): key = ('test', 'demo', ''.join(map(lambda unused: random.choice(charSet), range(length))) + ".com") - try: - self.as_connection.list_pop_range(key, "abc", 0, 1) - except e.BinIncompatibleType as exception: - assert exception.code == 12 + with pytest.raises(e.RecordNotFound): + self.as_connection.list_pop_range(key, "abc", 0, 1) def test_neg_list_pop_range_with_nonexistent_bin(self): """ diff --git a/test/new_tests/test_list_remove.py b/test/new_tests/test_list_remove.py index 85aa3ec15..779eb331a 100644 --- a/test/new_tests/test_list_remove.py +++ b/test/new_tests/test_list_remove.py @@ -110,11 +110,9 @@ def test_neg_list_remove_with_nonexistent_key(self): key = ('test', 'demo', ''.join(map(lambda unused: random.choice(charSet), range(length))) + ".com") - try: - self.as_connection.list_remove(key, "contact_no", 0) - except e.BinIncompatibleType as exception: - assert exception.code == 12 + with pytest.raises(e.RecordNotFound): + self.as_connection.list_remove(key, "contact_no", 0) def test_neg_list_remove_with_nonexistent_bin(self): """ diff --git a/test/new_tests/test_list_remove_range.py b/test/new_tests/test_list_remove_range.py index 5cfbbd962..0a6ceaa8a 100644 --- a/test/new_tests/test_list_remove_range.py +++ b/test/new_tests/test_list_remove_range.py @@ -115,12 +115,9 @@ def test_neg_list_remove_range_with_nonexistent_key(self): key = ('test', 'demo', ''.join(map(lambda unused: random.choice(charSet), range(length))) + ".com") - try: + with pytest.raises(e.RecordNotFound): self.as_connection.list_remove_range(key, "abc", 0, 1) - except e.BinIncompatibleType as exception: - assert exception.code == 12 - def test_neg_list_remove_range_with_nonexistent_bin(self): """ Invoke list_remove_range() with non-existent bin diff --git a/test/new_tests/test_list_trim.py b/test/new_tests/test_list_trim.py index 58b843349..cf3a20846 100644 --- a/test/new_tests/test_list_trim.py +++ b/test/new_tests/test_list_trim.py @@ -113,12 +113,9 @@ def test_neg_list_trim_with_nonexistent_key(self): key = ('test', 'demo', ''.join(map(lambda unused: random.choice(charSet), range(length))) + ".com") - try: + with pytest.raises(e.RecordNotFound): self.as_connection.list_trim(key, "abc", 0, 1) - except e.BinIncompatibleType as exception: - assert exception.code == 12 - def test_neg_list_trim_with_nonexistent_bin(self): """ Invoke list_trim() with non-existent bin From a2a9e81f85467cb69f840b8379cf699097eb2be3 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Tue, 13 Feb 2018 17:54:01 +0000 Subject: [PATCH 12/16] Add skip for differing cdt list behavior post 3.15.2 --- test/new_tests/conftest.py | 2 ++ test/new_tests/test_list_clear.py | 3 ++- test/new_tests/test_list_pop.py | 2 ++ test/new_tests/test_list_pop_range.py | 2 ++ test/new_tests/test_list_remove.py | 2 ++ test/new_tests/test_list_remove_range.py | 2 ++ test/new_tests/test_list_trim.py | 2 ++ 7 files changed, 14 insertions(+), 1 deletion(-) diff --git a/test/new_tests/conftest.py b/test/new_tests/conftest.py index fe79a2de0..c658470e0 100644 --- a/test/new_tests/conftest.py +++ b/test/new_tests/conftest.py @@ -51,12 +51,14 @@ def as_connection(request): as_client = aerospike.client(config).connect(user, password) request.cls.skip_old_server = True + request.cls.server_version = [] versioninfo = as_client.info_all('version') for keys in versioninfo: for value in versioninfo[keys]: if value is not None: versionlist = value[value.find("build") + 6:value.find("\n")].split(".") + request.cls.server_version = [int(n) for n in versionlist] if ((int(versionlist[0]) > 3) or (int(versionlist[0]) == 3 and int(versionlist[1]) >= 7)): request.cls.skip_old_server = False diff --git a/test/new_tests/test_list_clear.py b/test/new_tests/test_list_clear.py index 2c2acca08..7275c168a 100644 --- a/test/new_tests/test_list_clear.py +++ b/test/new_tests/test_list_clear.py @@ -85,7 +85,8 @@ def test_neg_list_clear_with_nonexistent_key(self): """ Invoke list_clear() with non-existent key """ - + if self.server_version < [3, 15, 2]: + pytest.skip("Change of error beginning in 3.15") key = ('test', 'demo', 'test_neg_list_clear_with_nonexistent_key') with pytest.raises(e.RecordNotFound): self.as_connection.list_clear(key, "contact_no") diff --git a/test/new_tests/test_list_pop.py b/test/new_tests/test_list_pop.py index 1a6df811f..e1549b1c4 100644 --- a/test/new_tests/test_list_pop.py +++ b/test/new_tests/test_list_pop.py @@ -96,6 +96,8 @@ def test_neg_list_pop_with_nonexistent_key(self): """ Invoke list_pop() with non-existent key """ + if self.server_version < [3, 15, 2]: + pytest.skip("Change of error beginning in 3.15") charSet = 'abcdefghijklmnopqrstuvwxyz1234567890' minLength = 5 maxLength = 30 diff --git a/test/new_tests/test_list_pop_range.py b/test/new_tests/test_list_pop_range.py index 952778331..2ae6aba02 100644 --- a/test/new_tests/test_list_pop_range.py +++ b/test/new_tests/test_list_pop_range.py @@ -99,6 +99,8 @@ def test_neg_list_pop_range_with_nonexistent_key(self): """ Invoke list_pop_range() with non-existent key """ + if self.server_version < [3, 15, 2]: + pytest.skip("Change of error beginning in 3.15") charSet = 'abcdefghijklmnopqrstuvwxyz1234567890' minLength = 5 maxLength = 30 diff --git a/test/new_tests/test_list_remove.py b/test/new_tests/test_list_remove.py index 779eb331a..7bc4a3959 100644 --- a/test/new_tests/test_list_remove.py +++ b/test/new_tests/test_list_remove.py @@ -103,6 +103,8 @@ def test_neg_list_remove_with_nonexistent_key(self): """ Invoke list_remove() with non-existent key """ + if self.server_version < [3, 15, 2]: + pytest.skip("Change of error beginning in 3.15") charSet = 'abcdefghijklmnopqrstuvwxyz1234567890' minLength = 5 maxLength = 30 diff --git a/test/new_tests/test_list_remove_range.py b/test/new_tests/test_list_remove_range.py index 0a6ceaa8a..f956732c9 100644 --- a/test/new_tests/test_list_remove_range.py +++ b/test/new_tests/test_list_remove_range.py @@ -108,6 +108,8 @@ def test_neg_list_remove_range_with_nonexistent_key(self): """ Invoke list_remove_range() with non-existent key """ + if self.server_version < [3, 15, 2]: + pytest.skip("Change of error beginning in 3.15") charSet = 'abcdefghijklmnopqrstuvwxyz1234567890' minLength = 5 maxLength = 30 diff --git a/test/new_tests/test_list_trim.py b/test/new_tests/test_list_trim.py index cf3a20846..eb03f2964 100644 --- a/test/new_tests/test_list_trim.py +++ b/test/new_tests/test_list_trim.py @@ -106,6 +106,8 @@ def test_neg_list_trim_with_nonexistent_key(self): """ Invoke list_trim() with non-existent key """ + if self.server_version < [3, 15, 2]: + pytest.skip("Change of error beginning in 3.15") charSet = 'abcdefghijklmnopqrstuvwxyz1234567890' minLength = 5 maxLength = 30 From 93dfe1f944cdb3b38596592775ae96b6fe920538 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Wed, 14 Feb 2018 18:38:33 +0000 Subject: [PATCH 13/16] Added note about checking in doubt status from an exception --- doc/exception.rst | 228 +++++----------------------------------------- 1 file changed, 21 insertions(+), 207 deletions(-) diff --git a/doc/exception.rst b/doc/exception.rst index f2b46dec5..d0dbe2c82 100644 --- a/doc/exception.rst +++ b/doc/exception.rst @@ -22,10 +22,25 @@ except ClientError as e: print("Error: {0} [{1}]".format(e.msg, e.code)) - .. versionadded:: 1.0.44 +In Doubt Status +--------------- + The in doubt status of a caught exception can be checked by looking at the 5th element of its `args` tuple + + .. code-block:: python + + key = 'test', 'demo', 1 + record = {'some': 'thing'} + try: + client.put(key, record) + except AerospikeError as exc: + print("The in doubt nature of the operation is: {}".format(exc.args[4]) +.. versionadded:: 3.0.1 + +Exception Types +--------------- .. py:exception:: AerospikeError The parent class of all exceptions raised by the Aerospike client, inherits @@ -69,9 +84,9 @@ reserved for the specified namespace. Subclass of :py:exc:`~aerospike.exception.ServerError`. -.. py:exception:: NoXDR +.. py:exception:: AlwaysForbidden - XDR is not available for the cluster. + Operation not allowed in current configuration. Subclass of :py:exc:`~aerospike.exception.ServerError`. .. py:exception:: UnsupportedFeature @@ -311,167 +326,6 @@ Subclass of :py:exc:`~aerospike.exception.UDFError`. -.. py:exception:: LDTError - - The parent class for Large Data Type exceptions. - Subclass of :py:exc:`~aerospike.exception.ServerError`. - - .. py:attribute:: key - - The key identifying the record. - - .. py:attribute:: bin - - The bin containing the LDT. - -.. py:exception:: LargeItemNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTInternalError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTUniqueKeyError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTInsertError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSearchError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTDeleteError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTInputParamError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTTypeMismatch - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTBinNameNull - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTBinNameNotString - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTBinNameTooLong - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTTooManyOpenSubrecs - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTTopRecNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSubRecNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTBinNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTBinExistsError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTBinDamaged - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSubrecPoolDamaged - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSubrecDamaged - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSubrecOpenError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSubrecUpdateError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSubrecCreateError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSubrecDeleteError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTSubrecCloseError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTToprecUpdateError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTToprecCreateError - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTFilterFunctionBad - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTFilterFunctionNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTKeyFunctionBad - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTKeyFunctionNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTTransFunctionBad - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTTransFunctionNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTUntransFunctionBad - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTUntransFunctionNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTUserModuleBad - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - -.. py:exception:: LDTUserModuleNotFound - - Subclass of :py:exc:`~aerospike.exception.LDTError`. - Exception Hierarchy ------------------- @@ -486,7 +340,7 @@ Exception Hierarchy +-- ServerError (1) +-- InvalidRequest (4) +-- ServerFull (8) - +-- NoXDR (10) + +-- AlwaysForbidden (10) +-- UnsupportedFeature (16) +-- DeviceOverload (18) +-- NamespaceNotFound (20) @@ -533,45 +387,5 @@ Exception Hierarchy | +-- InvalidPrivilege (72) | +-- NotAuthenticated (80) +-- UDFError (*) - | +-- UDFNotFound (1301) - | +-- LuaFileNotFound (1302) - +-- LDTError (*) - +-- LargeItemNotFound (125) - +-- LDTInternalError (1400) - +-- LDTNotFound (1401) - +-- LDTUniqueKeyError (1402) - +-- LDTInsertError (1403) - +-- LDTSearchError (1404) - +-- LDTDeleteError (1405) - +-- LDTInputParamError (1409) - +-- LDTTypeMismatch (1410) - +-- LDTBinNameNull (1411) - +-- LDTBinNameNotString (1412) - +-- LDTBinNameTooLong (1413) - +-- LDTTooManyOpenSubrecs (1414) - +-- LDTTopRecNotFound (1415) - +-- LDTSubRecNotFound (1416) - +-- LDTBinNotFound (1417) - +-- LDTBinExistsError (1418) - +-- LDTBinDamaged (1419) - +-- LDTSubrecPoolDamaged (1420) - +-- LDTSubrecDamaged (1421) - +-- LDTSubrecOpenError (1422) - +-- LDTSubrecUpdateError (1423) - +-- LDTSubrecCreateError (1424) - +-- LDTSubrecDeleteError (1425) - +-- LDTSubrecCloseError (1426) - +-- LDTToprecUpdateError (1427) - +-- LDTToprecCreateError (1428) - +-- LDTFilterFunctionBad (1430) - +-- LDTFilterFunctionNotFound (1431) - +-- LDTKeyFunctionBad (1432) - +-- LDTKeyFunctionNotFound (1433) - +-- LDTTransFunctionBad (1434) - +-- LDTTransFunctionNotFound (1435) - +-- LDTUntransFunctionBad (1436) - +-- LDTUntransFunctionNotFound (1437) - +-- LDTUserModuleBad (1438) - +-- LDTUserModuleNotFound (1439) - - + +-- UDFNotFound (1301) + +-- LuaFileNotFound (1302) \ No newline at end of file From 57f4f25089e57bdeaa43622c607b866cdae12038 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Wed, 14 Feb 2018 21:11:13 +0000 Subject: [PATCH 14/16] Improve server version checking --- test/new_tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/new_tests/conftest.py b/test/new_tests/conftest.py index c658470e0..aff85af6b 100644 --- a/test/new_tests/conftest.py +++ b/test/new_tests/conftest.py @@ -58,7 +58,7 @@ def as_connection(request): if value is not None: versionlist = value[value.find("build") + 6:value.find("\n")].split(".") - request.cls.server_version = [int(n) for n in versionlist] + request.cls.server_version = [int(n) for n in versionlist[:2]] if ((int(versionlist[0]) > 3) or (int(versionlist[0]) == 3 and int(versionlist[1]) >= 7)): request.cls.skip_old_server = False From ceaca3303e9d1b28f1fc18d0c0bb6b51ddcacdc7 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Thu, 15 Feb 2018 17:24:40 +0000 Subject: [PATCH 15/16] Add functionality to force info node test to go to ipv4 addressess --- test/new_tests/test_info_node.py | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/test/new_tests/test_info_node.py b/test/new_tests/test_info_node.py index 7ce1faab5..23aae5e1d 100644 --- a/test/new_tests/test_info_node.py +++ b/test/new_tests/test_info_node.py @@ -2,6 +2,7 @@ import pytest import time import sys +import socket from .test_base_class import TestBaseClass from .as_status_codes import AerospikeStatus @@ -26,12 +27,18 @@ def as_unicode(string): @pytest.mark.xfail(TestBaseClass.temporary_xfail(), reason="xfail variable set") @pytest.mark.usefixtures("as_connection", "connection_config") class TestInfoNode(object): + pytest_skip = False @pytest.fixture(autouse=True) def setup(self, request, as_connection, connection_config): key = ('test', 'demo', 'list_key') rec = {'names': ['John', 'Marlen', 'Steve']} self.host_name = self.connection_config['hosts'][0] + try: + host_addrinfo = socket.getaddrinfo(self.host_name[0], self.host_name[1], socket.AF_INET) + self.host_name = host_addrinfo[0][4] + except socket.gaierror: + self.pytest_skip = True self.as_connection.put(key, rec) yield @@ -42,6 +49,8 @@ def test_info_node_positive(self): """ Test info with correct arguments """ + if self.pytest_skip: + pytest.xfail() response = self.as_connection.info_node( 'bins', self.host_name) @@ -52,7 +61,8 @@ def test_info_node_positive_for_namespace(self): """ Test info with 'namespaces' as the command """ - + if self.pytest_skip: + pytest.xfail() response = self.as_connection.info_node( 'namespaces', self.host_name) @@ -62,7 +72,8 @@ def test_info_node_positive_for_sets(self): """ Test info with 'sets' as the command """ - + if self.pytest_skip: + pytest.xfail() response = self.as_connection.info_node( 'sets', self.host_name) @@ -72,6 +83,8 @@ def test_info_node_positive_for_sindex_creation(self): """ Test creating an index through an info call """ + if self.pytest_skip: + pytest.xfail() try: self.as_connection.index_remove('test', 'names_test_index') time.sleep(2) @@ -92,7 +105,8 @@ def test_info_node_positive_with_correct_policy(self): """ Test info call with bins as command and a timeout policy """ - + if self.pytest_skip: + pytest.xfail() host = self.host_name policy = {'timeout': 1000} response = self.as_connection.info_node('bins', host, policy) @@ -103,7 +117,8 @@ def test_info_node_positive_with_host(self): """ Test info with correct host """ - + if self.pytest_skip: + pytest.xfail() host = self.host_name response = self.as_connection.info_node('bins', host) @@ -113,6 +128,8 @@ def test_info_node_positive_with_all_parameters(self): """ Test info with all parameters """ + if self.pytest_skip: + pytest.xfail() policy = { 'timeout': 1000 } @@ -125,6 +142,8 @@ def test_info_node_with_unicode_request_string_and_host_name(self): """ Test info with all parameters """ + if self.pytest_skip: + pytest.xfail() if TestBaseClass.tls_in_use(): host = (as_unicode(self.connection_config['hosts'][0][0]), self.connection_config['hosts'][0][1], From 5bcf004d5574416eb00a519aa7b03cbd5a851ae2 Mon Sep 17 00:00:00 2001 From: Robert Marks Date: Thu, 15 Feb 2018 17:39:41 +0000 Subject: [PATCH 16/16] force use of transformed ip in info node tests for unicode --- test/new_tests/test_info_node.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/test/new_tests/test_info_node.py b/test/new_tests/test_info_node.py index 23aae5e1d..30c3ef406 100644 --- a/test/new_tests/test_info_node.py +++ b/test/new_tests/test_info_node.py @@ -36,7 +36,10 @@ def setup(self, request, as_connection, connection_config): self.host_name = self.connection_config['hosts'][0] try: host_addrinfo = socket.getaddrinfo(self.host_name[0], self.host_name[1], socket.AF_INET) - self.host_name = host_addrinfo[0][4] + if len(self.host_name) == 3: + self.host_name = (host_addrinfo[0][4][0], host_addrinfo[0][4][0], self.host_name[2]) + else: + self.host_name = host_addrinfo[0][4] except socket.gaierror: self.pytest_skip = True self.as_connection.put(key, rec) @@ -145,12 +148,12 @@ def test_info_node_with_unicode_request_string_and_host_name(self): if self.pytest_skip: pytest.xfail() if TestBaseClass.tls_in_use(): - host = (as_unicode(self.connection_config['hosts'][0][0]), - self.connection_config['hosts'][0][1], - as_unicode(self.connection_config['hosts'][0][2])) + host = (as_unicode(self.host_name[0]), + self.host_name[1], + as_unicode(self.host_name[2])) else: - host = (as_unicode(self.connection_config['hosts'][0][0]), - self.connection_config['hosts'][0][1]) + host = (as_unicode(self.host_name[0]), + self.host_name[1]) policy = { 'timeout': 1000 }