diff --git a/VERSION b/VERSION
index cb2b00e4f..b50214693 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-3.0.1
+3.0.2
diff --git a/doc/exception.rst b/doc/exception.rst
index f2b46dec5..d0dbe2c82 100644
--- a/doc/exception.rst
+++ b/doc/exception.rst
@@ -22,10 +22,25 @@
except ClientError as e:
print("Error: {0} [{1}]".format(e.msg, e.code))
-
.. versionadded:: 1.0.44
+In Doubt Status
+---------------
+ The in doubt status of a caught exception can be checked by looking at the 5th element of its `args` tuple
+
+ .. code-block:: python
+
+ key = 'test', 'demo', 1
+ record = {'some': 'thing'}
+ try:
+ client.put(key, record)
+ except AerospikeError as exc:
+ print("The in doubt nature of the operation is: {}".format(exc.args[4])
+.. versionadded:: 3.0.1
+
+Exception Types
+---------------
.. py:exception:: AerospikeError
The parent class of all exceptions raised by the Aerospike client, inherits
@@ -69,9 +84,9 @@
reserved for the specified namespace.
Subclass of :py:exc:`~aerospike.exception.ServerError`.
-.. py:exception:: NoXDR
+.. py:exception:: AlwaysForbidden
- XDR is not available for the cluster.
+ Operation not allowed in current configuration.
Subclass of :py:exc:`~aerospike.exception.ServerError`.
.. py:exception:: UnsupportedFeature
@@ -311,167 +326,6 @@
Subclass of :py:exc:`~aerospike.exception.UDFError`.
-.. py:exception:: LDTError
-
- The parent class for Large Data Type exceptions.
- Subclass of :py:exc:`~aerospike.exception.ServerError`.
-
- .. py:attribute:: key
-
- The key identifying the record.
-
- .. py:attribute:: bin
-
- The bin containing the LDT.
-
-.. py:exception:: LargeItemNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTInternalError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTUniqueKeyError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTInsertError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSearchError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTDeleteError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTInputParamError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTTypeMismatch
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTBinNameNull
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTBinNameNotString
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTBinNameTooLong
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTTooManyOpenSubrecs
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTTopRecNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSubRecNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTBinNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTBinExistsError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTBinDamaged
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSubrecPoolDamaged
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSubrecDamaged
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSubrecOpenError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSubrecUpdateError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSubrecCreateError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSubrecDeleteError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTSubrecCloseError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTToprecUpdateError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTToprecCreateError
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTFilterFunctionBad
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTFilterFunctionNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTKeyFunctionBad
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTKeyFunctionNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTTransFunctionBad
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTTransFunctionNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTUntransFunctionBad
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTUntransFunctionNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTUserModuleBad
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
-.. py:exception:: LDTUserModuleNotFound
-
- Subclass of :py:exc:`~aerospike.exception.LDTError`.
-
Exception Hierarchy
-------------------
@@ -486,7 +340,7 @@ Exception Hierarchy
+-- ServerError (1)
+-- InvalidRequest (4)
+-- ServerFull (8)
- +-- NoXDR (10)
+ +-- AlwaysForbidden (10)
+-- UnsupportedFeature (16)
+-- DeviceOverload (18)
+-- NamespaceNotFound (20)
@@ -533,45 +387,5 @@ Exception Hierarchy
| +-- InvalidPrivilege (72)
| +-- NotAuthenticated (80)
+-- UDFError (*)
- | +-- UDFNotFound (1301)
- | +-- LuaFileNotFound (1302)
- +-- LDTError (*)
- +-- LargeItemNotFound (125)
- +-- LDTInternalError (1400)
- +-- LDTNotFound (1401)
- +-- LDTUniqueKeyError (1402)
- +-- LDTInsertError (1403)
- +-- LDTSearchError (1404)
- +-- LDTDeleteError (1405)
- +-- LDTInputParamError (1409)
- +-- LDTTypeMismatch (1410)
- +-- LDTBinNameNull (1411)
- +-- LDTBinNameNotString (1412)
- +-- LDTBinNameTooLong (1413)
- +-- LDTTooManyOpenSubrecs (1414)
- +-- LDTTopRecNotFound (1415)
- +-- LDTSubRecNotFound (1416)
- +-- LDTBinNotFound (1417)
- +-- LDTBinExistsError (1418)
- +-- LDTBinDamaged (1419)
- +-- LDTSubrecPoolDamaged (1420)
- +-- LDTSubrecDamaged (1421)
- +-- LDTSubrecOpenError (1422)
- +-- LDTSubrecUpdateError (1423)
- +-- LDTSubrecCreateError (1424)
- +-- LDTSubrecDeleteError (1425)
- +-- LDTSubrecCloseError (1426)
- +-- LDTToprecUpdateError (1427)
- +-- LDTToprecCreateError (1428)
- +-- LDTFilterFunctionBad (1430)
- +-- LDTFilterFunctionNotFound (1431)
- +-- LDTKeyFunctionBad (1432)
- +-- LDTKeyFunctionNotFound (1433)
- +-- LDTTransFunctionBad (1434)
- +-- LDTTransFunctionNotFound (1435)
- +-- LDTUntransFunctionBad (1436)
- +-- LDTUntransFunctionNotFound (1437)
- +-- LDTUserModuleBad (1438)
- +-- LDTUserModuleNotFound (1439)
-
-
+ +-- UDFNotFound (1301)
+ +-- LuaFileNotFound (1302)
\ No newline at end of file
diff --git a/examples/client/client_big_list.py b/examples/client/client_big_list.py
new file mode 100644
index 000000000..c5fc2f7e6
--- /dev/null
+++ b/examples/client/client_big_list.py
@@ -0,0 +1,374 @@
+# -*- coding: utf-8 -*-
+##########################################################################
+# Copyright 2018 Aerospike, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+from __future__ import print_function
+import argparse
+
+import aerospike
+from aerospike import exception as as_exceptions
+'''
+This provides a rough implementation of an expandable list for Aerospike
+It utilizes a metadata record to provide information about associated subrecords.
+It should be considered to be roughly accurate, as distruptions and timeouts can
+cause stale data and pointers to be referenced.
+
+The rough algorithm to add an item is:
+
+1. Read top level metadata record to find a subrecord into which an item should be stored. If this fails
+ Create the top level record.
+2. Try to store the items in the appropriate subrecord.
+3. If step 2 fails because the subrecord is too big, create a new subrecord with the given items,
+ and increment the metadata record's reference count.
+
+Caveats:
+
+ 1. Due to records potentially being stored on different nodes, it is possible that the
+ metadata record exists, and is reachable, but certain subrecords are not reachable.
+ 2. In case of disruptions it is possible to have a subrecord created and not have a reference
+ to it stored in the top level metadata record.
+ 3. No ordering can be assumed, as various delays between additions of items may cause them to be
+ stored out of orer.
+ 4. Fetching all subrecords, will only fetch subrecords known about at the time of the request. Subrecords created after
+ the metadata record has been read will not be returned.
+ 5. This design does not facilitate a way to remove items from the conceptual list.
+ 6. This design does not allow for writing a single record larger than the write-block-size.
+
+The top level record is roughly
+{
+ 'subrecord_count': #
+}
+
+Where the # is greater than or less than or equal to the number of subrecords
+
+each subrecord is roughly:
+{
+ 'items': [item10, item2, item5, item0, item100]
+}
+'''
+
+
+class ASMetadataRecordTooLarge(Exception):
+ pass
+
+
+class ClientSideBigList(object):
+ '''
+ Abstraction around an unbounded size list for Aerospike. Relies on a top level record
+ containing metadata about subrecords. When a subrecord fills up, a new subrecord is created
+ and the metadata is updated.
+ '''
+
+ def __init__(self, client, base_key, ns='test', setname='demo',
+ subrecourd_count_name='sr_count', subrecord_list_bin='items'):
+ '''
+ Args:
+ client (Aerospike.client): a connected client to be used to talk with the database
+ base_key (string): The base key around which all record keys will be constructed
+ if base_key is 'person1', the top record will have the key (ns, set, 'person1')
+ and subrecords will be of the form (ns, set, 'person1-1'), (ns, set, 'person1-2')...
+ ns (string): The namespace into which the records will be stored.
+ setname (string): The set into which the records will be stored.
+ subrecord_count_name (string): The name of the bin in the metadata record which will store the count
+ of subrecords.
+ subrecord_list_bin (string): The name of the list bin in each of the subrecords.
+ '''
+ self.ns = ns
+ self.set = setname
+ self.client = client
+ self.base_key = base_key
+ self.metadata_key = (self.ns, self.set, self.base_key)
+
+ # This is the name of the bin in the top record which contains a set of subrecords
+ # There are at least this many subrecords. Unless the value of this is 1, in which case there may be no
+ # subrecords.
+ self.subrecourd_count_name = subrecourd_count_name
+
+ # This is the name of the bin containing items in each of the subrecords
+ self.subrecord_list_bin = subrecord_list_bin
+
+ def get_metadata_record(self):
+ '''
+ Fetches the top level record containing metadata about this list.
+ Returns None if the record does not exist
+
+ Returns:
+ Tuple: The record tuple: (key, meta, bins) for the metadata record if found
+ None: if the record does not exist.
+ Raises:
+ RecordNotFound: If the metadata record for this list does not yet exist.
+ AerospikeError: If the operation fails
+ '''
+ return self.client.get(self.metadata_key)
+
+ def add_item(self, item):
+ '''
+ Add a given item to this conceptual group of lists. If a top level
+ record has not yet been created, this operation will create it.
+
+ Args:
+ item: The item to be stored into the list.
+ Raises:
+ AerospikeError:
+ If communication with the cluster fails.
+ '''
+
+ try:
+ _, meta, bins = self.get_metadata_record()
+ generation = meta['gen']
+ subrecord_count = bins[self.subrecourd_count_name]
+ subrecord_count = max(1, subrecord_count) # Ensure that level is at least 1
+
+ self._create_or_update_subrecord(item, subrecord_count, generation)
+
+ except as_exceptions.RecordNotFound as e:
+ try:
+ # If this fails for a reason other than it already existing, error out
+ self._create_metadata_record()
+ userkey_1 = self._make_user_key(1)
+ # Create the first subrecord
+ self.client.put((self.ns, self.set, userkey_1), {self.subrecord_list_bin: []})
+ # Metadata record has just been created. Add the first subrecord
+ self._create_or_update_subrecord(item, 1, 1)
+ except as_exceptions.RecordExistsError:
+ # If the metadata record alread exists, try to insert into the correct subrecord by recursing.
+ add_item(self, item)
+
+ def get_all_entries(self, extended_search=False):
+ '''
+ Get all of the entries from all subrecords flattened and buffered into a list
+
+ Args:
+ extended_search(bool): Whether to attempt to fetch records beyond the count of known subrecords
+ Returns:
+ list: A list of all of the items from all subrecords for this record
+ None: If the metadata record does not exist.
+ Raises:
+ AerospikeError:
+ If any of the fetch operations fail.
+ '''
+ try:
+ _, _, bins = self.get_metadata_record()
+ min_count = bins[self.subrecourd_count_name]
+ keys = []
+ for i in range(1, min_count + 1):
+ key = (
+ self.ns,
+ self.set,
+ self._make_user_key(i)
+ )
+ keys.append(key)
+
+ subrecords = self.client.get_many(keys)
+ entries = self._get_items_from_subrecords(subrecords)
+
+ # Try to get subrecords beyond the listed amount.
+ # It is possible but not guaranteed that they exist.
+ if extended_search:
+ record_number = min_count + 1
+ while True:
+ key = (
+ self.ns,
+ self.set,
+ self._make_user_key(record_number)
+ )
+ try:
+ _, _, bins = self.client.get(key)
+ entries.append(bins)
+ except as_exceptions.RecordNotFound:
+ break
+ return entries
+ except as_exceptions.RecordNotFound:
+ return None
+
+ def _make_user_key(self, record_number):
+ '''
+ Returns a formatted string to be used as the userkey portion of a key.
+
+ Args:
+ record_number (int): Integer >= 1 specifying which subrecord for which to create a key.
+
+ Returns:
+ string: A formatted string of the form: 'base-#'
+ '''
+ return "{}-{}".format(self.base_key, record_number)
+
+ def _create_metadata_record(self):
+ '''
+ Create the top level information about the key.
+
+ Raises:
+ RecordExistsError:
+ If the metadata record already exists.
+ AerospikeError:
+ If the operation fails for any other reason
+ '''
+
+ # Only create the metadata record if it does not exist
+ policy = {'exists': aerospike.POLICY_EXISTS_CREATE}
+ self.client.put(self.metadata_key, {self.subrecourd_count_name: 1})
+
+ def _create_or_update_subrecord(self, item, subrecord_number, generation, retries_remaining=3):
+ '''
+ Create a new subrecord for the item.
+ 1. Create or append an item to the given specified subrecord
+ 2. Update the top level metadata record to mark this subrecord's existence
+
+ If the update causes the specified record to be too large, the operation
+ is retried with a new subrecord number
+
+ Args:
+ item: The item to be inserted into a subrecord
+ subrecord_number (int): An integer >= 1 indicating which subrecord to insert into.
+ generation (int): The generation of the metadata record.
+ retries_remaining (int): Number of retries remaining for an error caused by inserting
+ a record which by itself is greater than the write block size.
+ Raises:
+ Subclass of AerospikeError if an operation fails for a reason
+ other than the update causing the record to be too large.
+ '''
+ subrecord_userkey = self._make_user_key(subrecord_number)
+ subrecord_record_key = (self.ns, self.set, subrecord_userkey)
+ try:
+ self.client.list_append(
+ subrecord_record_key, self.subrecord_list_bin, item)
+ except as_exceptions.RecordTooBig as e:
+ if retries_remaining == 0:
+ raise e
+ # The insert overflowed the size capacity of the record, increment the top level record metadata.
+ self._update_metadata_record(generation)
+ self._create_or_update_subrecord(item, subrecord_number + 1, generation, retries_remaining=(retries_remaining - 1))
+
+ def _update_metadata_record(self, generation):
+ '''
+ Increment the metadata record's count of subrecords. This is only safe to do if the generation of the metadata matches
+ the expected value. Ignore if this fails.
+ '''
+ update_policy = {'gen': aerospike.POLICY_GEN_EQ}
+ meta = {'gen': generation}
+ try:
+ self.client.increment(self.metadata_key, self.subrecourd_count_name, 1, meta=meta, policy=update_policy)
+ except as_exceptions.RecordTooBig:
+ raise ASMetadataRecordTooLarge
+ except as_exceptions.RecordGenerationError:
+ # This means that somebody else has updated the record count already. Don't risk updating again.
+ pass
+
+ def _get_items_from_subrecords(self, subrecords):
+ '''
+ Extract only the items from a list of subrecord tuples
+ given records = [
+ (key, meta, {'items': [1, 2, ,3]}),
+ (key, meta, {'items': [4, 5, 6]}),
+ (key, meta, {'items': 7, 8, 9}))
+ ]
+ returns [1, 2, 3, 4, 5, 6, 7, 8, 9]
+
+ Args:
+ subrecords (list<(k, m, b)>): A list of record tuples.
+
+ Returns:
+ list: A flattened list of items. Containing all items from the subrecords.
+ '''
+ entries = []
+ # If a subrecord was included in the header of the top level record, but the matching subrecord
+ # was not found, ignore it.
+ for _, _, sr_bins in subrecords:
+ if sr_bins:
+ entries.extend(sr_bins[self.subrecord_list_bin])
+ return entries
+
+ def subrecord_iterator(self):
+
+ _, _, bins = self.get_metadata_record()
+ count = bins[self.subrecourd_count_name]
+
+ def sr_iter():
+ '''
+ Generator which fetches one subrecord at a time, and yields it to caller
+ '''
+ for i in range(1, count + 1):
+ key = (
+ self.ns,
+ self.set,
+ self._make_user_key(i)
+ )
+ try:
+ yield self.client.get(key)
+ except as_exceptions.RecordNotFound:
+ continue
+
+ # Instantiate the generator and return it.
+ return sr_iter()
+
+
+def main():
+ '''
+ Simple tests demonstrating the functionality.
+ If the database is set up with a small enough write block-size, several subrecords
+ will be created.
+ '''
+
+ optparser = argparse.ArgumentParser()
+
+ optparser.add_argument(
+ "--host", type=str, default="127.0.0.1", metavar="
",
+ help="Address of Aerospike server.")
+
+ optparser.add_argument(
+ "--port", type=int, default=3000, metavar="",
+ help="Port of the Aerospike server.")
+
+ optparser.add_argument(
+ "--namespace", type=str, default="test", metavar="",
+ help="Namespace to use for this example")
+
+ optparser.add_argument(
+ "-s", "--set", type=str, default="demo", metavar="",
+ help="Set to use for this example")
+
+ optparser.add_argument(
+ "-i", "--items", type=int, default=1000, metavar="",
+ help="Number of items to store into the big list")
+
+ options = optparser.parse_args()
+ print(options)
+
+ client = aerospike.client({'hosts': [('localhost', 3000)]}).connect()
+ ldt = ClientSideBigList(client, 'person1_friends')
+
+ for i in range(options.items):
+ # Store a reasonably large item
+ ldt.add_item('friend{}'.format(i) * 100)
+
+ print("Stored {} items".format(options.items))
+
+ items = ldt.get_all_entries()
+ _, _, bins = ldt.get_metadata_record()
+ print(bins)
+ print("Known subrecord count is: {}".format(bins['sr_count']))
+ print("Fetched {} items:".format(len(items)))
+
+ count = 0
+ for sr in ldt.subrecord_iterator():
+ if sr:
+ count = count + 1
+
+ print("Records yielded: {}".format(count))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/setup.py b/setup.py
index 316fbff9e..74a04688d 100644
--- a/setup.py
+++ b/setup.py
@@ -72,7 +72,7 @@ def run(self):
os.environ['ARCHFLAGS'] = '-arch x86_64'
AEROSPIKE_C_VERSION = os.getenv('AEROSPIKE_C_VERSION')
if not AEROSPIKE_C_VERSION:
- AEROSPIKE_C_VERSION = '4.3.3'
+ AEROSPIKE_C_VERSION = '4.3.5'
DOWNLOAD_C_CLIENT = os.getenv('DOWNLOAD_C_CLIENT')
AEROSPIKE_C_HOME = os.getenv('AEROSPIKE_C_HOME')
PREFIX = None
diff --git a/src/include/policy_config.h b/src/include/policy_config.h
index 02bf89309..d6e04d8ec 100644
--- a/src/include/policy_config.h
+++ b/src/include/policy_config.h
@@ -24,6 +24,13 @@ as_status set_optional_uint32_property(uint32_t* target_ptr, PyObject* policy_di
as_status set_optional_bool_property(bool* target_ptr, PyObject* py_policy, const char* name);
as_status set_base_policy(as_policy_base* base, PyObject* py_policy);
+as_status set_optional_key(as_policy_key* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_replica(as_policy_replica* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_commit_level(as_policy_commit_level* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_consistency_level(as_policy_consistency_level* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_gen(as_policy_gen* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_exists(as_policy_exists* target_ptr, PyObject* py_policy, const char* name);
+
as_status set_subpolicies(as_config* config, PyObject* py_policies);
as_status set_read_policy(as_policy_read* read_policy, PyObject* py_policy);
as_status set_write_policy(as_policy_write* write_policy, PyObject* py_policy);
diff --git a/src/main/aerospike.c b/src/main/aerospike.c
index 81ea00f31..e50613eb7 100644
--- a/src/main/aerospike.c
+++ b/src/main/aerospike.c
@@ -93,7 +93,7 @@ AerospikeConstants operator_constants[] = {
MOD_INIT(aerospike)
{
- const char version[8] = "3.0.1";
+ const char version[8] = "3.0.2";
// Makes things "thread-safe"
PyEval_InitThreads();
int i = 0;
diff --git a/src/main/client/admin.c b/src/main/client/admin.c
index f561091b3..56fec9485 100644
--- a/src/main/client/admin.c
+++ b/src/main/client/admin.c
@@ -830,7 +830,7 @@ PyObject * AerospikeClient_Admin_Query_Users( AerospikeClient * self, PyObject *
aerospike_query_users(self->as, &err, admin_policy_p, &users, &users_size);
Py_END_ALLOW_THREADS
if (err.code != AEROSPIKE_OK) {
- as_error_update(&err, err.code, err.message);
+ as_error_update(&err, err.code, NULL);
goto CLEANUP;
}
diff --git a/src/main/client/info_node.c b/src/main/client/info_node.c
index f95ae8a1f..6d098a7d4 100644
--- a/src/main/client/info_node.c
+++ b/src/main/client/info_node.c
@@ -174,7 +174,7 @@ static PyObject * AerospikeClient_InfoNode_Invoke(
goto CLEANUP;
}
} else {
- as_error_update(err, err->code, err->message);
+ as_error_update(err, err->code, NULL);
goto CLEANUP;
}
diff --git a/src/main/client/operate.c b/src/main/client/operate.c
index 6322a586a..45b95d125 100644
--- a/src/main/client/operate.c
+++ b/src/main/client/operate.c
@@ -863,7 +863,7 @@ static PyObject * AerospikeClient_OperateOrdered_Invoke(
}
if (err->code != AEROSPIKE_OK) {
- as_error_update(err, err->code, err->message);
+ as_error_update(err, err->code, NULL);
goto CLEANUP;
}
@@ -875,7 +875,7 @@ static PyObject * AerospikeClient_OperateOrdered_Invoke(
Py_END_ALLOW_THREADS
if (err->code != AEROSPIKE_OK) {
- as_error_update(err, err->code, err->message);
+ as_error_update(err, err->code, NULL);
goto CLEANUP;
}
diff --git a/src/main/client/operate_list.c b/src/main/client/operate_list.c
index 274f3d5c0..25f255268 100644
--- a/src/main/client/operate_list.c
+++ b/src/main/client/operate_list.c
@@ -450,6 +450,9 @@ PyObject * AerospikeClient_ListPop(AerospikeClient * self, PyObject * args, PyOb
PyObject *py_val = NULL;
if (rec && rec->bins.size) {
val_to_pyobject(self, &err, (as_val*) (rec->bins.entries[0].valuep), &py_val);
+ } else {
+ py_val = Py_None;
+ Py_INCREF(py_val);
}
CLEANUP:
@@ -510,6 +513,9 @@ PyObject * AerospikeClient_ListPopRange(AerospikeClient * self, PyObject * args,
}
if (rec && rec->bins.size) {
list_to_pyobject(self, &err, as_record_get_list(rec, bin), &py_list);
+ } else {
+ py_list = Py_None;
+ Py_INCREF(py_list);
}
CLEANUP:
@@ -744,6 +750,9 @@ PyObject * AerospikeClient_ListGet(AerospikeClient * self, PyObject * args, PyOb
PyObject *py_val = NULL;
if (rec && rec->bins.size) {
val_to_pyobject(self, &err, (as_val*) (rec->bins.entries[0].valuep), &py_val);
+ } else {
+ py_val = Py_None;
+ Py_INCREF(py_val);
}
CLEANUP:
diff --git a/src/main/conversions.c b/src/main/conversions.c
index 834bf1aea..87330553b 100644
--- a/src/main/conversions.c
+++ b/src/main/conversions.c
@@ -51,6 +51,7 @@
#define PY_EXCEPTION_MSG 1
#define PY_EXCEPTION_FILE 2
#define PY_EXCEPTION_LINE 3
+#define AS_PY_EXCEPTION_IN_DOUBT 4
as_status as_udf_file_to_pyobject( as_error *err, as_udf_file * entry, PyObject ** py_file )
{
@@ -1552,11 +1553,15 @@ bool error_to_pyobject(const as_error * err, PyObject ** obj)
PyObject * py_code = PyLong_FromLongLong(err->code);
PyObject * py_message = PyString_FromString(err->message);
- PyObject * py_err = PyTuple_New(4);
+ PyObject* py_in_doubt = err->in_doubt ? Py_True : Py_False;
+ Py_INCREF(py_in_doubt);
+
+ PyObject * py_err = PyTuple_New(5);
PyTuple_SetItem(py_err, PY_EXCEPTION_CODE, py_code);
PyTuple_SetItem(py_err, PY_EXCEPTION_MSG, py_message);
PyTuple_SetItem(py_err, PY_EXCEPTION_FILE, py_file);
PyTuple_SetItem(py_err, PY_EXCEPTION_LINE, py_line);
+ PyTuple_SetItem(py_err, AS_PY_EXCEPTION_IN_DOUBT, py_in_doubt);
*obj = py_err;
return true;
}
diff --git a/src/main/policy_config.c b/src/main/policy_config.c
index 81425c206..b5995a639 100644
--- a/src/main/policy_config.c
+++ b/src/main/policy_config.c
@@ -15,7 +15,13 @@
******************************************************************************/
#include "policy_config.h"
-
+as_status set_optional_key(as_policy_key* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_replica(as_policy_replica* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_commit_level(as_policy_commit_level* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_consistency_level(as_policy_consistency_level* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_gen(as_policy_gen* target_ptr, PyObject* py_policy, const char* name);
+as_status set_optional_exists(as_policy_exists* target_ptr, PyObject* py_policy, const char* name);
+as_status get_uint32_value(PyObject* py_policy_val, uint32_t* return_uint32);
/*
* py_policies must exist, and be a dictionary
*/
@@ -90,17 +96,17 @@ as_status set_read_policy(as_policy_read* read_policy, PyObject* py_policy) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&read_policy->key, py_policy, "key");
+ status = set_optional_key(&read_policy->key, py_policy, "key");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&read_policy->replica, py_policy, "replica");
+ status = set_optional_replica(&read_policy->replica, py_policy, "replica");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&read_policy->consistency_level, py_policy, "consistency_level");
+ status = set_optional_consistency_level(&read_policy->consistency_level, py_policy, "consistency_level");
if (status != AEROSPIKE_OK) {
return status;
}
@@ -135,27 +141,27 @@ as_status set_write_policy(as_policy_write* write_policy, PyObject* py_policy) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&write_policy->key, py_policy, "key");
+ status = set_optional_key(&write_policy->key, py_policy, "key");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&write_policy->replica, py_policy, "replica");
+ status = set_optional_replica(&write_policy->replica, py_policy, "replica");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&write_policy->commit_level, py_policy, "commit_level");
+ status = set_optional_commit_level(&write_policy->commit_level, py_policy, "commit_level");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&write_policy->gen, py_policy, "gen");
+ status = set_optional_gen(&write_policy->gen, py_policy, "gen");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&write_policy->exists, py_policy, "exists");
+ status = set_optional_exists(&write_policy->exists, py_policy, "exists");
if (status != AEROSPIKE_OK) {
return status;
}
@@ -190,22 +196,22 @@ as_status set_apply_policy(as_policy_apply* apply_policy, PyObject* py_policy) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&apply_policy->key, py_policy, "key");
+ status = set_optional_key(&apply_policy->key, py_policy, "key");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&apply_policy->replica, py_policy, "replica");
+ status = set_optional_replica(&apply_policy->replica, py_policy, "replica");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&apply_policy->gen, py_policy, "gen");
+ status = set_optional_gen(&apply_policy->gen, py_policy, "gen");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&apply_policy->commit_level, py_policy, "commit_level");
+ status = set_optional_commit_level(&apply_policy->commit_level, py_policy, "commit_level");
if (status != AEROSPIKE_OK) {
return status;
}
@@ -240,22 +246,22 @@ as_status set_remove_policy(as_policy_remove* remove_policy, PyObject* py_policy
return status;
}
- status = set_optional_uint32_property((uint32_t*)&remove_policy->key, py_policy, "key");
+ status = set_optional_key(&remove_policy->key, py_policy, "key");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&remove_policy->replica, py_policy, "replica");
+ status = set_optional_replica(&remove_policy->replica, py_policy, "replica");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&remove_policy->commit_level, py_policy, "commit_level");
+ status = set_optional_commit_level(&remove_policy->commit_level, py_policy, "commit_level");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&remove_policy->gen, py_policy, "gen");
+ status = set_optional_gen(&remove_policy->gen, py_policy, "gen");
if (status != AEROSPIKE_OK) {
return status;
}
@@ -339,27 +345,27 @@ as_status set_operate_policy(as_policy_operate* operate_policy, PyObject* py_pol
return status;
}
- status = set_optional_uint32_property((uint32_t*)&operate_policy->key, py_policy, "key");
+ status = set_optional_key(&operate_policy->key, py_policy, "key");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&operate_policy->replica, py_policy, "replica");
+ status = set_optional_replica(&operate_policy->replica, py_policy, "replica");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&operate_policy->commit_level, py_policy, "commit_level");
+ status = set_optional_commit_level(&operate_policy->commit_level, py_policy, "commit_level");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&operate_policy->gen, py_policy, "gen");
+ status = set_optional_gen(&operate_policy->gen, py_policy, "gen");
if (status != AEROSPIKE_OK) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&operate_policy->consistency_level, py_policy, "consistency_level");
+ status = set_optional_consistency_level(&operate_policy->consistency_level, py_policy, "consistency_level");
if (status != AEROSPIKE_OK) {
return status;
}
@@ -399,7 +405,7 @@ as_status set_batch_policy(as_policy_batch* batch_policy, PyObject* py_policy) {
return status;
}
- status = set_optional_uint32_property((uint32_t*)&batch_policy->consistency_level, py_policy, "consistency_level");
+ status = set_optional_consistency_level(&batch_policy->consistency_level, py_policy, "consistency_level");
if (status != AEROSPIKE_OK) {
return status;
}
@@ -473,6 +479,30 @@ as_status set_base_policy(as_policy_base* base_policy, PyObject* py_policy) {
return AEROSPIKE_OK;
}
+as_status get_uint32_value(PyObject* py_policy_val, uint32_t* return_uint32) {
+ long long int uint32_max = 0xFFFFFFFF;
+
+ if (!py_policy_val) {
+ return AEROSPIKE_ERR_PARAM;
+ }
+ if (PyInt_Check(py_policy_val)) {
+ long int_value = PyInt_AsLong(py_policy_val);
+
+ if (int_value == -1 && PyErr_Occurred()) {
+ PyErr_Clear();
+ return AEROSPIKE_ERR_PARAM;
+ }
+
+ if (int_value < 0 || int_value > uint32_max) {
+ return AEROSPIKE_ERR_PARAM;
+ }
+
+ *return_uint32 = (uint32_t)int_value;
+ return AEROSPIKE_OK;
+ }
+ return AEROSPIKE_ERR_PARAM;
+}
+
as_status set_optional_uint32_property(uint32_t* target_ptr, PyObject* py_policy, const char* name) {
PyObject* py_policy_val = NULL;
long long int uint32_max = 0xFFFFFFFF;
@@ -521,3 +551,123 @@ as_status set_optional_bool_property(bool* target_ptr, PyObject* py_policy, cons
}
return AEROSPIKE_ERR_PARAM;
}
+
+as_status set_optional_key(as_policy_key* target_ptr, PyObject* py_policy, const char* name) {
+ PyObject* py_policy_val = NULL;
+ if (!py_policy || !PyDict_Check(py_policy)) {
+ return AEROSPIKE_OK;
+ }
+
+ py_policy_val = PyDict_GetItemString(py_policy, name);
+ if (!py_policy_val || py_policy_val == Py_None) {
+ return AEROSPIKE_OK;
+ }
+
+ uint32_t out_uint32;
+ as_status status = get_uint32_value(py_policy, &out_uint32);
+ if (status != AEROSPIKE_OK) {
+ return status;
+ }
+ *target_ptr = (as_policy_key)out_uint32;
+ return AEROSPIKE_OK;
+}
+
+as_status set_optional_replica(as_policy_replica* target_ptr, PyObject* py_policy, const char* name) {
+ PyObject* py_policy_val = NULL;
+ if (!py_policy || !PyDict_Check(py_policy)) {
+ return AEROSPIKE_OK;
+ }
+
+ py_policy_val = PyDict_GetItemString(py_policy, name);
+ if (!py_policy_val || py_policy_val == Py_None) {
+ return AEROSPIKE_OK;
+ }
+
+ uint32_t out_uint32;
+ as_status status = get_uint32_value(py_policy, &out_uint32);
+ if (status != AEROSPIKE_OK) {
+ return status;
+ }
+ *target_ptr = (as_policy_replica)out_uint32;
+ return AEROSPIKE_OK;
+}
+
+as_status set_optional_commit_level(as_policy_commit_level* target_ptr, PyObject* py_policy, const char* name) {
+ PyObject* py_policy_val = NULL;
+ if (!py_policy || !PyDict_Check(py_policy)) {
+ return AEROSPIKE_OK;
+ }
+
+ py_policy_val = PyDict_GetItemString(py_policy, name);
+ if (!py_policy_val || py_policy_val == Py_None) {
+ return AEROSPIKE_OK;
+ }
+
+ uint32_t out_uint32;
+ as_status status = get_uint32_value(py_policy, &out_uint32);
+ if (status != AEROSPIKE_OK) {
+ return status;
+ }
+ *target_ptr = (as_policy_commit_level)out_uint32;
+ return AEROSPIKE_OK;
+}
+
+as_status set_optional_consistency_level(as_policy_consistency_level* target_ptr, PyObject* py_policy, const char* name) {
+ PyObject* py_policy_val = NULL;
+ if (!py_policy || !PyDict_Check(py_policy)) {
+ return AEROSPIKE_OK;
+ }
+
+ py_policy_val = PyDict_GetItemString(py_policy, name);
+ if (!py_policy_val || py_policy_val == Py_None) {
+ return AEROSPIKE_OK;
+ }
+
+ uint32_t out_uint32;
+ as_status status = get_uint32_value(py_policy, &out_uint32);
+ if (status != AEROSPIKE_OK) {
+ return status;
+ }
+ *target_ptr = (as_policy_consistency_level)out_uint32;
+ return AEROSPIKE_OK;
+}
+
+as_status set_optional_gen(as_policy_gen* target_ptr, PyObject* py_policy, const char* name) {
+ PyObject* py_policy_val = NULL;
+ if (!py_policy || !PyDict_Check(py_policy)) {
+ return AEROSPIKE_OK;
+ }
+
+ py_policy_val = PyDict_GetItemString(py_policy, name);
+ if (!py_policy_val || py_policy_val == Py_None) {
+ return AEROSPIKE_OK;
+ }
+
+ uint32_t out_uint32;
+ as_status status = get_uint32_value(py_policy, &out_uint32);
+ if (status != AEROSPIKE_OK) {
+ return status;
+ }
+ *target_ptr = (as_policy_gen)out_uint32;
+ return AEROSPIKE_OK;
+}
+
+as_status set_optional_exists(as_policy_exists* target_ptr, PyObject* py_policy, const char* name) {
+ PyObject* py_policy_val = NULL;
+ if (!py_policy || !PyDict_Check(py_policy)) {
+ return AEROSPIKE_OK;
+ }
+
+ py_policy_val = PyDict_GetItemString(py_policy, name);
+ if (!py_policy_val || py_policy_val == Py_None) {
+ return AEROSPIKE_OK;
+ }
+
+ uint32_t out_uint32;
+ as_status status = get_uint32_value(py_policy, &out_uint32);
+ if (status != AEROSPIKE_OK) {
+ return status;
+ }
+ *target_ptr = (as_policy_exists)out_uint32;
+ return AEROSPIKE_OK;
+}
diff --git a/test/bin_lua.lua b/test/bin_lua.lua
index 2d33ab19d..a504cb629 100644
--- a/test/bin_lua.lua
+++ b/test/bin_lua.lua
@@ -1,4 +1,5 @@
function mytransform(rec, bin, offset)
+ info("my transform: %s", tostring(record.digest(rec)))
rec['age'] = rec['age'] + offset
aerospike:update(rec)
end
diff --git a/test/new_tests/conftest.py b/test/new_tests/conftest.py
index fe79a2de0..aff85af6b 100644
--- a/test/new_tests/conftest.py
+++ b/test/new_tests/conftest.py
@@ -51,12 +51,14 @@ def as_connection(request):
as_client = aerospike.client(config).connect(user, password)
request.cls.skip_old_server = True
+ request.cls.server_version = []
versioninfo = as_client.info_all('version')
for keys in versioninfo:
for value in versioninfo[keys]:
if value is not None:
versionlist = value[value.find("build") +
6:value.find("\n")].split(".")
+ request.cls.server_version = [int(n) for n in versionlist[:2]]
if ((int(versionlist[0]) > 3) or
(int(versionlist[0]) == 3 and int(versionlist[1]) >= 7)):
request.cls.skip_old_server = False
diff --git a/test/new_tests/test_base_class.py b/test/new_tests/test_base_class.py
index a9a70b3f3..beec5ee74 100644
--- a/test/new_tests/test_base_class.py
+++ b/test/new_tests/test_base_class.py
@@ -53,27 +53,37 @@ def get_tls_info():
config.read("config.conf")
if (config.has_option('tls', 'enable') and (
- config.get('tls', 'enable') == 'True')):
+ config.getboolean('tls', 'enable'))):
TestBaseClass.using_tls = True
tls_dict['enable'] = True
if config.has_option('tls', 'cafile'):
- tls_dict['cafile'] = config.get('tls', 'cafile')
+ if config.get('tls', 'cafile') != '':
+ tls_dict['cafile'] = config.get('tls', 'cafile')
if config.has_option('tls', 'capath'):
- tls_dict['capath'] = config.get('tls', 'capath')
+ if config.get('tls', 'capath') != '':
+ tls_dict['capath'] = config.get('tls', 'capath')
if config.has_option('tls', 'protocols'):
- tls_dict['protocols'] = config.get('tls', 'protocols')
+ if config.get('tls', 'protocols') != '':
+ tls_dict['protocols'] = config.get('tls', 'protocols')
if config.has_option('tls', 'cipher_suite'):
- tls_dict['cipher_suite'] = config.get('tls', 'cipher_suite')
+ if config.get('tls', 'cipher_suite') != '':
+ tls_dict['cipher_suite'] = config.get('tls', 'cipher_suite')
if config.has_option('tls', 'keyfile'):
- tls_dict['keyfile'] = config.get('tls', 'keyfile')
+ if config.get('tls', 'keyfile') != '':
+ tls_dict['keyfile'] = config.get('tls', 'keyfile')
+
+ if config.has_option('tls', 'cert_blacklist'):
+ if config.get('tls', 'cert_blacklist') != '':
+ tls_dict['cert_blacklist'] = config.get('tls', 'cert_blacklist')
if config.has_option('tls', 'certfile'):
- tls_dict['certfile'] = config.get('tls', 'certfile')
+ if config.get('tls', 'certfile') != '':
+ tls_dict['certfile'] = config.get('tls', 'certfile')
if config.has_option('tls', 'crl_check'):
if config.get('tls', 'crl_check') == 'True':
diff --git a/test/new_tests/test_info_node.py b/test/new_tests/test_info_node.py
index 7ce1faab5..30c3ef406 100644
--- a/test/new_tests/test_info_node.py
+++ b/test/new_tests/test_info_node.py
@@ -2,6 +2,7 @@
import pytest
import time
import sys
+import socket
from .test_base_class import TestBaseClass
from .as_status_codes import AerospikeStatus
@@ -26,12 +27,21 @@ def as_unicode(string):
@pytest.mark.xfail(TestBaseClass.temporary_xfail(), reason="xfail variable set")
@pytest.mark.usefixtures("as_connection", "connection_config")
class TestInfoNode(object):
+ pytest_skip = False
@pytest.fixture(autouse=True)
def setup(self, request, as_connection, connection_config):
key = ('test', 'demo', 'list_key')
rec = {'names': ['John', 'Marlen', 'Steve']}
self.host_name = self.connection_config['hosts'][0]
+ try:
+ host_addrinfo = socket.getaddrinfo(self.host_name[0], self.host_name[1], socket.AF_INET)
+ if len(self.host_name) == 3:
+ self.host_name = (host_addrinfo[0][4][0], host_addrinfo[0][4][0], self.host_name[2])
+ else:
+ self.host_name = host_addrinfo[0][4]
+ except socket.gaierror:
+ self.pytest_skip = True
self.as_connection.put(key, rec)
yield
@@ -42,6 +52,8 @@ def test_info_node_positive(self):
"""
Test info with correct arguments
"""
+ if self.pytest_skip:
+ pytest.xfail()
response = self.as_connection.info_node(
'bins', self.host_name)
@@ -52,7 +64,8 @@ def test_info_node_positive_for_namespace(self):
"""
Test info with 'namespaces' as the command
"""
-
+ if self.pytest_skip:
+ pytest.xfail()
response = self.as_connection.info_node(
'namespaces', self.host_name)
@@ -62,7 +75,8 @@ def test_info_node_positive_for_sets(self):
"""
Test info with 'sets' as the command
"""
-
+ if self.pytest_skip:
+ pytest.xfail()
response = self.as_connection.info_node(
'sets', self.host_name)
@@ -72,6 +86,8 @@ def test_info_node_positive_for_sindex_creation(self):
"""
Test creating an index through an info call
"""
+ if self.pytest_skip:
+ pytest.xfail()
try:
self.as_connection.index_remove('test', 'names_test_index')
time.sleep(2)
@@ -92,7 +108,8 @@ def test_info_node_positive_with_correct_policy(self):
"""
Test info call with bins as command and a timeout policy
"""
-
+ if self.pytest_skip:
+ pytest.xfail()
host = self.host_name
policy = {'timeout': 1000}
response = self.as_connection.info_node('bins', host, policy)
@@ -103,7 +120,8 @@ def test_info_node_positive_with_host(self):
"""
Test info with correct host
"""
-
+ if self.pytest_skip:
+ pytest.xfail()
host = self.host_name
response = self.as_connection.info_node('bins', host)
@@ -113,6 +131,8 @@ def test_info_node_positive_with_all_parameters(self):
"""
Test info with all parameters
"""
+ if self.pytest_skip:
+ pytest.xfail()
policy = {
'timeout': 1000
}
@@ -125,13 +145,15 @@ def test_info_node_with_unicode_request_string_and_host_name(self):
"""
Test info with all parameters
"""
+ if self.pytest_skip:
+ pytest.xfail()
if TestBaseClass.tls_in_use():
- host = (as_unicode(self.connection_config['hosts'][0][0]),
- self.connection_config['hosts'][0][1],
- as_unicode(self.connection_config['hosts'][0][2]))
+ host = (as_unicode(self.host_name[0]),
+ self.host_name[1],
+ as_unicode(self.host_name[2]))
else:
- host = (as_unicode(self.connection_config['hosts'][0][0]),
- self.connection_config['hosts'][0][1])
+ host = (as_unicode(self.host_name[0]),
+ self.host_name[1])
policy = {
'timeout': 1000
}
diff --git a/test/new_tests/test_list_clear.py b/test/new_tests/test_list_clear.py
index 8455810e3..7275c168a 100644
--- a/test/new_tests/test_list_clear.py
+++ b/test/new_tests/test_list_clear.py
@@ -85,17 +85,11 @@ def test_neg_list_clear_with_nonexistent_key(self):
"""
Invoke list_clear() with non-existent key
"""
- charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'
- minLength = 5
- maxLength = 30
- length = random.randint(minLength, maxLength)
- key = ('test', 'demo', ''.join(map(lambda unused:
- random.choice(charSet),
- range(length))) + ".com")
- try:
+ if self.server_version < [3, 15, 2]:
+ pytest.skip("Change of error beginning in 3.15")
+ key = ('test', 'demo', 'test_neg_list_clear_with_nonexistent_key')
+ with pytest.raises(e.RecordNotFound):
self.as_connection.list_clear(key, "contact_no")
- except e.BinIncompatibleType as exception:
- assert exception.code == 12
def test_neg_list_clear_with_nonexistent_bin(self):
"""
diff --git a/test/new_tests/test_list_pop.py b/test/new_tests/test_list_pop.py
index 9fa7d1074..e1549b1c4 100644
--- a/test/new_tests/test_list_pop.py
+++ b/test/new_tests/test_list_pop.py
@@ -96,6 +96,8 @@ def test_neg_list_pop_with_nonexistent_key(self):
"""
Invoke list_pop() with non-existent key
"""
+ if self.server_version < [3, 15, 2]:
+ pytest.skip("Change of error beginning in 3.15")
charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'
minLength = 5
maxLength = 30
@@ -103,11 +105,9 @@ def test_neg_list_pop_with_nonexistent_key(self):
key = ('test', 'demo', ''.join(map(lambda unused:
random.choice(charSet),
range(length))) + ".com")
- try:
- self.as_connection.list_pop(key, "abc", 0)
- except e.BinIncompatibleType as exception:
- assert exception.code == 12
+ with pytest.raises(e.RecordNotFound):
+ self.as_connection.list_pop(key, "abc", 0)
def test_neg_list_pop_with_nonexistent_bin(self):
"""
diff --git a/test/new_tests/test_list_pop_range.py b/test/new_tests/test_list_pop_range.py
index e08a2aa43..2ae6aba02 100644
--- a/test/new_tests/test_list_pop_range.py
+++ b/test/new_tests/test_list_pop_range.py
@@ -99,6 +99,8 @@ def test_neg_list_pop_range_with_nonexistent_key(self):
"""
Invoke list_pop_range() with non-existent key
"""
+ if self.server_version < [3, 15, 2]:
+ pytest.skip("Change of error beginning in 3.15")
charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'
minLength = 5
maxLength = 30
@@ -106,11 +108,9 @@ def test_neg_list_pop_range_with_nonexistent_key(self):
key = ('test', 'demo', ''.join(map(lambda unused:
random.choice(charSet),
range(length))) + ".com")
- try:
- self.as_connection.list_pop_range(key, "abc", 0, 1)
- except e.BinIncompatibleType as exception:
- assert exception.code == 12
+ with pytest.raises(e.RecordNotFound):
+ self.as_connection.list_pop_range(key, "abc", 0, 1)
def test_neg_list_pop_range_with_nonexistent_bin(self):
"""
diff --git a/test/new_tests/test_list_remove.py b/test/new_tests/test_list_remove.py
index 85aa3ec15..7bc4a3959 100644
--- a/test/new_tests/test_list_remove.py
+++ b/test/new_tests/test_list_remove.py
@@ -103,6 +103,8 @@ def test_neg_list_remove_with_nonexistent_key(self):
"""
Invoke list_remove() with non-existent key
"""
+ if self.server_version < [3, 15, 2]:
+ pytest.skip("Change of error beginning in 3.15")
charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'
minLength = 5
maxLength = 30
@@ -110,11 +112,9 @@ def test_neg_list_remove_with_nonexistent_key(self):
key = ('test', 'demo', ''.join(map(lambda unused:
random.choice(charSet),
range(length))) + ".com")
- try:
- self.as_connection.list_remove(key, "contact_no", 0)
- except e.BinIncompatibleType as exception:
- assert exception.code == 12
+ with pytest.raises(e.RecordNotFound):
+ self.as_connection.list_remove(key, "contact_no", 0)
def test_neg_list_remove_with_nonexistent_bin(self):
"""
diff --git a/test/new_tests/test_list_remove_range.py b/test/new_tests/test_list_remove_range.py
index 5cfbbd962..f956732c9 100644
--- a/test/new_tests/test_list_remove_range.py
+++ b/test/new_tests/test_list_remove_range.py
@@ -108,6 +108,8 @@ def test_neg_list_remove_range_with_nonexistent_key(self):
"""
Invoke list_remove_range() with non-existent key
"""
+ if self.server_version < [3, 15, 2]:
+ pytest.skip("Change of error beginning in 3.15")
charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'
minLength = 5
maxLength = 30
@@ -115,12 +117,9 @@ def test_neg_list_remove_range_with_nonexistent_key(self):
key = ('test', 'demo', ''.join(map(lambda unused:
random.choice(charSet),
range(length))) + ".com")
- try:
+ with pytest.raises(e.RecordNotFound):
self.as_connection.list_remove_range(key, "abc", 0, 1)
- except e.BinIncompatibleType as exception:
- assert exception.code == 12
-
def test_neg_list_remove_range_with_nonexistent_bin(self):
"""
Invoke list_remove_range() with non-existent bin
diff --git a/test/new_tests/test_list_trim.py b/test/new_tests/test_list_trim.py
index 58b843349..eb03f2964 100644
--- a/test/new_tests/test_list_trim.py
+++ b/test/new_tests/test_list_trim.py
@@ -106,6 +106,8 @@ def test_neg_list_trim_with_nonexistent_key(self):
"""
Invoke list_trim() with non-existent key
"""
+ if self.server_version < [3, 15, 2]:
+ pytest.skip("Change of error beginning in 3.15")
charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'
minLength = 5
maxLength = 30
@@ -113,12 +115,9 @@ def test_neg_list_trim_with_nonexistent_key(self):
key = ('test', 'demo', ''.join(map(lambda unused:
random.choice(charSet),
range(length))) + ".com")
- try:
+ with pytest.raises(e.RecordNotFound):
self.as_connection.list_trim(key, "abc", 0, 1)
- except e.BinIncompatibleType as exception:
- assert exception.code == 12
-
def test_neg_list_trim_with_nonexistent_bin(self):
"""
Invoke list_trim() with non-existent bin
diff --git a/test/new_tests/test_query_apply.py b/test/new_tests/test_query_apply.py
index 5f86e0935..710d8fc29 100644
--- a/test/new_tests/test_query_apply.py
+++ b/test/new_tests/test_query_apply.py
@@ -28,13 +28,37 @@ def add_indexes_to_client(client):
pass
+def create_records(client):
+ for i in range(1, 10):
+ key = ('test', 'demo', i)
+ rec = {'name': str(i), 'age': i, 'val': i}
+ client.put(key, rec)
+
+ key = ('test', None, "no_set")
+ rec = {'name': 'no_set_name', 'age': 0}
+ client.put(key, rec)
+
+
+def drop_records(client):
+ for i in range(1, 10):
+ key = ('test', 'demo', i)
+ try:
+ client.remove(key)
+ except e.RecordNotFound:
+ pass
+
+ try:
+ client.remove(('test', None, "no_set"))
+ except e.RecordNotFound:
+ pass
+
def add_test_udf(client):
policy = {}
- client.udf_put(u"bin_lua.lua", 0, policy)
+ client.udf_put(u"query_apply.lua", 0, policy)
def drop_test_udf(client):
- client.udf_remove("bin_lua.lua")
+ client.udf_remove("query_apply.lua")
def remove_indexes_from_client(client):
@@ -46,31 +70,15 @@ class TestQueryApply(object):
# These functions will run once for this test class, and do all of the
# required setup and teardown
- connection_setup_functions = (add_test_udf, add_indexes_to_client)
- connection_teardown_functions = (drop_test_udf, remove_indexes_from_client)
- age_range_pred = p.between('age', 0, 5) # Predicate for ages between [0,5)
+ connection_setup_functions = (add_test_udf, add_indexes_to_client, create_records)
+ connection_teardown_functions = (drop_test_udf, remove_indexes_from_client, drop_records)
+ age_range_pred = p.between('age', 0, 4) # Predicate for ages between [0,5)
+ no_set_key = ('test', None, "no_set") # Key for item stored in a namespace but not in a set
@pytest.fixture(autouse=True)
def setup(self, request, connection_with_config_funcs):
client = connection_with_config_funcs
- for i in range(1, 5):
- key = ('test', 'demo', i)
- rec = {'name': 'name%s' % (str(i)), 'age': i}
- client.put(key, rec)
-
- key = ('test', None, "no_set")
- self.no_set_key = key
- rec = {'name': 'no_set_name', 'age': 0}
- client.put(key, rec)
-
- def teardown():
- for i in range(1, 5):
- key = ('test', 'demo', i)
- client.remove(key)
-
- client.remove(self.no_set_key)
-
- request.addfinalizer(teardown)
+ create_records(client)
def test_query_apply_with_no_parameters(self):
"""
@@ -80,85 +88,43 @@ def test_query_apply_with_no_parameters(self):
with pytest.raises(TypeError) as typeError:
self.as_connection.query_apply()
- def test_query_apply_with_correct_parameters(self):
+ def test_query_apply_with_correct_parameters_no_policy(self):
"""
Invoke query_apply() with correct parameters.
It should apply the proper UDF, and
"""
query_id = self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, "bin_lua",
- "mytransform", ['age', 2])
+ "test", "demo", self.age_range_pred, "query_apply",
+ "mark_as_applied", ['name', 2])
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
+ self._wait_for_query_complete(query_id)
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
-
- assert bins['age'] == i + 2
-
- _, _, bins = self.as_connection.get(self.no_set_key)
- assert bins['age'] == 0
+ self._correct_items_have_been_applied()
def test_query_apply_with_correct_policy(self):
"""
Invoke query_apply() with correct policy
"""
- policy = {'timeout': 1000}
+ policy = {'total_timeout': 0}
query_id = self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, "bin_lua",
- "mytransform", ['age', 2], policy)
+ "test", "demo", self.age_range_pred, "query_apply",
+ "mark_as_applied", ['name', 2], policy)
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
+ self._wait_for_query_complete(query_id)
+ self._correct_items_have_been_applied()
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
- assert bins['age'] == i + 2
-
- _, _, bins = self.as_connection.get(self.no_set_key)
- assert bins['age'] == 0
-
- def test_query_apply_with_none_set(self):
+ def test_query_apply_with_set_argument_as_none(self):
"""
Invoke query_apply() with correct policy,
Should casuse no changes as the
"""
- policy = {'timeout': 1000}
+ policy = {'total_timeout': 0}
query_id = self.as_connection.query_apply(
- "test", None, self.age_range_pred, "bin_lua",
- "mytransform", ['age', 2], policy)
+ "test", None, self.age_range_pred, "query_apply",
+ "mark_as_applied", ['name', 2], policy)
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
-
- # Since this query passes no set, the records stored in a set should
- # not be affected
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
- assert bins['age'] == i
-
- # the setless record should have been changed
- _, _, bins = self.as_connection.get(self.no_set_key)
- assert bins['age'] == 2
+ self._wait_for_query_complete(query_id)
+ self._items_without_set_have_been_applied()
def test_query_apply_with_incorrect_policy(self):
"""
@@ -168,25 +134,19 @@ def test_query_apply_with_incorrect_policy(self):
'timeout': 0.5
}
- with pytest.raises(e.ParamError) as err_info:
+ with pytest.raises(e.ParamError):
self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, "bin_lua",
- "mytransform", ['age', 2], policy)
-
- err_code = err_info.value.code
- assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
+ "test", "demo", self.age_range_pred, "query_apply",
+ "mark_as_applied", ['name', 2], policy)
- def test_query_apply_with_incorrect_ns_set(self):
+ def test_query_apply_with_nonexistent_set(self):
"""
Invoke query_apply() with incorrect ns and set
"""
- with pytest.raises(e.NamespaceNotFound) as err_info:
+ with pytest.raises(e.NamespaceNotFound):
self.as_connection.query_apply(
- "test1", "demo1", self.age_range_pred, "bin_lua",
- "mytransform", ['age', 2])
-
- err_code = err_info.value.code
- assert err_code == AerospikeStatus.AEROSPIKE_ERR_NAMESPACE_NOT_FOUND
+ "test1", "demo1", self.age_range_pred, "query_apply",
+ "mark_as_applied", ['name', 2])
def test_query_apply_with_incorrect_module_name(self):
"""
@@ -195,21 +155,11 @@ def test_query_apply_with_incorrect_module_name(self):
not call a function
"""
query_id = self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, "bin_lua_incorrect",
- "mytransform", ['age', 2])
-
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
+ "test", "demo", self.age_range_pred, "query_apply_incorrect",
+ "mark_as_applied", ['name', 2])
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
- assert bins['age'] == i
+ self._wait_for_query_complete(query_id)
+ self._no_items_have_been_applied()
def test_query_apply_with_incorrect_function_name(self):
"""
@@ -217,82 +167,58 @@ def test_query_apply_with_incorrect_function_name(self):
does not invoke a different function
"""
query_id = self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, "bin_lua",
- "mytransform_incorrect", ['age', 2])
+ "test", "demo", self.age_range_pred, "query_apply",
+ "mytransform_incorrect", ['name', 2])
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
-
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
- assert bins['age'] == i
+ self._wait_for_query_complete(query_id)
+ self._no_items_have_been_applied()
def test_query_apply_with_ns_set_none(self):
"""
Invoke query_apply() with ns and set as None
"""
- with pytest.raises(TypeError) as typeError:
+ with pytest.raises(TypeError):
self.as_connection.query_apply(None, None, self.age_range_pred,
- "bin_lua", "mytransform",
- ['age', 2])
+ "query_apply", "mark_as_applied",
+ ['name', 2])
- assert "query_apply() argument 1 must be str" in str(typeError.value)
-
- def test_query_apply_with_module_function_none(self):
+ def test_query_apply_with_module_argument_value_is_none(self):
"""
Invoke query_apply() with None module function
"""
- with pytest.raises(e.ParamError) as err_info:
+ with pytest.raises(e.ParamError):
self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, None, None, ['age', 2])
-
- err_code = err_info.value.code
- assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
+ "test", "demo", self.age_range_pred, None, None, ['name', 2])
- def test_query_apply_with_extra_argument(self):
+ def test_query_apply_with_too_many_arguments(self):
"""
Invoke query_apply() with extra argument
"""
policy = {'timeout': 1000}
- with pytest.raises(TypeError) as typeError:
+ with pytest.raises(TypeError):
self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, "bin_lua",
- "mytransform_incorrect", ['age', 2], policy, "")
-
- assert "query_apply() takes at most 7 arguments (8 given)" in str(
- typeError.value)
+ "test", "demo", self.age_range_pred, "query_apply",
+ "mytransform_incorrect", ['name', 2], policy, "")
- def test_query_apply_with_argument_is_string(self):
+ def test_query_apply_with_udf_arguments_as_string(self):
"""
Invoke query_apply() with arguments as string
"""
- with pytest.raises(e.ParamError) as err_info:
+ with pytest.raises(e.ParamError):
self.as_connection.query_apply("test", "demo", self.age_range_pred,
- "bin_lua", "mytransform", "")
-
- err_code = err_info.value.code
- assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
+ "query_apply", "mark_as_applied", "")
- def test_query_apply_with_argument_is_none(self):
+ def test_query_apply_with_udf_argument_as_none(self):
"""
Invoke query_apply() with arguments as None
"""
- with pytest.raises(e.ParamError) as err_info:
+ with pytest.raises(e.ParamError):
self.as_connection.query_apply(
"test", "demo", self.age_range_pred,
- "bin_lua", "mytransform", None)
-
- err_code = err_info.value.code
- assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
+ "query_apply", "mark_as_applied", None)
- def test_query_apply_with_extra_call_to_lua(self):
+ def test_query_apply_with_extra_parameter_to_lua_function(self):
"""
Invoke query_apply() with extra call to lua
test that passing an extra argument to a udf does
@@ -300,96 +226,37 @@ def test_query_apply_with_extra_call_to_lua(self):
"""
query_id = self.as_connection.query_apply(
"test", "demo", self.age_range_pred,
- "bin_lua", "mytransform", ['age', 2, 3])
+ "query_apply", "mark_as_applied", ['name', 2, 3])
# time.sleep(2)
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
-
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
- assert bins['age'] == i + 2
+ self._wait_for_query_complete(query_id)
+ self._correct_items_have_been_applied()
- def test_query_apply_with_extra_parameter_in_lua(self):
+ def test_query_apply_with_missing_parameter_to_function(self):
"""
Invoke query_apply() with a missing argument
to a lua function does not cause an error
"""
query_id = self.as_connection.query_apply(
"test", "demo", self.age_range_pred,
- "bin_lua", "mytransformextra", ['age', 2])
+ "query_apply", "mark_as_applied_three_arg", ['name', 2])
# time.sleep(2)
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
+ self._wait_for_query_complete(query_id)
+ self._correct_items_have_been_applied()
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
- assert bins['age'] == i + 2
-
- def test_query_apply_with_less_parameter_in_lua(self):
- """
- Invoke query_apply() with less parameter in lua
- this verifies that passing 3 arguments to a lua
- function which expects 2, will cause an undefined int
- to be treated as 0, and not crash.
- """
- query_id = self.as_connection.query_apply(
- "test", "demo", self.age_range_pred,
- "bin_lua", "mytransformless", ['age', 2])
-
- # time.sleep(2)
-
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
-
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
- if bins['age'] != i:
- assert True is False
- else:
- assert True is True
-
- def test_query_apply_unicode_input(self):
+ def test_query_apply_unicode_literal_for_strings(self):
"""
Invoke query_apply() with unicode udf
"""
query_id = self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, u"bin_lua",
- u"mytransform", ['age', 2])
+ "test", "demo", self.age_range_pred, u"query_apply",
+ u"mark_as_applied", ['name', 2])
- time.sleep(0.1)
- while True:
- response = self.as_connection.job_info(
- query_id, aerospike.JOB_QUERY)
- if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
- break
- time.sleep(0.1)
-
- for i in range(1, 5):
- key = ('test', 'demo', i)
- _, _, bins = self.as_connection.get(key)
- assert bins['age'] == i + 2
+ self._wait_for_query_complete(query_id)
+ self._correct_items_have_been_applied()
def test_query_apply_with_correct_parameters_without_connection(self):
"""
@@ -400,31 +267,12 @@ def test_query_apply_with_correct_parameters_without_connection(self):
with pytest.raises(e.ClusterError) as err_info:
client1.query_apply("test", "demo", self.age_range_pred,
- "bin_lua", "mytransform", ['age', 2])
+ "query_apply", "mark_as_applied", ['name', 2])
err_code = err_info.value.code
assert err_code == AerospikeStatus.AEROSPIKE_CLUSTER_ERROR
- @pytest.mark.skip(reason="This isn't a test for query_apply," +
- " but for job_info")
- def test_job_info_with_incorrect_module_type(self):
- """
- Invoke query_apply() with incorrect module type
- """
- query_id = self.as_connection.query_apply(
- "test", "demo", self.age_range_pred, "bin_lua",
- "mytransform", ['age', 2])
-
- with pytest.raises(e.ParamError) as err_info:
- time.sleep(0.1)
- self.as_connection.job_info(query_id, "aggregate")
- err_code = err_info.value.code
-
- assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
-
- # @pytest.mark.xfail(reason="Passing an invalid predicate currently works " +
- # " or raises a System Error")
@pytest.mark.parametrize(
"predicate",
(
@@ -436,12 +284,53 @@ def test_job_info_with_incorrect_module_type(self):
(1, 1, 'bin') # start of a valid predicate
)
)
- def test_invalid_predicate(self, predicate):
+ def test_invalid_predicate_tuple(self, predicate):
with pytest.raises(e.ParamError) as err_info:
query_id = self.as_connection.query_apply(
- "test", "demo", predicate, "bin_lua",
- "mytransform", ['age', 2])
+ "test", "demo", predicate, "query_apply",
+ "mark_as_applied", ['name', 2])
err_code = err_info.value.code
assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
+
+ def _correct_items_have_been_applied(self):
+ for i in range(1, 5):
+ key = ('test', 'demo', i)
+ _, _, bins = self.as_connection.get(key)
+ assert bins['name'] == 'aerospike'
+
+ for i in range(5, 10):
+ key = ('test', 'demo', i)
+ _, _, bins = self.as_connection.get(key)
+ assert bins['name'] != 'aerospike'
+
+ _, _, bins = self.as_connection.get(self.no_set_key)
+ assert bins['name'] != 'aerospike'
+
+ def _items_without_set_have_been_applied(self):
+ for i in range(1, 10):
+ key = ('test', 'demo', i)
+ _, _, bins = self.as_connection.get(key)
+ assert bins['name'] != 'aerospike'
+
+ _, _, bins = self.as_connection.get(self.no_set_key)
+ assert bins['name'] == 'aerospike'
+
+ def _wait_for_query_complete(self, query_id):
+ while True:
+ response = self.as_connection.job_info(
+ query_id, aerospike.JOB_QUERY)
+ if response['status'] != aerospike.JOB_STATUS_INPROGRESS:
+ return
+ time.sleep(0.1)
+
+ def _no_items_have_been_applied(self):
+
+ for i in range(1, 10):
+ key = ('test', 'demo', i)
+ _, _, bins = self.as_connection.get(key)
+ assert bins['name'] != 'aerospike'
+
+ _, _, bins = self.as_connection.get(self.no_set_key)
+ assert bins['name'] != 'aerospike'
diff --git a/test/query_apply.lua b/test/query_apply.lua
new file mode 100644
index 000000000..c83560d56
--- /dev/null
+++ b/test/query_apply.lua
@@ -0,0 +1,15 @@
+function mark_as_applied(rec, bin, unused)
+ info("my transform: %s", tostring(record.digest(rec)))
+ rec[bin] = 'aerospike'
+ aerospike:update(rec)
+end
+
+function mark_as_applied_three_arg(rec, bin, unused, unused1)
+ rec[bin] = 'aerospike'
+ aerospike:update(rec)
+end
+
+function mark_as_applied_one_arg(rec, bin)
+ rec[bin] = 'aerospike'
+ aerospike:update(rec)
+end