From 000af682451b6a0fa038069c36310a78c741cdc1 Mon Sep 17 00:00:00 2001 From: Kannan Dorairaj Date: Mon, 5 Aug 2019 19:52:04 +0530 Subject: [PATCH] Upgraded to Python3 with backward compatibility --- test/correctness/document-correctness.py | 88 +++++++-------- test/correctness/driver-correctness.py | 4 +- test/correctness/gen.py | 12 +-- test/correctness/mongo_model.py | 114 ++++++++++---------- test/correctness/preload_database.py | 8 +- test/correctness/setup_mongo.py | 22 ++-- test/correctness/smoke/test_core.py | 6 +- test/correctness/smoke/test_distinct.py | 32 +++--- test/correctness/smoke/test_numerical.py | 6 +- test/correctness/smoke/test_unique_index.py | 34 +++--- test/correctness/smoke/test_upsert.py | 6 +- test/correctness/test-automation.py | 20 ++-- test/correctness/util.py | 90 ++++++++-------- 13 files changed, 222 insertions(+), 220 deletions(-) diff --git a/test/correctness/document-correctness.py b/test/correctness/document-correctness.py index 9837234..4a647d6 100755 --- a/test/correctness/document-correctness.py +++ b/test/correctness/document-correctness.py @@ -49,7 +49,7 @@ def get_clients(str1, str2, ns): client_dict['mm'] = lambda: MongoModel("MongoDB") instance_id = str(random.random())[2:] if ns['instance_id'] == 0 else str(ns['instance_id']) - print 'Instance: ' + instance_id + print ('Instance: ' + instance_id) client1 = client_dict[str1]() client2 = client_dict[str2]() @@ -141,12 +141,12 @@ def diff_results(cA, rA, cB, rB): only_b = b - a if len(only_a) > 0 or len(only_b) > 0: - print " RESULT SET DIFFERENCES (as 'sets' so order within the returned results is not considered)" + print (" RESULT SET DIFFERENCES (as 'sets' so order within the returned results is not considered)") for x in only_a: - print " Only in", cA.__module__, ":", x + print (" Only in", cA.__module__, ":", x) for x in only_b: - print " Only in", cB.__module__, ":", x - print + print (" Only in", cB.__module__, ":", x) + print() zero_resp_queries = 0 @@ -166,7 +166,7 @@ def check_query(query, collection1, collection2, projection=None, sort=None, lim ret1 = get_result(query, collection1, projection, sort, limit, skip, exception_msg) ret2 = get_result(query, collection2, projection, sort, limit, skip, exception_msg) if len(exception_msg) == 1: - print '\033[91m\n', exception_msg[0], '\033[0m' + print ('\033[91m\n', exception_msg[0], '\033[0m') return False global total_queries @@ -187,11 +187,11 @@ def check_query(query, collection1, collection2, projection=None, sort=None, lim assert ret1[i] == ret2[i] return True except AssertionError: - print '\nQuery results didn\'t match at index %d!' % i - print 'Query: %r' % query - print 'Projection: %r' % projection - print '\n %s' % format_result(collection1, ret1, i) - print ' %s\n' % format_result(collection2, ret2, i) + print ('\nQuery results didn\'t match at index %d!' % i) + print ('Query: %r' % query) + print ('Projection: %r' % projection) + print ('\n %s' % format_result(collection1, ret1, i)) + print (' %s\n' % format_result(collection2, ret2, i)) diff_results(collection1, ret1, collection2, ret2) @@ -201,12 +201,12 @@ def check_query(query, collection1, collection2, projection=None, sort=None, lim return False except IndexError: - print 'Query results didn\'t match!' - print 'Query: %r' % query - print 'Projection: %r' % projection + print ('Query results didn\'t match!') + print ('Query: %r' % query) + print ('Projection: %r' % projection) - print '\n %s' % format_result(collection1, ret1, i) - print ' %s\n' % format_result(collection2, ret2, i) + print ('\n %s' % format_result(collection1, ret1, i)) + print (' %s\n' % format_result(collection2, ret2, i)) diff_results(collection1, ret1, collection2, ret2) @@ -237,8 +237,8 @@ def test_update(collection1, collection2, verbose=False): if verbose: all = [x for x in collection1.find(dict())] for item in collection1.find(update['query']): - print '[{}] Before update doc:{}'.format(type(collection1), item) - print 'Before update collection1 size: ', len(all) + print ('[{}] Before update doc:{}'.format(type(collection1), item)) + print ('Before update collection1 size: ', len(all)) collection1.update(update['query'], update['update'], upsert=update['upsert'], multi=update['multi']) except pymongo.errors.OperationFailure as e: exceptionOne = e @@ -248,8 +248,8 @@ def test_update(collection1, collection2, verbose=False): if verbose: all = [x for x in collection2.find(dict())] for item in collection2.find(update['query']): - print '[{}]Before update doc:{}'.format(type(collection2), item) - print 'Before update collection2 size: ', len(all) + print ('[{}]Before update doc:{}'.format(type(collection2), item)) + print ('Before update collection2 size: ', len(all)) collection2.update(update['query'], update['update'], upsert=update['upsert'], multi=update['multi']) except pymongo.errors.OperationFailure as e: exceptionTwo = e @@ -265,9 +265,9 @@ def test_update(collection1, collection2, verbose=False): # TODO re-enable consistency check when failure happened return (True, True) else: - print 'Unmatched result: ' - print type(exceptionOne), ': ', str(exceptionOne) - print type(exceptionTwo), ': ', str(exceptionTwo) + print ('Unmatched result: ') + print (type(exceptionOne), ': ', str(exceptionOne)) + print (type(exceptionTwo), ': ', str(exceptionTwo)) ignored_exception_check(exceptionOne) ignored_exception_check(exceptionTwo) return (False, False) @@ -307,30 +307,30 @@ def _run_operation_(op1, op2): func1(*args1, **kwargs1) except pymongo.errors.OperationFailure as e: if verbose: - print "Failed func1 with " + str(e) + print ("Failed func1 with " + str(e)) exceptionOne = e except MongoModelException as e: if verbose: - print "Failed func1 with " + str(e) + print ("Failed func1 with " + str(e)) exceptionOne = e try: func2(*args2, **kwargs2) except pymongo.errors.OperationFailure as e: if verbose: - print "Failed func2 with " + str(e) + print ("Failed func2 with " + str(e)) exceptionTwo = e except MongoModelException as e: if verbose: - print "Failed func2 with " + str(e) + print ("Failed func2 with " + str(e)) exceptionTwo = e if ((exceptionOne is None and exceptionTwo is None) or (exceptionOne is not None and exceptionTwo is not None and exceptionOne.code == exceptionTwo.code)): pass else: - print 'Unmatched result: ' - print type(exceptionOne), ': ', str(exceptionOne) - print type(exceptionTwo), ': ', str(exceptionTwo) + print ('Unmatched result: ') + print (type(exceptionOne), ': ', str(exceptionOne)) + print (type(exceptionTwo), ': ', str(exceptionTwo)) okay = False ignored_exception_check(exceptionOne) ignored_exception_check(exceptionTwo) @@ -383,7 +383,7 @@ def _run_operation_(op1, op2): (collection2.insert, (docs,), {}) ) if not okay: - print "Failed when doing inserts" + print ("Failed when doing inserts") return (okay, fname, None) if not indexes_first: @@ -398,7 +398,7 @@ def _run_operation_(op1, op2): (collection2.ensure_index, (i,), {"unique": uniqueIndex}) ) if not okay: - print "Failed when adding index after insert" + print ("Failed when adding index after insert") return (okay, fname, None) ii += 1 @@ -410,7 +410,7 @@ def _run_operation_(op1, op2): okay, skip_current_iteration = test_update(collection1, collection2, verbose) if skip_current_iteration: if verbose: - print "Skipping current iteration due to the failure from update." + print ("Skipping current iteration due to the failure from update.") return (True, fname, None) if not okay: return (okay, fname, None) @@ -442,7 +442,7 @@ def _run_operation_(op1, op2): return (okay, fname, None) except IgnoredException as e: - print "Ignoring EXCEPTION: ", e.message + print ("Ignoring EXCEPTION: ", e.message) return True, fname, None except Exception as e: import traceback @@ -489,9 +489,9 @@ def test_forever(ns): collection1 = client1[dbName][collName] collection2 = client2[dbName][collName] - print '========================================================' - print 'PID : ' + str(os.getpid()) + ' iteration : ' + str(jj) + ' DB : ' + dbName + ' Collection: ' + collName - print '========================================================' + print ('========================================================') + print ('PID : ' + str(os.getpid()) + ' iteration : ' + str(jj) + ' DB : ' + dbName + ' Collection: ' + collName) + print ('========================================================') (okay, fname, e) = one_iteration(collection1, collection2, ns, seed) if not okay: @@ -500,11 +500,11 @@ def test_forever(ns): # print 'File for failing iteration: ', fname with open(fname, 'r') as fp: for line in fp: - print line + print (line) break # Generate a new seed and start over - seed = random.randint(0, sys.maxint) + seed = random.randint(0, sys.maxsize) gen.global_prng = random.Random(seed) # house keeping @@ -567,13 +567,13 @@ def tester_thread(c1, c2): time.sleep(1) if not t1.is_alive(): sys.stdout = oldstdout - print 'SUCCESS: Test harness found artificial bug' + print ('SUCCESS: Test harness found artificial bug') break sys.stdout = oldstdout if t1.is_alive(): - print 'FAILURE: Test harness did not find obvious artificial bug in 5 seconds' + print ('FAILURE: Test harness did not find obvious artificial bug in 5 seconds') sys.stdout = NullWriter() @@ -583,12 +583,12 @@ def tester_thread(c1, c2): time.sleep(1) if not t2.is_alive(): sys.stdout = oldstdout - print 'FAILURE: Test of model vs. itself did not match' + print ('FAILURE: Test of model vs. itself did not match') return sys.stdout = oldstdout - print 'SUCCESS: Model was consistent with itself' + print ('SUCCESS: Model was consistent with itself') if __name__ == '__main__': @@ -605,7 +605,7 @@ def tester_thread(c1, c2): parser_forever.add_argument('1', choices=['mongo', 'mm', 'doclayer'], help='first tester') parser_forever.add_argument('2', choices=['mongo', 'mm', 'doclayer'], help='second tester') parser_forever.add_argument( - '-s', '--seed', type=int, default=random.randint(0, sys.maxint), help='random seed to use') + '-s', '--seed', type=int, default=random.randint(0, sys.maxsize), help='random seed to use') parser_forever.add_argument('--no-updates', default=True, action='store_false', help='disable update tests') parser_forever.add_argument( '--no-sort', default=True, action='store_false', help='disable non-deterministic sort tests') diff --git a/test/correctness/driver-correctness.py b/test/correctness/driver-correctness.py index c4a37e0..8e6c605 100644 --- a/test/correctness/driver-correctness.py +++ b/test/correctness/driver-correctness.py @@ -58,11 +58,11 @@ def test_all(times=300): return tests -print yaml.safe_dump({ +print (yaml.safe_dump({ 'host': 'localhost', 'port': 8031, 'database': 'test', 'collection': 'test', 'tests': test_all() }, - default_flow_style=False) + default_flow_style=False)) diff --git a/test/correctness/gen.py b/test/correctness/gen.py index 0bd8fe2..59b3f2f 100644 --- a/test/correctness/gen.py +++ b/test/correctness/gen.py @@ -318,7 +318,7 @@ def random_elem_match_predicate(): else: r = global_prng.choice([global_prng.uniform(0, 0.4), global_prng.uniform(0.5, 0.9)]) q = random_query(r) - q = {k: q.values()[0][k] for k in q.values()[0]} + q = {k: list(q.values())[0][k] for k in list(q.values())[0]} e.update(q) return ('$elemMatch', e) @@ -337,10 +337,10 @@ def random_ne_predicate(): def random_not_predicate(): r = global_prng.uniform(0, 0.9) - q = random_query(r).values()[0] + q = list(random_query(r).values())[0] while type(q) is list or not generator_options.allow_general_nots and ('$not' in q or '$regex' in q): r = global_prng.uniform(0, 0.9) - q = random_query(r).values()[0] + q = list(random_query(r).values())[0] return ('$not', q) @@ -419,12 +419,12 @@ def random_update_operator_mul(): def random_update_operator_rename(): doc = {} - while len(doc.keys()) == 0: + while len(list(doc.keys())) == 0: for i in range(0, global_prng.randint(0, 3)): old_name = random_field_name() new_name = random_field_name() - if old_name != new_name and old_name not in doc.values() and new_name not in doc.keys( - ) and new_name not in doc.values(): + if old_name != new_name and old_name not in list(doc.values()) and new_name not in list(doc.keys() + ) and new_name not in list(doc.values()): doc[old_name] = new_name return {'$rename': doc} diff --git a/test/correctness/mongo_model.py b/test/correctness/mongo_model.py index 3fabb79..1969345 100644 --- a/test/correctness/mongo_model.py +++ b/test/correctness/mongo_model.py @@ -114,7 +114,7 @@ def expand(field, document, check_last_array, expand_array, add_last_array, chec ret.append(None) if debug: - print ret + print (ret) return ret @@ -124,11 +124,11 @@ def evaluate(field, query, document, options, debug=False): # Transform logical (and effectively logical) operators if field == '$and': - return False not in [evaluate(q.keys()[0], q.values()[0], document, options, debug) for q in query] + return False not in [evaluate(list(q.keys())[0], list(q.values())[0], document, options, debug) for q in query] elif field == '$or': - return True in [evaluate(q.keys()[0], q.values()[0], document, options) for q in query] + return True in [evaluate(list(q.keys())[0], list(q.values())[0], document, options) for q in query] elif field == '$nor': - return True not in [evaluate(q.keys()[0], q.values()[0], document, options) for q in query] + return True not in [evaluate(list(q.keys())[0], list(q.values())[0], document, options) for q in query] if type(query) == dict: if '$not' in query: @@ -230,7 +230,7 @@ def pred(value, query, options): debug=debug) if debug: - print values + print (values) if len(values) == 0: return False @@ -432,12 +432,12 @@ def collection_names(self): def getTypeCode(value): if value is None: return "20" - elif isinstance(value, (long, float, int)): + elif isinstance(value, (int, float, int)): return "30" elif isinstance(value, binary.Binary): # this needs to come before basestring because `bson.binary.Binary` is also a subtype of `basestring` return "70" - elif isinstance(value, basestring): + elif isinstance(value, str): return "40" elif isinstance(value, OrderedDict): return "51" @@ -453,7 +453,7 @@ class SortedDict(OrderedDict): def __init__(*args, **kwds): OrderedDict.__init__(*args, **kwds) def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - super(SortedDict, self).__setitem__(key, value, dict_setitem=dict_setitem) + super(SortedDict, self).__setitem__(key, value) items = super(SortedDict, self).items() # group them together first tmp = OrderedDict() @@ -464,7 +464,7 @@ def __setitem__(self, key, value, dict_setitem=dict.__setitem__): tmp["70"] = [] tmp["80"] = [] tmp["100"] = [] - for kv in items: + for kv in list(items): tmp[getTypeCode(kv[0])].append(kv) # import pprint;pprint.pprint(dict(tmp)) # sort each group @@ -479,7 +479,7 @@ def __setitem__(self, key, value, dict_setitem=dict.__setitem__): # print str(sortedItems) super(SortedDict, self).clear() for k, v in sortedItems: - super(SortedDict, self).__setitem__(k, v, dict_setitem=dict_setitem) + super(SortedDict, self).__setitem__(k, v) @staticmethod def fromOrderedDict(orderedDict): @@ -521,7 +521,7 @@ def _insert(self, doc): doc['_id'] = gen.random_object_id() if doc['_id'] in self.data: raise MongoModelException("Duplicated value not allowed by unique index", code=11000) - tmp = self.data.values() + [doc] + tmp = list(self.data.values()) + [doc] for index in self.indexes: if not index.inError: index.validate_and_build_entry(tmp) @@ -538,7 +538,7 @@ def insert(self, input): for i in input: if '_id' in i: if not self.options.object_field_order_matters and isinstance(i['_id'], HashableOrderedDict): - i['_id'] = HashableOrderedDict(sorted(i['_id'].items(), key=lambda (key, value): key)) + i['_id'] = HashableOrderedDict(sorted(i['_id'].items(), key=lambda key, value: key)) if i['_id'] in all_ids: # print i['_id'] raise MongoModelException("Duplicated value not allowed by unique index", code=11000) @@ -552,7 +552,7 @@ def insert(self, input): if doc['_id'] in self.data: raise MongoModelException("Duplicated value not allowed by unique index", code=11000) buffer.append(doc) - tmp = self.data.values() + buffer + tmp = list(self.data.values()) + buffer for index in self.indexes: if not index.inError: index.validate_and_build_entry(tmp) @@ -573,11 +573,11 @@ def insert_many(self, list): def find(self, query, fields=None, batch_size=None): if len(query) == 0: - results = self.data.values() + results = list(self.data.values()) else: assert len(query) == 1 # FIXME: test weakness - k = query.keys()[0] - results = [item for item in self.data.values() if evaluate(k, query[k], item, self.options)] + k = list(query.keys())[0] + results = [item for item in list(self.data.values()) if evaluate(k, query[k], item, self.options)] if fields is None: return results @@ -639,13 +639,13 @@ def _get_index_name(keys): if i.name == kwargs["name"]: raise MongoModelException("There is an index with this name and a different key spec", code=29993) - if "unique" in kwargs.keys() and kwargs["unique"]: + if "unique" in list(kwargs.keys()) and kwargs["unique"]: newIndex = MongoUniqueIndex(deduplicatedKeys, kwargs) else: newIndex = MongoIndex(deduplicatedKeys, kwargs) self.indexes.append(newIndex) # insert first, since an index can be added but in error state if its constraints are violated. - newIndex.build(self.data.values()) + newIndex.build(list(self.data.values())) return newIndex.name @staticmethod @@ -670,7 +670,7 @@ def validate_update_object(update): if operator_name == '$rename': for field_name in update[operator_name]: rename_target = update[operator_name][field_name] - if not isinstance(rename_target, basestring): + if not isinstance(rename_target, str): raise MongoModelException('$rename target must be a string', code=13494) if rename_target in affected_fields: raise MongoModelException('Field name duplication not allowed with modifiers', code=10150) @@ -687,14 +687,14 @@ def process_update_operator_inc(self, key, update_expression): # print "Update Operator: $inc ", update # validation check: if all fields updated are numerical - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): if k in self.data[key]: # print alert("%s: %s" % (self.data[key][k], str(v))) - if not isinstance(self.data[key][k], (int, long, float)): + if not isinstance(self.data[key][k], (int, int, float)): # print "Filed \"", k, "\" is not numerical type!" raise MongoModelException('Cannot apply $inc to a value of non-numeric type.', code=10140) - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Inc: key: ", k, " value: ", v if k in self.data[key]: self.data[key][k] = self.data[key][k] + v @@ -705,13 +705,13 @@ def process_update_operator_mul(self, key, update_expression): # print "Update Operator: $mul ", update # validation check: if all fields updated are numerical - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): if k in self.data[key]: - if not isinstance(self.data[key][k], (int, long, float)): + if not isinstance(self.data[key][k], (int, int, float)): # print "Field \"", k, "\" is not numerical type!" raise MongoModelException('Cannot apply $mul to a value of non-numeric type.', code=16837) - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Mul: key: ", k, " value: ", v if k in self.data[key]: self.data[key][k] = self.data[key][k] * v @@ -720,7 +720,7 @@ def process_update_operator_mul(self, key, update_expression): def process_update_operator_rename(self, key, update_expression): # print "Update Operator: $rename ", update - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Rename: key: ", k, " value: ", v if k in self.data[key]: self.data[key][v] = self.data[key][k] @@ -729,21 +729,21 @@ def process_update_operator_rename(self, key, update_expression): def process_update_operator_set_on_insert(self, key, update_expression, new_doc=False): # print "Update Operator: $setOnInsert ", key, update_expression if new_doc: - self.data[key] = OrderedDict(self.data[key].items() + deepcopy(update_expression.items())) + self.data[key] = OrderedDict(list(self.data[key].items()) + deepcopy(list(update_expression.items()))) def process_update_operator_set(self, key, update_expression): # print "Update Operator: $set ", update - self.data[key] = OrderedDict(self.data[key].items() + deepcopy(update_expression.items())) + self.data[key] = OrderedDict(list(self.data[key].items())+ deepcopy(list(update_expression.items()))) def process_update_operator_unset(self, key, update_expression): # print "Update Operator: $unset ", update - for k in update_expression.keys(): + for k in list(update_expression.keys()): if k in self.data[key]: del self.data[key][k] def process_update_operator_min(self, key, update_expression): # print "Update Operator: $min ", update - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Inc: key: ", k, " value: ", v if k in self.data[key]: # The $min updates the value of the field to a specified value if the @@ -756,7 +756,7 @@ def process_update_operator_min(self, key, update_expression): def process_update_operator_max(self, key, update_expression): # print "Update Operator: $max ", update - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Inc: key: ", k, " value: ", v if k in self.data[key]: # The $max operator updates the value of the field to a specified value if the @@ -769,7 +769,7 @@ def process_update_operator_max(self, key, update_expression): def process_update_operator_current_date(self, key, update_expression): # print "Update Operator: $currentDate", update_expression - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): if v: # mongoDB use the UTC time self.data[key][k] = datetime.datetime.now(tzutc()) @@ -794,7 +794,7 @@ def append_each(dst, src): def process_update_operator_add_to_set(self, key, update_expression): # print "Update Operator: $addToSet ", update_operator, update - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Inc: key: ", k, " value: ", v if k in self.data[key]: if isinstance(self.data[key][k], list): @@ -808,7 +808,7 @@ def process_update_operator_add_to_set(self, key, update_expression): def process_update_operator_pop(self, key, update_expression): # print "Update Operator: $pop ", update_operator, update - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Inc: key: ", k, " value: ", v if k in self.data[key]: if isinstance(self.data[key][k], list): # and len(self.data[key][k]) > 0: @@ -826,7 +826,7 @@ def process_update_operator_pop(self, key, update_expression): def process_update_operator_pull_all(self, key, update_expression): # print "Update Operator: $pullAll ", update_operator, update - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "pullAll: key: ", k, " value: ", v if k in self.data[key]: if isinstance(self.data[key][k], list): @@ -840,7 +840,7 @@ def evaluate(self, query, document): acc = True if len(query) == 0: return len(document) == 0 - for field in query.keys(): + for field in list(query.keys()): if field == '_id': tmp = OrderedDict() for k,v in sorted(query[field].items(), key= lambda i: i[0]): @@ -852,7 +852,7 @@ def evaluate(self, query, document): def process_update_operator_pull(self, key, update_expression): # print "Update Operator: $pull ", update - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "$pull: key: ", k, " value: ", v if k in self.data[key]: if isinstance(self.data[key][k], list): @@ -876,7 +876,7 @@ def process_update_operator_pull(self, key, update_expression): def process_update_operator_push(self, key, update_expression): # print "Update Operator: $push ", update - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Push: key: ", k, " value: ", v if k in self.data[key]: if isinstance(self.data[key][k], list): @@ -926,19 +926,19 @@ def process_update_operator_push(self, key, update_expression): def process_update_operator_bit(self, key, update_expression): # print "Update Operator: $bit ", update_operator, update # validation check: if all fields updated are numerical - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): if k in self.data[key]: - if not isinstance(self.data[key][k], (int, long)): + if not isinstance(self.data[key][k], (int, int)): # print "Filed \"", k, "\" is not numerical type!" raise MongoModelException('Cannot apply $bit to a value of non-integeral type.', code=10138) - for k, v in update_expression.iteritems(): + for k, v in update_expression.items(): # print "Bit: key: ", k, " value: ", v if k not in self.data[key]: self.data[key][k] = 0 - bit_operator = v.keys()[0] - bit_num = v[v.keys()[0]] - if not isinstance(bit_num, (int, long, float)): + bit_operator = list(v.keys())[0] + bit_num = v[list(v.keys())[0]] + if not isinstance(bit_num, (int, int, float)): raise MongoModelException('$bit field must be a number', code=10139) # print "op:", bit_operator, "num:", bit_num, "self.data[key][k]:", self.data[key][k] if bit_operator == "and": @@ -976,7 +976,7 @@ def deep_transform_logical_operators(self, selector=None): if not selector: return None - for k, v in selector.iteritems(): + for k, v in selector.items(): if k in operators['logical']: if isinstance(v, list): if k == '$or' and len(v) > 1: @@ -986,11 +986,11 @@ def deep_transform_logical_operators(self, selector=None): else: for i in v: if isinstance(i, dict): - for kk, vv in i.iteritems(): + for kk, vv in i.items(): result = has_operator(kk) if result: logical_child = self.deep_transform_logical_operators(i) - for kkk, vvv in logical_child.iteritems(): + for kkk, vvv in logical_child.items(): new_selector[kkk] = vvv else: new_selector[kk] = vv @@ -1092,19 +1092,19 @@ def transform_operator_query_to_upsert(self, selector): if operator == '$all': selector = self.deep_transform_logical_operators(deep_convert_compound_string_to_dict(selector)) else: - for k, v in selector.iteritems(): + for k, v in selector.items(): count = 0 if isinstance(v, dict): - for kk, vv in v.iteritems(): + for kk, vv in v.items(): count += 1 if kk == operator: selector = {} if count > 1: raise MongoModelException("bad query!") elif (depth == 2 and operator not in operators['logical']): - for k, v in selector.iteritems(): + for k, v in selector.items(): if isinstance(v, dict): - for kk, vv in v.iteritems(): + for kk, vv in v.items(): if kk == operator: selector = {} break @@ -1125,13 +1125,13 @@ def update(self, query, update, upsert, multi): if len(query) == 0: # Update all existing docs. And since the query is empty, do NOT do upsert. any = True - for k in self.data.keys(): + for k in list(self.data.keys()): n +=1 self.process_update_operator(k, update) if not multi: break - key = query.keys()[0] - for k, item in self.data.iteritems(): + key = list(query.keys())[0] + for k, item in self.data.items(): if evaluate(key, query[key], item, self.options): any = True n += 1 @@ -1142,7 +1142,7 @@ def update(self, query, update, upsert, multi): if any: for index in self.indexes: if not index.inError: - index.validate_and_build_entry(self.data.values()) + index.validate_and_build_entry(list(self.data.values())) except MongoModelException as e: self.data = old_data raise e @@ -1169,7 +1169,7 @@ def update(self, query, update, upsert, multi): del self.data[new_id] for index in self.indexes: if not index.inError: - index.validate_and_build_entry(self.data.values()) + index.validate_and_build_entry(list(self.data.values())) except MongoModelException as e: # print "delete new_id", new_id, "because of the exception" if new_id in self.data: @@ -1181,7 +1181,7 @@ def update(self, query, update, upsert, multi): self.insert(update) elif not any: # mongoDB raise an exception for the '$setOnInsert' update operator even if the upsert is False - if '$setOnInsert' in update.keys() and len(update['$setOnInsert']) == 0: + if '$setOnInsert' in list(update.keys()) and len(update['$setOnInsert']) == 0: raise MongoModelException( "'$setOnInsert' is empty. You must specify a field like so: {$mod: {: ...}}", code=26840) return {'n': n} diff --git a/test/correctness/preload_database.py b/test/correctness/preload_database.py index b37f392..dfe490f 100755 --- a/test/correctness/preload_database.py +++ b/test/correctness/preload_database.py @@ -216,14 +216,14 @@ def preload_database(ns): doc["_id"] = str(i) docs.append(doc) collection.insert(docs, safe=False) - print "Inserted " + str(i) + print ("Inserted " + str(i)) # collection.insert(docs) # print [i for i in collection.find()] - print "Inserted " + str(ns['number']) + " documents" - print "Database: test" - print "Collection: " + ('performance' + str(instance)[2:] if ns['collection'] == '' else ns['collection']) + print ("Inserted " + str(ns['number']) + " documents") + print ("Database: test") + print ("Collection: " + ('performance' + str(instance)[2:] if ns['collection'] == '' else ns['collection'])) if __name__ == "__main__": diff --git a/test/correctness/setup_mongo.py b/test/correctness/setup_mongo.py index c3fd049..2277e3f 100755 --- a/test/correctness/setup_mongo.py +++ b/test/correctness/setup_mongo.py @@ -39,12 +39,12 @@ def init_replica_set(shard_port, shard_addresses, index): repl_set = {"_id": "rs" + str(index) + ".0", "members": members} shard.admin.command('replSetInitiate', repl_set) - print 'Replica set initialized with: ' - print repl_set + print ('Replica set initialized with: ') + print (repl_set) except errors.OperationFailure as e: if 'already initialized' in str(e.message): - print 'Replica set already initialized, continuing.' + print ('Replica set already initialized, continuing.') else: raise e @@ -54,12 +54,12 @@ def init_replica_set(shard_port, shard_addresses, index): def add_shard(mongos, replSet): try: mongos.admin.command('addShard', replSet['_id'] + "/" + replSet['members'][0]['host']) - print 'Shard added.' + print ('Shard added.') except errors.OperationFailure as e: if 'duplicate key' in str(e.message): - print 'Shard already added, continuing.' + print ('Shard already added, continuing.') elif 'exists in another' in str(e.message): - print 'Shard already added and enabled for DB, continuing.' + print ('Shard already added and enabled for DB, continuing.') else: raise e @@ -67,10 +67,10 @@ def add_shard(mongos, replSet): def enable_sharding_on_d_b(mongos, db_name): try: mongos.admin.command('enableSharding', db_name) - print 'Sharding enabled on DB.' + print ('Sharding enabled on DB.') except errors.OperationFailure as e: if 'already enabled' in str(e.message): - print 'Sharding already enabled on DB, continuing.' + print ('Sharding already enabled on DB, continuing.') else: raise e @@ -80,10 +80,10 @@ def enable_sharding_on_collection(mongos, db_name, collection_name): collection = mongos[db_name][collection_name] collection.ensure_index([("_id", pymongo.HASHED)]) mongos.admin.command('shardCollection', db_name + "." + collection_name, key={"_id": "hashed"}) - print 'Sharded collection.' + print ('Sharded collection.') except errors.OperationFailure as e: if 'already sharded' in str(e.message): - print 'Collection already sharded, continuing.' + print ('Collection already sharded, continuing.') else: raise e @@ -147,4 +147,4 @@ def group(lst, n): "big_documents": True }) else: - print "Incorrected or missing shard addresses - exiting.." + print ("Incorrected or missing shard addresses - exiting..") diff --git a/test/correctness/smoke/test_core.py b/test/correctness/smoke/test_core.py index 67a0212..b20678b 100644 --- a/test/correctness/smoke/test_core.py +++ b/test/correctness/smoke/test_core.py @@ -31,7 +31,7 @@ def operator_queries(base_query): q_list = [base_query] - q_list.extend([{base_query.keys()[0]: {operator: base_query.values()[0]}} for operator in value_operators]) + q_list.extend([{list(base_query.keys())[0]: {operator: list(base_query.values())[0]}} for operator in value_operators]) return q_list @@ -346,7 +346,7 @@ def test_non_deterministic_query(): ] for idx, test_item in enumerate(data): - print "\n========== test", idx, "==========" + print ("\n========== test", idx, "==========") (sort, limit, skip, query, input_data, result, expected) = test_item input_data = util.generate_list_of_ordered_dict_from_json(input_data) result = util.generate_list_of_ordered_dict_from_json(result) @@ -383,7 +383,7 @@ def test_is_ambiguous_field_name_in_array(): ] for idx, test_item in enumerate(data): - print "\n========== isAmbiguousFieldNameInArray test", idx, "==========" + print ("\n========== isAmbiguousFieldNameInArray test", idx, "==========") (src_list, path, ambiguous) = test_item src_list = util.generate_list_of_ordered_dict_from_json(src_list) for obj in src_list: diff --git a/test/correctness/smoke/test_distinct.py b/test/correctness/smoke/test_distinct.py index bf64a66..f79e450 100644 --- a/test/correctness/smoke/test_distinct.py +++ b/test/correctness/smoke/test_distinct.py @@ -25,9 +25,9 @@ def _generate_unique_int(seen): - tmp = random.randint(0, sys.maxint) + tmp = random.randint(0, sys.maxsize) while tmp in seen: - tmp = random.randint(0, sys.maxint) + tmp = random.randint(0, sys.maxsize) seen.add(tmp) return tmp @@ -60,7 +60,7 @@ def transform(elm): collection.insert_one(record) actual_return = map(transform, collection.distinct(field, query)) expected_return = map(transform, expected_return) - assert len(actual_return) == len(expected_return) and set(actual_return) == set(expected_return), \ + assert len(list(actual_return)) == len(list(expected_return)) and set(actual_return) == set(expected_return), \ "{} failed. Expected: {}; Actual: {}".format(test_name, expected_return, actual_return) @@ -72,7 +72,7 @@ def test_values_with_arrays(fixture_collection): # {"k1": 1, "k2": [4, [1]]} # when query collection.distinct("k2"), we should get [1,2,3,4,5,[1]] number_of_records = random.randint(1, 100) - key = "test_key_{}".format(random.randint(0, sys.maxint)) + key = "test_key_{}".format(random.randint(0, sys.maxsize)) records = [] ids = set() values = set() @@ -86,7 +86,7 @@ def test_values_with_arrays(fixture_collection): array_value = [] for _ in range(0, array_size): array_value.append(_generate_unique_int(values)) - records.append({"_id": random.randint(0, sys.maxint), key: array_value}) + records.append({"_id": random.randint(0, sys.maxsize), key: array_value}) elif vType == 1: # add an array with ints and arrays as its elements array_size = random.randint(1, 5) @@ -95,13 +95,13 @@ def test_values_with_arrays(fixture_collection): if random.randint(0, 1) == 1: array_value.append(_generate_unique_int(values)) else: - tmp = random.randint(0, sys.maxint) + tmp = random.randint(0, sys.maxsize) values.add(tuple([tmp])) array_value.append([tmp]) - records.append({"_id": random.randint(0, sys.maxint), key: array_value}) + records.append({"_id": random.randint(0, sys.maxsize), key: array_value}) else: # add ints - records.append({"_id": random.randint(0, sys.maxint), key: _generate_unique_int(values)}) + records.append({"_id": random.randint(0, sys.maxsize), key: _generate_unique_int(values)}) def transform(elm): if isinstance(elm, tuple): @@ -115,21 +115,21 @@ def transform(elm): def test_values_no_duplicates_no_query(fixture_collection): number_of_records = random.randint(1, 100) - key = "test_key_{}".format(random.randint(0, sys.maxint)) + key = "test_key_{}".format(random.randint(0, sys.maxsize)) records = [] ids = set() values = set() for _ in range(0, number_of_records): id = _generate_unique_int(ids) value = _generate_unique_int(values) - records.append({"_id": random.randint(0, sys.maxint), key: value}) + records.append({"_id": random.randint(0, sys.maxsize), key: value}) distinct_test("[Values with no duplicates; No query]", fixture_collection, key, records, list(values), None) def test_values_no_duplicates_with_query(fixture_collection): number_of_records = random.randint(1, 100) - key = "test_key_{}".format(random.randint(0, sys.maxint)) + key = "test_key_{}".format(random.randint(0, sys.maxsize)) key2 = "test_key_1" records = [] ids = set() @@ -138,28 +138,28 @@ def test_values_no_duplicates_with_query(fixture_collection): key2_val = random.randint(0, 1) id = _generate_unique_int(ids) value = _generate_unique_int(values[key2_val]) - records.append({"_id": random.randint(0, sys.maxint), key: value, key2: key2_val}) + records.append({"_id": random.randint(0, sys.maxsize), key: value, key2: key2_val}) distinct_test("[Values with no duplicates; With query]", fixture_collection, key, records, list(values[key2_val]), {key2: key2_val}) def test_values_with_duplicates_no_query(fixture_collection): number_of_records = random.randint(1, 100) - key = "test_key_{}".format(random.randint(0, sys.maxint)) + key = "test_key_{}".format(random.randint(0, sys.maxsize)) records = [] ids = set() values = set() for _ in range(0, number_of_records): id = _generate_unique_int(ids) value = _generate_random_duplicated_int(values) - records.append({"_id": random.randint(0, sys.maxint), key: value}) + records.append({"_id": random.randint(0, sys.maxsize), key: value}) distinct_test("[Values with duplicates; No query]", fixture_collection, key, records, list(values), None) def test_values_with_duplicates_with_query(fixture_collection): number_of_records = random.randint(1, 100) - key = "test_key_{}".format(random.randint(0, sys.maxint)) + key = "test_key_{}".format(random.randint(0, sys.maxsize)) key2 = "test_key_1" records = [] ids = set() @@ -168,7 +168,7 @@ def test_values_with_duplicates_with_query(fixture_collection): key2_val = random.randint(0, 1) id = _generate_unique_int(ids) value = _generate_random_duplicated_int(values[key2_val]) - records.append({"_id": random.randint(0, sys.maxint), key: value, key2: key2_val}) + records.append({"_id": random.randint(0, sys.maxsize), key: value, key2: key2_val}) distinct_test("[Values with duplicates; With query]", fixture_collection, key, records, list(values[key2_val]), {key2: key2_val}) diff --git a/test/correctness/smoke/test_numerical.py b/test/correctness/smoke/test_numerical.py index c69bd17..5900498 100644 --- a/test/correctness/smoke/test_numerical.py +++ b/test/correctness/smoke/test_numerical.py @@ -23,7 +23,7 @@ def check_wr_number(collection, number): - id = random.randint(0, sys.maxint) + id = random.randint(0, sys.maxsize) # Make sure we first delete record if it exists collection.delete_many({"_id": id}) # update/insert if needed @@ -36,11 +36,11 @@ def check_wr_number(collection, number): def test_min_int(fixture_collection): - check_wr_number(fixture_collection, int(-1 - sys.maxint)) + check_wr_number(fixture_collection, int(-1 - sys.maxsize)) def test_max_int(fixture_collection): - check_wr_number(fixture_collection, int(sys.maxint)) + check_wr_number(fixture_collection, int(sys.maxsize)) def test_min_dbl(fixture_collection): diff --git a/test/correctness/smoke/test_unique_index.py b/test/correctness/smoke/test_unique_index.py index 5d5ee58..5ff6724 100644 --- a/test/correctness/smoke/test_unique_index.py +++ b/test/correctness/smoke/test_unique_index.py @@ -39,7 +39,7 @@ def _wrap(func, args, kwargs, expected_failure): def test_insert_single_field_unique_index(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) random_key_2 = "{}-2".format(random_key) collection.create_index([(random_key, pymongo.ASCENDING)], unique=True) @@ -51,7 +51,7 @@ def test_insert_single_field_unique_index(fixture_collection): def test_insert_compound_unique_index(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) random_key_2 = "{}-2".format(random_key) random_key_3 = "{}-3".format(random_key) collection.create_index([(random_key, pymongo.ASCENDING), (random_key_2, pymongo.ASCENDING)], unique=True) @@ -66,10 +66,10 @@ def test_insert_compound_unique_index(fixture_collection): def test_update_single_field_unique_index(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) collection.create_index([(random_key, pymongo.ASCENDING)], unique=True) - id1 = random.randint(0, sys.maxint) - id2 = random.randint(0, sys.maxint) + id1 = random.randint(0, sys.maxsize) + id2 = random.randint(0, sys.maxsize) assert _wrap(collection.insert_one, ({random_key: 1, "_id": id1},), {}, False), "non-duplicated non-null insert failed" assert _wrap(collection.insert_one, ({random_key: 2, "_id": id2},), {}, False), "non-duplicated non-null insert failed" @@ -88,13 +88,13 @@ def test_update_single_field_unique_index(fixture_collection): def test_update_compound_unique_index(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) random_key_2 = "{}-2".format(random_key) assert _wrap(collection.create_index, ([(random_key, pymongo.ASCENDING), (random_key_2, pymongo.ASCENDING)],), {'unique': True}, False), "non-duplicated non-null index creation failed" - id1 = random.randint(0, sys.maxint) - id2 = random.randint(0, sys.maxint) + id1 = random.randint(0, sys.maxsize) + id2 = random.randint(0, sys.maxsize) assert _wrap(collection.insert_one, ({ random_key: 1, random_key_2: 1, @@ -119,10 +119,10 @@ def test_update_compound_unique_index(fixture_collection): def test_update_single_field_unique_index_with_same_value(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) collection.create_index([(random_key, pymongo.ASCENDING)], unique=True) - id1 = random.randint(0, sys.maxint) - id2 = random.randint(0, sys.maxint) + id1 = random.randint(0, sys.maxsize) + id2 = random.randint(0, sys.maxsize) assert _wrap(collection.insert_one, ({ random_key: 1, "_id": id1 @@ -145,14 +145,14 @@ def test_update_single_field_unique_index_with_same_value(fixture_collection): def test_update_compound_unique_index_with_same_value(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) random_key_2 = "{}-2".format(random_key) assert _wrap(collection.create_index, ([(random_key, pymongo.ASCENDING), (random_key_2, pymongo.ASCENDING)],), {'unique': True}, False), "non-duplicated non-null index creation failed" - id1 = random.randint(0, sys.maxint) - id2 = random.randint(0, sys.maxint) + id1 = random.randint(0, sys.maxsize) + id2 = random.randint(0, sys.maxsize) assert _wrap(collection.insert_one, ({ random_key: 1, random_key_2: 1, @@ -177,7 +177,7 @@ def test_update_compound_unique_index_with_same_value(fixture_collection): def test_create_single_field_unique_index(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) random_key_2 = "{}-2".format(random_key) assert _wrap(collection.insert_one, ({ @@ -194,7 +194,7 @@ def test_create_single_field_unique_index(fixture_collection): def test_create_compound_unique_index(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) random_key_2 = "{}-2".format(random_key) assert _wrap(collection.insert_one, ({ @@ -211,7 +211,7 @@ def test_create_compound_unique_index(fixture_collection): def test_unique_index_backgroud_build_request(fixture_collection): collection = fixture_collection - random_key = "key-{}".format(random.randint(0, sys.maxint)) + random_key = "key-{}".format(random.randint(0, sys.maxsize)) try: collection.create_index([(random_key, pymongo.ASCENDING)], unique=True, background=True) assert False, "did not get the expected error" diff --git a/test/correctness/smoke/test_upsert.py b/test/correctness/smoke/test_upsert.py index ba1399d..5822514 100644 --- a/test/correctness/smoke/test_upsert.py +++ b/test/correctness/smoke/test_upsert.py @@ -156,7 +156,7 @@ def create_upsert_dotted_selector_operator_test_with_operator_in_initial_positio def create_operator_tests(operators, depth, update): return [ - func(operator, object, depth, update) for name, func in globals().iteritems() + func(operator, object, depth, update) for name, func in globals().items() if name.startswith('create_upsert_') for operator, object in operators ] @@ -404,7 +404,7 @@ def create_operator_permutation_tests(oplist, depth, repeat, update): def operators_test_with_depth(dl_collection, depth): for update in updates: - for operator_type, operators in operator_types.iteritems(): + for operator_type, operators in operator_types.items(): for test_cfg in create_operator_tests(operators, depth, update): run_and_compare(dl_collection, test_cfg) @@ -431,7 +431,7 @@ def test_operators_with_depth_4(fixture_collection): def operators_permutation_test_with_depth(dl_collection, depth): oplist = [] - for _, operators in operator_types.iteritems(): + for _, operators in operator_types.items(): for op in operators: oplist.append(op) diff --git a/test/correctness/test-automation.py b/test/correctness/test-automation.py index 0872843..d0046c8 100644 --- a/test/correctness/test-automation.py +++ b/test/correctness/test-automation.py @@ -78,11 +78,11 @@ def show_statistics(ns): name for name in os.listdir(run_path) if os.path.isfile(os.path.join(run_path, name)) and not name.endswith('.timeout') ]) - print "Total tests run: ", str(run_count) + print ("Total tests run: ", str(run_count)) failed_count = len( [f for f in os.listdir(run_path) if f.endswith('.failed') and os.path.isfile(os.path.join(run_path, f))]) - print "Failing tests : ", str(failed_count) + print ("Failing tests : ", str(failed_count)) # this assumes that the database name we use for testing is "test" and we are interested in doclayer used_mem = float(0.0) @@ -91,7 +91,7 @@ def show_statistics(ns): client = pymongo.MongoClient(ns['doclayer_host'], ns['doclayer_port'], max_pool_size=1) cmd_output = client.test.command("getmemoryusage") used_mem = float(cmd_output['process memory usage']) - print "Used memory by FDBDOC : ", str(used_mem) + print ("Used memory by FDBDOC : ", str(used_mem)) def non_block_readline(output): @@ -159,9 +159,9 @@ def test_auto_forever(ns): run_min = (curr_time - start_time) / 60 if 0 < num_min < run_min and is_time_to_stop == False: - print "Time to stop, time to run was set to", num_min - print "Start time:", time.asctime(time.localtime(start_time)) - print "Finish time:", time.asctime(time.localtime(curr_time)) + print ("Time to stop, time to run was set to", num_min) + print ("Start time:", time.asctime(time.localtime(start_time))) + print ("Finish time:", time.asctime(time.localtime(curr_time))) is_time_to_stop = True # if test run did not updated itself in 2 minutes, perhaps it is stuck ? @@ -171,7 +171,7 @@ def test_auto_forever(ns): if stop_unresponsive: fname = run_path + "journal_" + str(instances[ii]) + ".running" fname = util.rename_file(fname, ".timeout") - print "Process was stopped because of timeout, check out this file for more info : ", fname + print ("Process was stopped because of timeout, check out this file for more info : ", fname) kill_process(processes[ii]) del processes[ii] del instances[ii] @@ -187,10 +187,10 @@ def test_auto_forever(ns): sys.stdout.flush() except Exception as inst: - print inst - print "Unexpected error:", sys.exc_info()[0] + print (inst) + print ("Unexpected error:", sys.exc_info()[0]) pass - print "AUTOMATION FINISHED ITS WORK" + print ("AUTOMATION FINISHED ITS WORK") if __name__ == '__main__': diff --git a/test/correctness/util.py b/test/correctness/util.py index 34c77cc..c665976 100644 --- a/test/correctness/util.py +++ b/test/correctness/util.py @@ -30,11 +30,9 @@ from collections import OrderedDict from collections import defaultdict from datetime import datetime -from types import NoneType - import bson.timestamp from bson import ObjectId, binary - +NoneType = type(None) from gen import HashableOrderedDict import gen @@ -78,13 +76,12 @@ def dedup(seq): datetime: 9, NoneType: 10, int: 16, - bson.timestamp.Timestamp: 17, - long: 18 + bson.timestamp.Timestamp: 17 } def is_literal(d): - return True not in [k[0] == '$' for k in d.keys()] + return True not in [k[0] == '$' for k in list(d.keys())] def is_numeric_type_code(code): @@ -94,6 +91,11 @@ def is_numeric_type_code(code): def is_numeric(field): return type(field) in BSON_type_codes and is_numeric_type_code(BSON_type_codes[type(field)]) +def cmp(term1,term2): + if(term1 is None or term2 is None): + return False + else: + return (term1>term2) - (term1 len(rhs): # lhs is longer(bigger) that rhs return 1 - ret = mongo_compare_pair(kl, vl, rhs.keys()[index], rhs.values()[index]) + ret = mongo_compare_pair(kl, vl, list(rhs.keys())[index], list(rhs.values())[index]) if ret != 0: return ret @@ -284,7 +286,7 @@ def mongo_sort_list(array, reverse=False): d[type(v)].append(v) # print 'd0=', d - for k in d.keys(): + for k in list(d.keys()): if k == OrderedDict: d[k] = mongo_sort_list_of_ordered_dict(d[k], reverse=reverse) elif k == dict: @@ -314,7 +316,7 @@ def mongo_sort_list_by_field(array, field_name, reverse=False): new_array1 = list() new_array2 = list() for v in array: - if isinstance(v, (dict, OrderedDict)) and field_name in v.keys() and v[field_name] is not None: + if isinstance(v, (dict, OrderedDict)) and field_name in list(v.keys()) and v[field_name] is not None: new_array2.append(v) else: new_array1.append(v) @@ -331,7 +333,7 @@ def has_item(context, key): try: # print 'key:', key, context if not isinstance(key, (str, int)): - print 'key', key, 'should be either string or integer!' + print ('key', key, 'should be either string or integer!') return False if isinstance(context, (dict, OrderedDict)): @@ -345,7 +347,7 @@ def has_item(context, key): return False if not isinstance(key, str): - print 'key', key, 'is not string!' + print ('key', key, 'is not string!') return False cur, _, rest = key.partition('.') @@ -366,9 +368,9 @@ def mongo_sort_list_by_fields(array, field_name_map): for v in array: if isinstance(v, (dict, OrderedDict)): match = False - for field_name in field_name_map.keys(): + for field_name in list(field_name_map.keys()): # print 'field_name:', field_name, v - if field_name in v.keys() and v[field_name] is not None: + if field_name in list(v.keys()) and v[field_name] is not None: lists_need_to_sort[field_name].append(v) match = True break @@ -385,8 +387,8 @@ def mongo_sort_list_by_fields(array, field_name_map): # print 'new_list:', new_list # print 'lists_need_to_sort:', lists_need_to_sort - for k in field_name_map.keys()[::-1]: - if k in lists_need_to_sort.keys(): + for k in list(field_name_map.keys())[::-1]: + if k in list(lists_need_to_sort.keys()): # print 'k=', k, lists_need_to_sort[k] reverse = (field_name_map[k] != 1) new_k, _, _ = k.partition('.') @@ -408,7 +410,7 @@ def mongo_sort_list_by_fields_list(array, field_name_list): return mongo_sort_list_by_fields(array, field_name_map) def sort_id_field(doc): - if '_id' in doc.keys() and isinstance(doc['_id'], dict): + if '_id' in list(doc.keys()) and isinstance(doc['_id'], dict): od = HashableOrderedDict() for k,v in sorted(doc['_id'].items(), key=lambda kv: kv[0]): od[k] = v @@ -434,7 +436,7 @@ def get_object(str_field, doc): def deep_convert_to_unordered(in_thing): if type(in_thing) in (dict, OrderedDict, HashableOrderedDict): return_dict = {} - for k, v in in_thing.iteritems(): + for k, v in in_thing.items(): return_dict[k] = deep_convert_to_unordered(v) return return_dict elif type(in_thing) is list: @@ -447,7 +449,7 @@ def deep_convert_to_unordered(in_thing): def deep_convert_to_ordered(in_thing): if type(in_thing) in (dict, OrderedDict): return_dict = OrderedDict() - for k, v in in_thing.iteritems(): + for k, v in in_thing.items(): return_dict[k] = deep_convert_to_ordered(v) return return_dict elif type(in_thing) is list: @@ -477,7 +479,7 @@ def has_operator(obj, depth=0): else: return False if isinstance(obj, (dict, OrderedDict)): - for k, v in obj.iteritems(): + for k, v in obj.items(): result = has_operator(k, depth + 1) or has_operator(v, depth + 1) if result: operator, depth = result @@ -507,11 +509,11 @@ def deep_convert_compound_string_to_dict(obj): obj[i] = deep_convert_compound_string_to_dict(obj[i]) if isinstance(obj, dict): mutable = OrderedDict(obj) - for k, v in obj.iteritems(): + for k, v in obj.items(): if isinstance(k, str): oo = convert_compound_string_to_dict(k, v) del mutable[k] - for kk, vv in oo.iteritems(): + for kk, vv in oo.items(): mutable[kk] = deep_convert_compound_string_to_dict(vv) obj = mutable @@ -790,24 +792,24 @@ def compare(self, other): assert j == len(result2) except: if not failed: - print '\nSorted list mismatch at index (%d, %d)!' % (i, j) + print ('\nSorted list mismatch at index (%d, %d)!' % (i, j)) - print 'Query: %r' % self.query - print 'Projection: %r' % self.projection - print 'Sort: %r' % self.sort - print 'Skip: %r' % self.skip - print 'Limit: %r' % self.limit - print '\n------------First Mismatch-----------' - print '\n %s' % format_result(self, result1, i) - print ' %s\n' % format_result(other, result2, j) + print ('Query: %r' % self.query) + print ('Projection: %r' % self.projection) + print ('Sort: %r' % self.sort) + print ('Skip: %r' % self.skip) + print ('Limit: %r' % self.limit) + print ('\n------------First Mismatch-----------') + print ('\n %s' % format_result(self, result1, i)) + print (' %s\n' % format_result(other, result2, j)) failed = True - print '\n------------Model Sort Tuple: %r-----------' % self.get_sort_key_values(result1[0]) + print ('\n------------Model Sort Tuple: %r-----------' % self.get_sort_key_values(result1[0])) for i in range(0, max(len(result1), len(result2))): - print '\n%d: %s' % (i, format_result(self, result1, i)) - print '%d: %s' % (i, format_result(other, result2, i)) + print ('\n%d: %s' % (i, format_result(self, result1, i))) + print ('%d: %s' % (i, format_result(other, result2, i))) return not failed @@ -894,7 +896,7 @@ def debug_predicate(f): def x(val, query, options): print('\nRunning predicate:\n val=%r,\n query=%r:' % (val, query)), result = f(val, query, options) - print result + print (result) return result return x @@ -959,7 +961,7 @@ def process_query_operator(path, array, query, val_func, options): op = '$regex' else: assert len(expression) == 1 - op = expression.keys()[0] + op = list(expression.keys())[0] if op == '$elemMatch': # This is emulating mongo's apparent behavior to require filter items to only match one filter from an $elemMatch (not all of them) @@ -1038,12 +1040,12 @@ def deep_convert_datetime_to_integer(obj): return int(obj.time / 3600) elif type(obj) is dict: new_obj = {} - for k, v in obj.iteritems(): + for k, v in obj.items(): new_obj[k] = deep_convert_datetime_to_integer(v) return new_obj elif type(obj) is OrderedDict: new_obj = OrderedDict() - for k, v in obj.iteritems(): + for k, v in obj.items(): new_obj[k] = deep_convert_datetime_to_integer(v) return new_obj elif type(obj) is list: @@ -1059,10 +1061,10 @@ def deep_convert_datetime_to_integer(obj): def trace(trace_level, *args): if TRACE_LEVEL_DEFINE.index(trace_level) <= TRACE_LEVEL_DEFINE.index(traceLevel): if trace_level == 'error': - print '\033[91m', + print ('\033[91m',) for x in args: - print x, - print '\033[0m' + print (x,) + print ('\033[0m') def generate_list_of_ordered_dict_from_json(list_json_string): @@ -1124,7 +1126,7 @@ def weaken_tests_for_doclayer(): def get_cmd_line(ns): - seed = random.randint(0, sys.maxint) + seed = random.randint(0, sys.maxsize) gen.global_prng = random.Random(seed) # Generate the random data @@ -1235,7 +1237,7 @@ def test_sort_key_fetcher(): fetcher = SortKeyFetcher(query1, doc1, ModelOptions('')) result = tuple([fetcher.get_sort_value(k, dir) for (k, dir) in sort1]) - print result + print (result) def test_mongo_nondeterministic_list():