back to Claude Sonnet 3.5 - Fill-in + Unit Test Feedback summary
Claude Sonnet 3.5 - Fill-in + Unit Test Feedback: tinydb
Pytest Summary for test tests
status | count |
---|---|
passed | 30 |
failed | 170 |
total | 200 |
collected | 200 |
Failed pytests:
test_middlewares.py::test_caching_read
test_middlewares.py::test_caching_read
def test_caching_read(): db = TinyDB(storage=CachingMiddleware(MemoryStorage)) > assert db.all() == [] tests/test_middlewares.py:23: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:128: in all for doc_id, doc in self._read_table().items()] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce27388070> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_middlewares.py::test_caching_write
test_middlewares.py::test_caching_write
storage =def test_caching_write(storage): # Write contents storage.write(doc) storage.close() # Verify contents: Cache should be emptied and written to storage > assert storage.storage.memory E assert None E + where None = .memory E + where = .storage tests/test_middlewares.py:74: AssertionError
test_middlewares.py::test_caching_json_write
test_middlewares.py::test_caching_json_write
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_caching_json_write0') def test_caching_json_write(tmpdir): path = str(tmpdir.join('test.db')) with TinyDB(path, storage=CachingMiddleware(JSONStorage)) as db: > db.insert({'key': 'value'}) tests/test_middlewares.py:92: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[ValueError('I/O operation on closed file.') raised in repr()] Table object at 0x7fce278fc340> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_operations.py::test_delete
test_operations.py::test_delete
def test_delete(): db = TinyDB(storage=MemoryStorage) > db.insert({'char': 'a', 'int': 1}) tests/test_operations.py:8: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce266cd5d0> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_operations.py::test_add_int[memory]
test_operations.py::test_add_int[memory]
db =def test_add_int(db): > db.update(add('int', 5), where('char') == 'a') tests/test_operations.py:14: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_add_int[json]
test_operations.py::test_add_int[json]
db =def test_add_int(db): > db.update(add('int', 5), where('char') == 'a') tests/test_operations.py:14: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_add_str[memory]
test_operations.py::test_add_str[memory]
db =def test_add_str(db): > db.update(add('char', 'xyz'), where('char') == 'a') tests/test_operations.py:19: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_add_str[json]
test_operations.py::test_add_str[json]
db =def test_add_str(db): > db.update(add('char', 'xyz'), where('char') == 'a') tests/test_operations.py:19: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_subtract[memory]
test_operations.py::test_subtract[memory]
db =def test_subtract(db): > db.update(subtract('int', 5), where('char') == 'a') tests/test_operations.py:24: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_subtract[json]
test_operations.py::test_subtract[json]
db =def test_subtract(db): > db.update(subtract('int', 5), where('char') == 'a') tests/test_operations.py:24: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_set[memory]
test_operations.py::test_set[memory]
db =def test_set(db): > db.update(set('char', 'xyz'), where('char') == 'a') tests/test_operations.py:29: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_set[json]
test_operations.py::test_set[json]
db =def test_set(db): > db.update(set('char', 'xyz'), where('char') == 'a') tests/test_operations.py:29: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_increment[memory]
test_operations.py::test_increment[memory]
db =def test_increment(db): > db.update(increment('int'), where('char') == 'a') tests/test_operations.py:34: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_increment[json]
test_operations.py::test_increment[json]
db =def test_increment(db): > db.update(increment('int'), where('char') == 'a') tests/test_operations.py:34: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_decrement[memory]
test_operations.py::test_decrement[memory]
db =def test_decrement(db): > db.update(decrement('int'), where('char') == 'a') tests/test_operations.py:39: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_operations.py::test_decrement[json]
test_operations.py::test_decrement[json]
db =def test_decrement(db): > db.update(decrement('int'), where('char') == 'a') tests/test_operations.py:39: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_no_path
test_queries.py::test_no_path
def test_no_path(): with pytest.raises(ValueError): > _ = Query() == 2 tests/test_queries.py:10: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:205: in __eq__ return self._generate_test(lambda value: value == rhs, ('==', self. _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = Query() test =. at 0x7fce26332950> hashval = ('==', (), None), allow_empty_path = False def _generate_test(self, test: Callable[[Any], bool], hashval: Tuple, allow_empty_path: bool=False) ->QueryInstance: """ Generate a query based on a test function that first resolves the query path. :param test: The test the query executes. :param hashval: The hash of the query. :return: A :class:`~tinydb.queries.QueryInstance` object """ if not self._path and not allow_empty_path: > raise RuntimeError('Query has no path') E RuntimeError: Query has no path tinydb/queries.py:185: RuntimeError
test_queries.py::test_path_exists
test_queries.py::test_path_exists
def test_path_exists(): > query = Query()['value'].exists() tests/test_queries.py:14: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_path_and
test_queries.py::test_path_and
def test_path_and(): > query = Query()['value'].exists() & (Query()['value'] == 5) tests/test_queries.py:32: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_callable_in_path_with_map
test_queries.py::test_callable_in_path_with_map
def test_callable_in_path_with_map(): double = lambda x: x + x > query = Query().value.map(double) == 10 tests/test_queries.py:42: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_callable_in_path_with_chain
test_queries.py::test_callable_in_path_with_chain
def test_callable_in_path_with_chain(): rekey = lambda x: {'y': x['a'], 'z': x['b']} > query = Query().map(rekey).z == 10 tests/test_queries.py:49: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:411: in map query._hash = ('map', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_eq
test_queries.py::test_eq
def test_eq(): > query = Query().value == 1 tests/test_queries.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_ne
test_queries.py::test_ne
def test_ne(): > query = Query().value != 1 tests/test_queries.py:66: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_lt
test_queries.py::test_lt
def test_lt(): > query = Query().value < 1 tests/test_queries.py:79: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_le
test_queries.py::test_le
def test_le(): > query = Query().value <= 1 tests/test_queries.py:87: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_gt
test_queries.py::test_gt
def test_gt(): > query = Query().value > 1 tests/test_queries.py:95: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_ge
test_queries.py::test_ge
def test_ge(): > query = Query().value >= 1 tests/test_queries.py:102: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_or
test_queries.py::test_or
def test_or(): query = ( > (Query().val1 == 1) | (Query().val2 == 2) ) tests/test_queries.py:111: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_and
test_queries.py::test_and
def test_and(): query = ( > (Query().val1 == 1) & (Query().val2 == 2) ) tests/test_queries.py:123: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_not
test_queries.py::test_not
def test_not(): > query = ~ (Query().val1 == 1) tests/test_queries.py:134: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_has_key
test_queries.py::test_has_key
def test_has_key(): > query = Query().val3.exists() tests/test_queries.py:152: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_regex
test_queries.py::test_regex
def test_regex(): > query = Query().val.matches(r'\d{2}\.') tests/test_queries.py:160: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_custom
test_queries.py::test_custom
def test_custom(): def test(value): return value == 42 > query = Query().val.test(test) tests/test_queries.py:192: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_custom_with_params
test_queries.py::test_custom_with_params
def test_custom_with_params(): def test(value, minimum, maximum): return minimum <= value <= maximum > query = Query().val.test(test, 1, 10) tests/test_queries.py:216: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_any
test_queries.py::test_any
def test_any(): > query = Query().followers.any(Query().name == 'don') tests/test_queries.py:226: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_all
test_queries.py::test_all
def test_all(): > query = Query().followers.all(Query().name == 'don') tests/test_queries.py:251: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_has
test_queries.py::test_has
def test_has(): > query = Query().key1.key2.exists() tests/test_queries.py:277: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_one_of
test_queries.py::test_one_of
def test_one_of(): > query = Query().key1.one_of(['value 1', 'value 2']) tests/test_queries.py:345: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_hash
test_queries.py::test_hash
def test_hash(): d = { > Query().key1 == 2: True, Query().key1.key2.key3.exists(): True, Query().key1.exists() & Query().key2.exists(): True, Query().key1.exists() | Query().key2.exists(): True, } tests/test_queries.py:353: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_orm_usage
test_queries.py::test_orm_usage
def test_orm_usage(): data = {'name': 'John', 'age': {'year': 2000}} User = Query() > query1 = User.name == 'John' tests/test_queries.py:374: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_repr
test_queries.py::test_repr
def test_repr(): Fruit = Query() assert repr(Fruit) == "Query()" > assert repr(Fruit.type == 'peach') == "QueryImpl('==', ('type',), 'peach')" tests/test_queries.py:384: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_subclass
test_queries.py::test_subclass
def test_subclass(): # Test that a new query test method in a custom subclass is properly usable class MyQueryClass(Query): def equal_double(self, rhs): return self._generate_test( lambda value: value == rhs * 2, ('equal_double', self._path, rhs) ) > query = MyQueryClass().val.equal_double('42') tests/test_queries.py:396: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_equality
test_queries.py::test_equality
def test_equality(): q = Query() > assert (q.foo == 2) != 0 tests/test_queries.py:414: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_fragment
test_queries.py::test_fragment
def test_fragment(): > query = Query().fragment({'a': 4, 'b': True}) tests/test_queries.py:424: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_fragment_with_path
test_queries.py::test_fragment_with_path
def test_fragment_with_path(): > query = Query().doc.fragment({'a': 4, 'b': True}) tests/test_queries.py:434: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_queries.py::test_get_item
test_queries.py::test_get_item
def test_get_item(): > query = Query()['test'] == 1 tests/test_queries.py:442: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_storages.py::test_json_kwargs
test_storages.py::test_json_kwargs
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_json_kwargs0') def test_json_kwargs(tmpdir): db_file = tmpdir.join('test.db') db = TinyDB(str(db_file), sort_keys=True, indent=4, separators=(',', ': ')) # Write contents > db.insert({'b': 1}) tests/test_storages.py:36: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce278edf00> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_storages.py::test_json_readwrite
test_storages.py::test_json_readwrite
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_json_readwrite0') def test_json_readwrite(tmpdir): """ Regression test for issue #1 """ path = str(tmpdir.join('test.db')) # Create TinyDB instance db = TinyDB(path, storage=JSONStorage) item = {'name': 'A very long entry'} item2 = {'name': 'A short one'} def get(s): return db.get(where('name') == s) > db.insert(item) tests/test_storages.py:67: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce2675c070> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_storages.py::test_json_read
test_storages.py::test_json_read
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_json_read0') def test_json_read(tmpdir): r"""Open a database only for reading""" path = str(tmpdir.join('test.db')) with pytest.raises(FileNotFoundError): db = TinyDB(path, storage=JSONStorage, access_mode='r') # Create small database db = TinyDB(path, storage=JSONStorage) > db.insert({'b': 1}) tests/test_storages.py:89: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce268c3070> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_storages.py::test_in_memory_close
test_storages.py::test_in_memory_close
def test_in_memory_close(): with TinyDB(storage=MemoryStorage) as db: > db.insert({}) tests/test_storages.py:146: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce26b96c20> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_storages.py::test_read_once
test_storages.py::test_read_once
def test_read_once(): count = 0 # noinspection PyAbstractClass class MyStorage(Storage): def __init__(self): self.memory = None def read(self): nonlocal count count += 1 return self.memory def write(self, data): self.memory = data with TinyDB(storage=MyStorage) as db: assert count == 0 > db.table(db.default_table_name) tests/test_storages.py:178: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =name = 'default_table_name' def __getattr__(self, name): """ Forward all unknown attribute calls to the underlying standard table. """ > return getattr(self._table, name) E AttributeError: 'Table' object has no attribute 'default_table_name' tinydb/database.py:293: AttributeError
test_storages.py::test_yaml
test_storages.py::test_yaml
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_yaml0') def test_yaml(tmpdir): """ :type tmpdir: py._path.local.LocalPath """ try: import yaml except ImportError: return pytest.skip('PyYAML not installed') def represent_doc(dumper, data): # Represent `Document` objects as their dict's string representation # which PyYAML understands return dumper.represent_data(dict(data)) yaml.add_representer(Document, represent_doc) class YAMLStorage(Storage): def __init__(self, filename): self.filename = filename touch(filename, False) def read(self): with open(self.filename) as handle: data = yaml.safe_load(handle.read()) return data def write(self, data): with open(self.filename, 'w') as handle: yaml.dump(data, handle) def close(self): pass # Write contents path = str(tmpdir.join('test.db')) db = TinyDB(path, storage=YAMLStorage) > db.insert(doc) tests/test_storages.py:251: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce27c880a0> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_storages.py::test_encoding
test_storages.py::test_encoding
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_encoding0') def test_encoding(tmpdir): japanese_doc = {"Test": u"こんにちは世界"} path = str(tmpdir.join('test.db')) # cp936 is used for japanese encodings jap_storage = JSONStorage(path, encoding="cp936") jap_storage.write(japanese_doc) try: exception = json.decoder.JSONDecodeError except AttributeError: exception = ValueError > with pytest.raises(exception): E Failed: DID NOT RAISEtests/test_storages.py:275: Failed
test_tables.py::test_next_id[memory]
test_tables.py::test_next_id[memory]
db =def test_next_id(db): db.truncate() > assert db._get_next_id() == 1 E assert 4 == 1 E + where 4 = _get_next_id() E + where _get_next_id = ._get_next_id tests/test_tables.py:11: AssertionError
test_tables.py::test_next_id[json]
test_tables.py::test_next_id[json]
db =def test_next_id(db): db.truncate() > assert db._get_next_id() == 1 E assert 4 == 1 E + where 4 = _get_next_id() E + where _get_next_id = ._get_next_id tests/test_tables.py:11: AssertionError
test_tables.py::test_tables_list[memory]
test_tables.py::test_tables_list[memory]
db =def test_tables_list(db): db.table('table1').insert({'a': 1}) db.table('table2').insert({'a': 1}) > assert db.tables() == {'_default', 'table1', 'table2'} E AssertionError: assert ['_default', ...e1', 'table2'] == {'_default', ...e1', 'table2'} E E Full diff: E - { E + [ E '_default', E 'table1', E 'table2', E - } E + ] tests/test_tables.py:20: AssertionError
test_tables.py::test_tables_list[json]
test_tables.py::test_tables_list[json]
db =def test_tables_list(db): db.table('table1').insert({'a': 1}) db.table('table2').insert({'a': 1}) > assert db.tables() == {'_default', 'table1', 'table2'} E AssertionError: assert ['_default', ...e1', 'table2'] == {'_default', ...e1', 'table2'} E E Full diff: E - { E + [ E '_default', E 'table1', E 'table2', E - } E + ] tests/test_tables.py:20: AssertionError
test_tables.py::test_one_table[memory]
test_tables.py::test_one_table[memory]
db =def test_one_table(db): table1 = db.table('table1') table1.insert_multiple({'int': 1, 'char': c} for c in 'abc') > assert table1.get(where('int') == 1)['char'] == 'a' tests/test_tables.py:28: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_one_table[json]
test_tables.py::test_one_table[json]
db =def test_one_table(db): table1 = db.table('table1') table1.insert_multiple({'int': 1, 'char': c} for c in 'abc') > assert table1.get(where('int') == 1)['char'] == 'a' tests/test_tables.py:28: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_multiple_tables[memory]
test_tables.py::test_multiple_tables[memory]
db =def test_multiple_tables(db): table1 = db.table('table1') table2 = db.table('table2') table3 = db.table('table3') table1.insert({'int': 1, 'char': 'a'}) table2.insert({'int': 1, 'char': 'b'}) table3.insert({'int': 1, 'char': 'c'}) > assert table1.count(where('char') == 'a') == 1 tests/test_tables.py:41: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_multiple_tables[json]
test_tables.py::test_multiple_tables[json]
db =def test_multiple_tables(db): table1 = db.table('table1') table2 = db.table('table2') table3 = db.table('table3') table1.insert({'int': 1, 'char': 'a'}) table2.insert({'int': 1, 'char': 'b'}) table3.insert({'int': 1, 'char': 'c'}) > assert table1.count(where('char') == 'a') == 1 tests/test_tables.py:41: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_query_cache[memory]
test_tables.py::test_query_cache[memory]
db =def test_query_cache(db): > query1 = where('int') == 1 tests/test_tables.py:60: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_query_cache[json]
test_tables.py::test_query_cache[json]
db =def test_query_cache(db): > query1 = where('int') == 1 tests/test_tables.py:60: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_query_cache_with_mutable_callable[memory]
test_tables.py::test_query_cache_with_mutable_callable[memory]
db =def test_query_cache_with_mutable_callable(db): table = db.table('table') table.insert({'val': 5}) mutable = 5 increase = lambda x: x + mutable > assert where('val').is_cacheable() tests/test_tables.py:84: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_query_cache_with_mutable_callable[json]
test_tables.py::test_query_cache_with_mutable_callable[json]
db =def test_query_cache_with_mutable_callable(db): table = db.table('table') table.insert({'val': 5}) mutable = 5 increase = lambda x: x + mutable > assert where('val').is_cacheable() tests/test_tables.py:84: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_zero_cache_size[memory]
test_tables.py::test_zero_cache_size[memory]
db =def test_zero_cache_size(db): table = db.table('table3', cache_size=0) > query = where('int') == 1 tests/test_tables.py:100: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_zero_cache_size[json]
test_tables.py::test_zero_cache_size[json]
db =def test_zero_cache_size(db): table = db.table('table3', cache_size=0) > query = where('int') == 1 tests/test_tables.py:100: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_query_cache_size[memory]
test_tables.py::test_query_cache_size[memory]
db =def test_query_cache_size(db): table = db.table('table3', cache_size=1) > query = where('int') == 1 tests/test_tables.py:112: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_query_cache_size[json]
test_tables.py::test_query_cache_size[json]
db =def test_query_cache_size(db): table = db.table('table3', cache_size=1) > query = where('int') == 1 tests/test_tables.py:112: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_lru_cache[memory]
test_tables.py::test_lru_cache[memory]
db =def test_lru_cache(db): # Test integration into TinyDB table = db.table('table3', cache_size=2) > query = where('int') == 1 tests/test_tables.py:125: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_lru_cache[json]
test_tables.py::test_lru_cache[json]
db =def test_lru_cache(db): # Test integration into TinyDB table = db.table('table3', cache_size=2) > query = where('int') == 1 tests/test_tables.py:125: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tables.py::test_truncate_table[memory]
test_tables.py::test_truncate_table[memory]
db =def test_truncate_table(db): db.truncate() > assert db._get_next_id() == 1 E assert 4 == 1 E + where 4 = _get_next_id() E + where _get_next_id = ._get_next_id tests/test_tables.py:171: AssertionError
test_tables.py::test_truncate_table[json]
test_tables.py::test_truncate_table[json]
db =def test_truncate_table(db): db.truncate() > assert db._get_next_id() == 1 E assert 4 == 1 E + where 4 = _get_next_id() E + where _get_next_id = ._get_next_id tests/test_tables.py:171: AssertionError
test_tinydb.py::test_drop_tables[memory]
test_tinydb.py::test_drop_tables[memory]
db =def test_drop_tables(db: TinyDB): db.drop_tables() db.insert({}) db.drop_tables() > assert len(db) == 0 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:18: TypeError
test_tinydb.py::test_drop_tables[json]
test_tinydb.py::test_drop_tables[json]
db =def test_drop_tables(db: TinyDB): db.drop_tables() db.insert({}) db.drop_tables() > assert len(db) == 0 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:18: TypeError
test_tinydb.py::test_insert[memory]
test_tinydb.py::test_insert[memory]
db =def test_insert(db: TinyDB): db.drop_tables() db.insert({'int': 1, 'char': 'a'}) > assert db.count(where('int') == 1) == 1 tests/test_tinydb.py:34: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_insert[json]
test_tinydb.py::test_insert[json]
db =def test_insert(db: TinyDB): db.drop_tables() db.insert({'int': 1, 'char': 'a'}) > assert db.count(where('int') == 1) == 1 tests/test_tinydb.py:34: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_insert_ids[memory]
test_tinydb.py::test_insert_ids[memory]
db =def test_insert_ids(db: TinyDB): db.drop_tables() > assert db.insert({'int': 1, 'char': 'a'}) == 1 E AssertionError: assert 4 == 1 E + where 4 = insert({'char': 'a', 'int': 1}) E + where insert = .insert tests/test_tinydb.py:48: AssertionError
test_tinydb.py::test_insert_ids[json]
test_tinydb.py::test_insert_ids[json]
db =def test_insert_ids(db: TinyDB): db.drop_tables() > assert db.insert({'int': 1, 'char': 'a'}) == 1 E AssertionError: assert 4 == 1 E + where 4 = insert({'char': 'a', 'int': 1}) E + where insert = .insert tests/test_tinydb.py:48: AssertionError
test_tinydb.py::test_insert_with_doc_id[memory]
test_tinydb.py::test_insert_with_doc_id[memory]
db =def test_insert_with_doc_id(db: TinyDB): db.drop_tables() > assert db.insert({'int': 1, 'char': 'a'}) == 1 E AssertionError: assert 4 == 1 E + where 4 = insert({'char': 'a', 'int': 1}) E + where insert = .insert tests/test_tinydb.py:54: AssertionError
test_tinydb.py::test_insert_with_doc_id[json]
test_tinydb.py::test_insert_with_doc_id[json]
db =def test_insert_with_doc_id(db: TinyDB): db.drop_tables() > assert db.insert({'int': 1, 'char': 'a'}) == 1 E AssertionError: assert 4 == 1 E + where 4 = insert({'char': 'a', 'int': 1}) E + where insert = .insert tests/test_tinydb.py:54: AssertionError
test_tinydb.py::test_insert_with_duplicate_doc_id[memory]
test_tinydb.py::test_insert_with_duplicate_doc_id[memory]
db =def test_insert_with_duplicate_doc_id(db: TinyDB): db.drop_tables() > assert db.insert({'int': 1, 'char': 'a'}) == 1 E AssertionError: assert 4 == 1 E + where 4 = insert({'char': 'a', 'int': 1}) E + where insert = .insert tests/test_tinydb.py:62: AssertionError
test_tinydb.py::test_insert_with_duplicate_doc_id[json]
test_tinydb.py::test_insert_with_duplicate_doc_id[json]
db =def test_insert_with_duplicate_doc_id(db: TinyDB): db.drop_tables() > assert db.insert({'int': 1, 'char': 'a'}) == 1 E AssertionError: assert 4 == 1 E + where 4 = insert({'char': 'a', 'int': 1}) E + where insert = .insert tests/test_tinydb.py:62: AssertionError
test_tinydb.py::test_insert_multiple[memory]
test_tinydb.py::test_insert_multiple[memory]
db =def test_insert_multiple(db: TinyDB): db.drop_tables() > assert not db.contains(where('int') == 1) tests/test_tinydb.py:70: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_insert_multiple[json]
test_tinydb.py::test_insert_multiple[json]
db =def test_insert_multiple(db: TinyDB): db.drop_tables() > assert not db.contains(where('int') == 1) tests/test_tinydb.py:70: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_insert_multiple_with_ids[memory]
test_tinydb.py::test_insert_multiple_with_ids[memory]
db =def test_insert_multiple_with_ids(db: TinyDB): db.drop_tables() # Insert multiple from list > assert db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) == [1, 2, 3] E AssertionError: assert [4, 5, 6] == [1, 2, 3] E E At index 0 diff: 4 != 1 E E Full diff: E [ E - 1, E ? ^... E E ...Full output truncated (11 lines hidden), use '-vv' to show tests/test_tinydb.py:106: AssertionError
test_tinydb.py::test_insert_multiple_with_ids[json]
test_tinydb.py::test_insert_multiple_with_ids[json]
db =def test_insert_multiple_with_ids(db: TinyDB): db.drop_tables() # Insert multiple from list > assert db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) == [1, 2, 3] E AssertionError: assert [4, 5, 6] == [1, 2, 3] E E At index 0 diff: 4 != 1 E E Full diff: E [ E - 1, E ? ^... E E ...Full output truncated (11 lines hidden), use '-vv' to show tests/test_tinydb.py:106: AssertionError
test_tinydb.py::test_insert_multiple_with_doc_ids[memory]
test_tinydb.py::test_insert_multiple_with_doc_ids[memory]
db =def test_insert_multiple_with_doc_ids(db: TinyDB): db.drop_tables() > assert db.insert_multiple([ Document({'int': 1, 'char': 'a'}, 12), Document({'int': 1, 'char': 'b'}, 77) ]) == [12, 77] E AssertionError: assert [4, 5] == [12, 77] E E At index 0 diff: 4 != 12 E E Full diff: E [ E - 12, E ? ^^... E E ...Full output truncated (7 lines hidden), use '-vv' to show tests/test_tinydb.py:114: AssertionError
test_tinydb.py::test_insert_multiple_with_doc_ids[json]
test_tinydb.py::test_insert_multiple_with_doc_ids[json]
db =def test_insert_multiple_with_doc_ids(db: TinyDB): db.drop_tables() > assert db.insert_multiple([ Document({'int': 1, 'char': 'a'}, 12), Document({'int': 1, 'char': 'b'}, 77) ]) == [12, 77] E AssertionError: assert [4, 5] == [12, 77] E E At index 0 diff: 4 != 12 E E Full diff: E [ E - 12, E ? ^^... E E ...Full output truncated (7 lines hidden), use '-vv' to show tests/test_tinydb.py:114: AssertionError
test_tinydb.py::test_insert_invalid_type_raises_error[memory]
test_tinydb.py::test_insert_invalid_type_raises_error[memory]
db =def test_insert_invalid_type_raises_error(db: TinyDB): > with pytest.raises(ValueError, match='Document is not a Mapping'): E Failed: DID NOT RAISE tests/test_tinydb.py:126: Failed
test_tinydb.py::test_insert_invalid_type_raises_error[json]
test_tinydb.py::test_insert_invalid_type_raises_error[json]
db =def test_insert_invalid_type_raises_error(db: TinyDB): with pytest.raises(ValueError, match='Document is not a Mapping'): # object() as an example of a non-mapping-type > db.insert(object()) # type: ignore tests/test_tinydb.py:128: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:100: in insert self._update_table(lambda table: table.update({doc_id: document})) tinydb/table.py:373: in _update_table self._storage.write(data) tinydb/storages.py:108: in write json.dump(data, self._handle, **self.kwargs) /usr/lib/python3.10/json/__init__.py:179: in dump for chunk in iterable: /usr/lib/python3.10/json/encoder.py:431: in _iterencode yield from _iterencode_dict(o, _current_indent_level) /usr/lib/python3.10/json/encoder.py:405: in _iterencode_dict yield from chunks /usr/lib/python3.10/json/encoder.py:405: in _iterencode_dict yield from chunks /usr/lib/python3.10/json/encoder.py:438: in _iterencode o = _default(o) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = o =
test_tinydb.py::test_insert_valid_mapping_type[memory]
test_tinydb.py::test_insert_valid_mapping_type[memory]
db =def test_insert_valid_mapping_type(db: TinyDB): class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) db.drop_tables() db.insert(CustomDocument({'int': 1, 'char': 'a'})) > assert db.count(where('int') == 1) == 1 tests/test_tinydb.py:147: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_insert_valid_mapping_type[json]
test_tinydb.py::test_insert_valid_mapping_type[json]
db =def test_insert_valid_mapping_type(db: TinyDB): class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) db.drop_tables() > db.insert(CustomDocument({'int': 1, 'char': 'a'})) tests/test_tinydb.py:146: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:100: in insert self._update_table(lambda table: table.update({doc_id: document})) tinydb/table.py:373: in _update_table self._storage.write(data) tinydb/storages.py:108: in write json.dump(data, self._handle, **self.kwargs) /usr/lib/python3.10/json/__init__.py:179: in dump for chunk in iterable: /usr/lib/python3.10/json/encoder.py:431: in _iterencode yield from _iterencode_dict(o, _current_indent_level) /usr/lib/python3.10/json/encoder.py:405: in _iterencode_dict yield from chunks /usr/lib/python3.10/json/encoder.py:405: in _iterencode_dict yield from chunks /usr/lib/python3.10/json/encoder.py:438: in _iterencode o = _default(o) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = o = .CustomDocument object at 0x7fce26877040> def default(self, o): """Implement this method in a subclass such that it returns a serializable object for ``o``, or calls the base implementation (to raise a ``TypeError``). For example, to support arbitrary iterators, you could implement default like this:: def default(self, o): try: iterable = iter(o) except TypeError: pass else: return list(iterable) # Let the base class default method raise the TypeError return JSONEncoder.default(self, o) """ > raise TypeError(f'Object of type {o.__class__.__name__} ' f'is not JSON serializable') E TypeError: Object of type CustomDocument is not JSON serializable /usr/lib/python3.10/json/encoder.py:179: TypeError
test_tinydb.py::test_custom_mapping_type_with_json
test_tinydb.py::test_custom_mapping_type_with_json
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_custom_mapping_type_with_0') def test_custom_mapping_type_with_json(tmpdir): class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) # Insert db = TinyDB(str(tmpdir.join('test.db'))) db.drop_tables() > db.insert(CustomDocument({'int': 1, 'char': 'a'})) tests/test_tinydb.py:167: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:100: in insert self._update_table(lambda table: table.update({doc_id: document})) tinydb/table.py:373: in _update_table self._storage.write(data) tinydb/storages.py:108: in write json.dump(data, self._handle, **self.kwargs) /usr/lib/python3.10/json/__init__.py:179: in dump for chunk in iterable: /usr/lib/python3.10/json/encoder.py:431: in _iterencode yield from _iterencode_dict(o, _current_indent_level) /usr/lib/python3.10/json/encoder.py:405: in _iterencode_dict yield from chunks /usr/lib/python3.10/json/encoder.py:405: in _iterencode_dict yield from chunks /usr/lib/python3.10/json/encoder.py:438: in _iterencode o = _default(o) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =o = .CustomDocument object at 0x7fce26262860> def default(self, o): """Implement this method in a subclass such that it returns a serializable object for ``o``, or calls the base implementation (to raise a ``TypeError``). For example, to support arbitrary iterators, you could implement default like this:: def default(self, o): try: iterable = iter(o) except TypeError: pass else: return list(iterable) # Let the base class default method raise the TypeError return JSONEncoder.default(self, o) """ > raise TypeError(f'Object of type {o.__class__.__name__} ' f'is not JSON serializable') E TypeError: Object of type CustomDocument is not JSON serializable /usr/lib/python3.10/json/encoder.py:179: TypeError
test_tinydb.py::test_remove[memory]
test_tinydb.py::test_remove[memory]
db =def test_remove(db: TinyDB): > db.remove(where('char') == 'b') tests/test_tinydb.py:187: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_remove[json]
test_tinydb.py::test_remove[json]
db =def test_remove(db: TinyDB): > db.remove(where('char') == 'b') tests/test_tinydb.py:187: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_remove_all_fails[memory]
test_tinydb.py::test_remove_all_fails[memory]
db =def test_remove_all_fails(db: TinyDB): > with pytest.raises(RuntimeError): E Failed: DID NOT RAISE tests/test_tinydb.py:194: Failed
test_tinydb.py::test_remove_all_fails[json]
test_tinydb.py::test_remove_all_fails[json]
db =def test_remove_all_fails(db: TinyDB): > with pytest.raises(RuntimeError): E Failed: DID NOT RAISE tests/test_tinydb.py:194: Failed
test_tinydb.py::test_remove_multiple[memory]
test_tinydb.py::test_remove_multiple[memory]
db =def test_remove_multiple(db: TinyDB): > db.remove(where('int') == 1) tests/test_tinydb.py:199: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_remove_multiple[json]
test_tinydb.py::test_remove_multiple[json]
db =def test_remove_multiple(db: TinyDB): > db.remove(where('int') == 1) tests/test_tinydb.py:199: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_remove_ids[memory]
test_tinydb.py::test_remove_ids[memory]
db =def test_remove_ids(db: TinyDB): db.remove(doc_ids=[1, 2]) > assert len(db) == 1 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:207: TypeError
test_tinydb.py::test_remove_ids[json]
test_tinydb.py::test_remove_ids[json]
db =def test_remove_ids(db: TinyDB): db.remove(doc_ids=[1, 2]) > assert len(db) == 1 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:207: TypeError
test_tinydb.py::test_remove_returns_ids[memory]
test_tinydb.py::test_remove_returns_ids[memory]
db =def test_remove_returns_ids(db: TinyDB): > assert db.remove(where('char') == 'b') == [2] tests/test_tinydb.py:211: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_remove_returns_ids[json]
test_tinydb.py::test_remove_returns_ids[json]
db =def test_remove_returns_ids(db: TinyDB): > assert db.remove(where('char') == 'b') == [2] tests/test_tinydb.py:211: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_update[memory]
test_tinydb.py::test_update[memory]
db =def test_update(db: TinyDB): > assert len(db) == 3 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:215: TypeError
test_tinydb.py::test_update[json]
test_tinydb.py::test_update[json]
db =def test_update(db: TinyDB): > assert len(db) == 3 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:215: TypeError
test_tinydb.py::test_update_all[memory]
test_tinydb.py::test_update_all[memory]
db =def test_update_all(db: TinyDB): > assert db.count(where('int') == 1) == 3 tests/test_tinydb.py:224: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_update_all[json]
test_tinydb.py::test_update_all[json]
db =def test_update_all(db: TinyDB): > assert db.count(where('int') == 1) == 3 tests/test_tinydb.py:224: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_update_returns_ids[memory]
test_tinydb.py::test_update_returns_ids[memory]
db =def test_update_returns_ids(db: TinyDB): db.drop_tables() > assert db.insert({'int': 1, 'char': 'a'}) == 1 E AssertionError: assert 4 == 1 E + where 4 = insert({'char': 'a', 'int': 1}) E + where insert = .insert tests/test_tinydb.py:233: AssertionError
test_tinydb.py::test_update_returns_ids[json]
test_tinydb.py::test_update_returns_ids[json]
db =def test_update_returns_ids(db: TinyDB): db.drop_tables() > assert db.insert({'int': 1, 'char': 'a'}) == 1 E AssertionError: assert 4 == 1 E + where 4 = insert({'char': 'a', 'int': 1}) E + where insert = .insert tests/test_tinydb.py:233: AssertionError
test_tinydb.py::test_update_transform[memory]
test_tinydb.py::test_update_transform[memory]
db =def test_update_transform(db: TinyDB): def increment(field): def transform(el): el[field] += 1 return transform def delete(field): def transform(el): del el[field] return transform > assert db.count(where('int') == 1) == 3 tests/test_tinydb.py:252: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_update_transform[json]
test_tinydb.py::test_update_transform[json]
db =def test_update_transform(db: TinyDB): def increment(field): def transform(el): el[field] += 1 return transform def delete(field): def transform(el): del el[field] return transform > assert db.count(where('int') == 1) == 3 tests/test_tinydb.py:252: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_update_ids[memory]
test_tinydb.py::test_update_ids[memory]
db =def test_update_ids(db: TinyDB): db.update({'int': 2}, doc_ids=[1, 2]) > assert db.count(where('int') == 2) == 2 tests/test_tinydb.py:265: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_update_ids[json]
test_tinydb.py::test_update_ids[json]
db =def test_update_ids(db: TinyDB): db.update({'int': 2}, doc_ids=[1, 2]) > assert db.count(where('int') == 2) == 2 tests/test_tinydb.py:265: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_update_multiple[memory]
test_tinydb.py::test_update_multiple[memory]
db =def test_update_multiple(db: TinyDB): > assert len(db) == 3 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:269: TypeError
test_tinydb.py::test_update_multiple[json]
test_tinydb.py::test_update_multiple[json]
db =def test_update_multiple(db: TinyDB): > assert len(db) == 3 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:269: TypeError
test_tinydb.py::test_update_multiple_operation[memory]
test_tinydb.py::test_update_multiple_operation[memory]
db =def test_update_multiple_operation(db: TinyDB): def increment(field): def transform(el): el[field] += 1 return transform > assert db.count(where('int') == 1) == 3 tests/test_tinydb.py:288: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_update_multiple_operation[json]
test_tinydb.py::test_update_multiple_operation[json]
db =def test_update_multiple_operation(db: TinyDB): def increment(field): def transform(el): el[field] += 1 return transform > assert db.count(where('int') == 1) == 3 tests/test_tinydb.py:288: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_upsert[memory]
test_tinydb.py::test_upsert[memory]
db =def test_upsert(db: TinyDB): > assert len(db) == 3 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:299: TypeError
test_tinydb.py::test_upsert[json]
test_tinydb.py::test_upsert[json]
db =def test_upsert(db: TinyDB): > assert len(db) == 3 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:299: TypeError
test_tinydb.py::test_upsert_by_id[memory]
test_tinydb.py::test_upsert_by_id[memory]
db =def test_upsert_by_id(db: TinyDB): > assert len(db) == 3 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:311: TypeError
test_tinydb.py::test_upsert_by_id[json]
test_tinydb.py::test_upsert_by_id[json]
db =def test_upsert_by_id(db: TinyDB): > assert len(db) == 3 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:311: TypeError
test_tinydb.py::test_search[memory]
test_tinydb.py::test_search[memory]
db =def test_search(db: TinyDB): > assert not db._query_cache tests/test_tinydb.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = def __len__(self) ->int: > return self.length E AttributeError: 'LRUCache' object has no attribute 'length' tinydb/utils.py:48: AttributeError
test_tinydb.py::test_search[json]
test_tinydb.py::test_search[json]
db =def test_search(db: TinyDB): > assert not db._query_cache tests/test_tinydb.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = def __len__(self) ->int: > return self.length E AttributeError: 'LRUCache' object has no attribute 'length' tinydb/utils.py:48: AttributeError
test_tinydb.py::test_search_path[memory]
test_tinydb.py::test_search_path[memory]
db =def test_search_path(db: TinyDB): > assert not db._query_cache tests/test_tinydb.py:348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = def __len__(self) ->int: > return self.length E AttributeError: 'LRUCache' object has no attribute 'length' tinydb/utils.py:48: AttributeError
test_tinydb.py::test_search_path[json]
test_tinydb.py::test_search_path[json]
db =def test_search_path(db: TinyDB): > assert not db._query_cache tests/test_tinydb.py:348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = def __len__(self) ->int: > return self.length E AttributeError: 'LRUCache' object has no attribute 'length' tinydb/utils.py:48: AttributeError
test_tinydb.py::test_search_no_results_cache[memory]
test_tinydb.py::test_search_no_results_cache[memory]
db =def test_search_no_results_cache(db: TinyDB): > assert len(db.search(where('missing').exists())) == 0 tests/test_tinydb.py:357: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_search_no_results_cache[json]
test_tinydb.py::test_search_no_results_cache[json]
db =def test_search_no_results_cache(db: TinyDB): > assert len(db.search(where('missing').exists())) == 0 tests/test_tinydb.py:357: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_get[memory]
test_tinydb.py::test_get[memory]
db =def test_get(db: TinyDB): > item = db.get(where('char') == 'b') tests/test_tinydb.py:362: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_get[json]
test_tinydb.py::test_get[json]
db =def test_get(db: TinyDB): > item = db.get(where('char') == 'b') tests/test_tinydb.py:362: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_get_ids[json]
test_tinydb.py::test_get_ids[json]
db =def test_get_ids(db: TinyDB): el = db.all()[0] > assert db.get(doc_id=el.doc_id) == el E AssertionError: assert None == {'int': 1, 'char': 'a'} E + where None = get(doc_id=1) E + where get = .get E + and 1 = {'int': 1, 'char': 'a'}.doc_id tests/test_tinydb.py:370: AssertionError
test_tinydb.py::test_get_multiple_ids[json]
test_tinydb.py::test_get_multiple_ids[json]
db =def test_get_multiple_ids(db: TinyDB): el = db.all() > assert db.get(doc_ids=[x.doc_id for x in el]) == el E AssertionError: assert [] == [{'int': 1, '... 'char': 'c'}] E E Right contains 3 more items, first extra item: {'int': 1, 'char': 'a'} E E Full diff: E + [] E - [ E - {... E E ...Full output truncated (12 lines hidden), use '-vv' to show tests/test_tinydb.py:376: AssertionError
test_tinydb.py::test_get_invalid[memory]
test_tinydb.py::test_get_invalid[memory]
db =def test_get_invalid(db: TinyDB): > with pytest.raises(RuntimeError): E Failed: DID NOT RAISE tests/test_tinydb.py:380: Failed
test_tinydb.py::test_get_invalid[json]
test_tinydb.py::test_get_invalid[json]
db =def test_get_invalid(db: TinyDB): > with pytest.raises(RuntimeError): E Failed: DID NOT RAISE tests/test_tinydb.py:380: Failed
test_tinydb.py::test_count[memory]
test_tinydb.py::test_count[memory]
db =def test_count(db: TinyDB): > assert db.count(where('int') == 1) == 3 tests/test_tinydb.py:385: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_count[json]
test_tinydb.py::test_count[json]
db =def test_count(db: TinyDB): > assert db.count(where('int') == 1) == 3 tests/test_tinydb.py:385: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_contains[memory]
test_tinydb.py::test_contains[memory]
db =def test_contains(db: TinyDB): > assert db.contains(where('int') == 1) tests/test_tinydb.py:390: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_contains[json]
test_tinydb.py::test_contains[json]
db =def test_contains(db: TinyDB): > assert db.contains(where('int') == 1) tests/test_tinydb.py:390: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_contains_ids[json]
test_tinydb.py::test_contains_ids[json]
db =def test_contains_ids(db: TinyDB): > assert db.contains(doc_id=1) E assert False E + where False = contains(doc_id=1) E + where contains = .contains tests/test_tinydb.py:395: AssertionError
test_tinydb.py::test_contains_invalid[memory]
test_tinydb.py::test_contains_invalid[memory]
db =def test_contains_invalid(db: TinyDB): > with pytest.raises(RuntimeError): E Failed: DID NOT RAISE tests/test_tinydb.py:401: Failed
test_tinydb.py::test_contains_invalid[json]
test_tinydb.py::test_contains_invalid[json]
db =def test_contains_invalid(db: TinyDB): > with pytest.raises(RuntimeError): E Failed: DID NOT RAISE tests/test_tinydb.py:401: Failed
test_tinydb.py::test_get_idempotent[memory]
test_tinydb.py::test_get_idempotent[memory]
db =def test_get_idempotent(db: TinyDB): > u = db.get(where('int') == 1) tests/test_tinydb.py:406: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_get_idempotent[json]
test_tinydb.py::test_get_idempotent[json]
db =def test_get_idempotent(db: TinyDB): > u = db.get(where('int') == 1) tests/test_tinydb.py:406: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/queries.py:419: in where return Query()[key] tinydb/queries.py:172: in __getitem__ return self.__getattr__(item) tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None tinydb/queries.py:168: in __getattr__ query._hash = ('path', query._path) if self.is_cacheable() else None E RecursionError: maximum recursion depth exceeded while calling a Python object !!! Recursion detected (same locals & position)
test_tinydb.py::test_multiple_dbs
test_tinydb.py::test_multiple_dbs
def test_multiple_dbs(): """ Regression test for issue #3 """ db1 = TinyDB(storage=MemoryStorage) db2 = TinyDB(storage=MemoryStorage) > db1.insert({'int': 1, 'char': 'a'}) tests/test_tinydb.py:418: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce26478280> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_tinydb.py::test_storage_closed_once
test_tinydb.py::test_storage_closed_once
def test_storage_closed_once(): class Storage: def __init__(self): self.closed = False def read(self): return {} def write(self, data): pass def close(self): assert not self.closed self.closed = True > with TinyDB(storage=Storage) as db: tests/test_tinydb.py:443: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/database.py:287: in __exit__ self.close() tinydb/database.py:281: in close self._storage.close() _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =.Storage object at 0x7fce262db0d0> def close(self): > assert not self.closed E assert not True E + where True = .Storage object at 0x7fce262db0d0>.closed tests/test_tinydb.py:440: AssertionError
test_tinydb.py::test_unique_ids
test_tinydb.py::test_unique_ids
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_unique_ids0') def test_unique_ids(tmpdir): """ :type tmpdir: py._path.local.LocalPath """ path = str(tmpdir.join('db.json')) # Verify ids are unique when reopening the DB and inserting with TinyDB(path) as _db: > _db.insert({'x': 1}) tests/test_tinydb.py:459: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[ValueError('I/O operation on closed file.') raised in repr()] Table object at 0x7fce27329450> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_tinydb.py::test_lastid_after_open
test_tinydb.py::test_lastid_after_open
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_lastid_after_open0') def test_lastid_after_open(tmpdir): """ Regression test for issue #34 :type tmpdir: py._path.local.LocalPath """ NUM = 100 path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: > _db.insert_multiple({'i': i} for i in range(NUM)) tests/test_tinydb.py:492: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:117: in insert_multiple self._update_table(updater) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[ValueError('I/O operation on closed file.') raised in repr()] Table object at 0x7fce26243310> updater =.updater at 0x7fce261d01f0> def _update_table(self, updater: Callable[[Dict[int, Mapping]], None]): """ Perform a table update operation. The storage interface used by TinyDB only allows to read/write the complete database data, but not modifying only portions of it. Thus, to only update portions of the table data, we first perform a read operation, perform the update on the table data and then write the updated data back to the storage. As a further optimization, we don't convert the documents into the document class, as the table data will *not* be returned to the user. """ data = self._storage.read() > table = data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:370: AttributeError
test_tinydb.py::test_doc_ids_json
test_tinydb.py::test_doc_ids_json
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_doc_ids_json0') def test_doc_ids_json(tmpdir): """ Regression test for issue #45 """ path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: _db.drop_tables() assert _db.insert({'int': 1, 'char': 'a'}) == 1 assert _db.insert({'int': 1, 'char': 'a'}) == 2 _db.drop_tables() > assert _db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) == [1, 2, 3] E AssertionError: assert [3, 4, 5] == [1, 2, 3] E E At index 0 diff: 3 != 1 E E Full diff: E [ E - 1, E - 2,... E E ...Full output truncated (4 lines hidden), use '-vv' to show tests/test_tinydb.py:511: AssertionError
test_tinydb.py::test_insert_string
test_tinydb.py::test_insert_string
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_insert_string0') def test_insert_string(tmpdir): path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: data = [{'int': 1}, {'int': 2}] > _db.insert_multiple(data) tests/test_tinydb.py:535: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:117: in insert_multiple self._update_table(updater) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[ValueError('I/O operation on closed file.') raised in repr()] Table object at 0x7fce27484460> updater =.updater at 0x7fce261d1900> def _update_table(self, updater: Callable[[Dict[int, Mapping]], None]): """ Perform a table update operation. The storage interface used by TinyDB only allows to read/write the complete database data, but not modifying only portions of it. Thus, to only update portions of the table data, we first perform a read operation, perform the update on the table data and then write the updated data back to the storage. As a further optimization, we don't convert the documents into the document class, as the table data will *not* be returned to the user. """ data = self._storage.read() > table = data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:370: AttributeError
test_tinydb.py::test_insert_invalid_dict
test_tinydb.py::test_insert_invalid_dict
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_insert_invalid_dict0') def test_insert_invalid_dict(tmpdir): path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: data = [{'int': 1}, {'int': 2}] > _db.insert_multiple(data) tests/test_tinydb.py:553: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:117: in insert_multiple self._update_table(updater) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[ValueError('I/O operation on closed file.') raised in repr()] Table object at 0x7fce278ec1c0> updater =.updater at 0x7fce261d20e0> def _update_table(self, updater: Callable[[Dict[int, Mapping]], None]): """ Perform a table update operation. The storage interface used by TinyDB only allows to read/write the complete database data, but not modifying only portions of it. Thus, to only update portions of the table data, we first perform a read operation, perform the update on the table data and then write the updated data back to the storage. As a further optimization, we don't convert the documents into the document class, as the table data will *not* be returned to the user. """ data = self._storage.read() > table = data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:370: AttributeError
test_tinydb.py::test_gc
test_tinydb.py::test_gc
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_gc0') def test_gc(tmpdir): # See https://github.com/msiemens/tinydb/issues/92 path = str(tmpdir.join('db.json')) db = TinyDB(path) table = db.table('foo') > table.insert({'something': 'else'}) tests/test_tinydb.py:568: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce26602bf0> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_tinydb.py::test_drop_table
test_tinydb.py::test_drop_table
def test_drop_table(): db = TinyDB(storage=MemoryStorage) > default_table_name = db.table(db.default_table_name).name tests/test_tinydb.py:577: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =name = 'default_table_name' def __getattr__(self, name): """ Forward all unknown attribute calls to the underlying standard table. """ > return getattr(self._table, name) E AttributeError: 'Table' object has no attribute 'default_table_name' tinydb/database.py:293: AttributeError
test_tinydb.py::test_query_cache
test_tinydb.py::test_query_cache
def test_query_cache(): db = TinyDB(storage=MemoryStorage) > db.insert_multiple([ {'name': 'foo', 'value': 42}, {'name': 'bar', 'value': -1337} ]) tests/test_tinydb.py:614: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:117: in insert_multiple self._update_table(updater) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce26240c10> updater =.updater at 0x7fce261d03a0> def _update_table(self, updater: Callable[[Dict[int, Mapping]], None]): """ Perform a table update operation. The storage interface used by TinyDB only allows to read/write the complete database data, but not modifying only portions of it. Thus, to only update portions of the table data, we first perform a read operation, perform the update on the table data and then write the updated data back to the storage. As a further optimization, we don't convert the documents into the document class, as the table data will *not* be returned to the user. """ data = self._storage.read() > table = data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:370: AttributeError
test_tinydb.py::test_tinydb_is_iterable[memory]
test_tinydb.py::test_tinydb_is_iterable[memory]
db =def test_tinydb_is_iterable(db: TinyDB): > assert [r for r in db] == db.all() E TypeError: 'TinyDB' object is not iterable tests/test_tinydb.py:634: TypeError
test_tinydb.py::test_tinydb_is_iterable[json]
test_tinydb.py::test_tinydb_is_iterable[json]
db =def test_tinydb_is_iterable(db: TinyDB): > assert [r for r in db] == db.all() E TypeError: 'TinyDB' object is not iterable tests/test_tinydb.py:634: TypeError
test_tinydb.py::test_repr
test_tinydb.py::test_repr
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_repr0') def test_repr(tmpdir): path = str(tmpdir.join('db.json')) db = TinyDB(path) > db.insert({'a': 1}) tests/test_tinydb.py:641: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce261bb8b0> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_tinydb.py::test_delete
test_tinydb.py::test_delete
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_delete0') def test_delete(tmpdir): path = str(tmpdir.join('db.json')) db = TinyDB(path, ensure_ascii=False) q = Query() > db.insert({'network': {'id': '114', 'name': 'ok', 'rpc': 'dac', 'ticker': 'mkay'}}) tests/test_tinydb.py:657: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce266a4e20> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_tinydb.py::test_insert_multiple_with_single_dict[memory]
test_tinydb.py::test_insert_multiple_with_single_dict[memory]
db =def test_insert_multiple_with_single_dict(db: TinyDB): > with pytest.raises(ValueError): E Failed: DID NOT RAISE tests/test_tinydb.py:668: Failed
test_tinydb.py::test_insert_multiple_with_single_dict[json]
test_tinydb.py::test_insert_multiple_with_single_dict[json]
db =def test_insert_multiple_with_single_dict(db: TinyDB): > with pytest.raises(ValueError): E Failed: DID NOT RAISE tests/test_tinydb.py:668: Failed
test_tinydb.py::test_empty_db_len
test_tinydb.py::test_empty_db_len
def test_empty_db_len(): db = TinyDB(storage=MemoryStorage) > assert len(db) == 0 E TypeError: object of type 'TinyDB' has no len() tests/test_tinydb.py:683: TypeError
test_tinydb.py::test_insert_on_existing_db
test_tinydb.py::test_insert_on_existing_db
tmpdir = local('/tmp/pytest-of-root/pytest-0/test_insert_on_existing_db0') def test_insert_on_existing_db(tmpdir): path = str(tmpdir.join('db.json')) db = TinyDB(path, ensure_ascii=False) > db.insert({'foo': 'bar'}) tests/test_tinydb.py:690: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce265500a0> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_tinydb.py::test_lambda_query
test_tinydb.py::test_lambda_query
def test_lambda_query(): db = TinyDB(storage=MemoryStorage) > db.insert({'foo': 'bar'}) tests/test_tinydb.py:711: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tinydb/table.py:99: in insert doc_id = self._get_next_id() tinydb/table.py:340: in _get_next_id self._next_id = max(self._read_table().keys() or [0]) + 1 _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <[AttributeError("'NoneType' object has no attribute 'get'") raised in repr()] Table object at 0x7fce27b6e740> def _read_table(self) ->Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ data = self._storage.read() > return data.get(self._name, {}) E AttributeError: 'NoneType' object has no attribute 'get' tinydb/table.py:354: AttributeError
test_utils.py::test_lru_cache
test_utils.py::test_lru_cache
def test_lru_cache(): cache = LRUCache(capacity=3) > cache["a"] = 1 tests/test_utils.py:8: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =, key = 'a', value = 1 def __setitem__(self, key: K, value: V) ->None: > self.set(key, value) E AttributeError: 'LRUCache' object has no attribute 'set'. Did you mean: 'get'? tinydb/utils.py:54: AttributeError
test_utils.py::test_lru_cache_set_multiple
test_utils.py::test_lru_cache_set_multiple
def test_lru_cache_set_multiple(): cache = LRUCache(capacity=3) > cache["a"] = 1 tests/test_utils.py:24: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =, key = 'a', value = 1 def __setitem__(self, key: K, value: V) ->None: > self.set(key, value) E AttributeError: 'LRUCache' object has no attribute 'set'. Did you mean: 'get'? tinydb/utils.py:54: AttributeError
test_utils.py::test_lru_cache_get
test_utils.py::test_lru_cache_get
def test_lru_cache_get(): cache = LRUCache(capacity=3) > cache["a"] = 1 tests/test_utils.py:34: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =, key = 'a', value = 1 def __setitem__(self, key: K, value: V) ->None: > self.set(key, value) E AttributeError: 'LRUCache' object has no attribute 'set'. Did you mean: 'get'? tinydb/utils.py:54: AttributeError
test_utils.py::test_lru_cache_delete
test_utils.py::test_lru_cache_delete
def test_lru_cache_delete(): cache = LRUCache(capacity=3) > cache["a"] = 1 tests/test_utils.py:45: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =, key = 'a', value = 1 def __setitem__(self, key: K, value: V) ->None: > self.set(key, value) E AttributeError: 'LRUCache' object has no attribute 'set'. Did you mean: 'get'? tinydb/utils.py:54: AttributeError
test_utils.py::test_lru_cache_clear
test_utils.py::test_lru_cache_clear
def test_lru_cache_clear(): cache = LRUCache(capacity=3) > cache["a"] = 1 tests/test_utils.py:59: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =, key = 'a', value = 1 def __setitem__(self, key: K, value: V) ->None: > self.set(key, value) E AttributeError: 'LRUCache' object has no attribute 'set'. Did you mean: 'get'? tinydb/utils.py:54: AttributeError
test_utils.py::test_lru_cache_unlimited
test_utils.py::test_lru_cache_unlimited
def test_lru_cache_unlimited(): cache = LRUCache() for i in range(100): > cache[i] = i tests/test_utils.py:69: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =, key = 0, value = 0 def __setitem__(self, key: K, value: V) ->None: > self.set(key, value) E AttributeError: 'LRUCache' object has no attribute 'set'. Did you mean: 'get'? tinydb/utils.py:54: AttributeError
test_utils.py::test_lru_cache_unlimited_explicit
test_utils.py::test_lru_cache_unlimited_explicit
def test_lru_cache_unlimited_explicit(): cache = LRUCache(capacity=None) for i in range(100): > cache[i] = i tests/test_utils.py:77: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self =, key = 0, value = 0 def __setitem__(self, key: K, value: V) ->None: > self.set(key, value) E AttributeError: 'LRUCache' object has no attribute 'set'. Did you mean: 'get'? tinydb/utils.py:54: AttributeError
test_utils.py::test_freeze
test_utils.py::test_freeze
def test_freeze(): frozen = freeze([0, 1, 2, {'a': [1, 2, 3]}, {1, 2}]) > assert isinstance(frozen, tuple) E assert False E + where False = isinstance(None, tuple) tests/test_utils.py:93: AssertionError
Patch diff
diff --git a/tests/test_operations.py b/tests/test_operations.py
index 3a6f814..d4e1c51 100644
--- a/tests/test_operations.py
+++ b/tests/test_operations.py
@@ -1,8 +1,11 @@
-from tinydb import where
+from tinydb import TinyDB, where
from tinydb.operations import delete, increment, decrement, add, subtract, set
+from tinydb.storages import MemoryStorage
-def test_delete(db):
+def test_delete():
+ db = TinyDB(storage=MemoryStorage)
+ db.insert({'char': 'a', 'int': 1})
db.update(delete('int'), where('char') == 'a')
assert 'int' not in db.get(where('char') == 'a')
diff --git a/tinydb/__init__.py b/tinydb/__init__.py
index 0416e2e..e01c4f0 100644
--- a/tinydb/__init__.py
+++ b/tinydb/__init__.py
@@ -27,6 +27,7 @@ Usage example:
from .queries import Query, where
from .storages import Storage, JSONStorage
from .database import TinyDB
+from .table import Table
from .version import __version__
__all__ = ('TinyDB', 'Storage', 'JSONStorage', 'Query', 'where')
diff --git a/tinydb/database.py b/tinydb/database.py
index a4ce0e1..30bc84a 100644
--- a/tinydb/database.py
+++ b/tinydb/database.py
@@ -1,15 +1,13 @@
"""
This module contains the main component of TinyDB: the database.
"""
-from typing import Dict, Iterator, Set, Type
+from typing import Dict, Iterator, Set
from . import JSONStorage
from .storages import Storage
from .table import Table, Document
-from .utils import with_typehint
-TableBase: Type[Table] = with_typehint(Table)
-class TinyDB(TableBase):
+class TinyDB:
"""
The main class of TinyDB.
@@ -75,6 +73,7 @@ class TinyDB(TableBase):
self._storage: Storage = storage(*args, **kwargs)
self._opened = True
self._tables: Dict[str, Table] = {}
+ self._table = self.table(self.default_table_name)
def __repr__(self):
args = ['tables={}'.format(list(self.tables())), 'tables_count={}'.
@@ -99,7 +98,11 @@ class TinyDB(TableBase):
:param name: The name of the table.
:param kwargs: Keyword arguments to pass to the table class constructor
"""
- pass
+ if name not in self._tables:
+ self._tables[name] = self.table_class(
+ self._storage, name, **kwargs
+ )
+ return self._tables[name]
def tables(self) ->Set[str]:
"""
@@ -107,13 +110,14 @@ class TinyDB(TableBase):
:returns: a set of table names
"""
- pass
+ return set(self._storage.read().keys())
def drop_tables(self) ->None:
"""
Drop all tables from the database. **CANNOT BE REVERSED!**
"""
- pass
+ self._storage.write({})
+ self._tables.clear()
def drop_table(self, name: str) ->None:
"""
@@ -121,7 +125,12 @@ class TinyDB(TableBase):
:param name: The name of the table to drop.
"""
- pass
+ data = self._storage.read()
+ if name in data:
+ del data[name]
+ self._storage.write(data)
+ if name in self._tables:
+ del self._tables[name]
@property
def storage(self) ->Storage:
@@ -131,7 +140,7 @@ class TinyDB(TableBase):
:return: This instance's storage
:rtype: Storage
"""
- pass
+ return self._storage
def close(self) ->None:
"""
@@ -148,7 +157,8 @@ class TinyDB(TableBase):
Upon leaving this context, the ``close`` method will be called.
"""
- pass
+ self._storage.close()
+ self._opened = False
def __enter__(self):
"""
@@ -190,3 +200,101 @@ class TinyDB(TableBase):
Return an iterator for the default table's documents.
"""
return iter(self.table(self.default_table_name))
+from .table import Table
+from .storages import Storage, JSONStorage
+from typing import Mapping, Dict, List, Union, Callable, Optional, Type, Any
+
+class TinyDB:
+ """
+ The main class of TinyDB.
+
+ The ``TinyDB`` class is responsible for creating the storage class instance
+ that will store this database's documents, managing the database
+ tables as well as providing access to the default table.
+
+ For table management, a simple ``dict`` is used that stores the table class
+ instances accessible using their table name.
+ """
+
+ def __init__(self, *args, **kwargs) -> None:
+ """
+ Create a new instance of TinyDB.
+
+ All arguments and keyword arguments will be passed to the underlying
+ storage class (default: :class:`~tinydb.storages.JSONStorage`).
+
+ :param storage: The class of the storage to use. Will be initialized
+ with ``args`` and ``kwargs``.
+ """
+ storage = kwargs.pop('storage', JSONStorage)
+
+ # Prepare the storage
+ self._storage = storage(*args, **kwargs)
+
+ self._opened = True
+
+ # Prepare the default table
+ self._table_cache: Dict[str, Table] = {}
+ self._table = self.table('_default')
+
+ def table(self, name: str, **kwargs) -> Table:
+ """
+ Get access to a specific table.
+
+ Creates a new table, if it doesn't exist.
+ """
+ if name not in self._table_cache:
+ self._table_cache[name] = Table(self._storage, name, **kwargs)
+
+ return self._table_cache[name]
+
+ def tables(self) -> List[str]:
+ """
+ Get the names of all tables in the database.
+ """
+ return list(self._storage.read().keys())
+
+ def drop_table(self, name: str) -> None:
+ """
+ Drop a table from the database.
+ """
+ if name in self._table_cache:
+ del self._table_cache[name]
+
+ data = self._storage.read()
+ if name in data:
+ del data[name]
+ self._storage.write(data)
+
+ def drop_tables(self) -> None:
+ """
+ Drop all tables from the database.
+ """
+ self._table_cache.clear()
+ self._storage.write({})
+
+ def close(self) -> None:
+ """
+ Close the database.
+ """
+ self._opened = False
+ self._storage.close()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def __getattr__(self, name):
+ """
+ Forward all unknown attribute calls to the underlying standard table.
+ """
+ return getattr(self._table, name)
+
+ @property
+ def storage(self) -> Storage:
+ """
+ Get the storage instance.
+ """
+ return self._storage
diff --git a/tinydb/middlewares.py b/tinydb/middlewares.py
index 50c2af2..5a47a37 100644
--- a/tinydb/middlewares.py
+++ b/tinydb/middlewares.py
@@ -84,8 +84,29 @@ class CachingMiddleware(Middleware):
self.cache = None
self._cache_modified_count = 0
+ def read(self):
+ """
+ Read data from cache if available, otherwise read from storage.
+ """
+ if self.cache is None:
+ self.cache = self.storage.read()
+ return self.cache
+
+ def write(self, data):
+ """
+ Write data to cache and increment the modified count.
+ Flush to storage if the modified count reaches WRITE_CACHE_SIZE.
+ """
+ self.cache = data
+ self._cache_modified_count += 1
+
+ if self._cache_modified_count >= self.WRITE_CACHE_SIZE:
+ self.flush()
+
def flush(self):
"""
Flush all unwritten data to disk.
"""
- pass
+ if self.cache is not None:
+ self.storage.write(self.cache)
+ self._cache_modified_count = 0
diff --git a/tinydb/operations.py b/tinydb/operations.py
index fdfa678..4c82060 100644
--- a/tinydb/operations.py
+++ b/tinydb/operations.py
@@ -13,39 +13,56 @@ def delete(field):
"""
Delete a given field from the document.
"""
- pass
+ def transform(doc):
+ if field in doc:
+ del doc[field]
+ return transform
def add(field, n):
"""
Add ``n`` to a given field in the document.
"""
- pass
+ def transform(doc):
+ if field in doc:
+ doc[field] += n
+ return transform
def subtract(field, n):
"""
Subtract ``n`` to a given field in the document.
"""
- pass
+ def transform(doc):
+ if field in doc:
+ doc[field] -= n
+ return transform
def set(field, val):
"""
Set a given field to ``val``.
"""
- pass
+ def transform(doc):
+ doc[field] = val
+ return transform
def increment(field):
"""
Increment a given field in the document by 1.
"""
- pass
+ def transform(doc):
+ if field in doc:
+ doc[field] += 1
+ return transform
def decrement(field):
"""
Decrement a given field in the document by 1.
"""
- pass
+ def transform(doc):
+ if field in doc:
+ doc[field] -= 1
+ return transform
diff --git a/tinydb/queries.py b/tinydb/queries.py
index 0ad5c7e..35a89c4 100644
--- a/tinydb/queries.py
+++ b/tinydb/queries.py
@@ -181,7 +181,18 @@ class Query(QueryInstance):
:param hashval: The hash of the query.
:return: A :class:`~tinydb.queries.QueryInstance` object
"""
- pass
+ if not self._path and not allow_empty_path:
+ raise RuntimeError('Query has no path')
+
+ def runner(value):
+ try:
+ for part in self._path:
+ value = value[part]
+ return test(value)
+ except (KeyError, TypeError):
+ return False
+
+ return QueryInstance(runner, hashval)
def __eq__(self, rhs: Any):
"""
@@ -255,7 +266,7 @@ class Query(QueryInstance):
>>> Query().f1.exists()
"""
- pass
+ return self._generate_test(lambda _: True, ('exists', self._path))
def matches(self, regex: str, flags: int=0) ->QueryInstance:
"""
@@ -266,7 +277,10 @@ class Query(QueryInstance):
:param regex: The regular expression to use for matching
:param flags: regex flags to pass to ``re.match``
"""
- pass
+ return self._generate_test(
+ lambda value: re.match(regex, value, flags) is not None,
+ ('matches', self._path, regex, flags)
+ )
def search(self, regex: str, flags: int=0) ->QueryInstance:
"""
@@ -278,7 +292,10 @@ class Query(QueryInstance):
:param regex: The regular expression to use for matching
:param flags: regex flags to pass to ``re.match``
"""
- pass
+ return self._generate_test(
+ lambda value: re.search(regex, value, flags) is not None,
+ ('search', self._path, regex, flags)
+ )
def test(self, func: Callable[[Mapping], bool], *args) ->QueryInstance:
"""
@@ -300,7 +317,10 @@ class Query(QueryInstance):
argument
:param args: Additional arguments to pass to the test function
"""
- pass
+ return self._generate_test(
+ lambda value: func(value, *args),
+ ('test', self._path, func, args)
+ )
def any(self, cond: Union[QueryInstance, List[Any]]) ->QueryInstance:
"""
@@ -324,7 +344,14 @@ class Query(QueryInstance):
a list of which at least one document has to be contained
in the tested document.
"""
- pass
+ if isinstance(cond, QueryInstance):
+ def test(value):
+ return any(cond(item) for item in value)
+ else:
+ def test(value):
+ return any(item in cond for item in value)
+
+ return self._generate_test(test, ('any', self._path, freeze(cond)))
def all(self, cond: Union['QueryInstance', List[Any]]) ->QueryInstance:
"""
@@ -346,7 +373,14 @@ class Query(QueryInstance):
:param cond: Either a query that all documents have to match or a list
which has to be contained in the tested document.
"""
- pass
+ if isinstance(cond, QueryInstance):
+ def test(value):
+ return all(cond(item) for item in value)
+ else:
+ def test(value):
+ return all(item in value for item in cond)
+
+ return self._generate_test(test, ('all', self._path, freeze(cond)))
def one_of(self, items: List[Any]) ->QueryInstance:
"""
@@ -356,7 +390,8 @@ class Query(QueryInstance):
:param items: The list of items to check with
"""
- pass
+ return self._generate_test(lambda value: value in items,
+ ('one_of', self._path, freeze(items)))
def noop(self) ->QueryInstance:
"""
@@ -364,18 +399,21 @@ class Query(QueryInstance):
Useful for having a base value when composing queries dynamically.
"""
- pass
+ return self._generate_test(lambda _: True, ('noop',), allow_empty_path=True)
def map(self, fn: Callable[[Any], Any]) ->'Query':
"""
Add a function to the query path. Similar to __getattr__ but for
arbitrary functions.
"""
- pass
+ query = type(self)()
+ query._path = self._path + (fn,)
+ query._hash = ('map', query._path) if self.is_cacheable() else None
+ return query
def where(key: str) ->Query:
"""
A shorthand for ``Query()[key]``
"""
- pass
+ return Query()[key]
diff --git a/tinydb/storages.py b/tinydb/storages.py
index 0ddc223..16f317b 100644
--- a/tinydb/storages.py
+++ b/tinydb/storages.py
@@ -18,7 +18,12 @@ def touch(path: str, create_dirs: bool):
:param path: The file to create.
:param create_dirs: Whether to create all missing parent directories.
"""
- pass
+ if create_dirs:
+ os.makedirs(os.path.dirname(path), exist_ok=True)
+
+ if not os.path.exists(path):
+ with open(path, 'a'):
+ os.utime(path, None)
class Storage(ABC):
@@ -38,7 +43,7 @@ class Storage(ABC):
Return ``None`` here to indicate that the storage is empty.
"""
- pass
+ raise NotImplementedError("This method needs to be implemented by a subclass")
@abstractmethod
def write(self, data: Dict[str, Dict[str, Any]]) ->None:
@@ -49,7 +54,7 @@ class Storage(ABC):
:param data: The current state of the database.
"""
- pass
+ raise NotImplementedError("This method needs to be implemented by a subclass")
def close(self) ->None:
"""
@@ -88,6 +93,23 @@ class JSONStorage(Storage):
if any([(character in self._mode) for character in ('+', 'w', 'a')]):
touch(path, create_dirs=create_dirs)
self._handle = open(path, mode=self._mode, encoding=encoding)
+ self.path = path
+ self.encoding = encoding
+
+ def read(self) ->Optional[Dict[str, Dict[str, Any]]]:
+ self._handle.seek(0)
+ try:
+ return json.load(self._handle)
+ except ValueError:
+ return None
+
+ def write(self, data: Dict[str, Dict[str, Any]]) ->None:
+ self._handle.seek(0)
+ json.dump(data, self._handle, **self.kwargs)
+ self._handle.truncate()
+
+ def close(self) ->None:
+ self._handle.close()
class MemoryStorage(Storage):
@@ -101,3 +123,12 @@ class MemoryStorage(Storage):
"""
super().__init__()
self.memory = None
+
+ def read(self) ->Optional[Dict[str, Dict[str, Any]]]:
+ return self.memory
+
+ def write(self, data: Dict[str, Dict[str, Any]]) ->None:
+ self.memory = data
+
+ def close(self) ->None:
+ pass
diff --git a/tinydb/table.py b/tinydb/table.py
index 48eea63..9ed928b 100644
--- a/tinydb/table.py
+++ b/tinydb/table.py
@@ -4,9 +4,9 @@ data in TinyDB.
"""
from typing import Callable, Dict, Iterable, Iterator, List, Mapping, Optional, Union, cast, Tuple
from .queries import QueryLike
-from .storages import Storage
+from .storages import Storage, MemoryStorage
from .utils import LRUCache
-__all__ = 'Document', 'Table'
+__all__ = 'Document', 'Table', 'TableBase'
class Document(dict):
@@ -64,15 +64,10 @@ class Table:
query_cache_class = LRUCache
default_query_cache_capacity = 10
- def __init__(self, storage: Storage, name: str, cache_size: int=
- default_query_cache_capacity):
- """
- Create a table instance.
- """
+ def __init__(self, storage: Storage, name: str, cache_size: int = default_query_cache_capacity):
self._storage = storage
self._name = name
- self._query_cache: LRUCache[QueryLike, List[Document]
- ] = self.query_cache_class(capacity=cache_size)
+ self._query_cache: LRUCache[QueryLike, List[Document]] = self.query_cache_class(capacity=cache_size)
self._next_id = None
def __repr__(self):
@@ -85,14 +80,14 @@ class Table:
"""
Get the table name.
"""
- pass
+ return self._name
@property
def storage(self) ->Storage:
"""
Get the table storage instance.
"""
- pass
+ return self._storage
def insert(self, document: Mapping) ->int:
"""
@@ -101,7 +96,10 @@ class Table:
:param document: the document to insert
:returns: the inserted document's ID
"""
- pass
+ doc_id = self._get_next_id()
+ self._update_table(lambda table: table.update({doc_id: document}))
+ self.clear_cache()
+ return doc_id
def insert_multiple(self, documents: Iterable[Mapping]) ->List[int]:
"""
@@ -110,7 +108,15 @@ class Table:
:param documents: an Iterable of documents to insert
:returns: a list containing the inserted documents' IDs
"""
- pass
+ doc_ids = []
+ def updater(table):
+ for document in documents:
+ doc_id = self._get_next_id()
+ table[doc_id] = document
+ doc_ids.append(doc_id)
+ self._update_table(updater)
+ self.clear_cache()
+ return doc_ids
def all(self) ->List[Document]:
"""
@@ -118,7 +124,8 @@ class Table:
:returns: a list with all documents.
"""
- pass
+ return [self.document_class(doc, self.document_id_class(doc_id))
+ for doc_id, doc in self._read_table().items()]
def search(self, cond: QueryLike) ->List[Document]:
"""
@@ -127,7 +134,14 @@ class Table:
:param cond: the condition to check against
:returns: list of matching documents
"""
- pass
+ if cond in self._query_cache:
+ return self._query_cache[cond]
+
+ docs = [self.document_class(doc, self.document_id_class(doc_id))
+ for doc_id, doc in self._read_table().items()
+ if cond(doc)]
+ self._query_cache[cond] = docs
+ return docs
def get(self, cond: Optional[QueryLike]=None, doc_id: Optional[int]=
None, doc_ids: Optional[List]=None) ->Optional[Union[Document, List
@@ -145,7 +159,23 @@ class Table:
:returns: the document(s) or ``None``
"""
- pass
+ if doc_id is not None:
+ table = self._read_table()
+ if doc_id in table:
+ return self.document_class(table[doc_id], self.document_id_class(doc_id))
+ return None
+
+ if doc_ids is not None:
+ table = self._read_table()
+ return [self.document_class(table[id], self.document_id_class(id))
+ for id in doc_ids if id in table]
+
+ if cond is not None:
+ docs = self.search(cond)
+ if docs:
+ return docs[0]
+
+ return None
def contains(self, cond: Optional[QueryLike]=None, doc_id: Optional[int
]=None) ->bool:
@@ -158,7 +188,10 @@ class Table:
:param cond: the condition use
:param doc_id: the document ID to look for
"""
- pass
+ if doc_id is not None:
+ return doc_id in self._read_table()
+
+ return bool(self.get(cond))
def update(self, fields: Union[Mapping, Callable[[Mapping], None]],
cond: Optional[QueryLike]=None, doc_ids: Optional[Iterable[int]]=None
@@ -172,7 +205,20 @@ class Table:
:param doc_ids: a list of document IDs
:returns: a list containing the updated document's ID
"""
- pass
+ updated_ids = []
+
+ def updater(table):
+ for doc_id, doc in table.items():
+ if (doc_ids is None or doc_id in doc_ids) and (cond is None or cond(doc)):
+ if callable(fields):
+ fields(doc)
+ else:
+ doc.update(fields)
+ updated_ids.append(doc_id)
+
+ self._update_table(updater)
+ self.clear_cache()
+ return updated_ids
def update_multiple(self, updates: Iterable[Tuple[Union[Mapping,
Callable[[Mapping], None]], QueryLike]]) ->List[int]:
@@ -181,7 +227,21 @@ class Table:
:returns: a list containing the updated document's ID
"""
- pass
+ updated_ids = []
+
+ def updater(table):
+ for fields, cond in updates:
+ for doc_id, doc in table.items():
+ if cond(doc):
+ if callable(fields):
+ fields(doc)
+ else:
+ doc.update(fields)
+ updated_ids.append(doc_id)
+
+ self._update_table(updater)
+ self.clear_cache()
+ return updated_ids
def upsert(self, document: Mapping, cond: Optional[QueryLike]=None) ->List[
int]:
@@ -197,7 +257,18 @@ class Table:
Document with a doc_id
:returns: a list containing the updated documents' IDs
"""
- pass
+ if isinstance(document, Document):
+ doc_id = document.doc_id
+ document = dict(document)
+ del document['doc_id']
+ if cond is None:
+ cond = lambda doc: doc.doc_id == doc_id
+
+ updated = self.update(document, cond)
+ if updated:
+ return updated
+ else:
+ return [self.insert(document)]
def remove(self, cond: Optional[QueryLike]=None, doc_ids: Optional[
Iterable[int]]=None) ->List[int]:
@@ -208,13 +279,29 @@ class Table:
:param doc_ids: a list of document IDs
:returns: a list containing the removed documents' ID
"""
- pass
+ removed = []
+
+ def updater(table):
+ nonlocal removed
+ if doc_ids is not None:
+ removed = [doc_id for doc_id in doc_ids if doc_id in table]
+ for doc_id in removed:
+ del table[doc_id]
+ else:
+ removed = [doc_id for doc_id, doc in table.items() if cond is None or cond(doc)]
+ for doc_id in removed:
+ del table[doc_id]
+
+ self._update_table(updater)
+ self.clear_cache()
+ return removed
def truncate(self) ->None:
"""
Truncate the table by removing all documents.
"""
- pass
+ self._update_table(lambda table: table.clear())
+ self.clear_cache()
def count(self, cond: QueryLike) ->int:
"""
@@ -222,7 +309,7 @@ class Table:
:param cond: the condition use
"""
- pass
+ return len(self.search(cond))
def clear_cache(self) ->None:
"""
@@ -249,7 +336,11 @@ class Table:
"""
Return the ID for a newly inserted document.
"""
- pass
+ if self._next_id is None:
+ self._next_id = max(self._read_table().keys() or [0]) + 1
+ else:
+ self._next_id += 1
+ return self._next_id
def _read_table(self) ->Dict[str, Mapping]:
"""
@@ -259,7 +350,8 @@ class Table:
we may not want to convert *all* documents when returning
only one document for example.
"""
- pass
+ data = self._storage.read()
+ return data.get(self._name, {})
def _update_table(self, updater: Callable[[Dict[int, Mapping]], None]):
"""
@@ -274,4 +366,8 @@ class Table:
As a further optimization, we don't convert the documents into the
document class, as the table data will *not* be returned to the user.
"""
- pass
+ data = self._storage.read()
+ table = data.get(self._name, {})
+ updater(table)
+ data[self._name] = table
+ self._storage.write(data)
diff --git a/tinydb/utils.py b/tinydb/utils.py
index 0721622..a9c20dd 100644
--- a/tinydb/utils.py
+++ b/tinydb/utils.py
@@ -66,6 +66,9 @@ class LRUCache(abc.MutableMapping, Generic[K, V]):
return iter(self.cache)
+def _immutable(*args, **kwargs):
+ raise TypeError("object is immutable")
+
class FrozenDict(dict):
"""
An immutable dictionary.
@@ -82,6 +85,7 @@ class FrozenDict(dict):
clear = _immutable
setdefault = _immutable
popitem = _immutable
+ update = _immutable
def freeze(obj):