Skip to content

8.5.0 #545

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
May 9, 2025
Merged

8.5.0 #545

Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
103 changes: 51 additions & 52 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
@@ -1,61 +1,60 @@
name: Unit Tests
name: CI

on:
push:
branches: [ "master", "dev" ]
pull_request:
branches: [ "master", "dev" ]
push: { branches: [master, dev] }
pull_request: { branches: [master, dev] }

jobs:
build:
env:
DEFAULT_PYTHON: 3.12
runs-on: ubuntu-latest
env:
DEFAULT_PYTHON: '3.12'
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
architecture: ["x64"]
python-version: ['3.9','3.10','3.11','3.12','3.13']
architecture: ['x64']

steps:
- uses: actions/checkout@v2
- name: Setup Python ${{ matrix.python-version }} on ${{ matrix.architecture }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.architecture }}
- name: Cache pip
env:
PYO3_USE_ABI3_FORWARD_COMPATIBILITY: "1"
uses: actions/cache@v4
with:
restore-keys: |
${{ runner.os }}-
- name: Upgrade setuptools
if: matrix.python-version >= 3.12
run: |
# workaround for 3.12, SEE: https://github.com/pypa/setuptools/issues/3661#issuecomment-1813845177
pip install --upgrade setuptools
- name: Lint with flake8
if: matrix.python-version == ${{ env.DEFAULT_PYTHON }}
run: |
# stop the build if there are Python syntax errors or undefined names
nox -e flake8 -- deepdiff --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
nox -e flake8 -- deepdiff --count --exit-zero --max-complexity=26 --max-line-length=250 --statistics
- name: Test with pytest and get the coverage
if: matrix.python-version == ${{ env.DEFAULT_PYTHON }}
run: |
nox -e pytest -s -- --benchmark-disable --cov-report=xml --cov=deepdiff tests/ --runslow
- name: Test with pytest and no coverage report
if: matrix.python-version != ${{ env.DEFAULT_PYTHON }}
run: |
nox -e pytest -s -- --benchmark-disable tests/
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
if: matrix.python-version == ${{ env.DEFAULT_PYTHON }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
file: ./coverage.xml
token: ${{ secrets.CODECOV_TOKEN }}
env_vars: OS,PYTHON
fail_ci_if_error: true
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.architecture }}
cache: pip
cache-dependency-path: pyproject.toml

- name: Install nox
run: pip install nox==2025.5.1
- name: Upgrade setuptools & wheel (for all venvs)
run: pip install --upgrade setuptools wheel

- name: Lint with flake8
if: ${{ matrix.python-version == '3.12' }}
run: |
nox -s flake8 -- deepdiff --count --select=E9,F63,F7,F82 --show-source --statistics
nox -s flake8 -- deepdiff --count --exit-zero --max-complexity=26 --max-line-length=250 --statistics
- name: Test with pytest (no coverage)
if: ${{ matrix.python-version != '3.12' }}
run: |
nox -s pytest-${{ matrix.python-version }} -- --benchmark-disable tests/
- name: Test with pytest (+ coverage)
if: ${{ matrix.python-version == '3.12' }}
run: |
nox -s pytest-${{ matrix.python-version }} -- \
--benchmark-disable \
--cov-report=xml \
--cov=deepdiff \
tests/ --runslow
- name: Upload coverage
if: ${{ matrix.python-version == '3.12' }}
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: coverage.xml
env_vars: OS,PYTHON
fail_ci_if_error: true
9 changes: 4 additions & 5 deletions deepdiff/diff.py
Original file line number Diff line number Diff line change
@@ -952,11 +952,10 @@ def _diff_by_forming_pairs_and_comparing_one_by_one(
self._report_result('iterable_item_moved', change_level, local_tree=local_tree)

if self.iterable_compare_func:
# Intentionally setting j as the first child relationship param in cases of a moved item.
# If the item was moved using an iterable_compare_func then we want to make sure that the index
# is relative to t2.
reference_param1 = j
reference_param2 = i
# Mark additional context denoting that we have moved an item.
# This will allow for correctly setting paths relative to t2 when using an iterable_compare_func
level.additional["moved"] = True

else:
continue

13 changes: 10 additions & 3 deletions deepdiff/helper.py
Original file line number Diff line number Diff line change
@@ -58,6 +58,7 @@ def __repr__(self):
np_complex128 = np_type # pragma: no cover.
np_cdouble = np_type # pragma: no cover.
np_complexfloating = np_type # pragma: no cover.
np_datetime64 = np_type # pragma: no cover.
else:
np_array_factory = np.array
np_ndarray = np.ndarray
@@ -80,6 +81,7 @@ def __repr__(self):
np_complex128 = np.complex128
np_cdouble = np.cdouble # np.complex_ is an alias for np.cdouble and is being removed by NumPy 2.0
np_complexfloating = np.complexfloating
np_datetime64 = np.datetime64

numpy_numbers = (
np_int8, np_int16, np_int32, np_int64, np_uint8,
@@ -93,6 +95,7 @@ def __repr__(self):

numpy_dtypes = set(numpy_numbers)
numpy_dtypes.add(np_bool_) # type: ignore
numpy_dtypes.add(np_datetime64) # type: ignore

numpy_dtype_str_to_type = {
item.__name__: item for item in numpy_dtypes
@@ -184,10 +187,10 @@ def get_semvar_as_integer(version):
bytes_type = bytes
only_complex_number = (complex,) + numpy_complex_numbers
only_numbers = (int, float, complex, Decimal) + numpy_numbers
datetimes = (datetime.datetime, datetime.date, datetime.timedelta, datetime.time)
datetimes = (datetime.datetime, datetime.date, datetime.timedelta, datetime.time, np_datetime64)
ipranges = (ipaddress.IPv4Interface, ipaddress.IPv6Interface, ipaddress.IPv4Network, ipaddress.IPv6Network)
uuids = (uuid.UUID, )
times = (datetime.datetime, datetime.time)
times = (datetime.datetime, datetime.time,np_datetime64)
numbers: Tuple = only_numbers + datetimes
booleans = (bool, np_bool_)

@@ -733,13 +736,17 @@ def detailed__dict__(obj, ignore_private_variables=True, ignore_keys=frozenset()
ignore_private_variables and key.startswith('__') and not key.startswith(private_var_prefix)
):
del result[key]
if isinstance(obj, PydanticBaseModel):
getter = lambda x, y: getattr(type(x), y)
else:
getter = getattr
for key in dir(obj):
if key not in result and key not in ignore_keys and (
not ignore_private_variables or (
ignore_private_variables and not key.startswith('__') and not key.startswith(private_var_prefix)
)
):
value = getattr(obj, key)
value = getter(obj, key)
if not callable(value):
result[key] = value
return result
27 changes: 20 additions & 7 deletions deepdiff/model.py
Original file line number Diff line number Diff line change
@@ -221,10 +221,11 @@ def _from_tree_value_changed(self, tree):

def _from_tree_iterable_item_moved(self, tree):
if 'iterable_item_moved' in tree and self.verbose_level > 1:

for change in tree['iterable_item_moved']:
the_changed = {'new_path': change.path(use_t2=True), 'value': change.t2}
the_changed = {'new_path': change.path(use_t2=True, reporting_move=True), 'value': change.t2}
self['iterable_item_moved'][change.path(
force=FORCE_DEFAULT)] = the_changed
force=FORCE_DEFAULT, use_t2=False, reporting_move=True)] = the_changed

def _from_tree_unprocessed(self, tree):
if 'unprocessed' in tree:
@@ -428,11 +429,11 @@ def _from_tree_iterable_item_moved(self, tree):
if 'iterable_item_moved' in tree:
for change in tree['iterable_item_moved']:
if (
change.up.path(force=FORCE_DEFAULT) not in self["_iterable_opcodes"]
change.up.path(force=FORCE_DEFAULT, reporting_move=True) not in self["_iterable_opcodes"]
):
the_changed = {'new_path': change.path(use_t2=True), 'value': change.t2}
the_changed = {'new_path': change.path(use_t2=True, reporting_move=True), 'value': change.t2}
self['iterable_item_moved'][change.path(
force=FORCE_DEFAULT)] = the_changed
force=FORCE_DEFAULT, reporting_move=True)] = the_changed


class DiffLevel:
@@ -673,7 +674,7 @@ def get_root_key(self, use_t2=False):
return next_rel.param
return notpresent

def path(self, root="root", force=None, get_parent_too=False, use_t2=False, output_format='str'):
def path(self, root="root", force=None, get_parent_too=False, use_t2=False, output_format='str', reporting_move=False):
"""
A python syntax string describing how to descend to this level, assuming the top level object is called root.
Returns None if the path is not representable as a string.
@@ -699,6 +700,9 @@ def path(self, root="root", force=None, get_parent_too=False, use_t2=False, outp
:param output_format: The format of the output. The options are 'str' which is the default and produces a
string representation of the path or 'list' to produce a list of keys and attributes
that produce the path.
:param reporting_move: This should be set to true if and only if we are reporting on iterable_item_moved.
All other cases should leave this set to False.
"""
# TODO: We could optimize this by building on top of self.up's path if it is cached there
cache_key = "{}{}{}{}".format(force, get_parent_too, use_t2, output_format)
@@ -720,7 +724,16 @@ def path(self, root="root", force=None, get_parent_too=False, use_t2=False, outp
# traverse all levels of this relationship
while level and level is not self:
# get this level's relationship object
if use_t2:
if level.additional.get("moved") and not reporting_move:
# To ensure we can properly replay items such as values_changed in items that may have moved, we
# need to make sure that all paths are reported relative to t2 if a level has reported a move.
# If we are reporting a move, the path is already correct and does not need to be swapped.
# Additional context of "moved" is only ever set if using iterable_compare_func and a move has taken place.
level_use_t2 = not use_t2
else:
level_use_t2 = use_t2

if level_use_t2:
next_rel = level.t2_child_rel or level.t1_child_rel
else:
next_rel = level.t1_child_rel or level.t2_child_rel # next relationship object to get a formatted param from
6 changes: 4 additions & 2 deletions deepdiff/operator.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import re
from typing import Any, Optional, List
from typing import Any, Optional, List, TYPE_CHECKING
from abc import ABCMeta, abstractmethod
from deepdiff.helper import convert_item_or_items_into_compiled_regexes_else_none

if TYPE_CHECKING:
from deepdiff import DeepDiff

Check warning on line 7 in deepdiff/operator.py

Codecov / codecov/patch

deepdiff/operator.py#L7

Added line #L7 was not covered by tests


class BaseOperatorPlus(metaclass=ABCMeta):
@@ -16,7 +18,7 @@
pass

@abstractmethod
def give_up_diffing(self, level, diff_instance: float) -> bool:
def give_up_diffing(self, level, diff_instance: "DeepDiff") -> bool:
"""
Given a level which includes t1 and t2 in the tree view, and the "distance" between l1 and l2.
do we consider t1 and t2 to be equal or not. The distance is a number between zero to one and is calculated by DeepDiff to measure how similar objects are.
2 changes: 1 addition & 1 deletion deepdiff/serialization.py
Original file line number Diff line number Diff line change
@@ -615,7 +615,7 @@ def _serialize_tuple(value):
}

if PydanticBaseModel is not pydantic_base_model_type:
JSON_CONVERTOR[PydanticBaseModel] = lambda x: x.dict()
JSON_CONVERTOR[PydanticBaseModel] = lambda x: x.model_dump()


def json_convertor_default(default_mapping=None):
2 changes: 1 addition & 1 deletion docs/custom.rst
Original file line number Diff line number Diff line change
@@ -202,7 +202,7 @@ Base Operator Plus
pass
@abstractmethod
def give_up_diffing(self, level, diff_instance: float) -> bool:
def give_up_diffing(self, level, diff_instance: "DeepDiff") -> bool:
"""
Given a level which includes t1 and t2 in the tree view, and the "distance" between l1 and l2.
do we consider t1 and t2 to be equal or not. The distance is a number between zero to one and is calculated by DeepDiff to measure how similar objects are.
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -6,7 +6,7 @@ build-backend = "flit_core.buildapi"
name = "deepdiff"
version = "8.4.2"
dependencies = [
"orderly-set>=5.3.0,<6",
"orderly-set>=5.4.1,<6",
]
requires-python = ">=3.9"
authors = [
@@ -54,6 +54,7 @@ dev = [
"tomli-w~=1.2.0",
"pandas~=2.2.0",
"polars~=1.21.0",
"nox==2025.5.1",
]
docs = [
# We use the html style that is not supported in Sphinx 7 anymore.
@@ -73,6 +74,9 @@ test = [
"pytest-cov~=6.0.0",
"python-dotenv~=1.0.0",
]
optimize = [
"orjson",
]

[project.scripts]
deep = "deepdiff.commands:cli"
260 changes: 253 additions & 7 deletions tests/test_delta.py
Original file line number Diff line number Diff line change
@@ -1880,21 +1880,74 @@ def test_compare_func1(self, compare_func_t1, compare_func_t2, compare_func_resu
assert compare_func_t2 == recreated_t2

def test_compare_func_with_duplicates_removed(self):
t1 = [{'id': 1, 'val': 1}, {'id': 2, 'val': 2}, {'id': 1, 'val': 3}, {'id': 3, 'val': 3}]
t2 = [{'id': 3, 'val': 3}, {'id': 2, 'val': 2}, {'id': 1, 'val': 3}]
t1 = [
{
'id': 1,
'val': 1,
"nested": [
{"id": 1, "val": 1},
{"id": 2, "val": 2},
]
},
{
'id': 2,
'val': 2
},
{
'id': 1,
'val': 3
},
{
'id': 3,
'val': 3
}
]
t2 = [
{
'id': 3,
'val': 3
},
{
'id': 2,
'val': 2
},
{
'id': 1,
'val': 3,
"nested":[
{
"id": 2,
"val": 3
},
]
}
]
ddiff = DeepDiff(t1, t2, iterable_compare_func=self.compare_func, verbose_level=2)
expected = {
"iterable_item_removed": {
"root[2]": {
"id": 1,
"val": 3
},

"root[2]['nested'][0]": {
"id": 1,
"val": 1
}
},
"iterable_item_moved": {
"root[0]": {
"new_path": "root[2]",
"value": {
"id": 1,
"val": 3,
"nested": [{"id": 2, "val": 3}, ]
},
},
"root[0]['nested'][1]": {
"new_path": "root[2]['nested'][0]",
"value": {
"id": 2,
"val": 3
}
},
@@ -1907,13 +1960,19 @@ def test_compare_func_with_duplicates_removed(self):
}
},
'values_changed': {
"root[2]['nested'][0]['val']": {
'new_path': "root[0]['nested'][1]['val']",
'new_value': 3,
'old_value': 2
},
"root[2]['val']": {
'new_value': 3,
'old_value': 1,
'new_path': "root[0]['val']"
}
},
}

assert expected == ddiff
delta = Delta(ddiff)
recreated_t2 = t1 + delta
@@ -1922,10 +1981,14 @@ def test_compare_func_with_duplicates_removed(self):
flat_result = delta.to_flat_rows()
flat_expected = [
{'path': [2, 'val'], 'value': 3, 'action': 'values_changed', 'type': int, 'new_path': [0, 'val']},
{'path': [2, 'nested', 0, 'val'], 'value': 3, 'action': 'values_changed', 'type': int, 'new_path': [0, 'nested', 1, 'val']},
{'path': [2, 'nested', 0], 'value': {'id': 1, 'val': 1}, 'action': 'iterable_item_removed', 'type': dict},
{'path': [2], 'value': {'id': 1, 'val': 3}, 'action': 'iterable_item_removed', 'type': dict},
{'path': [0], 'value': {'id': 1, 'val': 3}, 'action': 'iterable_item_removed', 'type': dict},
{'path': [0], 'value': {'id': 1, 'val': 3, 'nested': [{'id': 2, 'val': 3}]}, 'action': 'iterable_item_removed', 'type': dict},
{'path': [0, 'nested', 1], 'value': {'id': 2, 'val': 3}, 'action': 'iterable_item_removed', 'type': dict},
{'path': [3], 'value': {'id': 3, 'val': 3}, 'action': 'iterable_item_removed', 'type': dict},
{'path': [0], 'action': 'iterable_item_moved', 'value': {'id': 1, 'val': 3}, 'new_path': [2], 'type': dict},
{'path': [0], 'action': 'iterable_item_moved', 'value': {'id': 1, 'val': 3, 'nested': [{'id': 2, 'val': 3}]}, 'new_path': [2], 'type': dict},
{'path': [0, 'nested', 1], 'value': {'id': 2, 'val': 3}, 'action': 'iterable_item_moved', 'type': dict, 'new_path': [2, 'nested', 0]},
{'path': [3], 'action': 'iterable_item_moved', 'value': {'id': 3, 'val': 3}, 'new_path': [0], 'type': dict},
]
flat_expected = [FlatDeltaRow(**i) for i in flat_expected]
@@ -1942,18 +2005,35 @@ def test_compare_func_with_duplicates_removed(self):
},
'root[0]': {
'id': 1,
'val': 3
'val': 3,
'nested': [{'id': 2, 'val': 3}]
},
'root[3]': {
'id': 3,
'val': 3
},
"root[2]['nested'][0]": {
"id": 1,
"val": 1
},
"root[0]['nested'][1]": {
"id": 2,
"val": 3
}
},
'iterable_item_moved': {
'root[0]': {
'new_path': 'root[2]',
'value': {
'id': 1,
'val': 3,
'nested': [{'id': 2, 'val': 3}]
}
},
"root[0]['nested'][1]": {
'new_path': "root[2]['nested'][0]",
'value': {
'id': 2,
'val': 3
}
},
@@ -1968,8 +2048,12 @@ def test_compare_func_with_duplicates_removed(self):
'values_changed': {
"root[2]['val']": {
'new_value': 3,
'new_path': "root[0]['val']"
}
'new_path': "root[0]['val']",
},
"root[2]['nested'][0]['val']": {
'new_path': "root[0]['nested'][1]['val']",
'new_value': 3,
},
}
}
assert expected_delta_dict == delta_again.diff
@@ -2104,6 +2188,168 @@ def test_compare_func_nested_changes(self):
recreated_t2 = t1 + delta
assert t2 == recreated_t2

def test_compare_func_deep_nested_changes(self):

t1 = {
"Locations": [
{
"id": "c4fa7b12-f365-42a9-9544-3efc11963558",
"Items": [
{
"id": "2399528f-2556-4e2c-bf9b-c8ea17bc323f"
},
{
"id": "2399528f-2556-4e2c-bf9b-c8ea17bc323f1",
},
{
"id": "2399528f-2556-4e2c-bf9b-c8ea17bc323f2"
},
{
"id": "2399528f-2556-4e2c-bf9b-c8ea17bc323f3"
}
]
},
{
"id": "d9095676-bc41-4cbf-9fd2-7148bb26bcc4",
"Items": [
{
"id": "26b78305-df71-40c0-8e98-dcd40b7f716d"
},
{
"id": "3235125d-0110-4d0e-847a-24912cf73feb"
},
{
"id": "7699552a-add9-4338-aeb9-662bec14c175"
},
{
"id": "015e74f0-2c2a-45c0-a172-21758d14bf3a"
}
]
},
{
"id": "41b38757-8984-47fd-890d-8c4ed18c3c47",
"Items": [
{
"id": "494e839e-37b1-4cac-b1dc-a44f3e6e7ada"
},
{
"id": "60547ca6-3ef0-4b67-8826-2c7b76e67011"
},
{
"id": "cee762a0-fbd8-48bb-ba92-be32cf3cf250"
},
{
"id": "7a0da2b7-c1e6-45b4-8810-fec7b4b6186d"
}
]
},
{
"id": "c0be071a-5457-497d-9a78-ff7cb561d4d3",
"Items": [
{
"id": "e54dcdff-ec99-4941-92eb-c12bb3cbeb91"
}
]
},
{
"id": "dfe4b37b-8df3-4dc6-8686-0588937fbe10",
"Items": [
{
"id": "27a574ae-08db-47f9-a9dc-18df59287f4d"
},
{
"id": "23edf031-8c4e-43d6-b5bf-4d5ee9008a36",
"Containers": [
{"id": "1", "val": 1},
{"id": "2", "val": 2},
{"id": "3", "val": 3},
]
},
{
"id": "e1e54643-23ee-496d-b7d2-de67c4bb7d68"
},
{
"id": "2f910da3-8cd0-4cf5-81c9-23668fc9477f"
},
{
"id": "5e36d258-2a82-49ee-b4fc-db0a8c28b404"
},
{
"id": "4bf2ce8d-05ed-4718-a529-8c9e4704e38f"
},
]
},
]
}

t2 = {
"Locations": [
{
"id": "41b38757-8984-47fd-890d-8c4ed18c3c47",
"Items": [
{
"id": "60547ca6-3ef0-4b67-8826-2c7b76e67011"
},
{
"id": "cee762a0-fbd8-48bb-ba92-be32cf3cf250"
},
{
"id": "7a0da2b7-c1e6-45b4-8810-fec7b4b6186d"
}
]
},
{
"id": "c0be071a-5457-497d-9a78-ff7cb561d4d3",
"Items": [
{
"id": "e54dcdff-ec99-4941-92eb-c12bb3cbeb91"
}
]
},
{
"id": "dfe4b37b-8df3-4dc6-8686-0588937fbe10",
"Items": [
{
"id": "27a574ae-08db-47f9-a9dc-18df59287f4d"
},
{
"id": "27a574ae-08db-47f9-a9dc-88df59287f4d"
},
{
"id": "23edf031-8c4e-43d6-b5bf-4d5ee9008a36",
"Containers": [
{"id": "1", "val": 1},
{"id": "3", "val": 3},
{"id": "2", "val": 2},
]
},
{
"id": "e1e54643-23ee-496d-b7d2-de67c4bb7d68"
},
{
"id": "2f910da3-8cd0-4cf5-81c9-23668fc9477f"
},
{
"id": "5e36d258-2a82-49ee-b4fc-db0a8c28b404"
},
{
"id": "4bf2ce8d-05ed-4718-a529-8c9e4704e38f"
},
]
},
]
}

ddiff = DeepDiff(t1, t2, iterable_compare_func=self.compare_func, verbose_level=2)

delta2 = Delta(ddiff)
expected_move_1 = {'new_path': "root['Locations'][2]['Items'][2]['Containers'][2]", 'value': {'id': '2', 'val': 2}}
expected_move_2 = {'new_path': "root['Locations'][2]['Items'][2]['Containers'][1]", 'value': {'id': '3', 'val': 3}}
assert ddiff["iterable_item_moved"]["root['Locations'][4]['Items'][1]['Containers'][1]"] == expected_move_1
assert ddiff["iterable_item_moved"]["root['Locations'][4]['Items'][1]['Containers'][2]"] == expected_move_2
recreated_t2 = t1 + delta2
assert t2 == recreated_t2

def test_delta_force1(self):
t1 = {
'x': {
19 changes: 19 additions & 0 deletions tests/test_diff_numpy.py
Original file line number Diff line number Diff line change
@@ -143,6 +143,25 @@
}
},
},
'numpy_datetime_equal': {
't1': np.datetime64('2023-07-05T10:11:12'),
't2': np.datetime64('2023-07-05T10:11:12'),
'deepdiff_kwargs': {},
'expected_result': {},
},
'numpy_datetime_unequal': {
't1': np.datetime64('2023-07-05T10:11:12'),
't2': np.datetime64('2024-07-05T10:11:12'),
'deepdiff_kwargs': {},
'expected_result': {
'values_changed': {
'root': {
'new_value': np.datetime64('2024-07-05T10:11:12'),
'old_value': np.datetime64('2023-07-05T10:11:12'),
}
},
},
},
}


13 changes: 13 additions & 0 deletions tests/test_hash.py
Original file line number Diff line number Diff line change
@@ -196,6 +196,19 @@ def test_numpy_bool(self):
a_hash = DeepHash(a)[a]
assert not( a_hash is unprocessed)

def test_numpy_datetime64(self):
now_dt = datetime.datetime.now()
now = np.datetime64(now_dt)
later = np.datetime64(now_dt + datetime.timedelta(seconds=10))
a = b = now
a_hash = DeepHash(a)
b_hash = DeepHash(b)
assert a_hash[a] == b_hash[b]

later_hash = DeepHash(later)
assert a_hash[a] != later_hash[later]


class TestDeepHashPrep:
"""DeepHashPrep Tests covering object serialization."""