diff --git a/requirements-dev.in b/requirements-dev.in index 4fc63746ac..dfcfe84656 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -5,7 +5,6 @@ pip-tools pytest pytest-cov pytest-django -pytest-mock pytest-playwright pytest-randomly tox @@ -15,5 +14,6 @@ tox # See https://github.com/microsoft/playwright-python/issues/2190 git+https://github.com/microsoft/playwright-python.git@d9cdfbb1e178b6770625e9f857139aff77516af0#egg=playwright -# coverage 7.6.2 dropped support for Python 3.8, so pinning it for now. +# These dependencies dropped support for Python 3.8, so pinning them for now. coverage[toml]==7.6.1 +pytest-randomly==3.15.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index 6afa553d8c..188b81c0f8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -154,7 +154,7 @@ olefile==0.47 # opf-fido opf-fido @ git+https://github.com/artefactual-labs/fido.git@564ceb8018a8650fe931cf20e6780ee008e60fca # via -r requirements.txt -orjson==3.10.9 +orjson==3.10.10 # via -r requirements.txt packaging==24.1 # via @@ -213,7 +213,6 @@ pytest==8.3.3 # pytest-base-url # pytest-cov # pytest-django - # pytest-mock # pytest-playwright # pytest-randomly pytest-base-url==2.1.0 @@ -222,8 +221,6 @@ pytest-cov==5.0.0 # via -r requirements-dev.in pytest-django==4.9.0 # via -r requirements-dev.in -pytest-mock==3.14.0 - # via -r requirements-dev.in pytest-playwright==0.5.2 # via -r requirements-dev.in pytest-randomly==3.15.0 @@ -286,7 +283,7 @@ tomli==2.0.2 # pyproject-api # pytest # tox -tox==4.23.1 +tox==4.23.2 # via -r requirements-dev.in typing-extensions==4.12.2 # via @@ -302,7 +299,7 @@ urllib3==2.2.3 # amclient # elasticsearch # requests -virtualenv==20.27.0 +virtualenv==20.27.1 # via tox wheel==0.44.0 # via pip-tools @@ -317,13 +314,13 @@ zope-event==5.0 # via # -r requirements.txt # gevent -zope-interface==7.1.0 +zope-interface==7.1.1 # via # -r requirements.txt # gevent # The following packages are considered to be unsafe in a requirements file: -pip==24.2 +pip==24.3.1 # via pip-tools setuptools==75.2.0 # via diff --git a/requirements.in b/requirements.in index 5eb41fdb69..204bc96c0a 100644 --- a/requirements.in +++ b/requirements.in @@ -26,7 +26,6 @@ prometheus_client python-dateutil requests unidecode -whitenoise # Required by LDAP authentication django-auth-ldap @@ -42,3 +41,4 @@ mozilla-django-oidc django-auth-ldap==5.0.0 gevent==24.2.1 jsonschema-specifications==2023.12.1 +whitenoise==6.7.0 diff --git a/requirements.txt b/requirements.txt index f2ab995bd8..d7eb7776ec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -100,7 +100,7 @@ olefile==0.47 # via opf-fido opf-fido @ git+https://github.com/artefactual-labs/fido.git@564ceb8018a8650fe931cf20e6780ee008e60fca # via -r requirements.in -orjson==3.10.9 +orjson==3.10.10 # via -r requirements.in packaging==24.1 # via gunicorn @@ -170,7 +170,7 @@ zipp==3.20.2 # importlib-resources zope-event==5.0 # via gevent -zope-interface==7.1.0 +zope-interface==7.1.1 # via gevent # The following packages are considered to be unsafe in a requirements file: diff --git a/tests/MCPClient/test_antivirus.py b/tests/MCPClient/test_antivirus.py index b1d48ee1fd..23ea0647a7 100644 --- a/tests/MCPClient/test_antivirus.py +++ b/tests/MCPClient/test_antivirus.py @@ -2,6 +2,7 @@ from collections import OrderedDict from collections import namedtuple +from unittest import mock import archivematica_clamscan import pytest @@ -79,30 +80,39 @@ def version_attrs(self): def setup_test_scan_file_mocks( - mocker, + get_scanner, + file_already_scanned_mock, + file_objects_get, file_already_scanned=False, file_size=1024, scanner_should_except=False, scanner_passed=False, ): + file_already_scanned_mock.return_value = file_already_scanned + file_objects_get.return_value = FileMock(size=file_size) deps = namedtuple("deps", ["file_already_scanned", "file_get", "scanner"])( - file_already_scanned=mocker.patch( - "archivematica_clamscan.file_already_scanned", - return_value=file_already_scanned, - ), - file_get=mocker.patch( - "main.models.File.objects.get", return_value=FileMock(size=file_size) - ), + file_already_scanned=file_already_scanned_mock, + file_get=file_objects_get, scanner=ScannerMock(should_except=scanner_should_except, passed=scanner_passed), ) - mocker.patch("archivematica_clamscan.get_scanner", return_value=deps.scanner) + get_scanner.return_value = deps.scanner return deps -def test_scan_file_already_scanned(mocker): - deps = setup_test_scan_file_mocks(mocker, file_already_scanned=True) +@mock.patch("archivematica_clamscan.file_already_scanned") +@mock.patch("main.models.File.objects.get") +@mock.patch("archivematica_clamscan.get_scanner") +def test_scan_file_already_scanned( + get_scanner, file_objects_get, file_already_scanned_mock +): + deps = setup_test_scan_file_mocks( + get_scanner, + file_already_scanned_mock, + file_objects_get, + file_already_scanned=True, + ) exit_code = archivematica_clamscan.scan_file([], **dict(args)) @@ -160,8 +170,21 @@ def test_scan_file_already_scanned(mocker): ), ], ) -def test_scan_file(mocker, setup_kwargs, exit_code, queue_event_params, settings): - setup_test_scan_file_mocks(mocker, **setup_kwargs) +@mock.patch("archivematica_clamscan.file_already_scanned") +@mock.patch("main.models.File.objects.get") +@mock.patch("archivematica_clamscan.get_scanner") +def test_scan_file( + get_scanner, + file_objects_get, + file_already_scanned_mock, + setup_kwargs, + exit_code, + queue_event_params, + settings, +): + setup_test_scan_file_mocks( + get_scanner, file_already_scanned_mock, file_objects_get, **setup_kwargs + ) # Here the user configurable thresholds for maimum file size, and maximum # scan size are being tested. The scan size is offset so as to enable the diff --git a/tests/MCPClient/test_antivirus_clamdscan.py b/tests/MCPClient/test_antivirus_clamdscan.py index f9f1a2b2b1..5bf353a8ca 100644 --- a/tests/MCPClient/test_antivirus_clamdscan.py +++ b/tests/MCPClient/test_antivirus_clamdscan.py @@ -2,6 +2,7 @@ import errno from collections import namedtuple +from unittest import mock import archivematica_clamscan from clamd import BufferTooLongError @@ -20,32 +21,30 @@ def setup_clamdscanner( return archivematica_clamscan.ClamdScanner() -def test_clamdscanner_version_props(mocker, settings): +def test_clamdscanner_version_props(settings): scanner = setup_clamdscanner(settings) - mocker.patch.object( + with mock.patch.object( scanner, "version_attrs", return_value=("ClamAV 0.99.2", "23992/Fri Oct 27 05:04:12 2017"), - ) + ): + assert scanner.program() == "ClamAV (clamd)" + assert scanner.version() == "ClamAV 0.99.2" + assert scanner.virus_definitions() == "23992/Fri Oct 27 05:04:12 2017" - assert scanner.program() == "ClamAV (clamd)" - assert scanner.version() == "ClamAV 0.99.2" - assert scanner.virus_definitions() == "23992/Fri Oct 27 05:04:12 2017" - -def test_clamdscanner_version_attrs(mocker, settings): +def test_clamdscanner_version_attrs(settings): scanner = setup_clamdscanner(settings, addr="/var/run/clamav/clamd.ctl") - version = mocker.patch.object( + with mock.patch.object( scanner.client, "version", return_value="ClamAV 0.99.2/23992/Fri Oct 27 05:04:12 2017", - ) - - assert scanner.version_attrs() == ( - "ClamAV 0.99.2", - "23992/Fri Oct 27 05:04:12 2017", - ) - version.assert_called_once() + ) as version: + assert scanner.version_attrs() == ( + "ClamAV 0.99.2", + "23992/Fri Oct 27 05:04:12 2017", + ) + version.assert_called_once() def test_clamdscanner_get_client(settings): @@ -59,24 +58,22 @@ def test_clamdscanner_get_client(settings): assert scanner.client.timeout == 15.5 -def test_clamdscanner_scan(mocker, settings): +def test_clamdscanner_scan(settings): OKAY_RET = ("OK", None) ERROR_RET = ("ERROR", "Permission denied") FOUND_RET = ("FOUND", "Eicar-Test-Signature") - def patch(scanner, ret=OKAY_RET, excepts=False): + def patch(pass_by_stream, pass_by_reference, scanner, ret=OKAY_RET, excepts=False): """Patch the scanner function and enable testing of exceptions raised by clamdscanner that we want to control. excepts can take an argument of True to pass a generic exception. excepts can also take an exception as an argument for better granularity. """ + pass_by_stream.return_value = {"stream": ret} + pass_by_reference.return_value = {"/file": ret} deps = namedtuple("deps", ["pass_by_stream", "pass_by_reference"])( - pass_by_stream=mocker.patch.object( - scanner, "pass_by_stream", return_value={"stream": ret} - ), - pass_by_reference=mocker.patch.object( - scanner, "pass_by_reference", return_value={"/file": ret} - ), + pass_by_stream=pass_by_stream, + pass_by_reference=pass_by_reference, ) if excepts is not False: e = excepts @@ -87,72 +84,142 @@ def patch(scanner, ret=OKAY_RET, excepts=False): return deps scanner = setup_clamdscanner(settings, stream=False) - deps = patch(scanner, ret=OKAY_RET) - passed, state, details = scanner.scan("/file") - assert passed is True - assert state == "OK" - assert details is None - deps.pass_by_stream.assert_not_called() - deps.pass_by_reference.assert_called_once() + + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + deps = patch(pass_by_stream, pass_by_reference, scanner, ret=OKAY_RET) + passed, state, details = scanner.scan("/file") + assert passed is True + assert state == "OK" + assert details is None + deps.pass_by_stream.assert_not_called() + deps.pass_by_reference.assert_called_once() scanner = setup_clamdscanner(settings, stream=True) - deps = patch(scanner, ret=OKAY_RET) - passed, state, details = scanner.scan("/file") - assert passed is True - assert state == "OK" - assert details is None - deps.pass_by_stream.assert_called_once() - deps.pass_by_reference.assert_not_called() - - patch(scanner, ret=ERROR_RET) - passed, state, details = scanner.scan("/file") - assert passed is False - assert state == "ERROR" - assert details == "Permission denied" - - patch(scanner, ret=FOUND_RET) - passed, state, details = scanner.scan("/file") - assert passed is False - assert state == "FOUND" - assert details == "Eicar-Test-Signature" - - # Testing a generic Exception returned by the clamdscan micorservice. - patch(scanner, ret=OKAY_RET, excepts=True) - passed, state, details = scanner.scan("/file") - assert passed is False - assert state is None - assert details is None - - # Testing a generic IOError that is not a broken pipe error that we're - # expecting to be able to manage from clamdscan. - patch(scanner, ret=OKAY_RET, excepts=OSError("Testing a generic IO Error")) - passed, state, details = scanner.scan("/file") - assert passed is False - assert state is None - assert details is None - - # Broken pipe is a known error from the clamd library. - brokenpipe_error = OSError("Testing a broken pipe error") - brokenpipe_error.errno = errno.EPIPE - patch(scanner, ret=OKAY_RET, excepts=brokenpipe_error) - passed, state, details = scanner.scan("/file") - assert passed is None - assert state is None - assert details is None - - # The INSTREAM size limit error is known to us; test it here. - instream_error = BufferTooLongError("INSTREAM size limit exceeded. ERROR.") - patch(scanner, ret=OKAY_RET, excepts=instream_error) - passed, state, details = scanner.scan("/file") - assert passed is None - assert state is None - assert details is None - - # The clamd library can return a further error code here, and we we test it - # to make sure that if it does, it is managed. - connection_error = ConnectionError("Error while reading from socket.") - patch(scanner, ret=OKAY_RET, excepts=connection_error) - passed, state, details = scanner.scan("/file") - assert passed is None - assert state is None - assert details is None + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + deps = patch(pass_by_stream, pass_by_reference, scanner, ret=OKAY_RET) + passed, state, details = scanner.scan("/file") + assert passed is True + assert state == "OK" + assert details is None + deps.pass_by_stream.assert_called_once() + deps.pass_by_reference.assert_not_called() + + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + patch(pass_by_stream, pass_by_reference, scanner, ret=ERROR_RET) + passed, state, details = scanner.scan("/file") + assert passed is False + assert state == "ERROR" + assert details == "Permission denied" + + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + patch(pass_by_stream, pass_by_reference, scanner, ret=FOUND_RET) + passed, state, details = scanner.scan("/file") + assert passed is False + assert state == "FOUND" + assert details == "Eicar-Test-Signature" + + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + # Testing a generic Exception returned by the clamdscan micorservice. + patch(pass_by_stream, pass_by_reference, scanner, ret=OKAY_RET, excepts=True) + passed, state, details = scanner.scan("/file") + assert passed is False + assert state is None + assert details is None + + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + # Testing a generic IOError that is not a broken pipe error that we're + # expecting to be able to manage from clamdscan. + patch( + pass_by_stream, + pass_by_reference, + scanner, + ret=OKAY_RET, + excepts=OSError("Testing a generic IO Error"), + ) + passed, state, details = scanner.scan("/file") + assert passed is False + assert state is None + assert details is None + + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + # Broken pipe is a known error from the clamd library. + brokenpipe_error = OSError("Testing a broken pipe error") + brokenpipe_error.errno = errno.EPIPE + patch( + pass_by_stream, + pass_by_reference, + scanner, + ret=OKAY_RET, + excepts=brokenpipe_error, + ) + passed, state, details = scanner.scan("/file") + assert passed is None + assert state is None + assert details is None + + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + # The INSTREAM size limit error is known to us; test it here. + instream_error = BufferTooLongError("INSTREAM size limit exceeded. ERROR.") + patch( + pass_by_stream, + pass_by_reference, + scanner, + ret=OKAY_RET, + excepts=instream_error, + ) + passed, state, details = scanner.scan("/file") + assert passed is None + assert state is None + assert details is None + + with mock.patch.object( + scanner, "pass_by_stream" + ) as pass_by_stream, mock.patch.object( + scanner, "pass_by_reference" + ) as pass_by_reference: + # The clamd library can return a further error code here, and we we test it + # to make sure that if it does, it is managed. + connection_error = ConnectionError("Error while reading from socket.") + patch( + pass_by_stream, + pass_by_reference, + scanner, + ret=OKAY_RET, + excepts=connection_error, + ) + passed, state, details = scanner.scan("/file") + assert passed is None + assert state is None + assert details is None diff --git a/tests/MCPClient/test_antivirus_clamscan.py b/tests/MCPClient/test_antivirus_clamscan.py index f783bbe904..ac4d71ea42 100644 --- a/tests/MCPClient/test_antivirus_clamscan.py +++ b/tests/MCPClient/test_antivirus_clamscan.py @@ -1,6 +1,7 @@ """Tests for the archivematica_clamscan.py client script.""" import subprocess +from unittest import mock import archivematica_clamscan import pytest @@ -26,53 +27,52 @@ def setup_clamscanner(): return archivematica_clamscan.ClamScanner() -def test_clamscanner_version_props(mocker): +def test_clamscanner_version_props(): scanner = setup_clamscanner() - mocker.patch.object( + with mock.patch.object( scanner, "version_attrs", return_value=("ClamAV 0.99.2", "23992/Fri Oct 27 05:04:12 2017"), - ) + ): + assert scanner.program() == "ClamAV (clamscan)" + assert scanner.version() == "ClamAV 0.99.2" + assert scanner.virus_definitions() == "23992/Fri Oct 27 05:04:12 2017" - assert scanner.program() == "ClamAV (clamscan)" - assert scanner.version() == "ClamAV 0.99.2" - assert scanner.virus_definitions() == "23992/Fri Oct 27 05:04:12 2017" - -def test_clamscanner_version_attrs(mocker, settings): +def test_clamscanner_version_attrs(settings): scanner = setup_clamscanner() - mock = mocker.patch.object( + with mock.patch.object( scanner, "_call", return_value="ClamAV 0.99.2/23992/Fri Oct 27 05:04:12 2017" - ) - - assert scanner.version_attrs() == ( - "ClamAV 0.99.2", - "23992/Fri Oct 27 05:04:12 2017", - ) - mock.assert_called_once_with("-V") + ) as mock_call: + assert scanner.version_attrs() == ( + "ClamAV 0.99.2", + "23992/Fri Oct 27 05:04:12 2017", + ) + mock_call.assert_called_once_with("-V") -def test_clamscanner_scan(mocker, settings): +def test_clamscanner_scan(settings): scanner = setup_clamscanner() - mock = mocker.patch.object(scanner, "_call", return_value="Output of clamscan") - - # User configured thresholds need to be sent through to clamscanner and - # executed as part of the call to it. - settings.CLAMAV_CLIENT_MAX_FILE_SIZE = 20 - settings.CLAMAV_CLIENT_MAX_SCAN_SIZE = 20 - - max_file_size = "--max-filesize=%dM" % settings.CLAMAV_CLIENT_MAX_FILE_SIZE - max_scan_size = "--max-scansize=%dM" % settings.CLAMAV_CLIENT_MAX_SCAN_SIZE - - assert scanner.scan("/file") == (True, "OK", None) - mock.assert_called_once_with(max_file_size, max_scan_size, "/file") - - mock.side_effect = subprocess.CalledProcessError( - 1, "clamscan", "Output of clamscan" - ) - assert scanner.scan("/file") == (False, "FOUND", None) - - mock.side_effect = subprocess.CalledProcessError( - 2, "clamscan", "Output of clamscan" - ) - assert scanner.scan("/file") == (False, "ERROR", None) + with mock.patch.object( + scanner, "_call", return_value="Output of clamscan" + ) as mock_call: + # User configured thresholds need to be sent through to clamscanner and + # executed as part of the call to it. + settings.CLAMAV_CLIENT_MAX_FILE_SIZE = 20 + settings.CLAMAV_CLIENT_MAX_SCAN_SIZE = 20 + + max_file_size = "--max-filesize=%dM" % settings.CLAMAV_CLIENT_MAX_FILE_SIZE + max_scan_size = "--max-scansize=%dM" % settings.CLAMAV_CLIENT_MAX_SCAN_SIZE + + assert scanner.scan("/file") == (True, "OK", None) + mock_call.assert_called_once_with(max_file_size, max_scan_size, "/file") + + mock_call.side_effect = subprocess.CalledProcessError( + 1, "clamscan", "Output of clamscan" + ) + assert scanner.scan("/file") == (False, "FOUND", None) + + mock_call.side_effect = subprocess.CalledProcessError( + 2, "clamscan", "Output of clamscan" + ) + assert scanner.scan("/file") == (False, "ERROR", None) diff --git a/tests/MCPClient/test_archivematicaCreateMETSMetadataCSV.py b/tests/MCPClient/test_archivematicaCreateMETSMetadataCSV.py index 228e9511f0..81d80a8347 100755 --- a/tests/MCPClient/test_archivematicaCreateMETSMetadataCSV.py +++ b/tests/MCPClient/test_archivematicaCreateMETSMetadataCSV.py @@ -3,6 +3,8 @@ Tests for CSV metadata management on the METS creation process """ +from unittest import mock + from archivematicaCreateMETSMetadataCSV import parseMetadataCSV content_when_value_has_empty_strings = """ @@ -19,9 +21,9 @@ """.strip() -def test_parseMetadataCSV_with_null_values(mocker, tmp_path): +def test_parseMetadataCSV_with_null_values(tmp_path): """Applying testcases in parseMetadataCSV function""" - job = mocker.Mock() + job = mock.Mock() d = tmp_path / "sub" d.mkdir() p = d / "metadata.csv" @@ -31,9 +33,9 @@ def test_parseMetadataCSV_with_null_values(mocker, tmp_path): assert (result["objects/bar"]["dc.type"]) == ["Photograph", "Still Image"] -def test_parseMetadataCSV_with_column_has_no_values(mocker, tmp_path): +def test_parseMetadataCSV_with_column_has_no_values(tmp_path): """Applying testcases in parseMetadataCSV function""" - job = mocker.Mock() + job = mock.Mock() d = tmp_path / "sub" d.mkdir() p = d / "metadata.csv" diff --git a/tests/MCPClient/test_archivematicaCreateMETSMetadataXML.py b/tests/MCPClient/test_archivematicaCreateMETSMetadataXML.py index 8e774fbf1a..b0a4799865 100644 --- a/tests/MCPClient/test_archivematicaCreateMETSMetadataXML.py +++ b/tests/MCPClient/test_archivematicaCreateMETSMetadataXML.py @@ -6,6 +6,7 @@ """ from pathlib import Path +from unittest import mock from uuid import uuid4 import metsrw @@ -84,19 +85,19 @@ def _make_metadata_file(rel_path): @pytest.fixture -def make_mock_fsentry(mocker): +def make_mock_fsentry(): def _make_mock_fsentry(**kwargs): mock_fsentry = metsrw.FSEntry(**kwargs) - mock_fsentry.add_dmdsec = mocker.Mock() - mock_fsentry.delete_dmdsec = mocker.Mock() - mock_fsentry.add_premis_event = mocker.Mock() + mock_fsentry.add_dmdsec = mock.Mock() + mock_fsentry.delete_dmdsec = mock.Mock() + mock_fsentry.add_premis_event = mock.Mock() return mock_fsentry return _make_mock_fsentry @pytest.fixture -def make_mock_mets(mocker, make_mock_fsentry): +def make_mock_mets(make_mock_fsentry): def _make_mock_mets(metadata_file_uuids=None): if metadata_file_uuids is None: metadata_file_uuids = [] @@ -108,7 +109,7 @@ def _make_mock_mets(metadata_file_uuids=None): files = [sip, objects, directory, file_txt] for uuid in metadata_file_uuids: files.append(make_mock_fsentry(file_uuid=uuid, use="metadata")) - mock_mets = mocker.Mock() + mock_mets = mock.Mock() mock_mets.all_files.return_value = files mock_mets.get_file.side_effect = lambda **kwargs: next( ( @@ -124,45 +125,49 @@ def _make_mock_mets(metadata_file_uuids=None): @pytest.fixture -def insert_into_events_mock(mocker): - return mocker.patch( +def insert_into_events_mock(): + with mock.patch( "archivematicaCreateMETSMetadataXML.insertIntoEvents", return_value="fake_event", - ) + ) as result: + yield result @pytest.fixture -def create_event_mock(mocker): - return mocker.patch( +def create_event_mock(): + with mock.patch( "archivematicaCreateMETSMetadataXML.createmets2.createEvent", return_value="fake_element", - ) + ) as result: + yield result @pytest.fixture -def requests_get(mocker): +def requests_get(): # Return a mock response good enough to be parsed as an XML schema. - return mocker.patch( + with mock.patch( "requests.get", - return_value=mocker.Mock(text=IMPORTED_SCHEMA), - ) + return_value=mock.Mock(text=IMPORTED_SCHEMA), + ) as result: + yield result @pytest.fixture -def requests_get_error(mocker): +def requests_get_error(): # Simulate an error retrieving an imported schema. - return mocker.patch( + with mock.patch( "requests.get", side_effect=requests.RequestException("error"), - ) + ) as result: + yield result @pytest.fixture -def etree_parse(mocker): +def etree_parse(): # Mocked etree.parse used in the resolver tests that returns None before the first # XMLSchema call, which triggers an etree.XMLSchemaParseError exception # and forces reparsing the validation schema with a custom etree.Resolver. - class mock: + class mock_parse: def __init__(self, *args, **kwargs): self.call_count = 0 @@ -174,10 +179,11 @@ def __call__(self, *args, **kwargs): return return parse(*args, **kwargs) - mocker.patch( + with mock.patch( "archivematicaCreateMETSMetadataXML.etree.parse", - mock(), - ) + mock_parse(), + ): + yield @pytest.fixture @@ -268,7 +274,6 @@ def test_validation( make_schema_file, sip, sip_directory_path, - mocker, insert_into_events_mock, create_event_mock, xml_file, @@ -296,7 +301,7 @@ def test_validation( if should_pass: # Use ANY to avoid comparision with etree.Element, but confirm element tag. objects_fsentry.add_dmdsec.assert_called_once_with( - mocker.ANY, "OTHER", othermdtype="mdtype", status="original" + mock.ANY, "OTHER", othermdtype="mdtype", status="original" ) assert objects_fsentry.add_dmdsec.call_args[0][0].tag == "foo" else: @@ -323,7 +328,7 @@ def test_validation( @pytest.mark.django_db def test_skipped_validation( - settings, make_metadata_file, make_mock_mets, sip, sip_directory_path, mocker + settings, make_metadata_file, make_mock_mets, sip, sip_directory_path ): settings.METADATA_XML_VALIDATION_ENABLED = True xml_validation = {"foo": None} @@ -345,7 +350,7 @@ def test_skipped_validation( assert not errors # Use ANY to avoid comparision with etree.Element, but confirm element tag. objects_fsentry.add_dmdsec.assert_called_once_with( - mocker.ANY, "OTHER", othermdtype="mdtype", status="original" + mock.ANY, "OTHER", othermdtype="mdtype", status="original" ) assert objects_fsentry.add_dmdsec.call_args[0][0].tag == "foo" metadata_fsentry.add_premis_event.assert_not_called() @@ -552,7 +557,6 @@ def test_reingest( make_mock_mets, sip, sip_directory_path, - mocker, ): settings.METADATA_XML_VALIDATION_ENABLED = True xml_validation = {"foo": str(make_schema_file("xsd"))} @@ -571,7 +575,7 @@ def test_reingest( assert not errors # Use ANY to avoid comparision with etree.Element, but confirm element tag. objects_fsentry.add_dmdsec.assert_called_once_with( - mocker.ANY, "OTHER", othermdtype="mdtype", status="update" + mock.ANY, "OTHER", othermdtype="mdtype", status="update" ) assert objects_fsentry.add_dmdsec.call_args[0][0].tag == "foo" metadata_fsentry.add_premis_event.assert_called() diff --git a/tests/MCPClient/test_assign_file_uuids.py b/tests/MCPClient/test_assign_file_uuids.py index 1304251999..6f8000ee8f 100644 --- a/tests/MCPClient/test_assign_file_uuids.py +++ b/tests/MCPClient/test_assign_file_uuids.py @@ -1,4 +1,5 @@ import uuid +from unittest import mock import pytest from assign_file_uuids import call @@ -23,8 +24,8 @@ def sip_directory_path(sip_directory_path): @pytest.mark.django_db -def test_call_creates_transfer_files_and_events(mocker, sip_directory_path, transfer): - job = mocker.MagicMock( +def test_call_creates_transfer_files_and_events(sip_directory_path, transfer): + job = mock.MagicMock( spec=Job, args=[ "assign_file_uuids.py", @@ -68,8 +69,8 @@ def test_call_creates_transfer_files_and_events(mocker, sip_directory_path, tran ], ids=["no_transfer_nor_sip_provided", "transfer_and_sip_provided"], ) -def test_call_validates_job_arguments(mocker, job_arguments): - job = mocker.MagicMock( +def test_call_validates_job_arguments(job_arguments): + job = mock.MagicMock( spec=Job, args=job_arguments, ) @@ -84,19 +85,27 @@ def test_call_validates_job_arguments(mocker, job_arguments): @pytest.mark.django_db -def test_call_updates_transfer_file_on_reingest(mocker, sip_directory_path, transfer): +@mock.patch("metsrw.METSDocument.fromfile") +@mock.patch("assign_file_uuids.find_mets_file") +@mock.patch("assign_file_uuids.get_file_info_from_mets") +def test_call_updates_transfer_file_on_reingest( + get_file_info_from_mets, + find_mets_file, + fromfile, + sip_directory_path, + transfer, +): # Fake METS parsing and return static file information. - mocker.patch("metsrw.METSDocument.fromfile") - mocker.patch("assign_file_uuids.find_mets_file") + file_info = { "uuid": str(uuid.uuid4()), "filegrpuse": "original", "original_path": str(sip_directory_path / "contents" / "file-in.txt"), "current_path": str(sip_directory_path / "reingest" / "file-in.txt"), } - mocker.patch("assign_file_uuids.get_file_info_from_mets", return_value=file_info) + get_file_info_from_mets.return_value = file_info - job = mocker.MagicMock( + job = mock.MagicMock( spec=Job, args=[ "assign_file_uuids.py", diff --git a/tests/MCPClient/test_assign_uuids_to_directories.py b/tests/MCPClient/test_assign_uuids_to_directories.py index 9a4ddc915f..f28b3e3451 100755 --- a/tests/MCPClient/test_assign_uuids_to_directories.py +++ b/tests/MCPClient/test_assign_uuids_to_directories.py @@ -1,3 +1,5 @@ +from unittest import mock + import pytest from assign_uuids_to_directories import main from client.job import Job @@ -21,8 +23,8 @@ def transfer_directory_path(transfer_directory_path): @pytest.mark.django_db -def test_main(mocker, transfer, transfer_directory_path): - job = mocker.Mock(spec=Job) +def test_main(transfer, transfer_directory_path): + job = mock.Mock(spec=Job) include_dirs = True # Verify there are no directories. diff --git a/tests/MCPClient/test_check_for_access_directory.py b/tests/MCPClient/test_check_for_access_directory.py index 462718f74a..abcb291ece 100644 --- a/tests/MCPClient/test_check_for_access_directory.py +++ b/tests/MCPClient/test_check_for_access_directory.py @@ -1,3 +1,5 @@ +from unittest import mock + import check_for_access_directory import pytest from client.job import Job @@ -7,8 +9,8 @@ @pytest.mark.django_db -def test_main(mocker, tmp_path, sip, sip_directory_path): - job = mocker.Mock(spec=Job) +def test_main(tmp_path, sip, sip_directory_path): + job = mock.Mock(spec=Job) date = timezone.now() access_directory = tmp_path / "access" diff --git a/tests/MCPClient/test_create_aic_mets.py b/tests/MCPClient/test_create_aic_mets.py index 4d208d12c5..b0c8ec0062 100644 --- a/tests/MCPClient/test_create_aic_mets.py +++ b/tests/MCPClient/test_create_aic_mets.py @@ -1,8 +1,10 @@ import os import uuid +from unittest import mock import create_aic_mets import namespaces as ns +import pytest from lxml import etree THIS_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -13,18 +15,25 @@ def extract_file_mock(aip_uuid, mets_in_aip, mets_path): f.write("") -def test_create_aic_mets(db, mocker, tmp_path): - mocker.patch("fileOperations.addFileToSIP") - mocker.patch("databaseFunctions.insertIntoDerivations") - mocker.patch("storageService.extract_file", side_effect=extract_file_mock) - mocker.patch( - "create_mets_v2.getDublinCore", - return_value=etree.Element( - ns.dctermsBNS + "dublincore", nsmap={"dcterms": ns.dctermsNS, "dc": ns.dcNS} - ), - ) - mocker.patch("create_mets_v2.SIPMetadataAppliesToType") - +@pytest.mark.django_db +@mock.patch("fileOperations.addFileToSIP") +@mock.patch("databaseFunctions.insertIntoDerivations") +@mock.patch("storageService.extract_file", side_effect=extract_file_mock) +@mock.patch( + "create_mets_v2.getDublinCore", + return_value=etree.Element( + ns.dctermsBNS + "dublincore", nsmap={"dcterms": ns.dctermsNS, "dc": ns.dcNS} + ), +) +@mock.patch("create_mets_v2.SIPMetadataAppliesToType") +def test_create_aic_mets( + SIPMetadataAppliesToType, + getDublinCore, + extract_file, + insertIntoDerivations, + addFileToSIP, + tmp_path, +): # The client script expects an AIC directory that contains files whose # filenames are AIP UUIDs and their contents are the AIP names, so we create # a couple of those files here @@ -49,7 +58,7 @@ def test_create_aic_mets(db, mocker, tmp_path): # The AIC METS is created in the metadata directory of the AIC (aic_dir / "metadata").mkdir() - job = mocker.Mock() + job = mock.Mock() create_aic_mets.create_aic_mets(aic_uuid, str(aic_dir), job) # The AIC METS has a new UUID suffixed name, so we search for it instead of diff --git a/tests/MCPClient/test_create_mets_v2.py b/tests/MCPClient/test_create_mets_v2.py index f45702e35b..1fd62bc338 100644 --- a/tests/MCPClient/test_create_mets_v2.py +++ b/tests/MCPClient/test_create_mets_v2.py @@ -1,5 +1,6 @@ import uuid from contextlib import ExitStack as does_not_raise +from unittest import mock import pytest from create_mets_v2 import createDMDIDsFromCSVMetadata @@ -12,11 +13,9 @@ from namespaces import NSMAP -def test_createDMDIDsFromCSVMetadata_finds_non_ascii_paths(mocker): - dmd_secs_creator_mock = mocker.patch( - "create_mets_v2.createDmdSecsFromCSVParsedMetadata", return_value=[] - ) - state_mock = mocker.Mock( +@mock.patch("create_mets_v2.createDmdSecsFromCSVParsedMetadata", return_value=[]) +def test_createDMDIDsFromCSVMetadata_finds_non_ascii_paths(dmd_secs_creator_mock): + state_mock = mock.Mock( **{ "CSV_METADATA": { "montréal": "montreal metadata", @@ -31,9 +30,9 @@ def test_createDMDIDsFromCSVMetadata_finds_non_ascii_paths(mocker): dmd_secs_creator_mock.assert_has_calls( [ - mocker.call(None, "montreal metadata", state_mock), - mocker.call(None, {}, state_mock), - mocker.call(None, "dvorak metadata", state_mock), + mock.call(None, "montreal metadata", state_mock), + mock.call(None, {}, state_mock), + mock.call(None, "dvorak metadata", state_mock), ] ) @@ -263,8 +262,9 @@ def test_aip_mets_normative_directory_structure( (None, [], does_not_raise()), ], ) +@mock.patch("create_mets_v2.archivematicaCreateMETSMetadataXML.process_xml_metadata") def test_xml_validation_fail_on_error( - mocker, + process_xml_metadata, settings, mcp_job, sip_directory_path, @@ -274,16 +274,13 @@ def test_xml_validation_fail_on_error( errors, expectation, ): - mock_mets = mocker.Mock( + mock_mets = mock.Mock( **{ "serialize.return_value": etree.Element("tag"), "get_subsections_counts.return_value": {}, } ) - mocker.patch( - "create_mets_v2.archivematicaCreateMETSMetadataXML.process_xml_metadata", - return_value=(mock_mets, errors), - ) + process_xml_metadata.return_value = (mock_mets, errors) if fail_on_error is not None: settings.XML_VALIDATION_FAIL_ON_ERROR = fail_on_error with expectation: diff --git a/tests/MCPClient/test_email_fail_report.py b/tests/MCPClient/test_email_fail_report.py index 5e93145da0..2377be3f96 100644 --- a/tests/MCPClient/test_email_fail_report.py +++ b/tests/MCPClient/test_email_fail_report.py @@ -1,4 +1,5 @@ from smtplib import SMTPException +from unittest import mock import email_fail_report import pytest @@ -46,18 +47,6 @@ def test_send_email_err(monkeypatch): email_fail_report.send_email("Foobar", ["to@domain.tld"], "...") -@pytest.fixture -def args(mocker): - args_mock = mocker.Mock( - unit_type="Transfer", unit_name="My transfer", unit_uuid="uuid", stdout=False - ) - mocker.patch( - "argparse.ArgumentParser", - return_value=mocker.Mock(**{"parse_args.return_value": args_mock}), - ) - return args_mock - - @pytest.mark.parametrize( "test_case", [ @@ -89,24 +78,35 @@ def args(mocker): ids=["no-users-exist", "users-exist", "send-email-fails"], ) @pytest.mark.django_db -def test_report_is_always_stored(mocker, args, test_case): - mocker.patch( - "email_fail_report.get_emails_from_dashboard_users", - return_value=test_case["users"], - ) - mocker.patch("email_fail_report.get_content_for", return_value="my report") - send_email_mock = mocker.patch( - "email_fail_report.send_email", side_effect=test_case["send_email_result"] +@mock.patch("argparse.ArgumentParser") +@mock.patch("email_fail_report.get_emails_from_dashboard_users") +@mock.patch("email_fail_report.get_content_for", return_value="my report") +@mock.patch("email_fail_report.send_email") +@mock.patch("email_fail_report.store_report") +def test_report_is_always_stored( + store_report_mock, + send_email_mock, + get_content_for, + get_emails_from_dashboard_users, + argument_parser, + test_case, +): + args_mock = mock.Mock( + unit_type="Transfer", unit_name="My transfer", unit_uuid="uuid", stdout=False ) - store_report_mock = mocker.patch("email_fail_report.store_report") - job = mocker.MagicMock() + argument_parser.return_value = mock.Mock(**{"parse_args.return_value": args_mock}) + get_emails_from_dashboard_users.return_value = test_case["users"] + + send_email_mock.side_effect = test_case["send_email_result"] + + job = mock.MagicMock() email_fail_report.call([job]) store_report_mock.assert_called_once_with( - "my report", args.unit_type, args.unit_name, args.unit_uuid + "my report", args_mock.unit_type, args_mock.unit_name, args_mock.unit_uuid ) send_email_mock.assert_has_calls( - [mocker.call(*a) for a in test_case["send_email_calls"]] + [mock.call(*a) for a in test_case["send_email_calls"]] ) job.set_status.assert_has_calls( - [mocker.call(*a) for a in test_case["set_status_calls"]] + [mock.call(*a) for a in test_case["set_status_calls"]] ) diff --git a/tests/MCPClient/test_load_premis_events_from_xml.py b/tests/MCPClient/test_load_premis_events_from_xml.py index 6e62debf58..0bebb7d6d5 100644 --- a/tests/MCPClient/test_load_premis_events_from_xml.py +++ b/tests/MCPClient/test_load_premis_events_from_xml.py @@ -2,6 +2,7 @@ import pathlib import sys import uuid +from unittest import mock import load_premis_events_from_xml import pytest @@ -102,9 +103,9 @@ def simple_xml_path(tmp_path): return file_path -def test_get_premis_schema_with_nonexistent_path(mocker): - mocker.patch("os.path.isfile", return_value=False) - printfn = mocker.Mock() +@mock.patch("os.path.isfile", return_value=False) +def test_get_premis_schema_with_nonexistent_path(is_file): + printfn = mock.Mock() result = load_premis_events_from_xml.get_premis_schema("mock/xsd/path", printfn) printfn.assert_called_once_with( "The PREMIS XML schema asset mock/xsd/path is unavailable", file=sys.stderr @@ -112,8 +113,8 @@ def test_get_premis_schema_with_nonexistent_path(mocker): assert result is None -def test_get_premis_schema_with_invalid_schema(mocker, invalid_xsd_path): - printfn = mocker.Mock() +def test_get_premis_schema_with_invalid_schema(invalid_xsd_path): + printfn = mock.Mock() result = load_premis_events_from_xml.get_premis_schema( invalid_xsd_path.as_posix(), printfn ) @@ -125,14 +126,14 @@ def test_get_premis_schema_with_invalid_schema(mocker, invalid_xsd_path): assert result is None -def test_get_premis_schema_with_valid_schema(mocker, xsd_path): +def test_get_premis_schema_with_valid_schema(xsd_path): result = load_premis_events_from_xml.get_premis_schema(xsd_path) assert result is not None -def test_get_validated_etree_with_nonexistent_path(mocker): - mocker.patch("os.path.isfile", return_value=False) - printfn = mocker.Mock() +@mock.patch("os.path.isfile", return_value=False) +def test_get_validated_etree_with_nonexistent_path(is_file): + printfn = mock.Mock() result = load_premis_events_from_xml.get_validated_etree( "mock/xml/path", None, printfn ) @@ -140,8 +141,8 @@ def test_get_validated_etree_with_nonexistent_path(mocker): assert result is None -def test_get_validated_etree_with_invalid_xml(mocker, invalid_xml_path, xsd_path): - printfn = mocker.Mock() +def test_get_validated_etree_with_invalid_xml(invalid_xml_path, xsd_path): + printfn = mock.Mock() schema = etree.XMLSchema(etree.parse(xsd_path)) result = load_premis_events_from_xml.get_validated_etree( invalid_xml_path["path"].as_posix(), schema, printfn @@ -156,7 +157,7 @@ def test_get_validated_etree_with_invalid_xml(mocker, invalid_xml_path, xsd_path assert result is None -def test_get_validated_etree_with_valid_xml(mocker, simple_xml_path, xsd_path): +def test_get_validated_etree_with_valid_xml(simple_xml_path, xsd_path): schema = etree.XMLSchema(etree.parse(xsd_path)) result = load_premis_events_from_xml.get_validated_etree( simple_xml_path.as_posix(), schema @@ -194,14 +195,14 @@ def test_parse_datetime_with_empty_string(): assert load_premis_events_from_xml.parse_datetime("") is None -def test_get_elements(mocker): - root = mocker.Mock(**{"get.return_value": "3.0"}) - element = mocker.Mock(**{"get.return_value": None}) - tree = mocker.Mock( +def test_get_elements(): + root = mock.Mock(**{"get.return_value": "3.0"}) + element = mock.Mock(**{"get.return_value": None}) + tree = mock.Mock( **{"getroot.return_value": root, "findall.return_value": [element]} ) selector = "premis:object" - element_factory = mocker.Mock(return_value={"identifier": "id"}) + element_factory = mock.Mock(return_value={"identifier": "id"}) result = load_premis_events_from_xml.get_elements(tree, selector, element_factory) element.get.assert_called_once_with("version") element.set.assert_called_once_with("version", "3.0") @@ -209,10 +210,10 @@ def test_get_elements(mocker): assert list(result.values()) == [{"identifier": "id"}] -def test_get_premis_element_children_identifiers(mocker): - identifier1 = mocker.Mock(type="t", value="1") - identifier2 = mocker.Mock(type="t", value="2") - premis_element = mocker.Mock(**{"findall.return_value": [identifier1, identifier2]}) +def test_get_premis_element_children_identifiers(): + identifier1 = mock.Mock(type="t", value="1") + identifier2 = mock.Mock(type="t", value="2") + premis_element = mock.Mock(**{"findall.return_value": [identifier1, identifier2]}) result = load_premis_events_from_xml.get_premis_element_children_identifiers( premis_element, "premis:object" ) @@ -227,15 +228,19 @@ def test_get_premis_element_children_identifiers(mocker): ], ids=["original_name_as_string", "original_name_as_empty_string"], ) -def test_file_element_factory(mocker, params): - premis_element = mocker.Mock( - identifier_type="f", identifier_value="1", original_name=params["original_name"] - ) - mocker.patch("metsrw.plugins.premisrw.premis_to_data") - mocker.patch("load_premis_events_from_xml.PREMISFile", return_value=premis_element) - mocker.patch( - "load_premis_events_from_xml.get_premis_element_children_identifiers", - return_value=set(), +@mock.patch("metsrw.plugins.premisrw.premis_to_data") +@mock.patch("load_premis_events_from_xml.PREMISFile") +@mock.patch( + "load_premis_events_from_xml.get_premis_element_children_identifiers", + return_value=set(), +) +def test_file_element_factory( + get_premis_element_children_identifiers, premis_file, premis_to_data, params +): + premis_file.return_value = mock.Mock( + identifier_type="f", + identifier_value="1", + original_name=params["original_name"], ) result = load_premis_events_from_xml.file_element_factory(None) assert result == { @@ -245,18 +250,21 @@ def test_file_element_factory(mocker, params): } -def test_agent_element_factory(mocker): - premis_element = mocker.Mock( +@mock.patch("metsrw.plugins.premisrw.premis_to_data") +@mock.patch("metsrw.plugins.premisrw.PREMISAgent") +@mock.patch( + "load_premis_events_from_xml.get_premis_element_children_identifiers", + return_value=set(), +) +def test_agent_element_factory( + get_premis_element_children_identifiers, premis_agent, premis_to_data +): + premis_element = mock.Mock( identifier_type="a", identifier_value="1", type="agenttype" ) # a "name" attribute can't be set on a mock at creation time premis_element.name = "name" - mocker.patch("metsrw.plugins.premisrw.premis_to_data") - mocker.patch("metsrw.plugins.premisrw.PREMISAgent", return_value=premis_element) - mocker.patch( - "load_premis_events_from_xml.get_premis_element_children_identifiers", - return_value=set(), - ) + premis_agent.return_value = premis_element result = load_premis_events_from_xml.agent_element_factory(None) assert result == { "identifier": ("a", "1"), @@ -277,14 +285,22 @@ def test_agent_element_factory(mocker): ], ids=["detail_note_as_None", "detail_note_as_string"], ) -def test_event_element_factory(mocker, params): - event_detail = mocker.Mock(event_detail="event detail") - event_outcome_part1 = mocker.Mock(event_outcome="joined") - event_outcome_part2 = mocker.Mock(event_outcome="string") - event_outcome_detail = mocker.Mock( +@mock.patch("metsrw.plugins.premisrw.premis_to_data") +@mock.patch("metsrw.plugins.premisrw.PREMISEvent") +@mock.patch( + "load_premis_events_from_xml.get_premis_element_children_identifiers", + return_value=set(), +) +def test_event_element_factory( + get_premis_element_children_identifiers, premis_event, premis_to_data, params +): + event_detail = mock.Mock(event_detail="event detail") + event_outcome_part1 = mock.Mock(event_outcome="joined") + event_outcome_part2 = mock.Mock(event_outcome="string") + event_outcome_detail = mock.Mock( event_outcome_detail_note=params["event_outcome_detail_note"] ) - premis_element = mocker.Mock( + premis_element = mock.Mock( identifier_type="e", identifier_value="1", type="ingestion", @@ -297,12 +313,7 @@ def test_event_element_factory(mocker, params): ] }, ) - mocker.patch("metsrw.plugins.premisrw.premis_to_data") - mocker.patch("metsrw.plugins.premisrw.PREMISEvent", return_value=premis_element) - mocker.patch( - "load_premis_events_from_xml.get_premis_element_children_identifiers", - return_value=set(), - ) + premis_event.return_value = premis_element result = load_premis_events_from_xml.event_element_factory(None) event_datetime = result.pop("event_datetime") if params["event_outcome_detail_note"] is not None: @@ -331,20 +342,24 @@ def test_event_element_factory(mocker, params): ) -def test_event_element_factory_prints_datetime_error(mocker): - printfn = mocker.Mock() - premis_element = mocker.Mock( +@mock.patch("metsrw.plugins.premisrw.premis_to_data") +@mock.patch( + "metsrw.plugins.premisrw.PREMISEvent", + return_value=mock.Mock( identifier_type="e", identifier_value="1", date_time="foobar", **{"findall.return_value": []}, - ) - mocker.patch("metsrw.plugins.premisrw.premis_to_data") - mocker.patch("metsrw.plugins.premisrw.PREMISEvent", return_value=premis_element) - mocker.patch( - "load_premis_events_from_xml.get_premis_element_children_identifiers", - return_value=set(), - ) + ), +) +@mock.patch( + "load_premis_events_from_xml.get_premis_element_children_identifiers", + return_value=set(), +) +def test_event_element_factory_prints_datetime_error( + get_premis_element_children_identifiers, premis_event, premis_to_data +): + printfn = mock.Mock() load_premis_events_from_xml.event_element_factory(None, printfn) printfn.assert_called_once_with( "The premis:event element with premis:eventIdentifierType='e' and premis:eventIdentifierValue='1' contains a premis:eventDateTime value that could not be parsed: foobar", @@ -407,21 +422,21 @@ def test_event_element_factory_with_no_event_outcome_detail(): assert result["identifier"] == ("NRI Repository Event ID", "NRI-016") -def test_get_or_create_agents(mocker): - mock_agent_model = mocker.Mock( - **{"objects.get_or_create.return_value": (None, None)} - ) - mocker.patch("load_premis_events_from_xml.Agent", mock_agent_model) - agents = [{"identifier": ("type", "value"), "name": "agent1", "type": "agenttype"}] - load_premis_events_from_xml.get_or_create_agents(agents) - mock_agent_model.objects.get_or_create.assert_called_once_with( - **{ - "identifiertype": "type", - "identifiervalue": "value", - "name": "agent1", - "agenttype": "agenttype", - } - ) +def test_get_or_create_agents(): + mock_agent_model = mock.Mock(**{"objects.get_or_create.return_value": (None, None)}) + with mock.patch("load_premis_events_from_xml.Agent", mock_agent_model): + agents = [ + {"identifier": ("type", "value"), "name": "agent1", "type": "agenttype"} + ] + load_premis_events_from_xml.get_or_create_agents(agents) + mock_agent_model.objects.get_or_create.assert_called_once_with( + **{ + "identifiertype": "type", + "identifiervalue": "value", + "name": "agent1", + "agenttype": "agenttype", + } + ) def test_premis_file_class(): @@ -430,16 +445,16 @@ def test_premis_file_class(): assert getattr(premis_element, "original_name", default) is not default -def test_get_invalid_file_identifiers(mocker): - printfn = mocker.Mock() +def test_get_invalid_file_identifiers(): + printfn = mock.Mock() prefix = load_premis_events_from_xml.TRANSFER_ORIGINAL_LOCATION_PREFIX valid_filenames = ["".join([prefix, "object1"]), "".join([prefix, "object2"])] def mock_filter(originallocation): return_value = originallocation.decode() in valid_filenames - return mocker.Mock(**{"exists.return_value": return_value}) + return mock.Mock(**{"exists.return_value": return_value}) - file_queryset = mocker.Mock(**{"filter.side_effect": mock_filter}) + file_queryset = mock.Mock(**{"filter.side_effect": mock_filter}) files = { ("f", "valid"): {"original_name": "object1"}, ("f", "no-original-name"): {"original_name": ""}, @@ -450,11 +465,11 @@ def mock_filter(originallocation): ) assert result == {("f", "no-original-name"), ("f", "nonexistent-name")} calls = [ - mocker.call( + mock.call( "The premis:object element with premis:objectIdentifierType='f' and premis:objectIdentifierValue='no-original-name' does not contain a premis:originalName element", file=sys.stderr, ), - mocker.call( + mock.call( "The premis:object element with premis:objectIdentifierType='f' and premis:objectIdentifierValue='nonexistent-name' contains a premis:originalName value that does not match any filename in this transfer: 'object3'", file=sys.stderr, ), @@ -462,8 +477,8 @@ def mock_filter(originallocation): printfn.assert_has_calls(calls, any_order=True) -def test_print_unrelated_files(mocker): - printfn = mocker.Mock() +def test_print_unrelated_files(): + printfn = mock.Mock() files = { ("f", "1"): {"identifier": ("f", "1")}, ("f", "2"): {"identifier": ("f", "2")}, @@ -481,8 +496,8 @@ def test_print_unrelated_files(mocker): ) -def test_print_unrelated_agents(mocker): - printfn = mocker.Mock() +def test_print_unrelated_agents(): + printfn = mock.Mock() agents = { ("a", "1"): {"identifier": ("a", "1")}, ("a", "2"): {"identifier": ("a", "2")}, @@ -500,8 +515,8 @@ def test_print_unrelated_agents(mocker): ) -def test_print_unrelated_events(mocker): - printfn = mocker.Mock() +def test_print_unrelated_events(): + printfn = mock.Mock() events = { ("e", "1"): {"files": {"f1"}, "agents": set()}, ("e", "2"): {"files": {"f1"}, "agents": {"a1"}}, @@ -510,19 +525,19 @@ def test_print_unrelated_events(mocker): } load_premis_events_from_xml.print_unrelated_events(events, printfn) calls = [ - mocker.call( + mock.call( "The premis:event element with premis:eventIdentifierType='e' and premis:eventIdentifierValue='1' is not related to any premis:agent element", file=sys.stderr, ), - mocker.call( + mock.call( "The premis:event element with premis:eventIdentifierType='e' and premis:eventIdentifierValue='3' is not related to any premis:object element", file=sys.stderr, ), - mocker.call( + mock.call( "The premis:event element with premis:eventIdentifierType='e' and premis:eventIdentifierValue='4' is not related to any premis:object element", file=sys.stderr, ), - mocker.call( + mock.call( "The premis:event element with premis:eventIdentifierType='e' and premis:eventIdentifierValue='4' is not related to any premis:agent element", file=sys.stderr, ), @@ -530,8 +545,8 @@ def test_print_unrelated_events(mocker): printfn.assert_has_calls(calls, any_order=True) -def test_print_files_related_to_nonexistent_events(mocker): - printfn = mocker.Mock() +def test_print_files_related_to_nonexistent_events(): + printfn = mock.Mock() files = { ("f", "1"): {"events": set()}, ("f", "2"): {"events": {("e", "1"), ("e", "2"), ("e", "3")}}, @@ -548,8 +563,8 @@ def test_print_files_related_to_nonexistent_events(mocker): ) -def test_print_agents_related_to_nonexistent_events(mocker): - printfn = mocker.Mock() +def test_print_agents_related_to_nonexistent_events(): + printfn = mock.Mock() agents = { ("a", "1"): {"events": set()}, ("a", "2"): {"events": {("e", "1"), ("e", "2"), ("e", "3")}}, @@ -566,8 +581,8 @@ def test_print_agents_related_to_nonexistent_events(mocker): ) -def test_print_events_related_to_nonexistent_files(mocker): - printfn = mocker.Mock() +def test_print_events_related_to_nonexistent_files(): + printfn = mock.Mock() files = {("f", "1"): {}, ("f", "2"): {}} events = { ("e", "1"): {"files": set()}, @@ -584,8 +599,8 @@ def test_print_events_related_to_nonexistent_files(mocker): ) -def test_print_events_related_to_nonexistent_agents(mocker): - printfn = mocker.Mock() +def test_print_events_related_to_nonexistent_agents(): + printfn = mock.Mock() agents = {("a", "1"): {}, ("a", "2"): {}} events = { ("e", "1"): {"agents": set()}, @@ -640,8 +655,8 @@ def test_relate_agents_to_events(): } -def test_get_event_agents(mocker): - printfn = mocker.Mock() +def test_get_event_agents(): + printfn = mock.Mock() event = {"agents": {"a2", "a3"}} agents = {"a1": {"identifier": "a1"}, "a2": {"identifier": "a2"}} agent_identifiers = set(agents) @@ -666,8 +681,8 @@ def test_get_event_agents(mocker): ], ids=["no_file_identifiers_to_ignore", "with_file_identifiers_to_ignore"], ) -def test_get_event_files(mocker, params): - printfn = mocker.Mock() +def test_get_event_files(params): + printfn = mock.Mock() event = {"files": {"f1", "f2", "f3"}} files = {"f1": {"identifier": "f1"}, "f2": {"identifier": "f2"}} file_identifiers = set(files) @@ -689,12 +704,11 @@ def test_get_event_files(mocker, params): }, ], ) -def test_ensure_event_id_is_uuid(mocker, params): +@mock.patch("uuid.uuid4") +def test_ensure_event_id_is_uuid(uuid4, params): if params["message_logged"]: - mocker.patch( - "uuid.uuid4", return_value=uuid.UUID("f4eea76b-1921-4152-b1b4-a93dbbfeaaef") - ) - printfn = mocker.Mock() + uuid4.return_value = uuid.UUID("f4eea76b-1921-4152-b1b4-a93dbbfeaaef") + printfn = mock.Mock() result = load_premis_events_from_xml.ensure_event_id_is_uuid( params["event_id"], printfn ) @@ -714,10 +728,11 @@ def existent_event_id(transfer_file): @pytest.mark.django_db -def test_ensure_event_id_is_uuid_with_existent_event(mocker, existent_event_id): +@mock.patch("uuid.uuid4") +def test_ensure_event_id_is_uuid_with_existent_event(uuid4, existent_event_id): expected_uuid = uuid.uuid4() - mocker.patch("uuid.uuid4", return_value=expected_uuid) - printfn = mocker.Mock() + uuid4.return_value = expected_uuid + printfn = mock.Mock() result = load_premis_events_from_xml.ensure_event_id_is_uuid( existent_event_id, printfn ) @@ -728,8 +743,8 @@ def test_ensure_event_id_is_uuid_with_existent_event(mocker, existent_event_id): ) -def test_get_valid_events(mocker): - printfn = mocker.Mock() +def test_get_valid_events(): + printfn = mock.Mock() files = {"f1": {"identifier": "f1"}, "f2": {"identifier": "f2"}} agents = {"a1": {"identifier": "a1"}, "a2": {"identifier": "a2"}} events = { @@ -754,15 +769,15 @@ def test_get_valid_events(mocker): ] calls = [ - mocker.call( + mock.call( "The premis:event element with premis:eventIdentifierType='e' and premis:eventIdentifierValue='2' is not related to any filename in the transfer files", file=sys.stderr, ), - mocker.call( + mock.call( "The premis:event element with premis:eventIdentifierType='e' and premis:eventIdentifierValue='3' is not related to any agents", file=sys.stderr, ), - mocker.call( + mock.call( "The premis:event element with premis:eventIdentifierType='e' and premis:eventIdentifierValue='4' is not related to any agents", file=sys.stderr, ), @@ -771,7 +786,7 @@ def test_get_valid_events(mocker): @pytest.mark.django_db -def test_save_events(mocker, transfer, transfer_file): +def test_save_events(transfer, transfer_file): # check there are no events initially assert not Event.objects.count() @@ -812,7 +827,7 @@ def test_save_events(mocker, transfer, transfer_file): ] file_queryset = File.objects.filter(transfer=transfer) - printfn = mocker.Mock() + printfn = mock.Mock() # save the event load_premis_events_from_xml.save_events(valid_events, file_queryset, printfn) diff --git a/tests/MCPClient/test_parse_mets_to_db.py b/tests/MCPClient/test_parse_mets_to_db.py index bc8611e2ed..2e6ed4e538 100644 --- a/tests/MCPClient/test_parse_mets_to_db.py +++ b/tests/MCPClient/test_parse_mets_to_db.py @@ -666,9 +666,9 @@ def test_insert_file_info(self): @pytest.mark.django_db -def test_main_sets_aip_reingest_type(mocker, sip): - mocker.patch("parse_mets_to_db.os") - mocker.patch("parse_mets_to_db.etree") +@mock.patch("parse_mets_to_db.os") +@mock.patch("parse_mets_to_db.etree") +def test_main_sets_aip_reingest_type(etree, os, sip): job = None assert not models.SIP.objects.filter(uuid=sip.uuid, sip_type="AIP-REIN").exists() @@ -678,9 +678,9 @@ def test_main_sets_aip_reingest_type(mocker, sip): @pytest.mark.django_db -def test_main_unsets_partial_reingest_flag(mocker, sip): - mocker.patch("parse_mets_to_db.os") - mocker.patch("parse_mets_to_db.etree") +@mock.patch("parse_mets_to_db.os") +@mock.patch("parse_mets_to_db.etree") +def test_main_unsets_partial_reingest_flag(etree, os, sip): job = None sip.set_partial_reingest() assert sip.is_partial_reingest() diff --git a/tests/MCPClient/test_pid_components.py b/tests/MCPClient/test_pid_components.py index 6db9a49a44..fe248f6114 100644 --- a/tests/MCPClient/test_pid_components.py +++ b/tests/MCPClient/test_pid_components.py @@ -8,6 +8,7 @@ import os from itertools import chain +from unittest import mock import bind_pid import bind_pids @@ -218,8 +219,9 @@ def directories(transfer, sip): @pytest.fixture -def pid_web_service(mocker): - mocker.patch("requests.post", return_value=mocker.Mock(status_code=200)) +def pid_web_service(): + with mock.patch("requests.post", return_value=mock.Mock(status_code=200)): + yield @pytest.mark.django_db @@ -242,8 +244,18 @@ def test_bind_pids_no_config( @pytest.mark.django_db +@mock.patch("bind_pids._get_unique_acc_no") +@mock.patch("bind_pids._validate_handle_server_config") def test_bind_pids( - sip, settings, transfer, files, directories, mocker, mcp_job, pid_web_service + _get_unique_acc_no, + _validate_handle_server_config, + sip, + settings, + transfer, + files, + directories, + mcp_job, + pid_web_service, ): """Test the bind_pids function end-to-end and ensure that the result is that which is anticipated. @@ -253,8 +265,8 @@ def test_bind_pids( """ # We might want to return a unique accession number, but we can also # test here using the package UUID, the function's fallback position. - mocker.patch.object(bind_pids, "_get_unique_acc_no", return_value=str(sip.uuid)) - mocker.patch.object(bind_pids, "_validate_handle_server_config", return_value=None) + _get_unique_acc_no.return_value = str(sip.uuid) + _validate_handle_server_config.return_value = None # Primary entry-point for the bind_pids microservice job. bind_pids.main(mcp_job, str(sip.uuid), "") @@ -394,8 +406,26 @@ def test_bind_pid_no_settings( @pytest.mark.django_db +@mock.patch.object( + DeclarePIDs, + "_retrieve_identifiers_path", + return_value=os.path.join( + THIS_DIR, "fixtures", "pid_declaration", "identifiers.json" + ), +) +@mock.patch("bind_pids._get_unique_acc_no") +@mock.patch("bind_pids._validate_handle_server_config", return_value=None) def test_pid_declaration( - sip, settings, transfer, files, directories, mocker, mcp_job, pid_web_service + _validate_handle_server_config, + _get_unique_acc_no, + _retrieve_identifiers_path, + sip, + settings, + transfer, + files, + directories, + mcp_job, + pid_web_service, ): """Test that the overall functionality of the PID declaration functions work as expected. @@ -405,13 +435,6 @@ def test_pid_declaration( all_identifier_types = ( TRADITIONAL_IDENTIFIERS + BOUND_IDENTIFIER_TYPES + DECLARED_IDENTIFIER_TYPES ) - mocker.patch.object( - DeclarePIDs, - "_retrieve_identifiers_path", - return_value=os.path.join( - THIS_DIR, "fixtures", "pid_declaration", "identifiers.json" - ), - ) DeclarePIDs(mcp_job).pid_declaration(unit_uuid=sip.uuid, sip_directory="") # Declare PIDs allows us to assign PIDs to very specific objects in a # transfer. @@ -434,8 +457,7 @@ def test_pid_declaration( assert example_uri in value, "Example URI type not preserved" if key == PID_ULID: assert len(example_ulid) == len(value) - mocker.patch.object(bind_pids, "_get_unique_acc_no", return_value=str(sip.uuid)) - mocker.patch.object(bind_pids, "_validate_handle_server_config", return_value=None) + _get_unique_acc_no.return_value = str(sip.uuid) # Primary entry-point for the bind_pids microservice job. bind_pids.main(mcp_job, str(sip.uuid), "") for mdl in chain((sip_mdl,), dir_mdl): @@ -482,7 +504,7 @@ def test_pid_declaration( @pytest.mark.django_db def test_pid_declaration_exceptions( - sip, settings, transfer, files, directories, mocker, mcp_job + sip, settings, transfer, files, directories, mcp_job ): """Ensure that the PID declaration feature exits when the JSOn cannot be loaded. @@ -495,17 +517,16 @@ def test_pid_declaration_exceptions( assert ( "No identifiers.json file found" in mcp_job.get_stderr().strip() ), "Expecting no identifiers.json file, but got something else" - # Test behavior when identifiers.json is badly formatted. - bad_identifiers_loc = os.path.join( - THIS_DIR, "fixtures", "pid_declaration", "bad_identifiers.json" - ) - mocker.patch.object( - DeclarePIDs, "_retrieve_identifiers_path", return_value=bad_identifiers_loc - ) - try: - DeclarePIDs(mcp_job).pid_declaration(unit_uuid="", sip_directory="") - except DeclarePIDsException as err: - json_error = "Expecting value: line 15 column 1 (char 336)" - assert json_error in str( - err - ), "Error message something other than anticipated for invalid JSON" + with mock.patch( + "pid_declaration.DeclarePIDs._retrieve_identifiers_path", + return_value=os.path.join( + THIS_DIR, "fixtures", "pid_declaration", "bad_identifiers.json" + ), + ): + try: + DeclarePIDs(mcp_job).pid_declaration(unit_uuid="", sip_directory="") + except DeclarePIDsException as err: + json_error = "Expecting value: line 15 column 1 (char 336)" + assert json_error in str( + err + ), "Error message something other than anticipated for invalid JSON" diff --git a/tests/MCPClient/test_upload_archivesspace.py b/tests/MCPClient/test_upload_archivesspace.py index 2b47ce90aa..b8acf38ae5 100644 --- a/tests/MCPClient/test_upload_archivesspace.py +++ b/tests/MCPClient/test_upload_archivesspace.py @@ -1,6 +1,7 @@ """Tests for the upload_archivesspace.py client script.""" import uuid +from unittest import mock import pytest import upload_archivesspace @@ -24,8 +25,8 @@ def test_get_files_from_dip_finds_files(tmpdir): assert sorted(result) == [str(object1), str(object2)] -def test_get_files_from_dip_with_empty_dip_location(tmpdir, mocker): - logger = mocker.patch("upload_archivesspace.logger") +@mock.patch("upload_archivesspace.logger") +def test_get_files_from_dip_with_empty_dip_location(logger, tmpdir): dip = tmpdir.mkdir("mydip") with pytest.raises(ValueError): upload_archivesspace.get_files_from_dip(str(dip)) @@ -33,27 +34,25 @@ def test_get_files_from_dip_with_empty_dip_location(tmpdir, mocker): logger.error.assert_called_once_with(f"no files in {str(dip)}/objects") -def test_get_pairs(mocker): +@mock.patch( + "upload_archivesspace.ArchivesSpaceDIPObjectResourcePairing.objects.filter", + return_value=[ + mock.Mock(fileuuid="1", resourceid="myresource"), + mock.Mock(fileuuid="2", resourceid="myresource"), + ], +) +def test_get_pairs(filter_mock): dip_uuid = "somedipuuid" - filter_mock = mocker.patch( - "upload_archivesspace.ArchivesSpaceDIPObjectResourcePairing.objects.filter", - return_value=[ - mocker.Mock(fileuuid="1", resourceid="myresource"), - mocker.Mock(fileuuid="2", resourceid="myresource"), - ], - ) result = upload_archivesspace.get_pairs(dip_uuid) assert result == {"1": "myresource", "2": "myresource"} filter_mock.assert_called_once_with(dipuuid=dip_uuid) -def test_delete_pairs(mocker): +@mock.patch("upload_archivesspace.ArchivesSpaceDIPObjectResourcePairing.objects.filter") +def test_delete_pairs(filter_mock): dip_uuid = "somedipuuid" - queryset_mock = mocker.Mock() - filter_mock = mocker.patch( - "upload_archivesspace.ArchivesSpaceDIPObjectResourcePairing.objects.filter", - return_value=queryset_mock, - ) + queryset_mock = mock.Mock() + filter_mock.return_value = queryset_mock upload_archivesspace.delete_pairs(dip_uuid) filter_mock.assert_called_once_with(dipuuid=dip_uuid) queryset_mock.delete.assert_called_once() @@ -64,14 +63,15 @@ def test_delete_pairs(mocker): ["http://some/uri/", "http://some/uri"], ids=["uri_with_trailing_slash", "uri_with_no_trailing_slash"], ) -def test_upload_to_archivesspace_adds_trailing_slash_to_uri(db, mocker, uri): +@mock.patch("upload_archivesspace.mets_file") +@mock.patch("upload_archivesspace.get_pairs") +def test_upload_to_archivesspace_adds_trailing_slash_to_uri( + get_pairs, mest_file, db, uri +): file_uuid = str(uuid.uuid4()) dip_uuid = str(uuid.uuid4()) - client_mock = mocker.Mock() - mocker.patch("upload_archivesspace.mets_file") - mocker.patch( - "upload_archivesspace.get_pairs", return_value={file_uuid: "myresource"} - ) + client_mock = mock.Mock() + get_pairs.return_value = {file_uuid: "myresource"} files = [f"file/{file_uuid}-path"] success = upload_archivesspace.upload_to_archivesspace( files, client_mock, "", "", "", "", uri, dip_uuid, "", "", "", "", "" @@ -117,10 +117,12 @@ def test_upload_to_archivesspace_adds_trailing_slash_to_uri(db, mocker, uri): ], ids=["with_restrictions", "with_access_conditions", "with_use_conditions"], ) -def test_upload_to_archivesspace_gets_mets_if_needed(mocker, params): - mocker.patch("upload_archivesspace.get_pairs") - logger = mocker.patch("upload_archivesspace.logger") - mets_file_mock = mocker.patch("upload_archivesspace.mets_file") +@mock.patch("upload_archivesspace.get_pairs") +@mock.patch("upload_archivesspace.logger") +@mock.patch("upload_archivesspace.mets_file") +def test_upload_to_archivesspace_gets_mets_if_needed( + mets_file_mock, logger, get_pairs, params +): upload_archivesspace.upload_to_archivesspace( [], "", @@ -139,24 +141,27 @@ def test_upload_to_archivesspace_gets_mets_if_needed(mocker, params): mets_file_mock.assert_called_once_with("/dip/location/path/METS.dipuuid.xml") logger.debug.assert_has_calls( [ - mocker.call("Looking for mets: dipuuid"), - mocker.call("Found mets file at path: /dip/location/path/METS.dipuuid.xml"), + mock.call("Looking for mets: dipuuid"), + mock.call("Found mets file at path: /dip/location/path/METS.dipuuid.xml"), ] ) -def test_upload_to_archivesspace_logs_files_with_no_pairs(db, mocker): +@mock.patch("upload_archivesspace.get_pairs") +@mock.patch("upload_archivesspace.logger") +@mock.patch("upload_archivesspace.mets_file") +def test_upload_to_archivesspace_logs_files_with_no_pairs( + mets_file, logger, get_pairs, db +): file1_uuid = uuid.uuid4() file2_uuid = uuid.uuid4() file3_uuid = uuid.uuid4() dip_uuid = uuid.uuid4() - mocker.patch( - "upload_archivesspace.get_pairs", - return_value={str(file1_uuid): "myresource", str(file3_uuid): "myresource"}, - ) - logger = mocker.patch("upload_archivesspace.logger") - mocker.patch("upload_archivesspace.mets_file") - client_mock = mocker.Mock() + get_pairs.return_value = { + str(file1_uuid): "myresource", + str(file3_uuid): "myresource", + } + client_mock = mock.Mock() files = [ f"/path/to/{file1_uuid}-image.jpg", f"/path/to/{file2_uuid}-video.avi", @@ -171,27 +176,25 @@ def test_upload_to_archivesspace_logs_files_with_no_pairs(db, mocker): assert not success -def test_upload_to_archivesspace_when_upload_fails(db, mocker): +@mock.patch("upload_archivesspace.get_pairs") +@mock.patch("upload_archivesspace.logger") +@mock.patch("upload_archivesspace.mets_file") +def test_upload_to_archivesspace_when_upload_fails(mets_file, logger, get_pairs, db): file1_uuid = uuid.uuid4() file2_uuid = uuid.uuid4() file3_uuid = uuid.uuid4() dip_uuid = uuid.uuid4() - mocker.patch( - "upload_archivesspace.get_pairs", - return_value={ - str(file1_uuid): "myresource", - str(file2_uuid): "myresource", - str(file3_uuid): "myresource", - }, - ) - logger = mocker.patch("upload_archivesspace.logger") - mocker.patch("upload_archivesspace.mets_file") + get_pairs.return_value = { + str(file1_uuid): "myresource", + str(file2_uuid): "myresource", + str(file3_uuid): "myresource", + } def fail_video_upload(*args, **kwargs): if kwargs.get("uri").endswith("video.avi"): raise upload_archivesspace.ArchivesSpaceError("error with ArchivesSpace") - client_mock = mocker.Mock(**{"add_digital_object.side_effect": fail_video_upload}) + client_mock = mock.Mock(**{"add_digital_object.side_effect": fail_video_upload}) files = [ f"/path/to/{str(file1_uuid)}-image.jpg", f"/path/to/{str(file2_uuid)}-video.avi", @@ -208,10 +211,11 @@ def fail_video_upload(*args, **kwargs): assert not success -def test_call(db, mocker): - parser_mock = mocker.Mock( +@mock.patch( + "upload_archivesspace.get_parser", + return_value=mock.Mock( **{ - "parse_args.return_value": mocker.Mock( + "parse_args.return_value": mock.Mock( **{ "base_url": "some_base_url", "user": "some_user", @@ -231,20 +235,22 @@ def test_call(db, mocker): } ) } - ) - mocker.patch("upload_archivesspace.get_parser", return_value=parser_mock) - client_mock = mocker.Mock() - client_factory_mock = mocker.patch( - "upload_archivesspace.ArchivesSpaceClient", return_value=client_mock - ) - get_files_from_dip_mock = mocker.patch( - "upload_archivesspace.get_files_from_dip", return_value=[] - ) - upload_to_archivesspace = mocker.patch( - "upload_archivesspace.upload_to_archivesspace" - ) - job = mocker.Mock(args=[]) - job.JobContext = mocker.MagicMock() + ), +) +@mock.patch("upload_archivesspace.ArchivesSpaceClient") +@mock.patch("upload_archivesspace.get_files_from_dip", return_value=[]) +@mock.patch("upload_archivesspace.upload_to_archivesspace") +def test_call( + upload_to_archivesspace, + get_files_from_dip_mock, + client_factory_mock, + get_parser, + db, +): + client_mock = mock.Mock() + client_factory_mock.return_value = client_mock + job = mock.Mock(args=[]) + job.JobContext = mock.MagicMock() upload_archivesspace.call([job]) client_factory_mock.assert_called_once_with( host="some_base_url", user="some_user", passwd="some_passwd" @@ -276,28 +282,35 @@ def test_call(db, mocker): ], ids=["no_files_found", "unknown_error_raised"], ) -def test_call_when_files_from_dip_cant_be_retrieved(db, mocker, params): - mocker.patch("upload_archivesspace.get_parser") - mocker.patch("upload_archivesspace.ArchivesSpaceClient") - mocker.patch( - "upload_archivesspace.get_files_from_dip", side_effect=params["exception"] - ) - upload_to_archivesspace = mocker.patch( - "upload_archivesspace.upload_to_archivesspace" - ) - job = mocker.Mock(args=[]) - job.JobContext = mocker.MagicMock() +@mock.patch("upload_archivesspace.get_parser") +@mock.patch("upload_archivesspace.ArchivesSpaceClient") +@mock.patch("upload_archivesspace.get_files_from_dip") +@mock.patch("upload_archivesspace.upload_to_archivesspace") +def test_call_when_files_from_dip_cant_be_retrieved( + upload_to_archivesspace, + get_files_from_dip, + client_factory, + get_parser, + db, + params, +): + get_files_from_dip.side_effect = params["exception"] + + job = mock.Mock(args=[]) + job.JobContext = mock.MagicMock() upload_archivesspace.call([job]) job.set_status.assert_called_once_with(params["expected_job_status"]) - assert not upload_to_archivesspace.called + upload_to_archivesspace.assert_not_called() -def test_call_when_not_all_files_can_be_paired(db, mocker): - mocker.patch("upload_archivesspace.get_parser") - mocker.patch("upload_archivesspace.ArchivesSpaceClient") - mocker.patch("upload_archivesspace.get_files_from_dip") - mocker.patch("upload_archivesspace.upload_to_archivesspace", return_value=False) - job = mocker.Mock(args=[]) - job.JobContext = mocker.MagicMock() +@mock.patch("upload_archivesspace.get_parser") +@mock.patch("upload_archivesspace.ArchivesSpaceClient") +@mock.patch("upload_archivesspace.get_files_from_dip") +@mock.patch("upload_archivesspace.upload_to_archivesspace", return_value=False) +def test_call_when_not_all_files_can_be_paired( + upload_to_archivesspace, get_files_from_dip, client_factory, get_parser, db +): + job = mock.Mock(args=[]) + job.JobContext = mock.MagicMock() upload_archivesspace.call([job]) job.set_status.assert_called_once_with(2) diff --git a/tests/MCPClient/test_upload_qubit.py b/tests/MCPClient/test_upload_qubit.py index 03eab28954..696f046a14 100644 --- a/tests/MCPClient/test_upload_qubit.py +++ b/tests/MCPClient/test_upload_qubit.py @@ -1,4 +1,5 @@ import os +from unittest import mock import pytest import upload_qubit @@ -28,15 +29,12 @@ def access(db, sip): ) -def test_start_synchronously(db, mocker, mcp_job, sip, sip_job, access): - mocker.patch( - "requests.request", - return_value=mocker.Mock( - status_code=200, headers={"Location": "http://example.com"} - ), - ) - - opts = mocker.Mock( +@mock.patch( + "requests.request", + return_value=mock.Mock(status_code=200, headers={"Location": "http://example.com"}), +) +def test_start_synchronously(request, db, mcp_job, sip, sip_job, access): + opts = mock.Mock( uuid=sip.uuid, rsync_target=False, rsync_command=None, @@ -57,15 +55,12 @@ def test_start_synchronously(db, mocker, mcp_job, sip, sip_job, access): assert access.target == "atom-description-id" -def test_first_run(db, mocker, mcp_job, sip_job, sip, sip_file): - mocker.patch( - "requests.request", - return_value=mocker.Mock( - status_code=200, headers={"Location": "http://example.com"} - ), - ) - - opts = mocker.Mock( +@mock.patch( + "requests.request", + return_value=mock.Mock(status_code=200, headers={"Location": "http://example.com"}), +) +def test_first_run(request, db, mcp_job, sip_job, sip, sip_file): + opts = mock.Mock( uuid=sip.uuid, rsync_target=False, rsync_command=None, diff --git a/tests/MCPClient/test_verify_checksum.py b/tests/MCPClient/test_verify_checksum.py index dfbfa93134..94cf4dde88 100644 --- a/tests/MCPClient/test_verify_checksum.py +++ b/tests/MCPClient/test_verify_checksum.py @@ -23,6 +23,7 @@ """ import subprocess +from unittest import mock import pytest from client.job import Job @@ -84,7 +85,7 @@ def test_valid_initialisation(self, fixture): "Expecting NoHashCommandAvailable for a filename we shouldn't be able to handle" ) - def test_provenance_string(self, mocker): + def test_provenance_string(self): """Test to ensure that the string output to the PREMIS event for this microservice Job is consistent with what we're expecting. Provenance string includes the command called, plus the utility's version string. @@ -95,21 +96,23 @@ def test_provenance_string(self, mocker): "md5sum (GNU coreutils) 8.28", "Copyright (C) 2017 Free Software Foundation, Inc.", ] - mock = mocker.patch.object(hashsum, "_call", return_value=version_string) - assert ( - hashsum.version() == "md5sum (GNU coreutils) 8.28" - ), "Hashsum version retrieved is incorrect" - mock.assert_called_once_with("--version") - mocker.patch.object( - hashsum, - "command_called", - (hashsum.COMMAND,) + ("-c", "--strict", hash_file), - ) - expected_provenance = 'program="md5sum -c --strict metadata/checksum.md5"; version="md5sum (GNU coreutils) 8.28"' - provenance_output = hashsum.get_command_detail() - assert ( - provenance_output == expected_provenance - ), f"Provenance output is incorrect: {provenance_output}" + with mock.patch.object( + hashsum, "_call", return_value=version_string + ) as mock_call: + assert ( + hashsum.version() == "md5sum (GNU coreutils) 8.28" + ), "Hashsum version retrieved is incorrect" + mock_call.assert_called_once_with("--version") + expected_provenance = 'program="md5sum -c --strict metadata/checksum.md5"; version="md5sum (GNU coreutils) 8.28"' + with mock.patch.object( + hashsum, + "command_called", + (hashsum.COMMAND,) + ("-c", "--strict", hash_file), + ): + provenance_output = hashsum.get_command_detail() + assert ( + provenance_output == expected_provenance + ), f"Provenance output is incorrect: {provenance_output}" def test_provenance_string_no_command(self): """When nothing has happened, e.g. the checksums haven't been validated @@ -123,7 +126,7 @@ def test_provenance_string_no_command(self): except PREMISFailure: pass - def test_compare_hashes_failed(self, mocker): + def test_compare_hashes_failed(self): """Ensure we get consistent output when the checksum comparison fails.""" hash_file = "metadata/checksum.sha256" job = Job("stub", "stub", ["", ""]) @@ -143,21 +146,24 @@ def test_compare_hashes_failed(self, mocker): "sha256: objects/nested/ファイル3.bin: FAILED\n" "sha256: objects/readonly.file: FAILED open or read" ) - mock = mocker.patch.object(hashsum, "_call", return_value=output_string) - mocker.patch.object(hashsum, "count_and_compare_lines", return_value=True) - mock.side_effect = subprocess.CalledProcessError( - returncode=1, cmd=toolname, output=output_string - ) - ret = hashsum.compare_hashes("") - mock.assert_called_once_with( - "-c", "--strict", hash_file, transfer_dir=objects_dir - ) - assert ret == 1, self.assert_return_value.format(ret) - assert ( - job.get_stderr().strip() == exception_string - ), self.assert_exception_string + with mock.patch.object( + hashsum, "_call", return_value=output_string + ) as mock_call, mock.patch.object( + hashsum, "count_and_compare_lines", return_value=True + ): + mock_call.side_effect = subprocess.CalledProcessError( + returncode=1, cmd=toolname, output=output_string + ) + ret = hashsum.compare_hashes("") + mock_call.assert_called_once_with( + "-c", "--strict", hash_file, transfer_dir=objects_dir + ) + assert ret == 1, self.assert_return_value.format(ret) + assert ( + job.get_stderr().strip() == exception_string + ), self.assert_exception_string - def test_compare_hashes_with_bad_files(self, mocker): + def test_compare_hashes_with_bad_files(self): """Ensure that the formatting of errors is consistent if improperly formatted files are provided to hashsum. """ @@ -180,49 +186,57 @@ def test_compare_hashes_with_bad_files(self, mocker): "sha1: comparison exited with status: 1. Please check the formatting of the checksums or integrity of the files.\n" "sha1: sha1sum: WARNING: 1 line is improperly formatted" ) - mock = mocker.patch.object(hashsum, "_call", return_value=no_proper_output) - mocker.patch.object(hashsum, "count_and_compare_lines", return_value=True) - mock.side_effect = subprocess.CalledProcessError( - returncode=1, cmd=toolname, output=no_proper_output - ) - ret = hashsum.compare_hashes("") - mock.assert_called_once_with( - "-c", "--strict", hash_file, transfer_dir=objects_dir - ) - assert ( - job.get_stderr().strip() == except_string_no_proper_out - ), self.assert_exception_string - assert ret == 1, self.assert_return_value.format(ret) - # Flush job.error as it isn't flushed automatically. - job.error = "" - mock = mocker.patch.object(hashsum, "_call", return_value=improper_formatting) - mock.side_effect = subprocess.CalledProcessError( - returncode=1, cmd="sha1sum", output=improper_formatting - ) - ret = hashsum.compare_hashes("") - assert ( - job.get_stderr().strip() == except_string_improper_format - ), self.assert_exception_string - mock.assert_called_once_with( - "-c", "--strict", hash_file, transfer_dir=objects_dir - ) - assert ret == 1, self.assert_return_value.format(ret) + with mock.patch.object( + hashsum, "_call", return_value=no_proper_output + ) as mock_call, mock.patch.object( + hashsum, "count_and_compare_lines", return_value=True + ): + mock_call.side_effect = subprocess.CalledProcessError( + returncode=1, cmd=toolname, output=no_proper_output + ) + ret = hashsum.compare_hashes("") + mock_call.assert_called_once_with( + "-c", "--strict", hash_file, transfer_dir=objects_dir + ) + assert ( + job.get_stderr().strip() == except_string_no_proper_out + ), self.assert_exception_string + assert ret == 1, self.assert_return_value.format(ret) + # Flush job.error as it isn't flushed automatically. + job.error = "" + with mock.patch.object( + hashsum, "_call", return_value=improper_formatting + ) as mock_call: + mock_call.side_effect = subprocess.CalledProcessError( + returncode=1, cmd="sha1sum", output=improper_formatting + ) + ret = hashsum.compare_hashes("") + assert ( + job.get_stderr().strip() == except_string_improper_format + ), self.assert_exception_string + mock_call.assert_called_once_with( + "-c", "--strict", hash_file, transfer_dir=objects_dir + ) + assert ret == 1, self.assert_return_value.format(ret) - def test_line_comparison_fail(self, mocker): + def test_line_comparison_fail(self): """If the checksum line and object comparison function fails then we want to return early and _call shouldn't be called. """ hash_file = "metadata/checksum.sha1" hashsum = self.setup_hashsum(hash_file, Job("stub", "stub", ["", ""])) toolname = "sha1sum" - mock = mocker.patch.object(hashsum, "_call", return_value=None) - mocker.patch.object(hashsum, "count_and_compare_lines", return_value=False) - mock.side_effect = subprocess.CalledProcessError( - returncode=1, cmd=toolname, output=None - ) - ret = hashsum.compare_hashes("") - mock.assert_not_called() - assert ret == 1, self.assert_return_value.format(ret) + with mock.patch.object( + hashsum, "_call", return_value=None + ) as mock_call, mock.patch.object( + hashsum, "count_and_compare_lines", return_value=False + ): + mock_call.side_effect = subprocess.CalledProcessError( + returncode=1, cmd=toolname, output=None + ) + ret = hashsum.compare_hashes("") + mock_call.assert_not_called() + assert ret == 1, self.assert_return_value.format(ret) @pytest.mark.parametrize( "fixture", diff --git a/tests/MCPServer/test_gearman.py b/tests/MCPServer/test_gearman.py index ca8fab26f8..92abcbf11f 100644 --- a/tests/MCPServer/test_gearman.py +++ b/tests/MCPServer/test_gearman.py @@ -1,5 +1,6 @@ import math import uuid +from unittest import mock import gearman import pytest @@ -18,8 +19,8 @@ def run(self, *args, **kwargs): @pytest.fixture -def simple_job(request, mocker): - return MockJob(mocker.Mock(), mocker.Mock(), mocker.Mock(), name="test_job_name") +def simple_job(request): + return MockJob(mock.Mock(), mock.Mock(), mock.Mock(), name="test_job_name") @pytest.fixture @@ -57,12 +58,10 @@ def format_gearman_response(task_results): return response -def test_gearman_task_submission(simple_job, simple_task, mocker): - # Mock to avoid db writes - mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") - mocker.patch.object(GearmanTaskBackend, "TASK_BATCH_SIZE", 1) - mock_client = mocker.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") - +@mock.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.GearmanTaskBackend.TASK_BATCH_SIZE", 1) +@mock.patch("server.tasks.backends.gearman_backend.Task.bulk_log") +def test_gearman_task_submission(bulk_log, mock_client, simple_job, simple_task): backend = GearmanTaskBackend() backend.submit_task(simple_job, simple_task) @@ -81,14 +80,12 @@ def test_gearman_task_submission(simple_job, simple_task, mocker): assert submit_job_kwargs["max_retries"] == GearmanTaskBackend.MAX_RETRIES -def test_gearman_task_result_success(simple_job, simple_task, mocker): - # Mock to avoid db writes - mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") - - mock_client = mocker.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.backends.gearman_backend.Task.bulk_log") +def test_gearman_task_result_success(bulk_log, mock_client, simple_job, simple_task): backend = GearmanTaskBackend() - mock_gearman_job = mocker.Mock() + mock_gearman_job = mock.Mock() job_request = gearman.job.GearmanJobRequest( mock_gearman_job, background=True, max_attempts=0 ) @@ -130,14 +127,12 @@ def mock_jobs_completed(*args): assert task_result.done is True -def test_gearman_task_result_error(simple_job, simple_task, mocker): - # Mock to avoid db writes - mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") - - mock_client = mocker.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.backends.gearman_backend.Task.bulk_log") +def test_gearman_task_result_error(bulk_log, mock_client, simple_job, simple_task): backend = GearmanTaskBackend() - mock_gearman_job = mocker.Mock() + mock_gearman_job = mock.Mock() job_request = gearman.job.GearmanJobRequest( mock_gearman_job, background=True, max_attempts=0 ) @@ -169,14 +164,12 @@ def mock_jobs_completed(*args): @pytest.mark.parametrize( "reverse_result_order", (False, True), ids=["regular", "reversed"] ) +@mock.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch.object(GearmanTaskBackend, "TASK_BATCH_SIZE", 2) +@mock.patch("server.tasks.backends.gearman_backend.Task.bulk_log") def test_gearman_multiple_batches( - simple_job, simple_task, mocker, reverse_result_order + bulk_log, mock_client, simple_job, simple_task, reverse_result_order ): - # Mock to avoid db writes - mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") - mocker.patch.object(GearmanTaskBackend, "TASK_BATCH_SIZE", 2) - mock_client = mocker.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") - tasks = [] for i in range(5): task = Task( @@ -192,7 +185,7 @@ def test_gearman_multiple_batches( job_requests = [] for _ in range(3): - mock_gearman_job = mocker.Mock() + mock_gearman_job = mock.Mock() job_request = gearman.job.GearmanJobRequest( mock_gearman_job, background=True, max_attempts=0 ) diff --git a/tests/MCPServer/test_integration.py b/tests/MCPServer/test_integration.py index b5ea3e5b5c..468bba4739 100644 --- a/tests/MCPServer/test_integration.py +++ b/tests/MCPServer/test_integration.py @@ -3,6 +3,7 @@ import threading import uuid from io import StringIO +from unittest import mock import pytest from django.utils import timezone @@ -93,8 +94,13 @@ def dummy_file_replacements(request): @pytest.mark.django_db(transaction=True) +@mock.patch("server.jobs.decisions.load_processing_xml") +@mock.patch("server.jobs.decisions.load_preconfigured_choice") +@mock.patch("server.jobs.client.get_task_backend") def test_workflow_integration( - mocker, + mock_get_task_backend, + mock_load_preconfigured_choice, + mock_load_processing_xml, settings, tmp_path, workflow, @@ -106,162 +112,161 @@ def test_workflow_integration( echo_backend = EchoBackend() settings.SHARED_DIRECTORY = str(tmp_path) settings.PROCESSING_DIRECTORY = str(tmp_path / "processing") - mocker.patch.dict( + mock_get_task_backend.return_value = echo_backend + + with mock.patch.dict( "server.packages.BASE_REPLACEMENTS", {r"%processingDirectory%": settings.PROCESSING_DIRECTORY}, - ) - - mock_get_task_backend = mocker.patch( - "server.jobs.client.get_task_backend", return_value=echo_backend - ) - mock_load_preconfigured_choice = mocker.patch( - "server.jobs.decisions.load_preconfigured_choice" - ) - mock_load_processing_xml = mocker.patch("server.jobs.decisions.load_processing_xml") - mocker.patch.object(transfer, "files", return_value=dummy_file_replacements) - - # Schedule the first job - first_workflow_chain = workflow.get_chains()["3816f689-65a8-4ad0-ac27-74292a70b093"] - first_job_chain = JobChain(transfer, first_workflow_chain, workflow) - job = next(first_job_chain) - package_queue.schedule_job(job) - - assert package_queue.job_queue.qsize() == 1 - assert len(package_queue.active_packages) == 1 - assert transfer.uuid in package_queue.active_packages - - # Process the first job (DirectoryClientScriptJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) - - mock_get_task_backend.assert_called_once() - task = echo_backend.tasks[job.uuid][0] - - assert isinstance(job, DirectoryClientScriptJob) - assert job.exit_code == 0 - assert task.arguments == f'"{settings.PROCESSING_DIRECTORY}" "{transfer.uuid}"' - - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() - - # Process the second job (FilesClientScriptJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) - - tasks = echo_backend.tasks[job.uuid] - - assert isinstance(job, FilesClientScriptJob) - assert job.exit_code == 0 - assert len(tasks) == len(dummy_file_replacements) - for task, replacement in zip(tasks, dummy_file_replacements): - assert task.arguments == '"{}"'.format(replacement[r"%fileUUID%"]) - - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() - - # Process the third job (OutputClientScriptJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) - - assert isinstance(job, OutputClientScriptJob) - assert job.exit_code == 0 - assert job.job_chain.generated_choices == { - "default": {"description": "Default Location", "uri": DEFAULT_STORAGE_LOCATION} - } + ), mock.patch.object(transfer, "files", return_value=dummy_file_replacements): + # Schedule the first job + first_workflow_chain = workflow.get_chains()[ + "3816f689-65a8-4ad0-ac27-74292a70b093" + ] + first_job_chain = JobChain(transfer, first_workflow_chain, workflow) + job = next(first_job_chain) + package_queue.schedule_job(job) + + assert package_queue.job_queue.qsize() == 1 + assert len(package_queue.active_packages) == 1 + assert transfer.uuid in package_queue.active_packages + + # Process the first job (DirectoryClientScriptJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) + + mock_get_task_backend.assert_called_once() + task = echo_backend.tasks[job.uuid][0] + + assert isinstance(job, DirectoryClientScriptJob) + assert job.exit_code == 0 + assert task.arguments == f'"{settings.PROCESSING_DIRECTORY}" "{transfer.uuid}"' + + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() + + # Process the second job (FilesClientScriptJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) + + tasks = echo_backend.tasks[job.uuid] + + assert isinstance(job, FilesClientScriptJob) + assert job.exit_code == 0 + assert len(tasks) == len(dummy_file_replacements) + for task, replacement in zip(tasks, dummy_file_replacements): + assert task.arguments == '"{}"'.format(replacement[r"%fileUUID%"]) + + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() + + # Process the third job (OutputClientScriptJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) + + assert isinstance(job, OutputClientScriptJob) + assert job.exit_code == 0 + assert job.job_chain.generated_choices == { + "default": { + "description": "Default Location", + "uri": DEFAULT_STORAGE_LOCATION, + } + } - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Setup preconfigured choice for next job - mock_load_preconfigured_choice.return_value = DEFAULT_STORAGE_LOCATION + # Setup preconfigured choice for next job + mock_load_preconfigured_choice.return_value = DEFAULT_STORAGE_LOCATION - # Process the fourth job (OutputDecisionJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the fourth job (OutputDecisionJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, OutputDecisionJob) - assert job.exit_code == 0 - assert job.job_chain.context[r"%AIPsStore%"] == DEFAULT_STORAGE_LOCATION + assert isinstance(job, OutputDecisionJob) + assert job.exit_code == 0 + assert job.job_chain.context[r"%AIPsStore%"] == DEFAULT_STORAGE_LOCATION - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Setup preconfigured choice for next job - mock_load_preconfigured_choice.return_value = "7b814362-c679-43c4-a2e2-1ba59957cd18" + # Setup preconfigured choice for next job + mock_load_preconfigured_choice.return_value = ( + "7b814362-c679-43c4-a2e2-1ba59957cd18" + ) - # Process the fifth job (NextChainDecisionJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the fifth job (NextChainDecisionJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, NextChainDecisionJob) - assert job.exit_code == 0 + assert isinstance(job, NextChainDecisionJob) + assert job.exit_code == 0 - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # We should be on chain 2 now - assert job.job_chain is not first_job_chain - assert job.job_chain.chain.id == "7b814362-c679-43c4-a2e2-1ba59957cd18" + # We should be on chain 2 now + assert job.job_chain is not first_job_chain + assert job.job_chain.chain.id == "7b814362-c679-43c4-a2e2-1ba59957cd18" - # Setup preconfigured choice for next job - mock_load_processing_xml.return_value = TEST_PROCESSING_CONFIG + # Setup preconfigured choice for next job + mock_load_processing_xml.return_value = TEST_PROCESSING_CONFIG - # Process the sixth job (UpdateContextDecisionJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the sixth job (UpdateContextDecisionJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, UpdateContextDecisionJob) - assert job.exit_code == 0 - assert job.job_chain.context[r"%TestValue%"] == "7" + assert isinstance(job, UpdateContextDecisionJob) + assert job.exit_code == 0 + assert job.job_chain.context[r"%TestValue%"] == "7" - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Process the seventh job (SetUnitVarLinkJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the seventh job (SetUnitVarLinkJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, SetUnitVarLinkJob) - assert job.exit_code == 0 + assert isinstance(job, SetUnitVarLinkJob) + assert job.exit_code == 0 - unit_var = models.UnitVariable.objects.get( - unittype=transfer.UNIT_VARIABLE_TYPE, - unituuid=transfer.uuid, - variable="test_unit_variable", - variablevalue="", - microservicechainlink="f8e4c1ee-3e43-4caa-a664-f6b6bd8f156e", - ) - assert unit_var is not None + unit_var = models.UnitVariable.objects.get( + unittype=transfer.UNIT_VARIABLE_TYPE, + unituuid=transfer.uuid, + variable="test_unit_variable", + variablevalue="", + microservicechainlink="f8e4c1ee-3e43-4caa-a664-f6b6bd8f156e", + ) + assert unit_var is not None - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Process the eighth job (GetUnitVarLinkJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the eighth job (GetUnitVarLinkJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, GetUnitVarLinkJob) - assert job.exit_code == 0 + assert isinstance(job, GetUnitVarLinkJob) + assert job.exit_code == 0 - # Out job chain should have been redirected to the final link - assert job.job_chain.current_link.id == "f8e4c1ee-3e43-4caa-a664-f6b6bd8f156e" + # Out job chain should have been redirected to the final link + assert job.job_chain.current_link.id == "f8e4c1ee-3e43-4caa-a664-f6b6bd8f156e" - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Process the last job (DirectoryClientScriptJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the last job (DirectoryClientScriptJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert job.exit_code == 0 + assert job.exit_code == 0 - # Workflow is over; we're done - assert package_queue.job_queue.qsize() == 0 + # Workflow is over; we're done + assert package_queue.job_queue.qsize() == 0 diff --git a/tests/MCPServer/test_mcp.py b/tests/MCPServer/test_mcp.py index a7ce22a791..5c07d8affc 100644 --- a/tests/MCPServer/test_mcp.py +++ b/tests/MCPServer/test_mcp.py @@ -1,5 +1,6 @@ import threading import uuid +from unittest import mock import pytest from server.mcp import main @@ -7,7 +8,14 @@ @pytest.mark.django_db(transaction=True) -def test_watched_dir_handler_creates_transfer_if_it_does_not_exist(mocker, tmpdir): +@mock.patch("server.packages.models.Transfer.objects.create") +@mock.patch("server.packages.uuid4") +@mock.patch( + "server.mcp.JobChain", mock.MagicMock(return_value=iter(["some_chain_link"])) +) +def test_watched_dir_handler_creates_transfer_if_it_does_not_exist( + uuid4, create_mock, tmpdir +): """Test that a models.Transfer object exists for an unknown path. This for example simulates the case when a user drops a directory @@ -20,22 +28,17 @@ def test_watched_dir_handler_creates_transfer_if_it_does_not_exist(mocker, tmpdi """ # We're not interested in the package queue or the link chaining logics here, # so we mock very limited implementations for those. - job_chain_mock = mocker.MagicMock() - job_chain_mock.return_value = iter(["some_chain_link"]) - mocker.patch("server.mcp.JobChain", job_chain_mock) - package_queue = mocker.Mock() - watched_dir = mocker.MagicMock(unit_type="Transfer") + package_queue = mock.Mock() + watched_dir = mock.MagicMock(unit_type="Transfer") # Mock a known UUID for the new transfer. transfer_uuid = uuid.uuid4() - mocker.patch("server.packages.uuid4", return_value=transfer_uuid) + uuid4.return_value = transfer_uuid # Mock the Django manager for the Transfer model. This is mocked from the # `server.packages` module since its path from the Dashboard is not available. - transfer_mock = mocker.Mock(uuid=transfer_uuid) - create_mock = mocker.patch( - "server.packages.models.Transfer.objects.create", return_value=transfer_mock - ) + transfer_mock = mock.Mock(uuid=transfer_uuid) + create_mock.return_value = transfer_mock # The new/unknown path for creating the transfer. path = tmpdir.mkdir("some_transfer") @@ -48,26 +51,26 @@ def test_watched_dir_handler_creates_transfer_if_it_does_not_exist(mocker, tmpdi @pytest.mark.django_db(transaction=True) -def test_watched_dir_handler_creates_transfer_for_file(mocker, tmpdir): +@mock.patch("server.packages.models.Transfer.objects.create") +@mock.patch("server.packages.uuid4") +@mock.patch( + "server.mcp.JobChain", mock.MagicMock(return_value=iter(["some_chain_link"])) +) +def test_watched_dir_handler_creates_transfer_for_file(uuid4, create_mock, tmpdir): """Test that a models.Transfer object exists for a file path.""" # We're not interested in the package queue or the link chaining logics here, # so we mock very limited implementations for those. - job_chain_mock = mocker.MagicMock() - job_chain_mock.return_value = iter(["some_chain_link"]) - mocker.patch("server.mcp.JobChain", job_chain_mock) - package_queue = mocker.Mock() - watched_dir = mocker.MagicMock(unit_type="Transfer") + package_queue = mock.Mock() + watched_dir = mock.MagicMock(unit_type="Transfer") # Mock a known UUID for the new transfer. transfer_uuid = uuid.uuid4() - mocker.patch("server.packages.uuid4", return_value=transfer_uuid) + uuid4.return_value = transfer_uuid # Mock the Django manager for the Transfer model. This is mocked from the # `server.packages` module since its path from the Dashboard is not available. - transfer_mock = mocker.Mock(uuid=transfer_uuid) - create_mock = mocker.patch( - "server.packages.models.Transfer.objects.create", return_value=transfer_mock - ) + transfer_mock = mock.Mock(uuid=transfer_uuid) + create_mock.return_value = transfer_mock # The new/unknown path of a file for creating the transfer. path = tmpdir.join("file.txt") @@ -80,7 +83,21 @@ def test_watched_dir_handler_creates_transfer_for_file(mocker, tmpdir): create_mock.assert_called_once_with(uuid=transfer_uuid, currentlocation=str(path)) -def test_mcp_main(mocker, settings): +@mock.patch("server.mcp.metrics") +@mock.patch("server.mcp.Task") +@mock.patch("server.mcp.Job") +@mock.patch("server.mcp.Package") +@mock.patch("server.mcp.shared_dirs") +@mock.patch("server.mcp.load_workflow") +def test_mcp_main( + mock_load_workflow, + mock_shared_dirs, + mock_package, + mock_job, + mock_task, + mock_metrics, + settings, +): """Test spin up with immediate shutdown. This test has limited utility because everything is mocked, but it should @@ -91,13 +108,6 @@ def test_mcp_main(mocker, settings): settings.WORKER_THREADS = 1 settings.PROMETHEUS_ENABLED = True - mock_load_workflow = mocker.patch("server.mcp.load_workflow") - mock_shared_dirs = mocker.patch("server.mcp.shared_dirs") - mock_package = mocker.patch("server.mcp.Package") - mock_job = mocker.patch("server.mcp.Job") - mock_task = mocker.patch("server.mcp.Task") - mock_metrics = mocker.patch("server.mcp.metrics") - shutdown_event = threading.Event() shutdown_event.set() diff --git a/tests/MCPServer/test_package.py b/tests/MCPServer/test_package.py index fad43d7e62..919ed8b92a 100644 --- a/tests/MCPServer/test_package.py +++ b/tests/MCPServer/test_package.py @@ -1,6 +1,7 @@ import uuid from concurrent.futures import ThreadPoolExecutor from pathlib import Path +from unittest import mock import pytest from django.core.exceptions import ValidationError @@ -396,10 +397,10 @@ def test_package_statuses(tmp_path, package_class, model_class): @pytest.mark.django_db(transaction=True) -def test_create_package(mocker, tmp_path, admin_user, settings): - package_queue = mocker.Mock(spec=PackageQueue) - executor = mocker.Mock(spec=ThreadPoolExecutor) - workflow = mocker.Mock(spec=Workflow) +def test_create_package(tmp_path, admin_user, settings): + package_queue = mock.Mock(spec=PackageQueue) + executor = mock.Mock(spec=ThreadPoolExecutor) + workflow = mock.Mock(spec=Workflow) d = tmp_path / "sub" d.mkdir() diff --git a/tests/MCPServer/test_processing_config.py b/tests/MCPServer/test_processing_config.py index 83e355614f..80d39f9b67 100644 --- a/tests/MCPServer/test_processing_config.py +++ b/tests/MCPServer/test_processing_config.py @@ -1,5 +1,6 @@ import os import pathlib +from unittest import mock import pytest from server.processing_config import ChainChoicesField @@ -23,15 +24,30 @@ def _workflow(): return load(fp) -def test_get_processing_fields(mocker, _workflow): - mocker.patch("storageService.get_location", return_value=[]) - +@mock.patch("storageService.get_location", return_value=[]) +def test_get_processing_fields(_workflow): fields = get_processing_fields(_workflow) assert len(fields) == len(processing_fields) -def test_storage_location_field(mocker, _workflow): +@mock.patch( + "server.processing_config.processing_fields", + new=[ + StorageLocationField( + link_id="b320ce81-9982-408a-9502-097d0daa48fa", + name="store_aip_location", + purpose="AS", + ), + StorageLocationField( + link_id="cd844b6e-ab3c-4bc6-b34f-7103f88715de", + name="store_dip_location", + purpose="DS", + ), + ], +) +@mock.patch("storageService.get_location") +def test_storage_location_field(get_location, _workflow): def mocked_get_location(purpose): return [ { @@ -41,23 +57,7 @@ def mocked_get_location(purpose): } ] - mocker.patch("storageService.get_location", side_effect=mocked_get_location) - - mocker.patch( - "server.processing_config.processing_fields", - new=[ - StorageLocationField( - link_id="b320ce81-9982-408a-9502-097d0daa48fa", - name="store_aip_location", - purpose="AS", - ), - StorageLocationField( - link_id="cd844b6e-ab3c-4bc6-b34f-7103f88715de", - name="store_dip_location", - purpose="DS", - ), - ], - ) + get_location.side_effect = mocked_get_location assert get_processing_fields(_workflow) == [ { @@ -121,20 +121,19 @@ def mocked_get_location(purpose): ] -def test_replace_dict_field(mocker, _workflow): - mocker.patch( - "server.processing_config.processing_fields", - new=[ - ReplaceDictField( - link_id="f09847c2-ee51-429a-9478-a860477f6b8d", - name="select_format_id_tool_transfer", - ), - ReplaceDictField( - link_id="f19926dd-8fb5-4c79-8ade-c83f61f55b40", name="delete_packages" - ), - ], - ) - +@mock.patch( + "server.processing_config.processing_fields", + new=[ + ReplaceDictField( + link_id="f09847c2-ee51-429a-9478-a860477f6b8d", + name="select_format_id_tool_transfer", + ), + ReplaceDictField( + link_id="f19926dd-8fb5-4c79-8ade-c83f61f55b40", name="delete_packages" + ), + ], +) +def test_replace_dict_field(_workflow): assert get_processing_fields(_workflow) == [ { "choices": [ @@ -197,22 +196,21 @@ def test_replace_dict_field(mocker, _workflow): ] -def test_chain_choices_field(mocker, _workflow): - mocker.patch( - "server.processing_config.processing_fields", - new=[ - ChainChoicesField( - link_id="eeb23509-57e2-4529-8857-9d62525db048", name="reminder" - ), - ChainChoicesField( - link_id="cb8e5706-e73f-472f-ad9b-d1236af8095f", - name="normalize", - ignored_choices=["Reject SIP"], - find_duplicates="Normalize", - ), - ], - ) - +@mock.patch( + "server.processing_config.processing_fields", + new=[ + ChainChoicesField( + link_id="eeb23509-57e2-4529-8857-9d62525db048", name="reminder" + ), + ChainChoicesField( + link_id="cb8e5706-e73f-472f-ad9b-d1236af8095f", + name="normalize", + ignored_choices=["Reject SIP"], + find_duplicates="Normalize", + ), + ], +) +def test_chain_choices_field(_workflow): assert get_processing_fields(_workflow) == [ { "choices": [ @@ -318,23 +316,22 @@ def test_chain_choices_field(mocker, _workflow): ] -def test_shared_choices_field(mocker, _workflow): - mocker.patch( - "server.processing_config.processing_fields", - new=[ - SharedChainChoicesField( - link_id="856d2d65-cd25-49fa-8da9-cabb78292894", - name="virus_scanning", - related_links=[ - "1dad74a2-95df-4825-bbba-dca8b91d2371", - "7e81f94e-6441-4430-a12d-76df09181b66", - "390d6507-5029-4dae-bcd4-ce7178c9b560", - "97a5ddc0-d4e0-43ac-a571-9722405a0a9b", - ], - ) - ], - ) - +@mock.patch( + "server.processing_config.processing_fields", + new=[ + SharedChainChoicesField( + link_id="856d2d65-cd25-49fa-8da9-cabb78292894", + name="virus_scanning", + related_links=[ + "1dad74a2-95df-4825-bbba-dca8b91d2371", + "7e81f94e-6441-4430-a12d-76df09181b66", + "390d6507-5029-4dae-bcd4-ce7178c9b560", + "97a5ddc0-d4e0-43ac-a571-9722405a0a9b", + ], + ) + ], +) +def test_shared_choices_field(_workflow): assert get_processing_fields(_workflow) == [ { "id": "856d2d65-cd25-49fa-8da9-cabb78292894", @@ -412,14 +409,14 @@ def test_processing_configuration_file_exists_with_None(): assert not processing_configuration_file_exists(None) -def test_processing_configuration_file_exists_with_existent_file(mocker): - mocker.patch("os.path.isfile", return_value=True) +@mock.patch("os.path.isfile", return_value=True) +def test_processing_configuration_file_exists_with_existent_file(isfile): assert processing_configuration_file_exists("defaultProcessingMCP.xml") -def test_processing_configuration_file_exists_with_nonexistent_file(mocker): - mocker.patch("os.path.isfile", return_value=False) - logger = mocker.patch("server.processing_config.logger") +@mock.patch("server.processing_config.logger") +@mock.patch("os.path.isfile", return_value=False) +def test_processing_configuration_file_exists_with_nonexistent_file(isfile, logger): assert not processing_configuration_file_exists("bogus.xml") logger.debug.assert_called_once_with( "Processing configuration file for %s does not exist", "bogus.xml" diff --git a/tests/MCPServer/test_queues.py b/tests/MCPServer/test_queues.py index debce6b4b5..6f67e84779 100644 --- a/tests/MCPServer/test_queues.py +++ b/tests/MCPServer/test_queues.py @@ -3,6 +3,7 @@ import threading import time import uuid +from unittest import mock import pytest from server.jobs import DecisionJob @@ -120,8 +121,8 @@ def dip(request, tmp_path): dip_2 = dip -def test_schedule_job(package_queue, transfer, workflow_link, mocker): - test_job = MockJob(mocker.Mock(), workflow_link, transfer) +def test_schedule_job(package_queue, transfer, workflow_link): + test_job = MockJob(mock.Mock(), workflow_link, transfer) package_queue.schedule_job(test_job) @@ -140,9 +141,9 @@ def test_schedule_job(package_queue, transfer, workflow_link, mocker): assert package_queue.dip_queue.qsize() == 0 -def test_active_transfer_limit(package_queue, transfer, sip, workflow_link, mocker): - test_job1 = MockJob(mocker.Mock(), workflow_link, transfer) - test_job2 = MockJob(mocker.Mock(), workflow_link, sip) +def test_active_transfer_limit(package_queue, transfer, sip, workflow_link): + test_job1 = MockJob(mock.Mock(), workflow_link, transfer) + test_job2 = MockJob(mock.Mock(), workflow_link, sip) package_queue.schedule_job(test_job1) @@ -176,11 +177,9 @@ def test_activate_and_deactivate_package(package_queue, transfer): assert transfer.uuid not in package_queue.active_packages -def test_queue_next_job_raises_full( - package_queue, transfer, sip, workflow_link, mocker -): - test_job1 = MockJob(mocker.Mock(), workflow_link, transfer) - test_job2 = MockJob(mocker.Mock(), workflow_link, sip) +def test_queue_next_job_raises_full(package_queue, transfer, sip, workflow_link): + test_job1 = MockJob(mock.Mock(), workflow_link, transfer) + test_job2 = MockJob(mock.Mock(), workflow_link, sip) package_queue.schedule_job(test_job1) package_queue.schedule_job(test_job2) @@ -191,8 +190,8 @@ def test_queue_next_job_raises_full( package_queue.queue_next_job() -def test_await_job_decision(package_queue, transfer, workflow_link, mocker): - test_job = MockDecisionJob(mocker.Mock(), workflow_link, transfer) +def test_await_job_decision(package_queue, transfer, workflow_link): + test_job = MockDecisionJob(mock.Mock(), workflow_link, transfer) package_queue.await_decision(test_job) assert package_queue.job_queue.qsize() == 0 @@ -203,10 +202,10 @@ def test_await_job_decision(package_queue, transfer, workflow_link, mocker): def test_decision_job_moved_to_awaiting_decision( - package_queue, transfer, sip, workflow_link, mocker + package_queue, transfer, sip, workflow_link ): - test_job1 = MockDecisionJob(mocker.Mock(), workflow_link, transfer) - test_job2 = MockJob(mocker.Mock(), workflow_link, sip) + test_job1 = MockDecisionJob(mock.Mock(), workflow_link, transfer) + test_job2 = MockJob(mock.Mock(), workflow_link, sip) package_queue.schedule_job(test_job1) @@ -229,12 +228,12 @@ def test_decision_job_moved_to_awaiting_decision( def test_all_scheduled_decisions_are_processed( - package_queue_regular, dip_1, dip_2, workflow_link, mocker + package_queue_regular, dip_1, dip_2, workflow_link ): package_queue = package_queue_regular - test_job1 = MockDecisionJob(mocker.Mock(), workflow_link, dip_1) - test_job2 = MockDecisionJob(mocker.Mock(), workflow_link, dip_2) + test_job1 = MockDecisionJob(mock.Mock(), workflow_link, dip_1) + test_job2 = MockDecisionJob(mock.Mock(), workflow_link, dip_2) # Schedule two jobs simultaneously. # We want to confirm that both are eventually processed. @@ -271,7 +270,7 @@ def test_all_scheduled_decisions_are_processed( @pytest.mark.django_db(transaction=True) def test_all_scheduled_jobs_are_processed( - package_queue_regular, dip_1, dip_2, workflow_link, mocker + package_queue_regular, dip_1, dip_2, workflow_link ): package_queue = package_queue_regular @@ -279,8 +278,8 @@ def test_all_scheduled_jobs_are_processed( # It causes the queue manager to hit the database. workflow_link._src["end"] = True - test_job1 = MockJob(mocker.Mock(), workflow_link, dip_1) - test_job2 = MockJob(mocker.Mock(), workflow_link, dip_2) + test_job1 = MockJob(mock.Mock(), workflow_link, dip_1) + test_job2 = MockJob(mock.Mock(), workflow_link, dip_2) # Schedule two jobs simultaneously. # We want to confirm that both are eventually processed. diff --git a/tests/MCPServer/test_rpc_server.py b/tests/MCPServer/test_rpc_server.py index 333611940b..bd586f437b 100644 --- a/tests/MCPServer/test_rpc_server.py +++ b/tests/MCPServer/test_rpc_server.py @@ -1,6 +1,7 @@ import pathlib import threading import uuid +from unittest import mock import pytest from django.utils import timezone @@ -14,7 +15,7 @@ @pytest.mark.django_db -def test_approve_partial_reingest_handler(mocker): +def test_approve_partial_reingest_handler(): sip = models.SIP.objects.create(uuid=str(uuid.uuid4())) models.Job.objects.create( sipuuid=sip.pk, @@ -22,7 +23,7 @@ def test_approve_partial_reingest_handler(mocker): createdtime=timezone.now(), currentstep=models.Job.STATUS_AWAITING_DECISION, ) - package_queue = mocker.MagicMock() + package_queue = mock.MagicMock() with open(ASSETS_DIR / "workflow.json") as fp: wf = workflow.load(fp) shutdown_event = threading.Event() diff --git a/tests/MCPServer/test_translation.py b/tests/MCPServer/test_translation.py index 0ff10e46fd..565cd4d9ff 100644 --- a/tests/MCPServer/test_translation.py +++ b/tests/MCPServer/test_translation.py @@ -1,9 +1,11 @@ +from unittest import mock + from server.translation import UNKNOWN_TRANSLATION_LABEL from server.translation import TranslationLabel -def test_translation_label(mocker): - mocker.patch("server.translation.FALLBACK_LANG", "en") +@mock.patch("server.translation.FALLBACK_LANG", "en") +def test_translation_label(): tr = TranslationLabel({"en": "cat", "es": "gato"}) assert str(tr) == "cat" assert tr["es"] == "gato" @@ -11,12 +13,12 @@ def test_translation_label(mocker): assert tr.get_label(lang="es") == "gato" assert tr.get_label(lang="is", fallback_label="köttur") == "köttur" assert tr.get_label(lang="??") == "cat" - mocker.patch("server.translation.FALLBACK_LANG", "xx") - assert tr.get_label(lang="yy") == UNKNOWN_TRANSLATION_LABEL + with mock.patch("server.translation.FALLBACK_LANG", "xx"): + assert tr.get_label(lang="yy") == UNKNOWN_TRANSLATION_LABEL -def test_translation_label_with_prepared_codes(mocker): - mocker.patch("server.translation.FALLBACK_LANG", "en") +@mock.patch("server.translation.FALLBACK_LANG", "en") +def test_translation_label_with_prepared_codes(): tr = TranslationLabel({"en": "dog", "pt_BR": "cão"}) assert tr.get_label(lang="en") == "dog" assert tr.get_label(lang="pt-br") == "cão" diff --git a/tests/MCPServer/test_workflow.py b/tests/MCPServer/test_workflow.py index 23b3021875..d8b318854b 100644 --- a/tests/MCPServer/test_workflow.py +++ b/tests/MCPServer/test_workflow.py @@ -1,6 +1,7 @@ import os import pathlib from io import StringIO +from unittest import mock import pytest from django.utils.translation import gettext_lazy @@ -13,15 +14,15 @@ FIXTURES_DIR = pathlib.Path(__file__).parent / "fixtures" -def test_invert_job_statuses(mocker): - mocker.patch( - "server.jobs.Job.STATUSES", - ( - (1, gettext_lazy("Uno")), - (2, gettext_lazy("Dos")), - (3, gettext_lazy("Tres")), - ), - ) +@mock.patch( + "server.jobs.Job.STATUSES", + ( + (1, gettext_lazy("Uno")), + (2, gettext_lazy("Dos")), + (3, gettext_lazy("Tres")), + ), +) +def test_invert_job_statuses(): ret = workflow._invert_job_statuses() assert ret == {"Uno": 1, "Dos": 2, "Tres": 3} @@ -98,7 +99,7 @@ def test_load_valid_document(path): assert first_link.get_label("foobar") is None -def test_link_browse_methods(mocker): +def test_link_browse_methods(): with open(os.path.join(ASSETS_DIR, "workflow.json")) as fp: wf = workflow.load(fp) ln = wf.get_link("1ba589db-88d1-48cf-bb1a-a5f9d2b17378") @@ -113,7 +114,7 @@ def test_get_schema(): assert schema["$id"] == "https://www.archivematica.org/labs/workflow/schema/v1.json" -def test_get_schema_not_found(mocker): - mocker.patch("server.workflow._LATEST_SCHEMA", "non-existen-schema") +@mock.patch("server.workflow._LATEST_SCHEMA", "non-existen-schema") +def test_get_schema_not_found(): with pytest.raises(IOError): workflow._get_schema() diff --git a/tests/archivematicaCommon/test_storage_service.py b/tests/archivematicaCommon/test_storage_service.py index 96df376017..44eba5e4c5 100644 --- a/tests/archivematicaCommon/test_storage_service.py +++ b/tests/archivematicaCommon/test_storage_service.py @@ -4,6 +4,7 @@ """ import json +from unittest import mock import pytest from requests import Response @@ -32,18 +33,19 @@ def mock_response(status_code, content_type, content): (0, "", {}), ], ) -def test_location_desc_from_slug(status_code, content_type, expected_result, mocker): +@mock.patch("storageService._storage_service_url") +@mock.patch("requests.Session.get") +def test_location_desc_from_slug( + get, _storage_service_url, status_code, content_type, expected_result +): """Test location description from slug Rudimentary test to ensure that we're returning something that implements .get() for any potential return from this function. And that we get something sensible for unexpected status codes. """ - mocker.patch("storageService._storage_service_url") - mocker.patch( - "requests.Session.get", - return_value=mock_response(status_code, content_type, expected_result), - ) + + get.return_value = mock_response(status_code, content_type, expected_result) res = location_description_from_slug("mock_uri") assert res == expected_result, f"Unexpected result for status test: {status_code}" @@ -74,7 +76,10 @@ def test_location_desc_from_slug(status_code, content_type, expected_result, moc ("/api/v2/location/fd46760b-567f-4c17-a2f4-a05e79074932/", {}, ""), ], ) -def test_retrieve_storage_location(slug, return_value, expected_result, mocker): +@mock.patch("storageService.location_description_from_slug") +def test_retrieve_storage_location( + location_description_from_slug, slug, return_value, expected_result +): """Test retrieve storage location Ensure that we're able to retrieve the resource description for the @@ -84,8 +89,6 @@ def test_retrieve_storage_location(slug, return_value, expected_result, mocker): specified a description, we should be able to fall-back on something else. Likewise if we receive something unexpected. """ - mocker.patch( - "storageService.location_description_from_slug", return_value=return_value - ) + location_description_from_slug.return_value = return_value res = retrieve_storage_location_description(slug) assert res == expected_result diff --git a/tests/dashboard/components/archival_storage/test_archival_storage.py b/tests/dashboard/components/archival_storage/test_archival_storage.py index 456d3c9145..3d4004499d 100644 --- a/tests/dashboard/components/archival_storage/test_archival_storage.py +++ b/tests/dashboard/components/archival_storage/test_archival_storage.py @@ -111,31 +111,34 @@ def get_streaming_response(streaming_content): return response_text -def test_get_mets_unknown_mets(mocker, amsetup, admin_client): - mocker.patch("elasticSearchFunctions.get_client") - mocker.patch("elasticSearchFunctions.get_aip_data", side_effect=IndexError()) +@mock.patch("elasticSearchFunctions.get_client") +@mock.patch("elasticSearchFunctions.get_aip_data", side_effect=IndexError()) +def test_get_mets_unknown_mets(get_aip_data, get_client, amsetup, admin_client): response = admin_client.get( "/archival-storage/download/aip/11111111-1111-1111-1111-111111111111/mets_download/" ) assert isinstance(response, HttpResponseNotFound) -def test_get_mets_known_mets(mocker, amsetup, admin_client, mets_hdr): +@mock.patch("elasticSearchFunctions.get_client") +@mock.patch("elasticSearchFunctions.get_aip_data") +@mock.patch("components.helpers.stream_mets_from_storage_service") +def test_get_mets_known_mets( + stream_mets_from_storage_service, + get_aip_data, + get_client, + amsetup, + admin_client, + mets_hdr, +): sip_uuid = "22222222-2222-2222-2222-222222222222" - mocker.patch("elasticSearchFunctions.get_client") - mocker.patch( - "elasticSearchFunctions.get_aip_data", - return_value={"_source": {"name": f"transfer-{sip_uuid}"}}, - ) + get_aip_data.return_value = {"_source": {"name": f"transfer-{sip_uuid}"}} mock_response = StreamingHttpResponse(mets_hdr) mock_content_type = "application/xml" mock_content_disposition = f"attachment; filename=METS.{sip_uuid}.xml;" mock_response[CONTENT_TYPE] = mock_content_type mock_response[CONTENT_DISPOSITION] = mock_content_disposition - mocker.patch( - "components.helpers.stream_mets_from_storage_service", - return_value=mock_response, - ) + stream_mets_from_storage_service.return_value = mock_response response = admin_client.get( f"/archival-storage/download/aip/{sip_uuid}/mets_download/" ) @@ -145,20 +148,25 @@ def test_get_mets_known_mets(mocker, amsetup, admin_client, mets_hdr): assert response.get(CONTENT_DISPOSITION) == mock_content_disposition -def test_get_pointer_unknown_pointer(mocker, amsetup, admin_client): +@mock.patch("elasticSearchFunctions.get_client") +@mock.patch("storageService.pointer_file_url") +@mock.patch("components.helpers.stream_file_from_storage_service") +def test_get_pointer_unknown_pointer( + stream_file_from_storage_service, + pointer_file_url, + get_client, + amsetup, + admin_client, +): sip_uuid = "33333333-3333-3333-3333-333333333331" pointer_url = ( f"http://archivematica-storage-service:8000/api/v2/file/{sip_uuid}/pointer_file" ) - mocker.patch("elasticSearchFunctions.get_client") - mocker.patch("storageService.pointer_file_url", return_value=pointer_url) + pointer_file_url.return_value = pointer_url mock_status_code = 404 mock_error_message = {"status": "False", "message": "an error status"} mock_response = helpers.json_response(mock_error_message, mock_status_code) - mocker.patch( - "components.helpers.stream_file_from_storage_service", - return_value=mock_response, - ) + stream_file_from_storage_service.return_value = mock_response response = admin_client.get( f"/archival-storage/download/aip/{sip_uuid}/pointer_file/" ) @@ -167,22 +175,23 @@ def test_get_pointer_unknown_pointer(mocker, amsetup, admin_client): assert json.loads(response.content) == mock_error_message -def test_get_pointer_known_pointer(mocker, amsetup, admin_client, mets_hdr): +@mock.patch("storageService.pointer_file_url") +@mock.patch("components.helpers.stream_file_from_storage_service") +def test_get_pointer_known_pointer( + stream_file_from_storage_service, pointer_file_url, amsetup, admin_client, mets_hdr +): sip_uuid = "44444444-4444-4444-4444-444444444444" pointer_url = ( f"http://archivematica-storage-service:8000/api/v2/file/{sip_uuid}/pointer_file" ) pointer_file = f"pointer.{sip_uuid}.xml" content_disposition = f'attachment; filename="{pointer_file}"' - mocker.patch("storageService.pointer_file_url", return_value=pointer_url) + pointer_file_url.return_value = pointer_url mock_content_type = "application/xml" mock_response = StreamingHttpResponse(mets_hdr) mock_response[CONTENT_TYPE] = mock_content_type mock_response[CONTENT_DISPOSITION] = content_disposition - mocker.patch( - "components.helpers.stream_file_from_storage_service", - return_value=mock_response, - ) + stream_file_from_storage_service.return_value = mock_response response = admin_client.get( f"/archival-storage/download/aip/{sip_uuid}/pointer_file/" ) @@ -205,15 +214,11 @@ def test_search_rejects_unsupported_file_mime(amsetup, admin_client): assert response.content == b"Please use ?mimeType=text/csv" -def test_search_as_csv(mocker, amsetup, admin_client, tmp_path): - """Test search as CSV - - Test the new route via the Archival Storage tab to be able to - download the Elasticsearch AIP index as a CSV. Here we make sure - that various headers are set as well as testing whether or not the - data is returned correctly. - """ - mock_augmented_result = [ +@mock.patch("elasticSearchFunctions.get_client") +@mock.patch("elasticsearch.Elasticsearch.search") +@mock.patch( + "components.archival_storage.views.search_augment_aip_results", + return_value=[ { "status": "Stored", "encrypted": False, @@ -246,13 +251,18 @@ def test_search_as_csv(mocker, amsetup, admin_client, tmp_path): "size": "152.1\xa0KB", "type": "AIP", }, - ] - mocker.patch("elasticSearchFunctions.get_client") - mocker.patch("elasticsearch.Elasticsearch.search") - mocker.patch( - "components.archival_storage.views.search_augment_aip_results", - return_value=mock_augmented_result, - ) + ], +) +def test_search_as_csv( + search_augment_aip_results, search, get_client, amsetup, admin_client, tmp_path +): + """Test search as CSV + + Test the new route via the Archival Storage tab to be able to + download the Elasticsearch AIP index as a CSV. Here we make sure + that various headers are set as well as testing whether or not the + data is returned correctly. + """ REQUEST_PARAMS = { "requestFile": True, "mimeType": "text/csv", @@ -282,7 +292,17 @@ def test_search_as_csv(mocker, amsetup, admin_client, tmp_path): ) -def test_search_as_csv_invalid_route(mocker, amsetup, admin_client, tmp_path): +@mock.patch("elasticSearchFunctions.get_client", return_value=Elasticsearch()) +@mock.patch("elasticSearchFunctions.Elasticsearch.search") +@mock.patch("components.archival_storage.views.search_augment_aip_results") +def test_search_as_csv_invalid_route( + search_augment_aip_results, + search, + get_client, + amsetup, + admin_client, + tmp_path, +): """Test search as CSV invalid rute Given the ability to download the Elasticsearch AIP index table as a @@ -293,20 +313,13 @@ def test_search_as_csv_invalid_route(mocker, amsetup, admin_client, tmp_path): """ MOCK_TOTAL = 10 AUG_RESULTS = {"mock": "response"} - mocker.patch("elasticSearchFunctions.get_client", return_value=Elasticsearch()) - mocker.patch( - "elasticSearchFunctions.Elasticsearch.search", - return_value={ - "hits": {"total": MOCK_TOTAL}, - "aggregations": { - "aip_uuids": {"buckets": [{"key": "mocked", "doc_count": "mocked"}]} - }, + search.return_value = { + "hits": {"total": MOCK_TOTAL}, + "aggregations": { + "aip_uuids": {"buckets": [{"key": "mocked", "doc_count": "mocked"}]} }, - ) - mocker.patch( - "components.archival_storage.views.search_augment_aip_results", - return_value=[AUG_RESULTS], - ) + } + search_augment_aip_results.return_value = [AUG_RESULTS] REQUEST_PARAMS = {"requestFile": False} response = admin_client.get( "{}?{}".format( @@ -367,9 +380,14 @@ def test_load_datatable_state_404(self): @mock.patch("components.helpers.processing_config_path") +@mock.patch("elasticSearchFunctions.get_client") +@mock.patch("elasticSearchFunctions.get_aip_data") +@mock.patch("components.archival_storage.forms.get_atom_client") def test_view_aip_metadata_only_dip_upload_with_missing_description_slug( + get_atom_client, + get_aip_data, + get_client, processing_config_path, - mocker, amsetup, admin_client, tmpdir, @@ -378,25 +396,18 @@ def test_view_aip_metadata_only_dip_upload_with_missing_description_slug( processing_config_path.return_value = str(processing_configurations_dir) sip_uuid = uuid.uuid4() file_path = tmpdir.mkdir("file") - mocker.patch("elasticSearchFunctions.get_client") - mocker.patch( - "elasticSearchFunctions.get_aip_data", - return_value={ - "_source": { - "name": f"transfer-{sip_uuid}", - "filePath": str(file_path), - } - }, - ) - mocker.patch( - "components.archival_storage.forms.get_atom_client", - return_value=mocker.Mock( - **{ - "find_parent_id_for_component.side_effect": CommunicationError( - 404, mocker.Mock(url="http://example.com") - ) - } - ), + get_aip_data.return_value = { + "_source": { + "name": f"transfer-{sip_uuid}", + "filePath": str(file_path), + } + } + get_atom_client.return_value = mock.Mock( + **{ + "find_parent_id_for_component.side_effect": CommunicationError( + 404, mock.Mock(url="http://example.com") + ) + } ) response = admin_client.post( @@ -422,8 +433,13 @@ def test_create_aic_fails_if_query_is_not_passed(amsetup, admin_client): assert "Unable to create AIC: No AIPs selected" in response.content.decode() +@mock.patch("elasticSearchFunctions.get_client") +@mock.patch("databaseFunctions.createSIP") +@mock.patch( + "uuid.uuid4", return_value=uuid.UUID("1e23e6e2-02d7-4b2d-a648-caffa3b489f3") +) def test_create_aic_creates_temporary_files( - mocker, admin_client, settings, tmp_path, amsetup + uuid4, creat_sip, get_client, admin_client, settings, tmp_path, amsetup ): aipfiles_search_results = { "aggregations": { @@ -486,14 +502,9 @@ def test_create_aic_creates_temporary_files( "timed_out": False, "took": 4, } - mocker.patch( - "elasticSearchFunctions.get_client", - return_value=mocker.Mock( - **{"search.side_effect": [aipfiles_search_results, aip_search_results]} - ), + get_client.return_value = mock.Mock( + **{"search.side_effect": [aipfiles_search_results, aip_search_results]} ) - mocker.patch("databaseFunctions.createSIP") - mocker.patch("uuid.uuid4", return_value="1e23e6e2-02d7-4b2d-a648-caffa3b489f3") d = tmp_path / "test-aic" d.mkdir() (d / "tmp").mkdir() diff --git a/tests/dashboard/components/mcp/test_views.py b/tests/dashboard/components/mcp/test_views.py index 097c42f657..f17af51905 100644 --- a/tests/dashboard/components/mcp/test_views.py +++ b/tests/dashboard/components/mcp/test_views.py @@ -1,3 +1,5 @@ +from unittest import mock + from components.mcp import views from django.urls import reverse from externals import xmltodict @@ -36,21 +38,19 @@ """ -def test_list(mocker, rf, admin_user): +@mock.patch("contrib.mcp.client.GearmanClient") +@mock.patch( + "contrib.mcp.client.gearman.JOB_COMPLETE", +) +def test_list(job_complete, gearman_client, rf, admin_user): # Make the Gearman interactions return known values. - job_complete = mocker.patch( - "contrib.mcp.client.gearman.JOB_COMPLETE", - ) - mocker.patch( - "contrib.mcp.client.GearmanClient", - return_value=mocker.Mock( - **{ - "submit_job.return_value": mocker.Mock( - state=job_complete, - result=MCPSERVER_JOBS_AWAITING_APPROVAL_RESULT, - ) - } - ), + gearman_client.return_value = mock.Mock( + **{ + "submit_job.return_value": mock.Mock( + state=job_complete, + result=MCPSERVER_JOBS_AWAITING_APPROVAL_RESULT, + ) + } ) # Call the view we are testing. diff --git a/tests/dashboard/components/unit/test_views.py b/tests/dashboard/components/unit/test_views.py index 221d697d63..f9c879aa07 100644 --- a/tests/dashboard/components/unit/test_views.py +++ b/tests/dashboard/components/unit/test_views.py @@ -1,4 +1,5 @@ import uuid +from unittest import mock import pytest from components import helpers @@ -59,8 +60,8 @@ def test_it_conflicts_on_active_packages(self, install, admin_client, transfer): assert resp.status_code == 409 assert resp.json() == {"removed": False} - def test_it_handles_unknown_errors(self, install, admin_client, transfer, mocker): - mocker.patch("main.models.Transfer.objects.done", side_effect=Exception()) + @mock.patch("main.models.Transfer.objects.done", side_effect=Exception()) + def test_it_handles_unknown_errors(self, done, install, admin_client, transfer): url = reverse( "unit:mark_hidden", kwargs={"unit_type": "transfer", "unit_uuid": transfer.pk}, @@ -95,10 +96,10 @@ def test_it_rejects_non_delete_verbs(self, install, admin_client, transfer): assert resp.status_code == 405 - def test_it_handles_unknown_errors(self, install, admin_client, transfer, mocker): - mocker.patch( - "components.helpers.completed_units_efficient", side_effect=Exception() - ) + @mock.patch("components.helpers.completed_units_efficient", side_effect=Exception()) + def test_it_handles_unknown_errors( + self, completed_units_efficient, install, admin_client, transfer + ): url = reverse("unit:mark_all_hidden", kwargs={"unit_type": "transfer"}) resp = admin_client.delete(url) diff --git a/tests/dashboard/main/test_command_purge_transient_processing_data.py b/tests/dashboard/main/test_command_purge_transient_processing_data.py index 1eabb5a7dd..4c713f1e73 100644 --- a/tests/dashboard/main/test_command_purge_transient_processing_data.py +++ b/tests/dashboard/main/test_command_purge_transient_processing_data.py @@ -1,4 +1,5 @@ import uuid +from unittest import mock import elasticSearchFunctions as es import pytest @@ -85,37 +86,45 @@ def test_purge_command_removes_all_packages( @pytest.mark.django_db -def test_purge_command_removes_search_documents(search_enabled, old_transfer, mocker): - mocker.patch( - "main.management.commands.purge_transient_processing_data.es.create_indexes_if_needed" - ) - mock_remove_backlog_transfer = mocker.patch( - "main.management.commands.purge_transient_processing_data.es.remove_backlog_transfer" - ) - mock_remove_backlog_transfer_files = mocker.patch( - "main.management.commands.purge_transient_processing_data.es.remove_backlog_transfer_files" - ) - +@mock.patch( + "main.management.commands.purge_transient_processing_data.es.create_indexes_if_needed" +) +@mock.patch( + "main.management.commands.purge_transient_processing_data.es.remove_backlog_transfer" +) +@mock.patch( + "main.management.commands.purge_transient_processing_data.es.remove_backlog_transfer_files" +) +def test_purge_command_removes_search_documents( + mock_remove_backlog_transfer_files, + mock_remove_backlog_transfer, + search_enabled, + old_transfer, +): call_command("purge_transient_processing_data") - mock_remove_backlog_transfer.assert_called_once_with(mocker.ANY, old_transfer.pk) + mock_remove_backlog_transfer.assert_called_once_with(mock.ANY, old_transfer.pk) mock_remove_backlog_transfer_files.assert_called_once_with( - mocker.ANY, old_transfer.pk + mock.ANY, old_transfer.pk ) +@mock.patch( + "main.management.commands.purge_transient_processing_data.es.create_indexes_if_needed" +) +@mock.patch( + "main.management.commands.purge_transient_processing_data.es.remove_backlog_transfer" +) +@mock.patch( + "main.management.commands.purge_transient_processing_data.es.remove_backlog_transfer_files" +) @pytest.mark.django_db -def test_purge_command_keeps_search_documents(search_enabled, old_transfer, mocker): - mocker.patch( - "main.management.commands.purge_transient_processing_data.es.create_indexes_if_needed" - ) - mock_remove_backlog_transfer = mocker.patch( - "main.management.commands.purge_transient_processing_data.es.remove_backlog_transfer" - ) - mock_remove_backlog_transfer_files = mocker.patch( - "main.management.commands.purge_transient_processing_data.es.remove_backlog_transfer_files" - ) - +def test_purge_command_keeps_search_documents( + mock_remove_backlog_transfer_files, + mock_remove_backlog_transfer, + search_enabled, + old_transfer, +): call_command("purge_transient_processing_data", "--keep-searches") mock_remove_backlog_transfer.assert_not_called() diff --git a/tests/dashboard/test_api.py b/tests/dashboard/test_api.py index 93f1538678..7b52c47a66 100644 --- a/tests/dashboard/test_api.py +++ b/tests/dashboard/test_api.py @@ -1,6 +1,7 @@ import datetime import json import uuid +from unittest import mock import archivematicaFunctions import pytest @@ -849,16 +850,12 @@ def test_get_unit_status_multiple( @pytest.mark.django_db -def test_copy_metadata_files_api(mocker): - # Mock authentication helper - mocker.patch("components.api.views.authenticate_request", return_value=None) - - # Mock helper that actually copies files from the transfer source locations - mocker.patch( - "components.filesystem_ajax.views._copy_from_transfer_sources", - return_value=(None, ""), - ) - +@mock.patch("components.api.views.authenticate_request", return_value=None) +@mock.patch( + "components.filesystem_ajax.views._copy_from_transfer_sources", + return_value=(None, ""), +) +def test_copy_metadata_files_api(_copy_from_transfer_sources, authenticate_request): # Create a SIP sip_uuid = str(uuid.uuid4()) SIP.objects.create( @@ -867,7 +864,7 @@ def test_copy_metadata_files_api(mocker): ) # Call the endpoint with a mocked request - request = mocker.Mock( + request = mock.Mock( **{ "POST.get.return_value": sip_uuid, "POST.getlist.return_value": [ @@ -887,11 +884,11 @@ def test_copy_metadata_files_api(mocker): } -def test_start_transfer_api_decodes_paths(mocker, admin_client): - start_transfer_view = mocker.patch( - "components.filesystem_ajax.views.start_transfer", - return_value={}, - ) +@mock.patch( + "components.filesystem_ajax.views.start_transfer", + return_value={}, +) +def test_start_transfer_api_decodes_paths(start_transfer_view, admin_client): helpers.set_setting("dashboard_uuid", "test-uuid") admin_client.post( reverse("api:start_transfer"), @@ -909,20 +906,18 @@ def test_start_transfer_api_decodes_paths(mocker, admin_client): ) -def test_reingest_approve(mocker, admin_client): - job_complete = mocker.patch( - "contrib.mcp.client.gearman.JOB_COMPLETE", - ) - mocker.patch( - "contrib.mcp.client.GearmanClient", - return_value=mocker.Mock( - **{ - "submit_job.return_value": mocker.Mock( - state=job_complete, - result=None, - ) - } - ), +@mock.patch( + "contrib.mcp.client.gearman.JOB_COMPLETE", +) +@mock.patch("contrib.mcp.client.GearmanClient") +def test_reingest_approve(gearman_client, job_complete, admin_client): + gearman_client.return_value = mock.Mock( + **{ + "submit_job.return_value": mock.Mock( + state=job_complete, + result=None, + ) + } ) helpers.set_setting("dashboard_uuid", "test-uuid") @@ -1075,12 +1070,12 @@ def test_unapproved_transfers(admin_client): "mcpclient_error", ], ) -def test_approve_transfer_failures(post_data, expected_error, admin_client, mocker): +@mock.patch("contrib.mcp.client.GearmanClient", side_effect=Exception()) +def test_approve_transfer_failures( + gearman_client, post_data, expected_error, admin_client +): helpers.set_setting("dashboard_uuid", "test-uuid") - # Simulate an unhandled error when calling Gearman. - mocker.patch("contrib.mcp.client.GearmanClient", side_effect=Exception()) - response = admin_client.post(reverse("api:approve_transfer"), post_data) assert response.status_code == 500 @@ -1088,25 +1083,22 @@ def test_approve_transfer_failures(post_data, expected_error, admin_client, mock assert payload == {"error": True, "message": expected_error} -def test_approve_transfer(admin_client, mocker): +@mock.patch("contrib.mcp.client.gearman.JOB_COMPLETE") +@mock.patch("contrib.mcp.client.GearmanClient") +def test_approve_transfer(gearman_client, job_complete, admin_client): helpers.set_setting("dashboard_uuid", "test-uuid") # Simulate a dashboard <-> Gearman <-> MCPServer interaction. # The MCPServer approveTransferByPath RPC method returns a UUID. transfer_uuid = uuid.uuid4() - job_complete = mocker.patch( - "contrib.mcp.client.gearman.JOB_COMPLETE", - ) - mocker.patch( - "contrib.mcp.client.GearmanClient", - return_value=mocker.Mock( - **{ - "submit_job.return_value": mocker.Mock( - state=job_complete, - result=transfer_uuid, - ) - } - ), + + gearman_client.return_value = mock.Mock( + **{ + "submit_job.return_value": mock.Mock( + state=job_complete, + result=transfer_uuid, + ) + } ) response = admin_client.post( @@ -1238,12 +1230,11 @@ def test_reingest_deletes_existing_models_related_to_sip( @pytest.mark.django_db -def test_reingest_full(sip_path, settings, admin_client, mocker): +def test_reingest_full(sip_path, settings, admin_client): helpers.set_setting("dashboard_uuid", "test-uuid") # Fake UUID generation from the endpoint for a new Transfer. transfer_uuid = uuid.uuid4() - mocker.patch("uuid.uuid4", return_value=transfer_uuid) # Set the SHARED_DIRECTORY setting based on the sip_path fixture. shared_directory = sip_path.parent.parent @@ -1252,10 +1243,11 @@ def test_reingest_full(sip_path, settings, admin_client, mocker): # There are no existing Transfers initially. assert Transfer.objects.count() == 0 - response = admin_client.post( - reverse("api:transfer_reingest", kwargs={"target": "transfer"}), - {"name": sip_path.name, "uuid": sip_path.name[-36:]}, - ) + with mock.patch("uuid.uuid4", return_value=transfer_uuid): + response = admin_client.post( + reverse("api:transfer_reingest", kwargs={"target": "transfer"}), + {"name": sip_path.name, "uuid": sip_path.name[-36:]}, + ) assert response.status_code == 200 # Verify the Transfer in the payload contains the fake UUID. @@ -1288,13 +1280,12 @@ def test_reingest_full(sip_path, settings, admin_client, mocker): @pytest.mark.django_db def test_reingest_full_fails_if_target_directory_already_exists( - sip_path, settings, admin_client, mocker + sip_path, settings, admin_client ): helpers.set_setting("dashboard_uuid", "test-uuid") # Fake UUID generation from the endpoint for a new Transfer. transfer_uuid = uuid.uuid4() - mocker.patch("uuid.uuid4", return_value=transfer_uuid) # Set the SHARED_DIRECTORY setting based on the sip_path fixture. shared_directory = sip_path.parent.parent @@ -1306,10 +1297,11 @@ def test_reingest_full_fails_if_target_directory_already_exists( ) (active_transfers_path / f"mytransfer-{transfer_uuid}").mkdir() - response = admin_client.post( - reverse("api:transfer_reingest", kwargs={"target": "transfer"}), - {"name": sip_path.name, "uuid": sip_path.name[-36:]}, - ) + with mock.patch("uuid.uuid4", return_value=transfer_uuid): + response = admin_client.post( + reverse("api:transfer_reingest", kwargs={"target": "transfer"}), + {"name": sip_path.name, "uuid": sip_path.name[-36:]}, + ) assert response.status_code == 400 payload = json.loads(response.content.decode("utf8")) @@ -1350,7 +1342,8 @@ def test_reingest_partial(sip_path, settings, admin_client): @pytest.mark.django_db -def test_fetch_levels_of_description_from_atom(admin_client, mocker): +@mock.patch("requests.get") +def test_fetch_levels_of_description_from_atom(get, admin_client): helpers.set_setting("dashboard_uuid", "test-uuid") # Set up the AtoM settings used on the Administration tab. @@ -1367,18 +1360,15 @@ def test_fetch_levels_of_description_from_atom(admin_client, mocker): # Simulate interaction with AtoM. lods = ["Series", "Subseries", "File"] - mocker.patch( - "requests.get", - side_effect=[ - mocker.Mock( - **{ - "status_code": 200, - "json.return_value": [{"name": lod} for lod in lods], - }, - spec=requests.Response, - ) - ], - ) + get.side_effect = [ + mock.Mock( + **{ + "status_code": 200, + "json.return_value": [{"name": lod} for lod in lods], + }, + spec=requests.Response, + ) + ] # Add existing LODs before calling the endpoint. LevelOfDescription.objects.create(name="One existing", sortorder=1) @@ -1400,9 +1390,10 @@ def test_fetch_levels_of_description_from_atom(admin_client, mocker): @pytest.mark.django_db -def test_fetch_levels_of_description_from_atom_communication_failure( - admin_client, mocker -): +@mock.patch( + "requests.get", side_effect=[mock.Mock(status_code=503, spec=requests.Response)] +) +def test_fetch_levels_of_description_from_atom_communication_failure(get, admin_client): helpers.set_setting("dashboard_uuid", "test-uuid") # Set up the AtoM settings used on the Administration tab. @@ -1417,12 +1408,6 @@ def test_fetch_levels_of_description_from_atom_communication_failure( ), ) - # Simulate failing interaction with AtoM. - mocker.patch( - "requests.get", - side_effect=[mocker.Mock(status_code=503, spec=requests.Response)], - ) - response = admin_client.get(reverse("api:fetch_atom_lods")) assert response.status_code == 500 diff --git a/tests/dashboard/test_filesystem_ajax.py b/tests/dashboard/test_filesystem_ajax.py index 2f482d9ce1..e0eac07193 100644 --- a/tests/dashboard/test_filesystem_ajax.py +++ b/tests/dashboard/test_filesystem_ajax.py @@ -498,15 +498,16 @@ def test_copy_from_arrange_to_completed_handles_name_clashes(self): ], ids=["regular_sip", "partial_reingest"], ) +@mock.patch( + "components.filesystem_ajax.views._copy_from_transfer_sources", + return_value=(None, ""), +) def test_copy_metadata_files( - mocker, rf, set_partial_reingest_flag, expected_metadata_dir + _copy_from_transfer_sources_mock, + rf, + set_partial_reingest_flag, + expected_metadata_dir, ): - # Mock helper that actually copies files from the transfer source locations - _copy_from_transfer_sources_mock = mocker.patch( - "components.filesystem_ajax.views._copy_from_transfer_sources", - return_value=(None, ""), - ) - # Create a SIP sip_uuid = str(uuid.uuid4()) sip = models.SIP.objects.create( @@ -555,7 +556,24 @@ def test_copy_metadata_files( (False, False), # Download ], ) -def test_download_by_uuid(mocker, local_path_exists, preview): +@mock.patch("components.helpers.stream_file_from_storage_service") +@mock.patch("components.helpers.send_file") +@mock.patch("storageService.extract_file_url") +@mock.patch("os.path.exists") +@mock.patch("storageService.get_first_location") +@mock.patch("elasticSearchFunctions.get_client") +@mock.patch("elasticSearchFunctions.get_transfer_file_info") +def test_download_by_uuid( + mock_get_file_info, + get_client, + mock_get_location, + mock_exists, + mock_extract_file_url, + mock_send_file, + mock_stream_file_from_ss, + local_path_exists, + preview, +): """Test that transfer file downloads work as expected.""" TEST_UUID = "a29e7e86-eca9-43b6-b059-6f23a9802dc8" TEST_SS_URL = "http://test-url" @@ -563,27 +581,17 @@ def test_download_by_uuid(mocker, local_path_exists, preview): TEST_RELPATH = f"transfer-{TEST_UUID}/data/objects/bird.mp3" TEST_ABSPATH = os.path.join(TEST_BACKLOG_LOCATION_PATH, "originals", TEST_RELPATH) - mock_get_file_info = mocker.patch("elasticSearchFunctions.get_transfer_file_info") mock_get_file_info.return_value = { "sipuuid": str(uuid.uuid4()), "relative_path": TEST_RELPATH, } - mocker.patch("elasticSearchFunctions.get_client") - mock_get_location = mocker.patch("storageService.get_first_location") mock_get_location.return_value = {"path": TEST_BACKLOG_LOCATION_PATH} - mock_exists = mocker.patch("os.path.exists") mock_exists.return_value = local_path_exists - mock_extract_file_url = mocker.patch("storageService.extract_file_url") mock_extract_file_url.return_value = TEST_SS_URL - mock_send_file = mocker.patch("components.helpers.send_file") - mock_stream_file_from_ss = mocker.patch( - "components.helpers.stream_file_from_storage_service" - ) - factory = RequestFactory() request = factory.get(f"/filesystem/{TEST_UUID}/download/") @@ -621,39 +629,41 @@ def test_contents_sorting(db, tmp_path, admin_client): @pytest.mark.django_db -def test_copy_within_arrange(mocker, admin_client): - mocker.patch( - "storageService.get_first_location", - return_value={"uuid": "355d110f-b641-4b6b-b1c0-8426e63951e5"}, - ) - mocker.patch( - "storageService.get_file_metadata", - side_effect=[ - [ - { - "fileuuid": "0b603cee-1f8a-4842-996a-e02a0307ccf7", - "sipuuid": "99c87143-6f74-4398-84e0-14a8ca4bd05a", - } - ], - [ - { - "fileuuid": "03a33ef5-8714-46cc-aefe-7283186341ca", - "sipuuid": "99c87143-6f74-4398-84e0-14a8ca4bd05a", - } - ], +@mock.patch( + "storageService.get_first_location", + return_value={"uuid": "355d110f-b641-4b6b-b1c0-8426e63951e5"}, +) +@mock.patch( + "storageService.get_file_metadata", + side_effect=[ + [ + { + "fileuuid": "0b603cee-1f8a-4842-996a-e02a0307ccf7", + "sipuuid": "99c87143-6f74-4398-84e0-14a8ca4bd05a", + } ], - ) - mocker.patch( - "storageService.browse_location", - return_value={ - "directories": [], - "entries": ["file1.txt", "file2.txt"], - "properties": { - "file1.txt": {"size": 8}, - "file2.txt": {"size": 6}, - }, + [ + { + "fileuuid": "03a33ef5-8714-46cc-aefe-7283186341ca", + "sipuuid": "99c87143-6f74-4398-84e0-14a8ca4bd05a", + } + ], + ], +) +@mock.patch( + "storageService.browse_location", + return_value={ + "directories": [], + "entries": ["file1.txt", "file2.txt"], + "properties": { + "file1.txt": {"size": 8}, + "file2.txt": {"size": 6}, }, - ) + }, +) +def test_copy_within_arrange( + browse_location, get_file_metadata, get_first_location, admin_client +): helpers.set_setting("dashboard_uuid", "test-uuid") # Expected attributes for the new SIPArrange instances. diff --git a/tests/dashboard/test_helpers.py b/tests/dashboard/test_helpers.py index 2633404dcc..85916dc7dd 100644 --- a/tests/dashboard/test_helpers.py +++ b/tests/dashboard/test_helpers.py @@ -1,8 +1,8 @@ import json +from unittest import mock import pytest import requests -from amclient import AMClient from components import helpers CONTENT_TYPE = "content-type" @@ -47,28 +47,19 @@ def get_streaming_response(streaming_content): return response_text -def mock_amclient_details(mocker, return_value=None, side_effect=None): - mocker.patch( - "components.helpers.get_setting", return_value="http://storage-service-url/" - ) - if side_effect: - mocker.patch.object(AMClient, "extract_file", side_effect=side_effect) - # A requests.Response() object can evaluate to None, e.g. where response.status_code = 500 - elif return_value is not None: - mocker.patch.object(AMClient, "extract_file", return_value=return_value) - - def mets_stream(tmpdir, mets_hdr): mets_stream = tmpdir.join("mets_file") mets_stream.write(mets_hdr) return mets_stream.open() -def test_stream_mets_from_disconnected_storage(mocker): +@mock.patch( + "components.helpers.get_setting", return_value="http://storage-service-url/" +) +@mock.patch("amclient.AMClient.extract_file") +def test_stream_mets_from_disconnected_storage(extract_file, get_setting): custom_error_message = "Error connecting to the storage service" - mock_amclient_details( - mocker, side_effect=requests.exceptions.ConnectionError(custom_error_message) - ) + extract_file.side_effect = requests.exceptions.ConnectionError(custom_error_message) response = helpers.stream_mets_from_storage_service( transfer_name="mets_transfer", sip_uuid="11111111-1111-1111-1111-111111111111" ) @@ -79,12 +70,16 @@ def test_stream_mets_from_disconnected_storage(mocker): assert response.status_code == RESPONSE_503 -def test_stream_mets_from_storage_no_file(mocker, tmp_path): +@mock.patch( + "components.helpers.get_setting", return_value="http://storage-service-url/" +) +@mock.patch("amclient.AMClient.extract_file") +def test_stream_mets_from_storage_no_file(extract_file, get_setting, tmp_path): custom_error_message = "Rsync failed with status 23: sending incremental file list" mock_response = requests.Response() mock_response._content = custom_error_message mock_response.status_code = 500 - mock_amclient_details(mocker, return_value=mock_response) + extract_file.return_value = mock_response response = helpers.stream_mets_from_storage_service( transfer_name="mets_transfer", sip_uuid="22222222-2222-2222-2222-222222222222" ) @@ -95,14 +90,18 @@ def test_stream_mets_from_storage_no_file(mocker, tmp_path): assert response.status_code == RESPONSE_500 -def test_stream_mets_from_storage_success(mocker, mets_hdr, tmpdir): +@mock.patch( + "components.helpers.get_setting", return_value="http://storage-service-url/" +) +@mock.patch("amclient.AMClient.extract_file") +def test_stream_mets_from_storage_success(extract_file, get_setting, mets_hdr, tmpdir): sip_uuid = "33333333-3333-3333-3333-333333333333" mets_file = f"METS.{sip_uuid}.xml" mock_response = requests.Response() mock_response.headers = {CONTENT_DISPOSITION: f"attachment; filename={mets_file};"} mock_response.status_code = RESPONSE_200 mock_response.raw = mets_stream(tmpdir, mets_hdr) - mock_amclient_details(mocker, return_value=mock_response) + extract_file.return_value = mock_response response = helpers.stream_mets_from_storage_service( transfer_name="mets_transfer", sip_uuid=sip_uuid ) @@ -112,12 +111,13 @@ def test_stream_mets_from_storage_success(mocker, mets_hdr, tmpdir): assert response_text == mets_hdr -def test_stream_pointer_from_storage_unsuccessful(mocker): +@mock.patch( + "requests.get", + return_value=mock.Mock(status_code=RESPONSE_503, spec=requests.Response), +) +def test_stream_pointer_from_storage_unsuccessful(get): pointer_url = "http://archivematica-storage-service:8000/api/v2/file/44444444-4444-4444-4444-444444444444/pointer_file" custom_error_message = "Unable to retrieve AIP pointer file from Storage Service" - mock_response = requests.Response() - mock_response.status_code = RESPONSE_503 - mocker.patch("requests.get", return_value=mock_response) response = helpers.stream_file_from_storage_service( pointer_url, error_message=custom_error_message ) @@ -128,7 +128,8 @@ def test_stream_pointer_from_storage_unsuccessful(mocker): assert response.status_code == RESPONSE_400 -def test_stream_pointer_from_storage_successful(mocker, tmpdir, mets_hdr): +@mock.patch("requests.get") +def test_stream_pointer_from_storage_successful(get, tmpdir, mets_hdr): sip_uuid = "55555555-5555-5555-5555-555555555555" mock_response = requests.Response() pointer_url, pointer_file, content_disposition = setup_ptr_info(sip_uuid) @@ -138,7 +139,7 @@ def test_stream_pointer_from_storage_successful(mocker, tmpdir, mets_hdr): } mock_response.status_code = RESPONSE_200 mock_response.raw = mets_stream(tmpdir, mets_hdr) - mocker.patch("requests.get", return_value=mock_response) + get.return_value = mock_response response = helpers.stream_file_from_storage_service(pointer_url) assert response.status_code == RESPONSE_200 assert response.get(CONTENT_TYPE) == CONTENT_XML @@ -149,7 +150,7 @@ def test_stream_pointer_from_storage_successful(mocker, tmpdir, mets_hdr): mock_response = requests.Response() mock_response.status_code = RESPONSE_200 mock_response.raw = mets_stream(tmpdir, mets_hdr) - mocker.patch("requests.get", return_value=mock_response) + get.return_value = mock_response # Make the request again, but ask to view in the browser, i.e. inline. response = helpers.stream_file_from_storage_service(pointer_url, preview_file=True) assert response.get(CONTENT_DISPOSITION) == "inline" @@ -157,14 +158,15 @@ def test_stream_pointer_from_storage_successful(mocker, tmpdir, mets_hdr): assert response_text == mets_hdr -def test_stream_pointer_from_storage_no_content_type(mocker, tmpdir, mets_hdr): +@mock.patch("requests.get") +def test_stream_pointer_from_storage_no_content_type(get, tmpdir, mets_hdr): sip_uuid = "66666666-6666-6666-6666-666666666666" mock_response = requests.Response() pointer_url, pointer_file, content_disposition = setup_ptr_info(sip_uuid) mock_response.headers = {CONTENT_DISPOSITION: content_disposition} mock_response.status_code = RESPONSE_200 mock_response.raw = mets_stream(tmpdir, mets_hdr) - mocker.patch.object(requests, "get", return_value=mock_response) + get.return_value = mock_response response = helpers.stream_file_from_storage_service(pointer_url) assert response.status_code == RESPONSE_200 # The content type will be set to text/plain according to the function. diff --git a/tests/dashboard/test_models.py b/tests/dashboard/test_models.py index 8068967b22..97501071a0 100644 --- a/tests/dashboard/test_models.py +++ b/tests/dashboard/test_models.py @@ -94,12 +94,10 @@ def test_sip_arrange_create_many(db): assert models.SIPArrange.objects.count() == 2 -def test_sip_arrange_create_many_with_integrity_error(mocker): - mocker.patch( - "main.models.SIPArrange.objects.bulk_create", side_effect=IntegrityError() - ) - arrange1_mock = mocker.Mock() - arrange2_mock = mocker.Mock() +@mock.patch("main.models.SIPArrange.objects.bulk_create", side_effect=IntegrityError()) +def test_sip_arrange_create_many_with_integrity_error(bulk_create): + arrange1_mock = mock.Mock() + arrange2_mock = mock.Mock() models.SIPArrange.create_many([arrange1_mock, arrange2_mock]) # If bulk creation fails each SIPArrange is saved individually arrange1_mock.save.assert_called_once()