diff --git a/src/protagonist/API.Tests/Integration/PolicyTests.cs b/src/protagonist/API.Tests/Integration/PolicyTests.cs index ca2f6047f..bb6f5a8f8 100644 --- a/src/protagonist/API.Tests/Integration/PolicyTests.cs +++ b/src/protagonist/API.Tests/Integration/PolicyTests.cs @@ -111,7 +111,7 @@ public async Task Get_StoragePolicies_200() response.StatusCode.Should().Be(HttpStatusCode.OK); var model = await response.ReadAsHydraResponseAsync>(); - model.Members.Should().HaveCount(2); + model.Members.Should().HaveCount(3); } [Fact] diff --git a/src/protagonist/DLCS.Model/Storage/AssetStorageMetric.cs b/src/protagonist/DLCS.Model/Storage/AssetStorageMetric.cs index ebdf53667..44678d764 100644 --- a/src/protagonist/DLCS.Model/Storage/AssetStorageMetric.cs +++ b/src/protagonist/DLCS.Model/Storage/AssetStorageMetric.cs @@ -18,6 +18,6 @@ public bool CanStoreAsset(int assetCount = 1) => /// /// Check if there is allowance to store asset based on size /// - public bool CanStoreAssetSize(long proposedNewSize) => - CustomerStorage.TotalSizeOfStoredImages + proposedNewSize <= Policy.MaximumTotalSizeOfStoredImages; + public bool CanStoreAssetSize(long proposedNewSize, long oldFileSize) => + (CustomerStorage.TotalSizeOfStoredImages - oldFileSize) + proposedNewSize <= Policy.MaximumTotalSizeOfStoredImages; } \ No newline at end of file diff --git a/src/protagonist/Engine.Tests/Ingest/File/FileChannelWorkerTests.cs b/src/protagonist/Engine.Tests/Ingest/File/FileChannelWorkerTests.cs index 5ef69863a..a79c94907 100644 --- a/src/protagonist/Engine.Tests/Ingest/File/FileChannelWorkerTests.cs +++ b/src/protagonist/Engine.Tests/Ingest/File/FileChannelWorkerTests.cs @@ -40,7 +40,7 @@ public async Task Ingest_NoOp_IfOptimisedStrategy() // Assert result.Should().Be(IngestResultStatus.Success); A.CallTo(() => - assetToS3.CopyOriginToStorage(A._, A._, A._, cos, A._)) + assetToS3.CopyOriginToStorage(A._, A._, A._, cos, A._)) .MustNotHaveHappened(); } @@ -56,7 +56,7 @@ public async Task Ingest_CopiesFileToStorage_SetsImageStorage_AndStoredObject() .Returns(destination); A.CallTo(() => - assetToS3.CopyOriginToStorage(destination, context.Asset, true, cos, A._)) + assetToS3.CopyOriginToStorage(destination, context, true, cos, A._)) .Returns(new AssetFromOrigin(context.AssetId, 1234L, "anywhere", "application/docx")); // Act @@ -81,7 +81,7 @@ public async Task Ingest_CopiesFileToStorage_IncrementsImageStorage_AndStoredObj .Returns(destination); A.CallTo(() => - assetToS3.CopyOriginToStorage(destination, context.Asset, true, cos, A._)) + assetToS3.CopyOriginToStorage(destination, context, true, cos, A._)) .Returns(new AssetFromOrigin(context.AssetId, 1234L, "anywhere", "application/docx")); // Act @@ -92,7 +92,7 @@ public async Task Ingest_CopiesFileToStorage_IncrementsImageStorage_AndStoredObj context.StoredObjects.Should().ContainKey(destination).WhoseValue.Should().Be(1234L); result.Should().Be(IngestResultStatus.Success); } - + [Fact] public async Task Ingest_ReturnsErrorIfCopyExceedStorageLimit() { @@ -107,7 +107,7 @@ public async Task Ingest_ReturnsErrorIfCopyExceedStorageLimit() var assetFromOrigin = new AssetFromOrigin(context.AssetId, 1234L, "anywhere", "application/docx"); assetFromOrigin.FileTooLarge(); A.CallTo(() => - assetToS3.CopyOriginToStorage(destination, context.Asset, true, cos, A._)) + assetToS3.CopyOriginToStorage(destination, context, true, cos, A._)) .Returns(assetFromOrigin); // Act @@ -135,7 +135,7 @@ public async Task Ingest_CopiesFileToStorage_PassesVerifySizeFalse_IfCustomerExc // Assert A.CallTo(() => - assetToS3.CopyOriginToStorage(destination, context.Asset, false, cos, A._)) + assetToS3.CopyOriginToStorage(destination, context, false, cos, A._)) .MustHaveHappened(); result.Should().Be(IngestResultStatus.Success); } diff --git a/src/protagonist/Engine.Tests/Ingest/IngestExecutorTests.cs b/src/protagonist/Engine.Tests/Ingest/IngestExecutorTests.cs index ffd964cca..81063b4cc 100644 --- a/src/protagonist/Engine.Tests/Ingest/IngestExecutorTests.cs +++ b/src/protagonist/Engine.Tests/Ingest/IngestExecutorTests.cs @@ -13,12 +13,14 @@ public class IngestExecutorTests private readonly IEngineAssetRepository repo; private readonly IngestExecutor sut; private readonly CustomerOriginStrategy customerOriginStrategy = new(); + private readonly IAssetIngestorSizeCheck assetSizeCheck; public IngestExecutorTests() { workerBuilder = A.Fake(); repo = A.Fake(); - sut = new IngestExecutor(workerBuilder, repo, new NullLogger()); + assetSizeCheck = A.Fake(); + sut = new IngestExecutor(workerBuilder, repo, assetSizeCheck, new NullLogger()); } [Fact] @@ -35,6 +37,26 @@ public async Task IngestAsset_HandlesNoWorkers() repo.UpdateIngestedAsset(asset, A._, A._, true, A._)) .MustHaveHappened(); } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task IngestAsset_Handles_CanHaveAssetSizeCheck(bool sizeCheck) + { + var asset = new Asset(); + A.CallTo(() => workerBuilder.GetWorkers(asset)) + .Returns(new[] { new FakeWorker(IngestResultStatus.Success) }); + A.CallTo(() => assetSizeCheck.CustomerHasNoStorageCheck(A._)) + .Returns(sizeCheck); + + // Act + await sut.IngestAsset(asset, customerOriginStrategy); + + // Assert + A.CallTo(() => + repo.UpdateIngestedAsset(asset, A._, A._, true, A._)) + .MustHaveHappened(); + } [Theory] [InlineData(IngestResultStatus.Success, IngestResultStatus.Success, IngestResultStatus.Success)] @@ -59,7 +81,7 @@ public async Task IngestAsset_Success_ReturnsCorrectStatus(IngestResultStatus fi // Act var result = await sut.IngestAsset(asset, customerOriginStrategy); - + // Assert result.Status.Should().Be(overall); } diff --git a/src/protagonist/Engine.Tests/Ingest/Persistence/AssetToDiskTests.cs b/src/protagonist/Engine.Tests/Ingest/Persistence/AssetToDiskTests.cs index 39fddc12f..89db1ffd8 100644 --- a/src/protagonist/Engine.Tests/Ingest/Persistence/AssetToDiskTests.cs +++ b/src/protagonist/Engine.Tests/Ingest/Persistence/AssetToDiskTests.cs @@ -5,6 +5,7 @@ using DLCS.Repository.Strategy; using DLCS.Repository.Strategy.DependencyInjection; using DLCS.Repository.Strategy.Utils; +using Engine.Ingest; using Engine.Ingest.Persistence; using FakeItEasy; using Microsoft.Extensions.Logging.Abstractions; @@ -40,7 +41,7 @@ public AssetToDiskTests() public void CopyAssetFromOrigin_Throws_IfDestinationFolderNullOrEmpty(string destinationFolder) { // Act - Func action = () => sut.CopyAssetToLocalDisk(new Asset(), destinationFolder, true, new CustomerOriginStrategy()); + Func action = () => sut.CopyAssetToLocalDisk(new IngestionContext(new Asset()), destinationFolder, true, new CustomerOriginStrategy()); // Assert action.Should() @@ -54,13 +55,14 @@ public void CopyAssetFromOrigin_Throws_IfOriginReturnsNull() // Arrange const string origin = "http://test-origin"; var asset = new Asset { Id = AssetId.FromString("/2/1/godzilla") }; + var context = new IngestionContext(asset); var cos = new CustomerOriginStrategy { Strategy = OriginStrategyType.S3Ambient }; A.CallTo(() => customerOriginStrategy.LoadAssetFromOrigin(asset.Id, origin, cos, A._)) .Returns(null); // Act - Func action = () => sut.CopyAssetToLocalDisk(asset, "./here", true, cos); + Func action = () => sut.CopyAssetToLocalDisk(context, "./here", true, cos); // Assert action.Should().ThrowAsync(); @@ -73,12 +75,13 @@ public void CopyAssetFromOrigin_Throws_IfOriginReturnsEmptyStream() const string origin = "http://test-origin"; var asset = new Asset { Id = AssetId.FromString("/2/1/godzilla"), Origin = origin}; var cos = new CustomerOriginStrategy { Strategy = OriginStrategyType.S3Ambient }; + var context = new IngestionContext(asset); A.CallTo(() => customerOriginStrategy.LoadAssetFromOrigin(asset.Id, origin, cos, A._)) .Returns(new OriginResponse(Stream.Null)); // Act - Func action = () => sut.CopyAssetToLocalDisk(asset, "./here", true, cos); + Func action = () => sut.CopyAssetToLocalDisk(context, "./here", true, cos); // Assert action.Should().ThrowAsync(); @@ -93,6 +96,7 @@ public async Task CopyAssetFromOrigin_SavesFileToDisk_IfNoContentLength() AssetId assetId = AssetId.FromString("2/1/godzilla"); var asset = new Asset(assetId) { Origin = origin }; var cos = new CustomerOriginStrategy { Strategy = OriginStrategyType.S3Ambient }; + var context = new IngestionContext(asset); var responseStream = "{\"foo\":\"bar\"}".ToMemoryStream(); var originResponse = new OriginResponse(responseStream).WithContentType("application/json"); @@ -106,7 +110,7 @@ public async Task CopyAssetFromOrigin_SavesFileToDisk_IfNoContentLength() var expectedOutput = Path.Join(".", "2", "1", "godzilla", "godzilla.file"); // Act - var response = await sut.CopyAssetToLocalDisk(asset, destination, false, cos); + var response = await sut.CopyAssetToLocalDisk(context, destination, false, cos); // Assert A.CallTo(() => fileSaver.SaveResponseToDisk(A.That.Matches(a => a == assetId), @@ -126,6 +130,7 @@ public async Task CopyAssetFromOrigin_SavesFileToDisk_IfContentLength() const string origin = "http://test-origin"; var assetId = AssetId.FromString("2/1/godzilla1"); var asset = new Asset(assetId) { Origin = origin }; + var context = new IngestionContext(asset); var cos = new CustomerOriginStrategy { Strategy = OriginStrategyType.S3Ambient }; @@ -143,7 +148,7 @@ public async Task CopyAssetFromOrigin_SavesFileToDisk_IfContentLength() var expectedOutput = Path.Join(".", "2", "1", "godzilla1", "godzilla1.file"); // Act - var response = await sut.CopyAssetToLocalDisk(asset, destination, false, cos); + var response = await sut.CopyAssetToLocalDisk(context, destination, false, cos); // Assert A.CallTo(() => fileSaver.SaveResponseToDisk(A.That.Matches(a => a == assetId), @@ -166,6 +171,7 @@ public async Task CopyAssetFromOrigin_SetsExtension_BasedOnFileType(string conte const string origin = "http://test-origin"; var asset = new Asset { Id = AssetId.FromString("/2/1/godzilla.jp2"), Customer = 2, Space = 1, Origin = origin }; var cos = new CustomerOriginStrategy { Strategy = OriginStrategyType.S3Ambient }; + var context = new IngestionContext(asset); var responseStream = "{\"foo\":\"bar\"}".ToMemoryStream(); var originResponse = new OriginResponse(responseStream) @@ -176,7 +182,7 @@ public async Task CopyAssetFromOrigin_SetsExtension_BasedOnFileType(string conte .Returns(originResponse); // Act - var response = await sut.CopyAssetToLocalDisk(asset, destination, false, cos); + var response = await sut.CopyAssetToLocalDisk(context, destination, false, cos); // Assert response.ContentType.Should().Be(contentType); @@ -193,6 +199,7 @@ public async Task CopyAssetFromOrigin_SetsContentType_IfUnknownOrBinary_AssetIdI const string origin = "http://test-origin"; var asset = new Asset { Id = AssetId.FromString("/2/1/godzilla.jp2"), Customer = 2, Space = 1, Origin = origin }; var cos = new CustomerOriginStrategy { Strategy = OriginStrategyType.S3Ambient }; + var context = new IngestionContext(asset); var responseStream = "{\"foo\":\"bar\"}".ToMemoryStream(); var originResponse = new OriginResponse(responseStream) @@ -203,7 +210,7 @@ public async Task CopyAssetFromOrigin_SetsContentType_IfUnknownOrBinary_AssetIdI .Returns(originResponse); // Act - var response = await sut.CopyAssetToLocalDisk(asset, destination, false, cos); + var response = await sut.CopyAssetToLocalDisk(context, destination, false, cos); // Assert response.ContentType.Should().Be("image/jp2"); @@ -219,6 +226,7 @@ public async Task CopyAssetFromOrigin_VerifiesFileSize(bool isValid) const string origin = "http://test-origin"; var asset = new Asset { Id = AssetId.FromString("/2/1/godzilla"), Customer = 2, Space = 1, Origin = origin }; var cos = new CustomerOriginStrategy { Strategy = OriginStrategyType.S3Ambient }; + var context = new IngestionContext(asset); var responseStream = "{\"foo\":\"bar\"}".ToMemoryStream(); var originResponse = new OriginResponse(responseStream).WithContentType("application/json"); @@ -234,7 +242,7 @@ public async Task CopyAssetFromOrigin_VerifiesFileSize(bool isValid) }); // Act - var response = await sut.CopyAssetToLocalDisk(asset, destination, true, cos); + var response = await sut.CopyAssetToLocalDisk(context, destination, true, cos); // Assert response.FileExceedsAllowance.Should().Be(!isValid); diff --git a/src/protagonist/Engine.Tests/Ingest/Persistence/AssetToS3Tests.cs b/src/protagonist/Engine.Tests/Ingest/Persistence/AssetToS3Tests.cs index c0c6af46c..ecc949a3f 100644 --- a/src/protagonist/Engine.Tests/Ingest/Persistence/AssetToS3Tests.cs +++ b/src/protagonist/Engine.Tests/Ingest/Persistence/AssetToS3Tests.cs @@ -5,6 +5,7 @@ using DLCS.Model.Assets; using DLCS.Model.Customers; using DLCS.Model.Storage; +using Engine.Ingest; using Engine.Ingest.Persistence; using Engine.Settings; using Engine.Tests.Integration; @@ -54,6 +55,7 @@ public async Task CopyAsset_CopiesDirectS3ToS3_IfS3AmbientAndFullBucketAccess() { Strategy = OriginStrategyType.S3Ambient, Optimised = true }; + var context = new IngestionContext(asset); A.CallTo(() => bucketWriter.CopyLargeObject(A._, A._, A>>._, false, A._, A._)) @@ -62,7 +64,7 @@ public async Task CopyAsset_CopiesDirectS3ToS3_IfS3AmbientAndFullBucketAccess() var ct = new CancellationToken(); // Act - await sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + await sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert A.CallTo(() => bucketWriter.CopyLargeObject( @@ -90,6 +92,7 @@ public async Task CopyAsset_ReturnsExpected_AfterDirectS3ToS3(bool optimised) { Strategy = OriginStrategyType.S3Ambient, Optimised = optimised }; + var context = new IngestionContext(asset); A.CallTo(() => bucketWriter.CopyLargeObject(A._, A._, A>>._, false, A._, A._)) @@ -100,7 +103,7 @@ public async Task CopyAsset_ReturnsExpected_AfterDirectS3ToS3(bool optimised) var ct = new CancellationToken(); // Act - var actual = await sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + var actual = await sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert actual.Should().BeEquivalentTo(expected); @@ -122,6 +125,7 @@ public async Task CopyAsset_ReturnsFileTooLarge_IfDirectS3ToS3CopyReturnsFileToo { Strategy = OriginStrategyType.S3Ambient, Optimised = true }; + var context = new IngestionContext(asset); A.CallTo(() => bucketWriter.CopyLargeObject(A._, A._, A>>._, false, A._, A._)) @@ -133,7 +137,7 @@ public async Task CopyAsset_ReturnsFileTooLarge_IfDirectS3ToS3CopyReturnsFileToo var ct = new CancellationToken(); // Act - var actual = await sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + var actual = await sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert actual.Should().BeEquivalentTo(expected); @@ -156,6 +160,7 @@ public void CopyAsset_Throws_IfDirectCopyNotSuccess(LargeObjectStatus status) { Strategy = OriginStrategyType.S3Ambient }; + var context = new IngestionContext(asset); A.CallTo(() => bucketWriter.CopyLargeObject(A._, A._, A>>._, false, A._, A._)) @@ -164,7 +169,7 @@ public void CopyAsset_Throws_IfDirectCopyNotSuccess(LargeObjectStatus status) var ct = new CancellationToken(); // Act - Func action = () => sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + Func action = () => sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert action.Should().ThrowAsync(); @@ -187,20 +192,21 @@ public async Task CopyAsset_CopiesToDisk_IfNotS3Ambient(OriginStrategyType strat { Strategy = strategy }; + var context = new IngestionContext(asset); var ct = new CancellationToken(); var assetFromOrigin = new AssetFromOrigin(); assetFromOrigin.FileTooLarge(); A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(asset, A._, true, originStrategy, A._)) + assetToDisk.CopyAssetToLocalDisk(context, A._, true, originStrategy, A._)) .Returns(assetFromOrigin); // Act - await sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + await sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(asset, A._, true, originStrategy, A._)) + assetToDisk.CopyAssetToLocalDisk(context, A._, true, originStrategy, A._)) .MustHaveHappened(); } @@ -217,16 +223,17 @@ public async Task CopyAsset_DoesNotWriteToBucket_IfCopiedDiskTooLarge_IfNotS3Amb { Strategy = OriginStrategyType.Default }; + var context = new IngestionContext(asset); var ct = new CancellationToken(); var assetOnDisk = new AssetFromOrigin(asset.Id, 1234, "1", "video/mpeg"); assetOnDisk.FileTooLarge(); A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(asset, A._, true, originStrategy, A._)) + assetToDisk.CopyAssetToLocalDisk(context, A._, true, originStrategy, A._)) .Returns(assetOnDisk); // Act - var response = await sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + var response = await sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert A.CallTo(() => bucketWriter.WriteFileToBucket(A._, A._, A._, ct)) @@ -247,18 +254,19 @@ public async Task CopyAsset_CopiesFromDiskToBucket_IfNotS3Ambient() { Strategy = OriginStrategyType.Default }; + var context = new IngestionContext(asset); var ct = new CancellationToken(); var assetOnDisk = new AssetFromOrigin(asset.Id, 1234, "1", "video/mpeg"); A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(asset, A._, true, originStrategy, A._)) + assetToDisk.CopyAssetToLocalDisk(context, A._, true, originStrategy, A._)) .Returns(assetOnDisk); A.CallTo(() => bucketWriter.WriteFileToBucket(A._, A._, A._, ct)) .Returns(true); // Act - await sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + await sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert A.CallTo(() => bucketWriter.WriteFileToBucket( @@ -286,20 +294,21 @@ public async Task CopyAsset_ReturnsExpected_AfterIndirectS3ToS3() { Strategy = OriginStrategyType.Default }; + var context = new IngestionContext(asset); var expected = new AssetFromOrigin(asset.Id, assetSize, "s3://fantasy/test-key", mediaType); var ct = new CancellationToken(); var assetOnDisk = new AssetFromOrigin(asset.Id, assetSize, "/on/disk", mediaType); A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(asset, A._, true, originStrategy, A._)) + assetToDisk.CopyAssetToLocalDisk(context, A._, true, originStrategy, A._)) .Returns(assetOnDisk); A.CallTo(() => bucketWriter.WriteFileToBucket(A._, A._, A._, ct)) .Returns(true); // Act - var actual = await sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + var actual = await sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert actual.Should().BeEquivalentTo(expected); @@ -318,15 +327,16 @@ public void CopyAsset_ThrowsIfUploadToS3Fails_IfNotS3Ambient() { Strategy = OriginStrategyType.S3Ambient }; + var context = new IngestionContext(asset); var ct = new CancellationToken(); var assetOnDisk = new AssetFromOrigin(asset.Id, 1234, "/on/disk", "video/mpeg"); A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(asset, A._, true, originStrategy, A._)) + assetToDisk.CopyAssetToLocalDisk(context, A._, true, originStrategy, A._)) .Returns(assetOnDisk); // Act - Func action = () => sut.CopyOriginToStorage(destination, asset, true, originStrategy, ct); + Func action = () => sut.CopyOriginToStorage(destination, context, true, originStrategy, ct); // Assert action.Should().ThrowAsync(); diff --git a/src/protagonist/Engine.Tests/Ingest/Workers/ImageIngesterWorkerTests.cs b/src/protagonist/Engine.Tests/Ingest/Workers/ImageIngesterWorkerTests.cs index 9c8fb2881..37809704a 100644 --- a/src/protagonist/Engine.Tests/Ingest/Workers/ImageIngesterWorkerTests.cs +++ b/src/protagonist/Engine.Tests/Ingest/Workers/ImageIngesterWorkerTests.cs @@ -48,7 +48,7 @@ public async Task Ingest_ReturnsFailed_IfCopyAssetError() // Arrange var asset = new Asset(AssetId.FromString("2/1/shallow")); A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(A._, A._, true, A._, + assetToDisk.CopyAssetToLocalDisk(A._, A._, true, A._, A._)) .ThrowsAsync(new ArgumentNullException()); @@ -67,7 +67,7 @@ public async Task Ingest_SetsVerifySizeFlag_DependingOnCustomerOverride(int cust // Arrange var asset = new Asset(AssetId.FromString($"{customerId}/1/shallow")); var assetFromOrigin = new AssetFromOrigin(asset.Id, 13, "/target/location", "application/json"); - A.CallTo(() => assetToDisk.CopyAssetToLocalDisk(A._, A._, A._, A._, + A.CallTo(() => assetToDisk.CopyAssetToLocalDisk(A._, A._, A._, A._, A._)) .Returns(assetFromOrigin); @@ -76,7 +76,7 @@ public async Task Ingest_SetsVerifySizeFlag_DependingOnCustomerOverride(int cust // Assert A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(A._, A._, !noStoragePolicyCheck, A._, + assetToDisk.CopyAssetToLocalDisk(A._, A._, !noStoragePolicyCheck, A._, A._)) .MustHaveHappened(); } @@ -89,7 +89,7 @@ public async Task Ingest_ReturnsStorageLimitExceeded_IfFileSizeTooLarge() var assetFromOrigin = new AssetFromOrigin(asset.Id, 13, "/target/location", "application/json"); assetFromOrigin.FileTooLarge(); A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(A._, A._, true, A._, + assetToDisk.CopyAssetToLocalDisk(A._, A._, true, A._, A._)) .Returns(assetFromOrigin); @@ -99,7 +99,7 @@ public async Task Ingest_ReturnsStorageLimitExceeded_IfFileSizeTooLarge() // Assert result.Should().Be(IngestResultStatus.StorageLimitExceeded); } - + [Theory] [InlineData(true, IngestResultStatus.Success)] [InlineData(false, IngestResultStatus.Failed)] @@ -110,7 +110,7 @@ public async Task Ingest_ReturnsCorrectResult_DependingOnIngestAndCompletion(boo var asset = new Asset(AssetId.FromString("/2/1/remurdered")); A.CallTo(() => - assetToDisk.CopyAssetToLocalDisk(A._, A._, true, A._, + assetToDisk.CopyAssetToLocalDisk(A._, A._, true, A._, A._)) .Returns(new AssetFromOrigin(asset.Id, 13, "target", "application/json")); diff --git a/src/protagonist/Engine.Tests/Ingest/Workers/TimebasedIngesterWorkerTests.cs b/src/protagonist/Engine.Tests/Ingest/Workers/TimebasedIngesterWorkerTests.cs index 92b4e4ba9..fe432cf7b 100644 --- a/src/protagonist/Engine.Tests/Ingest/Workers/TimebasedIngesterWorkerTests.cs +++ b/src/protagonist/Engine.Tests/Ingest/Workers/TimebasedIngesterWorkerTests.cs @@ -42,7 +42,7 @@ public async Task Ingest_ReturnsFailed_IfCopyAssetError() // Arrange var asset = new Asset(AssetId.FromString("2/1/shallow")); A.CallTo(() => - assetToS3.CopyOriginToStorage(A._, A._, true, A._, + assetToS3.CopyOriginToStorage(A._, A._, true, A._, A._)) .ThrowsAsync(new Exception()); @@ -61,7 +61,7 @@ public async Task Ingest_SetsVerifySizeFlagCorrectly(int customerId, bool noStor // Arrange var asset = new Asset(AssetId.FromString($"{customerId}/1/shallow")); var assetFromOrigin = new AssetFromOrigin(asset.Id, 13, "/target/location", "application/json"); - A.CallTo(() => assetToS3.CopyOriginToStorage(A._, A._, A._, + A.CallTo(() => assetToS3.CopyOriginToStorage(A._, A._, A._, A._, A._)).Returns(assetFromOrigin); // Act @@ -69,7 +69,7 @@ public async Task Ingest_SetsVerifySizeFlagCorrectly(int customerId, bool noStor // Assert A.CallTo(() => - assetToS3.CopyOriginToStorage(A._, A._, !noStoragePolicyCheck, + assetToS3.CopyOriginToStorage(A._, A._, !noStoragePolicyCheck, A._, A._)) .MustHaveHappened(); } @@ -82,7 +82,7 @@ public async Task Ingest_ReturnsStorageLimitExceeded_IfFileSizeTooLarge() var assetFromOrigin = new AssetFromOrigin(asset.Id, 13, "/target/location", "application/json"); assetFromOrigin.FileTooLarge(); A.CallTo(() => - assetToS3.CopyOriginToStorage(A._, A._, A._, A._, + assetToS3.CopyOriginToStorage(A._, A._, A._, A._, A._)) .Returns(assetFromOrigin); @@ -100,7 +100,7 @@ public async Task Ingest_SetsSizeValue_InMetadata_IfOriginLocationInIngestionCon var asset = new Asset(AssetId.FromString("2/1/remurdered")); A.CallTo(() => - assetToS3.CopyOriginToStorage(A._, A._, A._, A._, + assetToS3.CopyOriginToStorage(A._, A._, A._, A._, A._)) .Returns(new AssetFromOrigin(asset.Id, 13, "target", "application/json")); @@ -132,7 +132,7 @@ public async Task Ingest_ReturnsQueuedForProcessing_IfMediaTranscodeSuccess() var asset = new Asset(AssetId.FromString("2/1/remurdered")); A.CallTo(() => - assetToS3.CopyOriginToStorage(A._, A._, A._, A._, + assetToS3.CopyOriginToStorage(A._, A._, A._, A._, A._)) .Returns(new AssetFromOrigin(asset.Id, 13, "target", "application/json")); diff --git a/src/protagonist/Engine.Tests/Integration/ImageIngestTests.cs b/src/protagonist/Engine.Tests/Integration/ImageIngestTests.cs index 606b2779f..3797dd7f8 100644 --- a/src/protagonist/Engine.Tests/Integration/ImageIngestTests.cs +++ b/src/protagonist/Engine.Tests/Integration/ImageIngestTests.cs @@ -122,6 +122,54 @@ public async Task IngestAsset_Success_HttpOrigin_AllOpen() storage.Size.Should().BeGreaterThan(0); } + [Fact] + public async Task IngestAsset_Success_OnLargerReingest() + { + // Arrange + // Create a new customer to have control over CustomerStorage and make sure it's isolated + const int customerId = -10; + var assetId = AssetId.FromString($"{customerId}/2/{nameof(IngestAsset_Success_OnLargerReingest)}"); + + // Note - API will have set this up before handing off + var origin = $"{apiStub.Address}/image"; + + var entity = await dbContext.Images.AddTestAsset(assetId, ingesting: true, origin: origin, + imageOptimisationPolicy: "fast-higher", mediaType: "image/tiff", width: 0, height: 0, duration: 0, + deliveryChannels: imageDeliveryChannels); + var asset = entity.Entity; + await dbContext.Customers.AddTestCustomer(customerId); + await dbContext.Spaces.AddTestSpace(customerId, 2); + await dbContext.ImageStorages.AddTestImageStorage(id: assetId, space: 2, customer: customerId, size: 950); + await dbContext.CustomerStorages.AddTestCustomerStorage(customer: customerId, sizeOfStored: 950, + storagePolicy: "medium"); + await dbContext.SaveChangesAsync(); + var message = new IngestAssetRequest(asset, DateTime.UtcNow); + + // Act + var jsonContent = + new StringContent(JsonSerializer.Serialize(message, settings), Encoding.UTF8, "application/json"); + var result = await httpClient.PostAsync("asset-ingest", jsonContent); + + // Assert + result.StatusCode.Should().Be(HttpStatusCode.OK); + + // Database records updated + var updatedAsset = await dbContext.Images.SingleAsync(a => a.Id == assetId); + updatedAsset.Width.Should().Be(500); + updatedAsset.Height.Should().Be(1000); + updatedAsset.Ingesting.Should().BeFalse(); + updatedAsset.Finished.Should().BeCloseTo(DateTime.UtcNow, TimeSpan.FromMinutes(1)); + updatedAsset.MediaType.Should().Be("image/tiff"); + updatedAsset.Error.Should().BeEmpty(); + + var location = await dbContext.ImageLocations.SingleAsync(a => a.Id == assetId); + location.Nas.Should().BeEmpty(); + location.S3.Should().Be($"s3://us-east-1/{LocalStackFixture.StorageBucketName}/{assetId}"); + + var storage = await dbContext.ImageStorages.SingleAsync(a => a.Id == assetId); + storage.Size.Should().NotBe(950); + } + [Fact] public async Task IngestAsset_Success_HttpOrigin_InitialOrigin_AllOpen() { @@ -269,6 +317,54 @@ await dbContext.CustomerStorages.AddTestCustomerStorage(customer: customerId, si storage.Should().BeNull(); } + [Fact] + public async Task IngestAsset_Error_ExceedAllowanceOnReingest() + { + // Arrange + // Create a new customer to have control over CustomerStorage and make sure it's isolated + const int customerId = -10; + var assetId = AssetId.FromString($"{customerId}/3/{nameof(IngestAsset_Error_ExceedAllowanceOnReingest)}"); + + // Note - API will have set this up before handing off + var origin = $"{apiStub.Address}/image"; + + var entity = await dbContext.Images.AddTestAsset(assetId, ingesting: true, origin: origin, customer: customerId, + width: 0, height: 0, duration: 0, mediaType: "image/tiff", deliveryChannels: imageDeliveryChannels); + var asset = entity.Entity; + await dbContext.Customers.AddTestCustomer(customerId); + await dbContext.Spaces.AddTestSpace(customerId, 3); + await dbContext.ImageStorages.AddTestImageStorage(id: assetId, space: 2, customer: customerId, size: 500); + await dbContext.CustomerStorages.AddTestCustomerStorage(customer: customerId, sizeOfStored: 950, + storagePolicy: "medium"); + await dbContext.SaveChangesAsync(); + var message = new IngestAssetRequest(asset, DateTime.UtcNow); + + // Act + var jsonContent = + new StringContent(JsonSerializer.Serialize(message, settings), Encoding.UTF8, "application/json"); + var result = await httpClient.PostAsync("asset-ingest", jsonContent); + + // Assert + result.StatusCode.Should().Be(HttpStatusCode.InsufficientStorage); + + // No S3 assets created + BucketWriter.ShouldNotHaveKey(assetId.ToString()); + + // Database records updated + var updatedAsset = await dbContext.Images.SingleAsync(a => a.Id == assetId); + updatedAsset.Width.Should().Be(0); + updatedAsset.Height.Should().Be(0); + updatedAsset.Ingesting.Should().BeFalse(); + updatedAsset.Finished.Should().BeCloseTo(DateTime.UtcNow, TimeSpan.FromMinutes(1)); + updatedAsset.Error.Should().Be("StoragePolicy size limit exceeded"); + + var location = await dbContext.ImageLocations.SingleOrDefaultAsync(a => a.Id == assetId); + location.Should().BeNull(); + + var storage = await dbContext.ImageStorages.SingleOrDefaultAsync(a => a.Id == assetId); + storage!.Size.Should().Be(500); + } + [Fact] public async Task IngestAsset_Error_HttpOrigin() { diff --git a/src/protagonist/Engine/Data/EngineAssetRepository.cs b/src/protagonist/Engine/Data/EngineAssetRepository.cs index d3e8a7fc6..357c1e106 100644 --- a/src/protagonist/Engine/Data/EngineAssetRepository.cs +++ b/src/protagonist/Engine/Data/EngineAssetRepository.cs @@ -85,6 +85,16 @@ public async Task UpdateIngestedAsset(Asset asset, ImageLocation? imageLoc public ValueTask GetAsset(AssetId assetId, CancellationToken cancellationToken = default) => dlcsContext.Images.FindAsync(new object[] { assetId }, cancellationToken); + public async Task GetImageSize(AssetId assetId, CancellationToken cancellationToken = default) + { + var imageSize = await dlcsContext.ImageStorages.AsNoTracking() + .Where(i => i.Id == assetId) + .Select(i => i.Size) + .FirstOrDefaultAsync(cancellationToken: cancellationToken); + + return imageSize; + } + private async Task NonBatchedSave(CancellationToken cancellationToken) { var updatedRows = await dlcsContext.SaveChangesAsync(cancellationToken); diff --git a/src/protagonist/Engine/Data/IEngineAssetRepository.cs b/src/protagonist/Engine/Data/IEngineAssetRepository.cs index 1ef14e0f0..9083aaf36 100644 --- a/src/protagonist/Engine/Data/IEngineAssetRepository.cs +++ b/src/protagonist/Engine/Data/IEngineAssetRepository.cs @@ -24,4 +24,12 @@ Task UpdateIngestedAsset(Asset asset, ImageLocation? imageLocation, ImageS /// Get Asset with specified Id /// ValueTask GetAsset(AssetId assetId, CancellationToken cancellationToken = default); + + /// + /// Retrieves the size of an image from the database, or null if the image is not found + /// + /// The asset id of the image to check + /// Current cancellation token + /// The size of the image, or null if not found + Task GetImageSize(AssetId assetId, CancellationToken cancellationToken = default); } \ No newline at end of file diff --git a/src/protagonist/Engine/Ingest/File/FileChannelWorker.cs b/src/protagonist/Engine/Ingest/File/FileChannelWorker.cs index 1ce4cbd32..ec7cd8f16 100644 --- a/src/protagonist/Engine/Ingest/File/FileChannelWorker.cs +++ b/src/protagonist/Engine/Ingest/File/FileChannelWorker.cs @@ -43,7 +43,7 @@ public async Task Ingest(IngestionContext ingestionContext, var targetStorageLocation = storageKeyGenerator.GetStoredOriginalLocation(ingestionContext.AssetId); var assetInBucket = await assetToS3.CopyOriginToStorage(targetStorageLocation, - asset, + ingestionContext, !assetIngestorSizeCheck.CustomerHasNoStorageCheck(asset.Customer), customerOriginStrategy, cancellationToken); diff --git a/src/protagonist/Engine/Ingest/Image/ImageIngesterWorker.cs b/src/protagonist/Engine/Ingest/Image/ImageIngesterWorker.cs index 7c486e5bc..fab765aef 100644 --- a/src/protagonist/Engine/Ingest/Image/ImageIngesterWorker.cs +++ b/src/protagonist/Engine/Ingest/Image/ImageIngesterWorker.cs @@ -50,7 +50,7 @@ public async Task Ingest(IngestionContext ingestionContext, { var stopwatch = Stopwatch.StartNew(); var assetOnDisk = await assetToDisk.CopyAssetToLocalDisk( - asset, + ingestionContext, sourceTemplate, !assetIngestorSizeCheck.CustomerHasNoStorageCheck(asset.Customer), customerOriginStrategy, diff --git a/src/protagonist/Engine/Ingest/IngestExecutor.cs b/src/protagonist/Engine/Ingest/IngestExecutor.cs index 47261647c..1579329c7 100644 --- a/src/protagonist/Engine/Ingest/IngestExecutor.cs +++ b/src/protagonist/Engine/Ingest/IngestExecutor.cs @@ -1,5 +1,6 @@ using DLCS.Model.Assets; using DLCS.Model.Customers; +using DLCS.Model.Storage; using Engine.Data; namespace Engine.Ingest; @@ -12,13 +13,17 @@ public class IngestExecutor private readonly IWorkerBuilder workerBuilder; private readonly IEngineAssetRepository assetRepository; private readonly ILogger logger; + private readonly IAssetIngestorSizeCheck assetIngestorSizeCheck; - public IngestExecutor(IWorkerBuilder workerBuilder, IEngineAssetRepository assetRepository, + public IngestExecutor(IWorkerBuilder workerBuilder, + IEngineAssetRepository assetRepository, + IAssetIngestorSizeCheck assetIngestorSizeCheck, ILogger logger) { this.workerBuilder = workerBuilder; this.assetRepository = assetRepository; this.logger = logger; + this.assetIngestorSizeCheck = assetIngestorSizeCheck; } public async Task IngestAsset(Asset asset, CustomerOriginStrategy customerOriginStrategy, @@ -28,6 +33,12 @@ public async Task IngestAsset(Asset asset, CustomerOriginStrategy var context = new IngestionContext(asset); + if (!assetIngestorSizeCheck.CustomerHasNoStorageCheck(asset.Customer)) + { + var preIngestionAssetSize = await assetRepository.GetImageSize(asset.Id, cancellationToken); + context.WithPreIngestionAssetSize(preIngestionAssetSize); + } + var postProcessors = new List(workers.Count); var overallStatus = IngestResultStatus.Unknown; diff --git a/src/protagonist/Engine/Ingest/IngestionContext.cs b/src/protagonist/Engine/Ingest/IngestionContext.cs index e253e4e7c..535f55fbb 100644 --- a/src/protagonist/Engine/Ingest/IngestionContext.cs +++ b/src/protagonist/Engine/Ingest/IngestionContext.cs @@ -22,6 +22,8 @@ public class IngestionContext public ImageStorage? ImageStorage { get; private set; } + public long PreIngestionAssetSize { get; private set; } + /// /// Any objects, and their size, uploaded to DLCS storage /// @@ -44,6 +46,18 @@ public IngestionContext WithLocation(ImageLocation imageLocation) ImageLocation = imageLocation.ThrowIfNull(nameof(imageLocation)); return this; } + + /// + /// Updates the pre-ingestion asset size. This is used for calculating storage of reingested assets + /// + /// The size of the asset + /// The ingestion context + public IngestionContext WithPreIngestionAssetSize(long? assetSize = null) + { + PreIngestionAssetSize = assetSize ?? 0; + + return this; + } public IngestionContext WithStorage(long? assetSize = null, long? thumbnailSize = null) { diff --git a/src/protagonist/Engine/Ingest/Persistence/AssetMoverBase.cs b/src/protagonist/Engine/Ingest/Persistence/AssetMoverBase.cs index dbd478947..063a70bbe 100644 --- a/src/protagonist/Engine/Ingest/Persistence/AssetMoverBase.cs +++ b/src/protagonist/Engine/Ingest/Persistence/AssetMoverBase.cs @@ -15,10 +15,10 @@ protected AssetMoverBase(IStorageRepository storageRepository) StorageRepository = storageRepository; } - protected async Task VerifyFileSize(AssetId assetId, long size) + protected async Task VerifyFileSize(AssetId assetId, long size, long oldFileSize) { var storageMetrics = await StorageRepository.GetStorageMetrics(assetId.Customer, CancellationToken.None); - var customerHasEnoughSize = storageMetrics.CanStoreAssetSize(size); + var customerHasEnoughSize = storageMetrics.CanStoreAssetSize(size, oldFileSize); return customerHasEnoughSize; } } \ No newline at end of file diff --git a/src/protagonist/Engine/Ingest/Persistence/AssetToDisk.cs b/src/protagonist/Engine/Ingest/Persistence/AssetToDisk.cs index 16d38b570..9eb7a03b7 100644 --- a/src/protagonist/Engine/Ingest/Persistence/AssetToDisk.cs +++ b/src/protagonist/Engine/Ingest/Persistence/AssetToDisk.cs @@ -15,13 +15,13 @@ public interface IAssetToDisk /// /// Copy asset from Origin to local disk. /// - /// to be copied. + /// Ingestion context containing the to be copied. /// String representing destinations folder to copy to. /// if True, size is validated that it does not exceed allowed size. /// to use to fetch item. /// Current cancellation token /// containing new location, size etc - Task CopyAssetToLocalDisk(Asset asset, string destinationTemplate, bool verifySize, + Task CopyAssetToLocalDisk(IngestionContext context, string destinationTemplate, bool verifySize, CustomerOriginStrategy customerOriginStrategy, CancellationToken cancellationToken = default); } @@ -49,37 +49,37 @@ public AssetToDisk( /// /// Copy asset from Origin to local disk. /// - /// to be copied. + /// Ingestion context containing the to be copied. /// String representing destinations folder to copy to. /// if True, size is validated that it does not exceed allowed size. /// to use to fetch item. /// Current cancellation token /// containing new location, size etc - public async Task CopyAssetToLocalDisk(Asset asset, string destinationTemplate, bool verifySize, + public async Task CopyAssetToLocalDisk(IngestionContext context, string destinationTemplate, bool verifySize, CustomerOriginStrategy customerOriginStrategy, CancellationToken cancellationToken = default) { destinationTemplate.ThrowIfNullOrWhiteSpace(nameof(destinationTemplate)); var originResponse = - await originFetcher.LoadAssetFromLocation(asset.Id, asset.GetIngestOrigin(), + await originFetcher.LoadAssetFromLocation(context.Asset.Id, context.Asset.GetIngestOrigin(), customerOriginStrategy, cancellationToken); if (originResponse == null || originResponse.Stream.IsNull()) { - logger.LogWarning("Unable to fetch asset {AssetId} from {Origin}, using {OriginStrategy}", asset.Id, - asset.Origin, customerOriginStrategy.Strategy); + logger.LogWarning("Unable to fetch asset {AssetId} from {Origin}, using {OriginStrategy}", context.Asset.Id, + context.Asset.Origin, customerOriginStrategy.Strategy); throw new ApplicationException( - $"Unable to get asset '{asset.Id}' from origin '{asset.Origin}' using {customerOriginStrategy.Strategy}"); + $"Unable to get asset '{context.Asset.Id}' from origin '{context.Asset.Origin}' using {customerOriginStrategy.Strategy}"); } cancellationToken.ThrowIfCancellationRequested(); - var assetFromOrigin = await CopyAssetToDisk(asset, destinationTemplate, originResponse, cancellationToken); + var assetFromOrigin = await CopyAssetToDisk(context.Asset, destinationTemplate, originResponse, cancellationToken); assetFromOrigin.CustomerOriginStrategy = customerOriginStrategy; if (verifySize) { - await VerifyFileSize(asset, assetFromOrigin); + await VerifyFileSize(context, assetFromOrigin); } return assetFromOrigin; @@ -148,9 +148,9 @@ private string GetFileExtension(OriginResponse originResponse) return extension; } - private async Task VerifyFileSize(Asset asset, AssetFromOrigin assetFromOrigin) + private async Task VerifyFileSize(IngestionContext context, AssetFromOrigin assetFromOrigin) { - var customerHasEnoughSize = await VerifyFileSize(asset.Id, assetFromOrigin.AssetSize); + var customerHasEnoughSize = await VerifyFileSize(context.Asset.Id, assetFromOrigin.AssetSize, context.PreIngestionAssetSize); if (!customerHasEnoughSize) { diff --git a/src/protagonist/Engine/Ingest/Persistence/AssetToS3.cs b/src/protagonist/Engine/Ingest/Persistence/AssetToS3.cs index 143d2929b..581f37f9d 100644 --- a/src/protagonist/Engine/Ingest/Persistence/AssetToS3.cs +++ b/src/protagonist/Engine/Ingest/Persistence/AssetToS3.cs @@ -19,12 +19,12 @@ public interface IAssetToS3 /// Configuration determines if this is a direct S3-S3 copy, or S3-disk-S3. /// /// where file is to copied to - /// to be copied + /// Ingestion context containing the to be copied /// if True, size is validated that it does not exceed allowed size. /// to use to fetch item. /// /// containing new location, size etc - Task CopyOriginToStorage(ObjectInBucket destination, Asset asset, bool verifySize, + Task CopyOriginToStorage(ObjectInBucket destination, IngestionContext context, bool verifySize, CustomerOriginStrategy customerOriginStrategy, CancellationToken cancellationToken = default); } @@ -54,65 +54,65 @@ public AssetToS3( this.fileSystem = fileSystem; } - public async Task CopyOriginToStorage(ObjectInBucket destination, Asset asset, bool verifySize, + public async Task CopyOriginToStorage(ObjectInBucket destination, IngestionContext context, bool verifySize, CustomerOriginStrategy customerOriginStrategy, CancellationToken cancellationToken = default) { var stopwatch = Stopwatch.StartNew(); - var copyResult = await DoCopy(destination, asset, verifySize, customerOriginStrategy, cancellationToken); + var copyResult = await DoCopy(destination, context, verifySize, customerOriginStrategy, cancellationToken); stopwatch.Stop(); logger.LogDebug("Copied asset {AssetId} in {Elapsed}ms using {OriginStrategy}", - asset.Id, stopwatch.ElapsedMilliseconds, customerOriginStrategy.Strategy); + context.Asset.Id, stopwatch.ElapsedMilliseconds, customerOriginStrategy.Strategy); return copyResult; } - private async Task DoCopy(ObjectInBucket destination, Asset asset, bool verifySize, + private async Task DoCopy(ObjectInBucket destination, IngestionContext context, bool verifySize, CustomerOriginStrategy customerOriginStrategy, CancellationToken cancellationToken) { if (ShouldCopyBucketToBucket(customerOriginStrategy)) { // We have direct bucket access so can copy directly using SDK - return await CopyBucketToBucket(asset, destination, verifySize, cancellationToken); + return await CopyBucketToBucket(context, destination, verifySize, cancellationToken); } // We don't have direct bucket access; or it's a non-S3 origin so copy S3->Disk->S3 - return await IndirectCopyBucketToBucket(asset, destination, verifySize, customerOriginStrategy, + return await IndirectCopyBucketToBucket(context, destination, verifySize, customerOriginStrategy, cancellationToken); } private bool ShouldCopyBucketToBucket(CustomerOriginStrategy customerOriginStrategy) => customerOriginStrategy is { Strategy: OriginStrategyType.S3Ambient }; - private async Task CopyBucketToBucket(Asset asset, ObjectInBucket destination, bool verifySize, + private async Task CopyBucketToBucket(IngestionContext context, ObjectInBucket destination, bool verifySize, CancellationToken cancellationToken) { - var assetId = asset.Id; - var source = RegionalisedObjectInBucket.Parse(asset.GetIngestOrigin()); + var assetId = context.Asset.Id; + var source = RegionalisedObjectInBucket.Parse(context.Asset.GetIngestOrigin()); if (source == null) { // TODO - better error type - logger.LogError("Unable to parse ingest origin {Origin} to ObjectInBucket", asset.GetIngestOrigin()); + logger.LogError("Unable to parse ingest origin {Origin} to ObjectInBucket", context.Asset.GetIngestOrigin()); throw new InvalidOperationException( - $"Unable to parse ingest origin {asset.GetIngestOrigin()} to ObjectInBucket"); + $"Unable to parse ingest origin {context.Asset.GetIngestOrigin()} to ObjectInBucket"); } - logger.LogDebug("Copying asset '{AssetId}' directly from bucket to bucket. {Source} - {Dest}", asset.Id, + logger.LogDebug("Copying asset '{AssetId}' directly from bucket to bucket. {Source} - {Dest}", context.Asset.Id, source.GetS3Uri(), destination.GetS3Uri()); // copy S3-S3 - Func>? sizeVerifier = verifySize ? assetSize => VerifyFileSize(assetId, assetSize) : null; + Func>? sizeVerifier = verifySize ? assetSize => VerifyFileSize(assetId, assetSize, context.PreIngestionAssetSize) : null; var copyResult = await bucketWriter.CopyLargeObject(source, destination, verifySize: sizeVerifier, token: cancellationToken); if (copyResult.Result is not LargeObjectStatus.Success and not LargeObjectStatus.FileTooLarge) { throw new ApplicationException( - $"Failed to copy timebased asset {asset.Id} directly from '{asset.GetIngestOrigin()}' to {destination.GetS3Uri()}. Result: {copyResult.Result}"); + $"Failed to copy timebased asset {context.Asset.Id} directly from '{context.Asset.GetIngestOrigin()}' to {destination.GetS3Uri()}. Result: {copyResult.Result}"); } var assetFromOrigin = new AssetFromOrigin(assetId, copyResult.Size ?? 0, destination.GetS3Uri().ToString(), - asset.MediaType); + context.Asset.MediaType); if (copyResult.Result == LargeObjectStatus.FileTooLarge) { @@ -122,18 +122,19 @@ private async Task CopyBucketToBucket(Asset asset, ObjectInBuck return assetFromOrigin; } - private async Task IndirectCopyBucketToBucket(Asset asset, ObjectInBucket destination, + private async Task IndirectCopyBucketToBucket(IngestionContext context, ObjectInBucket destination, bool verifySize, CustomerOriginStrategy customerOriginStrategy, CancellationToken cancellationToken) { - logger.LogDebug("Copying asset '{AssetId}' indirectly from bucket to bucket. {Source} - {Dest}", asset.Id, - asset.GetIngestOrigin(), destination.GetS3Uri()); - var assetId = asset.Id; + logger.LogDebug("Copying asset '{AssetId}' indirectly from bucket to bucket. {Source} - {Dest}", + context.Asset.Id, + context.Asset.GetIngestOrigin(), destination.GetS3Uri()); + var assetId = context.Asset.Id; string? downloadedFile = null; try { var diskDestination = GetDestination(assetId); - var assetOnDisk = await assetToDisk.CopyAssetToLocalDisk(asset, diskDestination, verifySize, + var assetOnDisk = await assetToDisk.CopyAssetToLocalDisk(context, diskDestination, verifySize, customerOriginStrategy, cancellationToken); if (assetOnDisk.FileExceedsAllowance) @@ -141,7 +142,7 @@ private async Task IndirectCopyBucketToBucket(Asset asset, Obje return assetOnDisk; } - logger.LogDebug("Copied asset '{AssetId}' to disk, copying to bucket..", asset.Id); + logger.LogDebug("Copied asset '{AssetId}' to disk, copying to bucket..", context.Asset.Id); var success = await bucketWriter.WriteFileToBucket(destination, assetOnDisk.Location, assetOnDisk.ContentType, cancellationToken); downloadedFile = assetOnDisk.Location; @@ -149,7 +150,7 @@ private async Task IndirectCopyBucketToBucket(Asset asset, Obje if (!success) { throw new ApplicationException( - $"Failed to copy timebased asset {assetId} indirectly from '{asset.GetIngestOrigin()}' to {destination}"); + $"Failed to copy timebased asset {assetId} indirectly from '{context.Asset.GetIngestOrigin()}' to {destination}"); } return new AssetFromOrigin(assetId, assetOnDisk.AssetSize, destination.GetS3Uri().ToString(), diff --git a/src/protagonist/Engine/Ingest/Timebased/TimebasedIngesterWorker.cs b/src/protagonist/Engine/Ingest/Timebased/TimebasedIngesterWorker.cs index d7c471b4a..236dc35bf 100644 --- a/src/protagonist/Engine/Ingest/Timebased/TimebasedIngesterWorker.cs +++ b/src/protagonist/Engine/Ingest/Timebased/TimebasedIngesterWorker.cs @@ -41,7 +41,7 @@ public async Task Ingest(IngestionContext ingestionContext, var targetStorageLocation = storageKeyGenerator.GetTimebasedInputLocation(asset.Id); var assetInBucket = await assetToS3.CopyOriginToStorage( targetStorageLocation, - asset, + ingestionContext, !assetIngestorSizeCheck.CustomerHasNoStorageCheck(asset.Customer), customerOriginStrategy, cancellationToken); ingestionContext.WithAssetFromOrigin(assetInBucket); diff --git a/src/protagonist/Test.Helpers/Integration/DlcsDatabaseFixture.cs b/src/protagonist/Test.Helpers/Integration/DlcsDatabaseFixture.cs index 774d58d0c..45cdb4750 100644 --- a/src/protagonist/Test.Helpers/Integration/DlcsDatabaseFixture.cs +++ b/src/protagonist/Test.Helpers/Integration/DlcsDatabaseFixture.cs @@ -48,7 +48,7 @@ public void CleanUp() { DbContext.Database.ExecuteSqlRaw("DELETE FROM \"Spaces\" WHERE \"Customer\" != 99 AND \"Id\" != 1"); DbContext.Database.ExecuteSqlRaw("DELETE FROM \"Customers\" WHERE \"Id\" != 99"); - DbContext.Database.ExecuteSqlRaw("DELETE FROM \"StoragePolicies\" WHERE \"Id\" not in ('default', 'small')"); + DbContext.Database.ExecuteSqlRaw("DELETE FROM \"StoragePolicies\" WHERE \"Id\" not in ('default', 'small', 'medium')"); DbContext.Database.ExecuteSqlRaw("DELETE FROM \"ThumbnailPolicies\" WHERE \"Id\" != 'default'"); DbContext.Database.ExecuteSqlRaw( "DELETE FROM \"ImageOptimisationPolicies\" WHERE \"Id\" not in ('fast-higher', 'video-max', 'audio-max', 'cust-default')"); @@ -90,7 +90,14 @@ await DbContext.StoragePolicies.AddRangeAsync(new StoragePolicy Id = "small", MaximumNumberOfStoredImages = 10, MaximumTotalSizeOfStoredImages = 100 + }, + new StoragePolicy + { + Id = "medium", + MaximumNumberOfStoredImages = 100, + MaximumTotalSizeOfStoredImages = 1000 }); + await DbContext.EntityCounters.AddRangeAsync(new EntityCounter { Type = KnownEntityCounters.CustomerSpaces,