Skip to content

Commit

Permalink
#98 Included the AI predictions in the webhook requests.
Browse files Browse the repository at this point in the history
  • Loading branch information
djdd87 committed Feb 6, 2022
1 parent 86d5162 commit 89f54c5
Show file tree
Hide file tree
Showing 14 changed files with 135 additions and 307 deletions.
10 changes: 6 additions & 4 deletions SynoAI/AIs/DeepStack/DeepStackAI.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,11 @@ public async override Task<IEnumerable<AIPrediction>> Process(ILogger logger, Ca

decimal minConfidence = camera.Threshold / 100m;

MultipartFormDataContent multipartContent = new MultipartFormDataContent();
multipartContent.Add(new StreamContent(new MemoryStream(image)), "image", "image");
multipartContent.Add(new StringContent(minConfidence.ToString()), "min_confidence"); // From face detection example - using JSON with MinConfidence didn't always work
MultipartFormDataContent multipartContent = new()
{
{ new StreamContent(new MemoryStream(image)), "image", "image" },
{ new StringContent(minConfidence.ToString()), "min_confidence" } // From face detection example - using JSON with MinConfidence didn't always work
};

logger.LogDebug($"{camera.Name}: DeepStackAI: POSTing image with minimum confidence of {minConfidence} ({camera.Threshold}%) to {string.Join("/", Config.AIUrl, Config.AIPath)}.");

Expand Down Expand Up @@ -69,7 +71,7 @@ public async override Task<IEnumerable<AIPrediction>> Process(ILogger logger, Ca
/// <returns>A <see cref="Uri"/> for the combined base and resource.</returns>
protected Uri GetUri(string basePath, string resourcePath)
{
Uri baseUri = new Uri(basePath);
Uri baseUri = new(basePath);
return new Uri(baseUri, resourcePath);
}

Expand Down
81 changes: 24 additions & 57 deletions SynoAI/Controllers/CameraController.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ public class CameraController : ControllerBase
private readonly ISynologyService _synologyService;
private readonly ILogger<CameraController> _logger;

private static ConcurrentDictionary<string, bool> _runningCameraChecks = new ConcurrentDictionary<string, bool>(StringComparer.OrdinalIgnoreCase);
private static ConcurrentDictionary<string, DateTime> _delayedCameraChecks = new ConcurrentDictionary<string, DateTime>(StringComparer.OrdinalIgnoreCase);
private static ConcurrentDictionary<string, bool> _runningCameraChecks = new(StringComparer.OrdinalIgnoreCase);
private static ConcurrentDictionary<string, DateTime> _delayedCameraChecks = new(StringComparer.OrdinalIgnoreCase);

public CameraController(IAIService aiService, ISynologyService synologyService, ILogger<CameraController> logger, IHubContext<SynoAIHub> hubContext)
{
Expand Down Expand Up @@ -134,7 +134,7 @@ public async void Get(string id)
int maxSizeX = camera.GetMaxSizeX();
int maxSizeY = camera.GetMaxSizeY();

List<AIPrediction> validPredictions = new List<AIPrediction>();
List<AIPrediction> validPredictions = new();
foreach (AIPrediction prediction in predictions)
{
// Check if the prediction label is in the list of types the camera is looking for
Expand Down Expand Up @@ -178,14 +178,17 @@ public async void Get(string id)

if (validPredictions.Count() > 0)
{
// Generate text for notifications
IEnumerable<string> labels = GetLabels(validPredictions);

// Process and save the snapshot
ProcessedImage processedImage = SnapshotManager.DressImage(camera, snapshot, predictions, validPredictions, _logger);

// Send Notifications
await SendNotifications(camera, processedImage, labels);
Notification notification = new()
{
ProcessedImage = processedImage,
ValidPredictions = validPredictions
};

await SendNotifications(camera, notification);

// Inform eventual web users about this new Snapshot, for the "realtime" option thru Web
await _hubContext.Clients.All.SendAsync("ReceiveSnapshot", camera.Name, processedImage.FileName);
Expand Down Expand Up @@ -239,10 +242,10 @@ private bool ShouldIncludePrediction(string id, Camera camera, Stopwatch overall
// Check if the prediction falls within the exclusion zones
if (camera.Exclusions != null && camera.Exclusions.Count() > 0)
{
Rectangle boundary = new Rectangle(prediction.MinX, prediction.MinY, prediction.SizeX, prediction.SizeY);
Rectangle boundary = new(prediction.MinX, prediction.MinY, prediction.SizeX, prediction.SizeY);
foreach (Zone exclusion in camera.Exclusions)
{
Rectangle exclusionZoneBoundary = new Rectangle(exclusion.Start.X, exclusion.Start.Y, exclusion.End.X - exclusion.Start.X, exclusion.End.Y - exclusion.Start.Y);
Rectangle exclusionZoneBoundary = new(exclusion.Start.X, exclusion.Start.Y, exclusion.End.X - exclusion.Start.X, exclusion.End.Y - exclusion.Start.Y);
bool exclude = exclusion.Mode == OverlapMode.Contains ? exclusionZoneBoundary.Contains(boundary) : exclusionZoneBoundary.IntersectsWith(boundary);
if (exclude)
{
Expand All @@ -256,43 +259,6 @@ private bool ShouldIncludePrediction(string id, Camera camera, Stopwatch overall
return true;
}

/// <summary>
/// Gets the labels from the predictions to use in the notifications.
/// </summary>
/// <param name="validPredictions">The predictions to process.</param>
/// <returns>A list of labels.</returns>
private IEnumerable<string> GetLabels(IEnumerable<AIPrediction> validPredictions)
{
if (Config.AlternativeLabelling && Config.DrawMode == DrawMode.Matches)
{
List<String> labels = new List<String>();
if (validPredictions.Count() == 1)
{
// If there is only a single object, then don't add a correlating number and instead just
// write out the label.
decimal confidence = Math.Round(validPredictions.First().Confidence, 0, MidpointRounding.AwayFromZero);
labels.Add($"{validPredictions.First().Label.FirstCharToUpper()} {confidence}%");
}
else
{
// Since there is more than one object detected, include correlating number
int counter = 1;
foreach (AIPrediction prediction in validPredictions)
{
decimal confidence = Math.Round(prediction.Confidence, 0, MidpointRounding.AwayFromZero);
labels.Add($"{counter}. {prediction.Label.FirstCharToUpper()} {confidence}%");
counter++;
}
}

return labels;
}
else
{
return validPredictions.Select(x => x.Label.FirstCharToUpper()).ToList();
}
}

/// <summary>
/// Adds a delay for the specified camera.
/// </summary>
Expand Down Expand Up @@ -332,7 +298,7 @@ private void CleanupOldImages()
{
_cleanupOldImagesRunning = true;

DirectoryInfo directory = new DirectoryInfo(Constants.DIRECTORY_CAPTURES);
DirectoryInfo directory = new(Constants.DIRECTORY_CAPTURES);
IEnumerable<FileInfo> files = directory.GetFiles("*", new EnumerationOptions() { RecurseSubdirectories = true });
foreach (FileInfo file in files)
{
Expand All @@ -350,7 +316,7 @@ private void CleanupOldImages()
}
}
private bool _cleanupOldImagesRunning;
private object _cleanUpOldImagesLock = new object();
private object _cleanUpOldImagesLock = new();

/// <summary>
/// Handles any required preprocessing of the captured image.
Expand Down Expand Up @@ -399,8 +365,8 @@ private SKBitmap Rotate(SKBitmap bitmap, double angle)
int rotatedWidth = (int)(cosine * originalWidth + sine * originalHeight);
int rotatedHeight = (int)(cosine * originalHeight + sine * originalWidth);

SKBitmap rotatedBitmap = new SKBitmap(rotatedWidth, rotatedHeight);
using (SKCanvas canvas = new SKCanvas(rotatedBitmap))
SKBitmap rotatedBitmap = new(rotatedWidth, rotatedHeight);
using (SKCanvas canvas = new(rotatedBitmap))
{
canvas.Clear();
canvas.Translate(rotatedWidth / 2, rotatedHeight / 2);
Expand All @@ -417,22 +383,23 @@ private SKBitmap Rotate(SKBitmap bitmap, double angle)
/// Sends notifications, if there is any configured
/// </summary>
/// <param name="camera">The camera responsible for this snapshot.</param>
/// <param name="processedImage">The path information for the snapshot.</param>
/// <param name="labels">The text metadata for each existing valid object.</param>
private async Task SendNotifications(Camera camera, ProcessedImage processedImage, IEnumerable<string> labels)
/// <param name="notification">The notification data to process.</param>
private async Task SendNotifications(Camera camera, Notification notification)
{
Stopwatch stopwatch = Stopwatch.StartNew();

IEnumerable<string> labels = notification.ValidPredictions.Select(x => x.Label).Distinct().ToList();

IEnumerable<INotifier> notifiers = Config.Notifiers
.Where(x =>
(x.Cameras == null || x.Cameras.Count() == 0 || x.Cameras.Any(c => c.Equals(camera.Name, StringComparison.OrdinalIgnoreCase))) &&
(x.Types == null || x.Types.Count() == 0 || x.Types.Any(t => labels.Contains(t, StringComparer.OrdinalIgnoreCase)))
(x.Cameras == null || !x.Cameras.Any() || x.Cameras.Any(c => c.Equals(camera.Name, StringComparison.OrdinalIgnoreCase))) &&
(x.Types == null || !x.Types.Any() || x.Types.Any(t => labels.Contains(t, StringComparer.OrdinalIgnoreCase)))
).ToList();

List<Task> tasks = new List<Task>();
List<Task> tasks = new();
foreach (INotifier notifier in notifiers)
{
tasks.Add(notifier.SendAsync(camera, processedImage, labels, _logger));
tasks.Add(notifier.SendAsync(camera, notification, _logger));
}

await Task.WhenAll(tasks);
Expand Down
67 changes: 67 additions & 0 deletions SynoAI/Models/Notification.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
using SynoAI.Extensions;
using System;
using System.Collections.Generic;
using System.Linq;

namespace SynoAI.Models
{
public class Notification
{
/// <summary>
/// Object for fetching the processed image
/// </summary>
public ProcessedImage ProcessedImage { get; set; }
/// <summary>
/// The list of valid predictions.
/// </summary>
public IEnumerable<AIPrediction> ValidPredictions { get; set; }

/// <summary>
/// The list of types that were found.
/// </summary>
public IEnumerable<string> FoundTypes
{
get
{
return GetLabels();
}
}

/// <summary>
/// Gets the labels from the predictions to use in the notifications.
/// </summary>
/// <param name="validPredictions">The predictions to process.</param>
/// <returns>A list of labels.</returns>
private IEnumerable<string> GetLabels()
{
if (Config.AlternativeLabelling && Config.DrawMode == DrawMode.Matches)
{
List<String> labels = new List<String>();
if (ValidPredictions.Count() == 1)
{
// If there is only a single object, then don't add a correlating number and instead just
// write out the label.
decimal confidence = Math.Round(ValidPredictions.First().Confidence, 0, MidpointRounding.AwayFromZero);
labels.Add($"{ValidPredictions.First().Label.FirstCharToUpper()} {confidence}%");
}
else
{
// Since there is more than one object detected, include correlating number
int counter = 1;
foreach (AIPrediction prediction in ValidPredictions)
{
decimal confidence = Math.Round(prediction.Confidence, 0, MidpointRounding.AwayFromZero);
labels.Add($"{counter}. {prediction.Label.FirstCharToUpper()} {confidence}%");
counter++;
}
}

return labels;
}
else
{
return ValidPredictions.Select(x => x.Label.FirstCharToUpper()).ToList();
}
}
}
}
7 changes: 3 additions & 4 deletions SynoAI/Notifiers/Email/Email.cs
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,15 @@ public class Email : NotifierBase
/// Sends a message and an image using the Pushbullet API.
/// </summary>
/// <param name="camera">The camera that triggered the notification.</param>
/// <param name="processedImage">Object for fetching the processed image.</param>
/// <param name="foundTypes">The list of types that were found.</param>
/// <param name="notification">The notification data to process.</param>
/// <param name="logger">A logger.</param>
public override async Task SendAsync(Camera camera, ProcessedImage processedImage, IEnumerable<string> foundTypes, ILogger logger)
public override async Task SendAsync(Camera camera, Notification notification, ILogger logger)
{
using (logger.BeginScope($"Email '{Destination}'"))
{
// Assign camera name to variable for logger placeholder
string cameraName = camera.Name;
string filePath = processedImage.FilePath;
string filePath = notification.ProcessedImage.FilePath;

try
{
Expand Down
2 changes: 1 addition & 1 deletion SynoAI/Notifiers/INotifier.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,6 @@ public interface INotifier
/// <summary>
/// Handles the send of the notification.
/// </summary>
Task SendAsync(Camera camera, ProcessedImage processedImage, IEnumerable<string> foundTypes, ILogger logger);
Task SendAsync(Camera camera, Notification notification, ILogger logger);
}
}
2 changes: 1 addition & 1 deletion SynoAI/Notifiers/NotifierBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ public abstract class NotifierBase : INotifier
public IEnumerable<string> Cameras { get; set; }
public IEnumerable<string> Types { get; set; }

public abstract Task SendAsync(Camera camera, ProcessedImage processedImage, IEnumerable<string> foundTypes, ILogger logger);
public abstract Task SendAsync(Camera camera, Notification notification, ILogger logger);

protected string GetMessage(Camera camera, IEnumerable<string> foundTypes, string errorMessage = null)
{
Expand Down
3 changes: 0 additions & 3 deletions SynoAI/Notifiers/NotifierFactory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,6 @@ public static INotifier Create(NotifierType type, ILogger logger, IConfiguration
case NotifierType.Webhook:
factory = new WebhookFactory();
break;
case NotifierType.WebhookLegacy:
factory = new WebhookFactoryLegacy();
break;
default:
throw new NotImplementedException(type.ToString());
}
Expand Down
6 changes: 1 addition & 5 deletions SynoAI/Notifiers/NotifierType.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,6 @@ public enum NotifierType
/// <summary>
/// Calls a webhook with the image attached.
/// </summary>
Webhook,
/// <summary>
/// Legacy implementation of Webhook
/// </summary>
WebhookLegacy
Webhook
}
}
8 changes: 4 additions & 4 deletions SynoAI/Notifiers/Pushbullet/Pushbullet.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,12 @@ public class Pushbullet : NotifierBase
/// Sends a message and an image using the Pushbullet API.
/// </summary>
/// <param name="camera">The camera that triggered the notification.</param>
/// <param name="processedImage">Object for fetching the processed image.</param>
/// <param name="foundTypes">The list of types that were found.</param>
/// <param name="logger">A logger.</param>
public override async Task SendAsync(Camera camera, ProcessedImage processedImage, IEnumerable<string> foundTypes, ILogger logger)
public override async Task SendAsync(Camera camera, Notification notification, ILogger logger)
{
// Pushbullet file uploads are a two part process. First we need to request to upload a file
ProcessedImage processedImage = notification.ProcessedImage;

string fileName = processedImage.FileName;
string requestJson = JsonConvert.SerializeObject(new PushbulletUploadRequest()
{
Expand Down Expand Up @@ -82,7 +82,7 @@ public override async Task SendAsync(Camera camera, ProcessedImage processedImag
{
Type = uploadSuccess ? "file" : "note",
Title = $"{camera.Name}: Movement Detected",
Body = GetMessage(camera, foundTypes, errorMessage: uploadError),
Body = GetMessage(camera, notification.FoundTypes, errorMessage: uploadError),
FileName = uploadSuccess ? uploadRequestResult.FileName : null,
FileUrl = uploadSuccess ? uploadRequestResult.FileUrl : null,
FileType = uploadSuccess ? uploadRequestResult.FileType : null
Expand Down
Loading

0 comments on commit 89f54c5

Please sign in to comment.