Skip to content

Commit 1bd6a90

Browse files
committed
#62 Added support for Pushover
1 parent 095e532 commit 1bd6a90

17 files changed

+360
-128
lines changed

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -366,6 +366,7 @@ FodyWeavers.xsd
366366

367367
# Custom
368368
appsettings.Development.json
369+
appsettings.*.json
369370
PrivateNotes.txt
370371
Captures
371372
docker-compose.debug.yml

README.md

+20
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ I made this application mostly for myself in order to improve upon Christopher A
2626
* [Telegram](#telegram)
2727
* [Email](#email)
2828
* [HomeAssistant](#homeassistant)
29+
* [Pushover](#pushover)
2930
* [Caveats](#caveats)
3031
* [Configuration](#configuration)
3132
* [1) Configure Deepstack](#1-configure-deepstack)
@@ -277,6 +278,25 @@ Automations can be created using this webhook by checking for changes for the ca
277278

278279
Multiple webhooks can be set up, each pointed at a different HomeAssistant Push camera. Additionally, you can create an automation that is triggered on a Webhook call. Then just use the SynoAI webhook notification to call that webhook. Note that it's wasteful to send an image when triggering the non-Push webhooks on HomeAssistant, so ensure that SendImage is set to false.
279280

281+
### Pushover
282+
The [Pushover](https://www.pushover.net/) notification will send an image and a message containing a list of detected object types. An API key and user key will need to be obtained from your Pushover account. An array of devices can be specified to limit the devices that receive the notifications, or the `device` field can be left blank
283+
284+
```json
285+
{
286+
"Type": "Pushover",
287+
"ApiKey": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
288+
"UserKey": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
289+
"Device": [
290+
"iphone"
291+
]
292+
}
293+
```
294+
* ApiKey [required]: The API key for the Pushover service
295+
* UserKey [required]: The User key for the Pushover service
296+
* Device [optional]: An array of device names to send the notifications to. If left blank, the notifications will be sent to all devices
297+
* Sound [optional]: The [sound](https://pushover.net/api#sounds) to override the user's default sound choice
298+
* Priority [optional]: The [priority](https://pushover.net/api#priority) with which to send the message
299+
280300
## Caveats
281301
* SynoAI still relies on Surveillance Station triggering the motion alerts
282302
* Looking for an object, such as a car on a driveway, will continually trigger alerts if that object is in view of the camera when Surveillance Station detects movement, e.g. a tree blowing in the wind.

SynoAI.Tests/SynoAI.Tests.csproj

+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
3+
<PropertyGroup>
4+
<TargetFramework>net5.0</TargetFramework>
5+
6+
<IsPackable>false</IsPackable>
7+
</PropertyGroup>
8+
9+
<ItemGroup>
10+
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.9.4" />
11+
<PackageReference Include="NUnit" Version="3.13.1" />
12+
<PackageReference Include="NUnit3TestAdapter" Version="3.17.0" />
13+
<PackageReference Include="coverlet.collector" Version="3.0.2" />
14+
</ItemGroup>
15+
16+
</Project>

SynoAI.Tests/UnitTest1.cs

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
using NUnit.Framework;
2+
3+
namespace SynoAI.Tests
4+
{
5+
public class Tests
6+
{
7+
[SetUp]
8+
public void Setup()
9+
{
10+
}
11+
12+
[Test]
13+
public void Test1()
14+
{
15+
Assert.Pass();
16+
}
17+
}
18+
}

SynoAI.sln

+23-20
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11

22
Microsoft Visual Studio Solution File, Format Version 12.00
3-
# Visual Studio 15
4-
VisualStudioVersion = 15.0.26124.0
3+
# Visual Studio Version 17
4+
VisualStudioVersion = 17.0.32014.148
55
MinimumVisualStudioVersion = 15.0.26124.0
6-
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SynoAI", "SynoAI\SynoAI.csproj", "{D55517BF-4185-4B3D-956F-9CCE6425D88B}"
6+
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SynoAI", "SynoAI\SynoAI.csproj", "{D55517BF-4185-4B3D-956F-9CCE6425D88B}"
7+
EndProject
8+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SynoAI.Tests", "SynoAI.Tests\SynoAI.Tests.csproj", "{6F168456-88C9-4B70-813B-AB93C13CD702}"
79
EndProject
8-
#Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SynoAI.Tests", "SynoAI.Tests\SynoAI.Tests.csproj", "{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}"
9-
#EndProject
1010
Global
1111
GlobalSection(SolutionConfigurationPlatforms) = preSolution
1212
Debug|Any CPU = Debug|Any CPU
@@ -16,9 +16,6 @@ Global
1616
Release|x64 = Release|x64
1717
Release|x86 = Release|x86
1818
EndGlobalSection
19-
GlobalSection(SolutionProperties) = preSolution
20-
HideSolutionNode = FALSE
21-
EndGlobalSection
2219
GlobalSection(ProjectConfigurationPlatforms) = postSolution
2320
{D55517BF-4185-4B3D-956F-9CCE6425D88B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
2421
{D55517BF-4185-4B3D-956F-9CCE6425D88B}.Debug|Any CPU.Build.0 = Debug|Any CPU
@@ -32,17 +29,23 @@ Global
3229
{D55517BF-4185-4B3D-956F-9CCE6425D88B}.Release|x64.Build.0 = Release|Any CPU
3330
{D55517BF-4185-4B3D-956F-9CCE6425D88B}.Release|x86.ActiveCfg = Release|Any CPU
3431
{D55517BF-4185-4B3D-956F-9CCE6425D88B}.Release|x86.Build.0 = Release|Any CPU
35-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
36-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Debug|Any CPU.Build.0 = Debug|Any CPU
37-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Debug|x64.ActiveCfg = Debug|Any CPU
38-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Debug|x64.Build.0 = Debug|Any CPU
39-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Debug|x86.ActiveCfg = Debug|Any CPU
40-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Debug|x86.Build.0 = Debug|Any CPU
41-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Release|Any CPU.ActiveCfg = Release|Any CPU
42-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Release|Any CPU.Build.0 = Release|Any CPU
43-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Release|x64.ActiveCfg = Release|Any CPU
44-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Release|x64.Build.0 = Release|Any CPU
45-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Release|x86.ActiveCfg = Release|Any CPU
46-
{C3A70D73-D1DD-46E4-882E-34CA833BE3EF}.Release|x86.Build.0 = Release|Any CPU
32+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
33+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Debug|Any CPU.Build.0 = Debug|Any CPU
34+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Debug|x64.ActiveCfg = Debug|Any CPU
35+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Debug|x64.Build.0 = Debug|Any CPU
36+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Debug|x86.ActiveCfg = Debug|Any CPU
37+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Debug|x86.Build.0 = Debug|Any CPU
38+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Release|Any CPU.ActiveCfg = Release|Any CPU
39+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Release|Any CPU.Build.0 = Release|Any CPU
40+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Release|x64.ActiveCfg = Release|Any CPU
41+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Release|x64.Build.0 = Release|Any CPU
42+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Release|x86.ActiveCfg = Release|Any CPU
43+
{6F168456-88C9-4B70-813B-AB93C13CD702}.Release|x86.Build.0 = Release|Any CPU
44+
EndGlobalSection
45+
GlobalSection(SolutionProperties) = preSolution
46+
HideSolutionNode = FALSE
47+
EndGlobalSection
48+
GlobalSection(ExtensibilityGlobals) = postSolution
49+
SolutionGuid = {B2C914D5-92C3-4299-9B25-318E116B0F72}
4750
EndGlobalSection
4851
EndGlobal

SynoAI/AIs/DeepStack/DeepStackAI.cs

+44-34
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
using Microsoft.Extensions.Logging;
22
using Newtonsoft.Json;
3+
using SynoAI.App;
34
using SynoAI.Models;
45
using System;
56
using System.Collections.Generic;
@@ -15,52 +16,61 @@ public class DeepStackAI : AI
1516
{
1617
public async override Task<IEnumerable<AIPrediction>> Process(ILogger logger, Camera camera, byte[] image)
1718
{
18-
using (HttpClient client = new HttpClient())
19-
{
20-
Stopwatch stopwatch = Stopwatch.StartNew();
19+
Stopwatch stopwatch = Stopwatch.StartNew();
2120

22-
decimal minConfidence = camera.Threshold / 100m;
21+
decimal minConfidence = camera.Threshold / 100m;
2322

24-
MultipartFormDataContent multipartContent = new MultipartFormDataContent();
25-
multipartContent.Add(new StreamContent(new MemoryStream(image)), "image", "image");
26-
multipartContent.Add(new StringContent(minConfidence.ToString()), "min_confidence"); // From face detection example - using JSON with MinConfidence didn't always work
23+
MultipartFormDataContent multipartContent = new MultipartFormDataContent();
24+
multipartContent.Add(new StreamContent(new MemoryStream(image)), "image", "image");
25+
multipartContent.Add(new StringContent(minConfidence.ToString()), "min_confidence"); // From face detection example - using JSON with MinConfidence didn't always work
2726

28-
client.BaseAddress = new Uri(Config.AIUrl);
27+
logger.LogDebug($"{camera.Name}: DeepStackAI: POSTing image with minimum confidence of {minConfidence} ({camera.Threshold}%) to {string.Join("/", Config.AIUrl, Config.AIPath)}.");
2928

30-
logger.LogDebug($"{camera.Name}: DeepStackAI: POSTing image with minimum confidence of {minConfidence} ({camera.Threshold}%) to {string.Join("/", Config.AIUrl, Config.AIPath)}.");
29+
Uri uri = GetUri(Config.AIUrl, Config.AIPath);
30+
HttpResponseMessage response = await Shared.HttpClient.PostAsync(uri, multipartContent);
3131

32-
HttpResponseMessage response = await client.PostAsync(Config.AIPath, multipartContent);
33-
if (response.IsSuccessStatusCode)
32+
if (response.IsSuccessStatusCode)
33+
{
34+
DeepStackResponse deepStackResponse = await GetResponse(logger, camera, response);
35+
if (deepStackResponse.Success)
3436
{
35-
DeepStackResponse deepStackResponse = await GetResponse(logger, camera, response);
36-
if (deepStackResponse.Success)
37+
IEnumerable<AIPrediction> predictions = deepStackResponse.Predictions.Where(x => x.Confidence >= minConfidence).Select(x => new AIPrediction()
3738
{
38-
IEnumerable<AIPrediction> predictions = deepStackResponse.Predictions.Where(x=> x.Confidence >= minConfidence).Select(x => new AIPrediction()
39-
{
40-
Confidence = x.Confidence * 100,
41-
Label = x.Label,
42-
MaxX = x.MaxX,
43-
MaxY = x.MaxY,
44-
MinX = x.MinX,
45-
MinY = x.MinY
46-
}).ToList();
47-
48-
stopwatch.Stop();
49-
logger.LogInformation($"{camera.Name}: DeepStackAI: Processed successfully ({stopwatch.ElapsedMilliseconds}ms).");
50-
return predictions;
51-
}
52-
else
53-
{
54-
logger.LogWarning($"{camera.Name}: DeepStackAI: Failed with unknown error.");
55-
}
39+
Confidence = x.Confidence * 100,
40+
Label = x.Label,
41+
MaxX = x.MaxX,
42+
MaxY = x.MaxY,
43+
MinX = x.MinX,
44+
MinY = x.MinY
45+
}).ToList();
46+
47+
stopwatch.Stop();
48+
logger.LogInformation($"{camera.Name}: DeepStackAI: Processed successfully ({stopwatch.ElapsedMilliseconds}ms).");
49+
return predictions;
5650
}
5751
else
5852
{
59-
logger.LogWarning($"{camera.Name}: DeepStackAI: Failed to call API with HTTP status code '{response.StatusCode}'.");
53+
logger.LogWarning($"{camera.Name}: DeepStackAI: Failed with unknown error.");
6054
}
61-
62-
return null;
6355
}
56+
else
57+
{
58+
logger.LogWarning($"{camera.Name}: DeepStackAI: Failed to call API with HTTP status code '{response.StatusCode}'.");
59+
}
60+
61+
return null;
62+
}
63+
64+
/// <summary>
65+
/// Builds a <see cref="Uri"/> from the provided base and resource.
66+
/// </summary>
67+
/// <param name="basePath"></param>
68+
/// <param name="resourcePath"></param>
69+
/// <returns>A <see cref="Uri"/> for the combined base and resource.</returns>
70+
protected Uri GetUri(string basePath, string resourcePath)
71+
{
72+
Uri baseUri = new Uri(basePath);
73+
return new Uri(baseUri, resourcePath);
6474
}
6575

6676
/// <summary>

SynoAI/App/HttpClientWrapper.cs

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
using System.Net.Http;
2+
3+
namespace SynoAI.App
4+
{
5+
public class HttpClientWrapper : HttpClient, IHttpClient
6+
{
7+
}
8+
}

SynoAI/App/IHttpClient.cs

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
using System;
2+
using System.Net.Http;
3+
using System.Threading.Tasks;
4+
5+
namespace SynoAI.App
6+
{
7+
public interface IHttpClient
8+
{
9+
Task<HttpResponseMessage> PostAsync(string requestUri, HttpContent content);
10+
Task<HttpResponseMessage> PostAsync(Uri requestUri, HttpContent content);
11+
}
12+
}

SynoAI/App/Shared.cs

+9
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
using System.Net.Http;
2+
3+
namespace SynoAI.App
4+
{
5+
public static class Shared
6+
{
7+
public static IHttpClient HttpClient = new HttpClientWrapper();
8+
}
9+
}

SynoAI/Config.cs

+2-2
Original file line numberDiff line numberDiff line change
@@ -180,11 +180,11 @@ public static void Generate(ILogger logger, IConfiguration configuration)
180180

181181
StrokeWidth = configuration.GetValue<int>("StrokeWidth", 2);
182182

183-
BoxColor = configuration.GetValue<string>("BoxColor", SKColors.Red.ToString());
183+
BoxColor = configuration.GetValue<string>("BoxColor", SKColors.Green.ToString());
184184
ExclusionBoxColor = configuration.GetValue<string>("ExclusionBoxColor", SKColors.Green.ToString());
185185

186186
Font = configuration.GetValue<string>("Font", "Tahoma");
187-
FontColor = configuration.GetValue<string>("FontColor", SKColors.Red.ToString());
187+
FontColor = configuration.GetValue<string>("FontColor", SKColors.Green.ToString());
188188
FontSize = configuration.GetValue<int>("FontSize", 12);
189189

190190
TextOffsetX = configuration.GetValue<int>("TextOffsetX", 4);

SynoAI/Notifiers/NotifierBase.cs

-1
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ protected string GetMessage(Camera camera, IEnumerable<string> foundTypes, strin
5454
return result;
5555
}
5656

57-
5857
/// <summary>
5958
/// Fetches the response content and parses it as the specified type.
6059
/// </summary>

0 commit comments

Comments
 (0)