Add talkgroup activity alerting #17

Merged
amigan merged 11 commits from alerting into trunk 2024-10-31 00:20:48 -04:00
7 changed files with 79 additions and 41 deletions
Showing only changes of commit 922da11c8c - Show all commits

2
.gitignore vendored
View file

@ -1,4 +1,5 @@
config.yaml config.yaml
config.test.yaml
mydb.sql mydb.sql
client/calls/ client/calls/
!client/calls/.gitkeep !client/calls/.gitkeep
@ -6,3 +7,4 @@ client/calls/
/calls /calls
Session.vim Session.vim
*.log *.log
*.dlv

View file

@ -85,9 +85,11 @@ type Clock interface {
Now() time.Time Now() time.Time
} }
// defaultClock is used in case no clock is provided to the constructor. // DefaultClock is used in case no clock is provided to the constructor.
type defaultClock struct{} type defaultClock struct{}
var DefaultClock Clock = &defaultClock{}
func (c *defaultClock) Now() time.Time { func (c *defaultClock) Now() time.Time {
return time.Now() return time.Now()
} }
@ -137,7 +139,7 @@ func NewTimeSeries(os ...Option) (*TimeSeries, error) {
o(&opts) o(&opts)
} }
if opts.clock == nil { if opts.clock == nil {
opts.clock = &defaultClock{} opts.clock = DefaultClock
} }
if opts.granularities == nil { if opts.granularities == nil {
opts.granularities = defaultGranularities opts.granularities = defaultGranularities

View file

@ -3,8 +3,6 @@ package timeseries
import ( import (
"testing" "testing"
"time" "time"
"github.com/benbjohnson/clock"
) )
// TODO: do table based testing // TODO: do table based testing

View file

@ -3,6 +3,8 @@ package trending
import ( import (
"math" "math"
"time" "time"
timeseries "dynatron.me/x/stillbox/internal/timeseries"
) )
type item[K comparable] struct { type item[K comparable] struct {
@ -66,11 +68,13 @@ func (i *item[K]) score() score[K] {
Expectation: expectation, Expectation: expectation,
Maximum: i.max, Maximum: i.max,
KLScore: klScore, KLScore: klScore,
Count: count,
RecentCount: recentCount,
} }
} }
func (i *item[K]) computeCounts() (float64, float64) { func (i *item[K]) computeCounts() (float64, float64) {
now := time.Now() now := timeseries.DefaultClock.Now()
totalCount, _ := i.eventSeries.Range(now.Add(-i.options.storageDuration), now) totalCount, _ := i.eventSeries.Range(now.Add(-i.options.storageDuration), now)
count, _ := i.eventSeries.Range(now.Add(-i.options.recentDuration), now) count, _ := i.eventSeries.Range(now.Add(-i.options.recentDuration), now)
return count, totalCount return count, totalCount
@ -86,11 +90,11 @@ func (i *item[K]) decayMax() {
func (i *item[K]) updateMax(score float64) { func (i *item[K]) updateMax(score float64) {
i.max = score i.max = score
i.maxTime = time.Now() i.maxTime = timeseries.DefaultClock.Now()
} }
func (i *item[K]) computeExponentialDecayMultiplier() float64 { func (i *item[K]) computeExponentialDecayMultiplier() float64 {
return math.Pow(0.5, float64(time.Now().Unix()-i.maxTime.Unix())/i.options.halfLife.Seconds()) return math.Pow(0.5, float64(timeseries.DefaultClock.Now().Unix()-i.maxTime.Unix())/i.options.halfLife.Seconds())
} }
func computeKullbackLeibler(probability float64, expectation float64) float64 { func computeKullbackLeibler(probability float64, expectation float64) float64 {

View file

@ -7,6 +7,8 @@ type score[K comparable] struct {
Expectation float64 Expectation float64
Maximum float64 Maximum float64
KLScore float64 KLScore float64
Count float64
RecentCount float64
} }
type Scores[K comparable] []score[K] type Scores[K comparable] []score[K]

View file

@ -25,7 +25,7 @@ var defaultHalfLife = 2 * time.Hour
var defaultRecentDuration = 5 * time.Minute var defaultRecentDuration = 5 * time.Minute
var defaultStorageDuration = 7 * 24 * time.Hour var defaultStorageDuration = 7 * 24 * time.Hour
var defaultMaxResults = 100 var defaultMaxResults = 100
var defaultBaseCount = 3 var defaultBaseCount = 1
var defaultScoreThreshold = 0.01 var defaultScoreThreshold = 0.01
var defaultCountThreshold = 3.0 var defaultCountThreshold = 3.0

View file

@ -16,9 +16,11 @@ import (
) )
const ( const (
StorageLookbackDays = 2 StorageLookbackDays = 4
HalfLife = time.Hour HalfLife = 30 * time.Minute
RecentDuration = time.Hour RecentDuration = 12*time.Hour
ScoreThreshold = -1
CountThreshold = 1
) )
type AlertSink struct { type AlertSink struct {
@ -26,6 +28,14 @@ type AlertSink struct {
scorer trending.Scorer[cl.Talkgroup] scorer trending.Scorer[cl.Talkgroup]
} }
type myClock struct {
offset time.Duration
}
func (c *myClock) Now() time.Time {
return time.Now().Add(c.offset)
}
func NewSink(ctx context.Context) *AlertSink { func NewSink(ctx context.Context) *AlertSink {
as := &AlertSink{ as := &AlertSink{
scorer: trending.NewScorer[cl.Talkgroup]( scorer: trending.NewScorer[cl.Talkgroup](
@ -33,6 +43,8 @@ func NewSink(ctx context.Context) *AlertSink {
trending.WithStorageDuration[cl.Talkgroup](StorageLookbackDays*24*time.Hour), trending.WithStorageDuration[cl.Talkgroup](StorageLookbackDays*24*time.Hour),
trending.WithRecentDuration[cl.Talkgroup](RecentDuration), trending.WithRecentDuration[cl.Talkgroup](RecentDuration),
trending.WithHalfLife[cl.Talkgroup](HalfLife), trending.WithHalfLife[cl.Talkgroup](HalfLife),
trending.WithScoreThreshold[cl.Talkgroup](ScoreThreshold),
trending.WithCountThreshold[cl.Talkgroup](CountThreshold),
), ),
} }
@ -41,41 +53,67 @@ func NewSink(ctx context.Context) *AlertSink {
return as return as
} }
func newTimeSeries(id cl.Talkgroup) trending.TimeSeries {
ts, _ := timeseries.NewTimeSeries(timeseries.WithGranularities(
[]timeseries.Granularity{
{Granularity: time.Second, Count: 60},
{Granularity: time.Minute, Count: 60},
{Granularity: time.Hour, Count: 24},
{Granularity: time.Hour * 24, Count: StorageLookbackDays},
},
))
return ts
}
func (as *AlertSink) startBackfill(ctx context.Context) { func (as *AlertSink) startBackfill(ctx context.Context) {
since := time.Now().Add(StorageLookbackDays * -24 * time.Hour) now := time.Now()
cl := &myClock{-18*time.Hour}
timeseries.DefaultClock = cl
since := now.Add(StorageLookbackDays * -24 * 3 * time.Hour)
log.Debug().Time("since", since).Msg("starting stats backfill") log.Debug().Time("since", since).Msg("starting stats backfill")
count, err := as.backfill(ctx, since) count, err := as.backfill(ctx, since)
if err != nil { if err != nil {
log.Error().Err(err).Msg("backfill failed") log.Error().Err(err).Msg("backfill failed")
return return
} }
log.Debug().Int("count", count).Int("len", as.scorer.Score().Len()).Msg("backfill finished") log.Debug().Int("count", count).Str("in", time.Now().Sub(now).String()).Int("len", as.scorer.Score().Len()).Msg("backfill finished")
as.printScores() for {
fmt.Printf("offs: %s\n", cl.offset.String())
as.printScores(ctx)
cl.offset += time.Minute*5
if cl.offset == time.Minute*5 {
break
}
}
} }
type score[K comparable] struct { func (as *AlertSink) printScores(ctx context.Context) {
ID K db := database.FromCtx(ctx)
Score float64 as.Lock()
Probability float64 defer as.Unlock()
Expectation float64
Maximum float64
KLScore float64
}
func (as *AlertSink) printScores() {
scores := as.scorer.Score() scores := as.scorer.Score()
fmt.Printf("score len is %d\n", scores.Len()) fmt.Printf("score len is %d\n", scores.Len())
//const scoreMult = 1000000000
const scoreMult = 1
for _, s := range scores { for _, s := range scores {
fmt.Printf("%d:%d score %f prob %f exp %f max %f kl %f", s.ID.System, s.ID.Talkgroup, s.Score, if s.ID.Talkgroup != 1616 && s.ID.Talkgroup != 1617 {
s.Probability, s.Expectation, s.Maximum, s.KLScore) continue
}
tg, _ := db.GetTalkgroup(ctx, int(s.ID.System), int(s.ID.Talkgroup))
tgn := ""
if tg.Name != nil {
tgn = *tg.Name
}
fmt.Printf("%s\t\t\t%d:%d c %f\trc %f\tscore %f\tprob %f\texp %f\tmax %f\tkl %f\n", tgn, s.ID.System, s.ID.Talkgroup,
s.Count, s.RecentCount, s.Score*scoreMult, s.Probability, s.Expectation, s.Maximum, s.KLScore)
} }
} }
func (as *AlertSink) backfill(ctx context.Context, since time.Time) (count int, err error) { func (as *AlertSink) backfill(ctx context.Context, since time.Time) (count int, err error) {
db := database.FromCtx(ctx) db := database.FromCtx(ctx)
const backfillStatsQuery = `SELECT system, talkgroup, call_date FROM calls WHERE call_date > $1` const backfillStatsQuery = `SELECT system, talkgroup, call_date FROM calls WHERE call_date > $1 AND call_date < $2`
rows, err := db.Query(ctx, backfillStatsQuery, since) rows, err := db.Query(ctx, backfillStatsQuery, since, timeseries.DefaultClock.Now())
if err != nil { if err != nil {
return count, err return count, err
} }
@ -101,22 +139,14 @@ func (as *AlertSink) backfill(ctx context.Context, since time.Time) (count int,
return count, nil return count, nil
} }
func newTimeSeries(id cl.Talkgroup) trending.TimeSeries {
ts, _ := timeseries.NewTimeSeries(timeseries.WithGranularities(
[]timeseries.Granularity{
{Granularity: time.Second, Count: 60},
{Granularity: time.Minute, Count: 10},
{Granularity: time.Hour, Count: 24},
{Granularity: time.Hour * 24, Count: StorageLookbackDays},
},
))
return ts
}
func (as *AlertSink) SinkType() string { func (as *AlertSink) SinkType() string {
return "alerting" return "alerting"
} }
func (ns *AlertSink) Call(ctx context.Context, call *cl.Call) error { func (as *AlertSink) Call(ctx context.Context, call *cl.Call) error {
as.Lock()
defer as.Unlock()
as.scorer.AddEvent(call.TalkgroupTuple(), call.DateTime)
return nil return nil
} }