Fix un-deletion for activitypub / mastodon, improve queue handling

This commit is contained in:
Jan-Lukas Else 2022-03-31 14:55:36 +02:00
parent c38c5e8ed9
commit 5f969f8ba8
17 changed files with 102 additions and 61 deletions

View File

@ -300,7 +300,7 @@ func (a *goBlog) apPost(p *post) {
a.apSendToAllFollowers(p.Blog, map[string]any{ a.apSendToAllFollowers(p.Blog, map[string]any{
"@context": []string{asContext}, "@context": []string{asContext},
"actor": a.apIri(a.cfg.Blogs[p.Blog]), "actor": a.apIri(a.cfg.Blogs[p.Blog]),
"id": a.fullPostURL(p), "id": a.activityPubId(p),
"published": n.Published, "published": n.Published,
"type": "Create", "type": "Create",
"object": n, "object": n,
@ -311,7 +311,7 @@ func (a *goBlog) apUpdate(p *post) {
a.apSendToAllFollowers(p.Blog, map[string]any{ a.apSendToAllFollowers(p.Blog, map[string]any{
"@context": []string{asContext}, "@context": []string{asContext},
"actor": a.apIri(a.cfg.Blogs[p.Blog]), "actor": a.apIri(a.cfg.Blogs[p.Blog]),
"id": a.fullPostURL(p), "id": a.activityPubId(p),
"published": time.Now().Format("2006-01-02T15:04:05-07:00"), "published": time.Now().Format("2006-01-02T15:04:05-07:00"),
"type": "Update", "type": "Update",
"object": a.toASNote(p), "object": a.toASNote(p),
@ -323,22 +323,22 @@ func (a *goBlog) apDelete(p *post) {
"@context": []string{asContext}, "@context": []string{asContext},
"actor": a.apIri(a.cfg.Blogs[p.Blog]), "actor": a.apIri(a.cfg.Blogs[p.Blog]),
"type": "Delete", "type": "Delete",
"object": a.fullPostURL(p), "object": a.activityPubId(p),
}) })
} }
func (a *goBlog) apUndelete(p *post) { func (a *goBlog) apUndelete(p *post) {
a.apSendToAllFollowers(p.Blog, map[string]any{ // The optimal way to do this would be to send a "Undo Delete" activity,
"@context": []string{asContext}, // but that doesn't work with Mastodon yet.
"actor": a.apIri(a.cfg.Blogs[p.Blog]), // see:
"type": "Undo", // https://socialhub.activitypub.rocks/t/soft-deletes-and-restoring-deleted-posts/2318
"object": map[string]any{ // https://github.com/mastodon/mastodon/issues/17553
"@context": []string{asContext},
"actor": a.apIri(a.cfg.Blogs[p.Blog]), // Update "activityPubVersion" parameter to current timestamp in nanoseconds
"type": "Delete", p.Parameters[activityPubVersionParam] = []string{fmt.Sprintf("%d", utcNowNanos())}
"object": a.fullPostURL(p), a.db.replacePostParam(p.Path, activityPubVersionParam, p.Parameters[activityPubVersionParam])
}, // Post as new post
}) a.apPost(p)
} }
func (a *goBlog) apAccept(blogName string, blog *configBlog, follow map[string]any) { func (a *goBlog) apAccept(blogName string, blog *configBlog, follow map[string]any) {
@ -373,7 +373,7 @@ func (a *goBlog) apAccept(blogName string, blog *configBlog, follow map[string]a
"object": follow, "object": follow,
} }
_, accept["id"] = a.apNewID(blog) _, accept["id"] = a.apNewID(blog)
_ = a.db.apQueueSendSigned(a.apIri(blog), inbox, accept) _ = a.apQueueSendSigned(a.apIri(blog), inbox, accept)
} }
func (a *goBlog) apSendToAllFollowers(blog string, activity any) { func (a *goBlog) apSendToAllFollowers(blog string, activity any) {
@ -382,13 +382,13 @@ func (a *goBlog) apSendToAllFollowers(blog string, activity any) {
log.Println("Failed to retrieve inboxes:", err.Error()) log.Println("Failed to retrieve inboxes:", err.Error())
return return
} }
a.db.apSendTo(a.apIri(a.cfg.Blogs[blog]), activity, inboxes) a.apSendTo(a.apIri(a.cfg.Blogs[blog]), activity, inboxes)
} }
func (db *database) apSendTo(blogIri string, activity any, inboxes []string) { func (a *goBlog) apSendTo(blogIri string, activity any, inboxes []string) {
for _, i := range inboxes { for _, i := range inboxes {
go func(inbox string) { go func(inbox string) {
_ = db.apQueueSendSigned(blogIri, inbox, activity) _ = a.apQueueSendSigned(blogIri, inbox, activity)
}(i) }(i)
} }
} }

View File

@ -47,7 +47,7 @@ func (a *goBlog) initAPSendQueue() {
}) })
} }
func (db *database) apQueueSendSigned(blogIri, to string, activity any) error { func (a *goBlog) apQueueSendSigned(blogIri, to string, activity any) error {
body, err := json.Marshal(activity) body, err := json.Marshal(activity)
if err != nil { if err != nil {
return err return err
@ -61,7 +61,7 @@ func (db *database) apQueueSendSigned(blogIri, to string, activity any) error {
}).encode(buf); err != nil { }).encode(buf); err != nil {
return err return err
} }
return db.enqueue("ap", buf.Bytes(), time.Now()) return a.enqueue("ap", buf.Bytes(), time.Now())
} }
func (r *apRequest) encode(w io.Writer) error { func (r *apRequest) encode(w io.Writer) error {

View File

@ -101,7 +101,7 @@ func (a *goBlog) toASNote(p *post) *asNote {
Context: []string{asContext}, Context: []string{asContext},
To: []string{"https://www.w3.org/ns/activitystreams#Public"}, To: []string{"https://www.w3.org/ns/activitystreams#Public"},
MediaType: contenttype.HTML, MediaType: contenttype.HTML,
ID: a.fullPostURL(p), ID: a.activityPubId(p),
URL: a.fullPostURL(p), URL: a.fullPostURL(p),
AttributedTo: a.apIri(a.cfg.Blogs[p.Blog]), AttributedTo: a.apIri(a.cfg.Blogs[p.Blog]),
} }
@ -152,6 +152,16 @@ func (a *goBlog) toASNote(p *post) *asNote {
return as return as
} }
const activityPubVersionParam = "activitypubversion"
func (a *goBlog) activityPubId(p *post) string {
fu := a.fullPostURL(p)
if version := p.firstParameter(activityPubVersionParam); version != "" {
return fu + "?activitypubversion=" + version
}
return fu
}
func (a *goBlog) serveActivityStreams(blog string, w http.ResponseWriter, r *http.Request) { func (a *goBlog) serveActivityStreams(blog string, w http.ResponseWriter, r *http.Request) {
b := a.cfg.Blogs[blog] b := a.cfg.Blogs[blog]
publicKeyDer, err := x509.MarshalPKIXPublicKey(&(a.apPrivateKey.PublicKey)) publicKeyDer, err := x509.MarshalPKIXPublicKey(&(a.apPrivateKey.PublicKey))

2
app.go
View File

@ -73,6 +73,8 @@ type goBlog struct {
regexRedirects []*regexRedirect regexRedirects []*regexRedirect
// Sessions // Sessions
loginSessions, captchaSessions *dbSessionStore loginSessions, captchaSessions *dbSessionStore
// Queue triggers
queueTriggers []chan struct{}
// Shutdown // Shutdown
shutdown shutdowner.Shutdowner shutdown shutdowner.Shutdowner
// Template strings // Template strings

View File

@ -3,6 +3,7 @@ package main
import ( import (
"bytes" "bytes"
"context" "context"
"io"
"log" "log"
"net/http" "net/http"
"sort" "sort"
@ -66,7 +67,7 @@ func (a *goBlog) serveBlogrollExport(w http.ResponseWriter, r *http.Request) {
return return
} }
w.Header().Set(contentType, contenttype.XMLUTF8) w.Header().Set(contentType, contenttype.XMLUTF8)
_, _ = opmlBuf.WriteTo(w) _, _ = io.Copy(w, opmlBuf)
} }
func (a *goBlog) getBlogrollOutlines(blog string) ([]*opml.Outline, error) { func (a *goBlog) getBlogrollOutlines(blog string) ([]*opml.Outline, error) {

View File

@ -5,7 +5,6 @@ import (
"errors" "errors"
"log" "log"
"math" "math"
"strings"
"github.com/tkrajina/gpxgo/gpx" "github.com/tkrajina/gpxgo/gpx"
"golang.org/x/text/language" "golang.org/x/text/language"
@ -102,7 +101,7 @@ func trackParseGPX(gpxString string) (result *trackParseResult, err error) {
points []*trackPoint points []*trackPoint
} }
result.gpxData, err = gpx.Parse(strings.NewReader(gpxString)) result.gpxData, err = gpx.ParseString(gpxString)
if err != nil { if err != nil {
return nil, err return nil, err
} }

3
go.mod
View File

@ -50,7 +50,8 @@ require (
github.com/spf13/viper v1.10.1 github.com/spf13/viper v1.10.1
github.com/stretchr/testify v1.7.1 github.com/stretchr/testify v1.7.1
github.com/tdewolff/minify/v2 v2.10.0 github.com/tdewolff/minify/v2 v2.10.0
github.com/tkrajina/gpxgo v1.2.1 // master
github.com/tkrajina/gpxgo v1.2.2-0.20220217201249-321f19554eec
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80
github.com/vcraescu/go-paginator v1.0.1-0.20201114172518-2cfc59fe05c2 github.com/vcraescu/go-paginator v1.0.1-0.20201114172518-2cfc59fe05c2
github.com/yuin/goldmark v1.4.11 github.com/yuin/goldmark v1.4.11

4
go.sum
View File

@ -438,8 +438,8 @@ github.com/tdewolff/parse/v2 v2.5.27/go.mod h1:WzaJpRSbwq++EIQHYIRTpbYKNA3gn9it1
github.com/tdewolff/test v1.0.6 h1:76mzYJQ83Op284kMT+63iCNCI7NEERsIN8dLM+RiKr4= github.com/tdewolff/test v1.0.6 h1:76mzYJQ83Op284kMT+63iCNCI7NEERsIN8dLM+RiKr4=
github.com/tdewolff/test v1.0.6/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/tdewolff/test v1.0.6/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE=
github.com/thoas/go-funk v0.9.1 h1:O549iLZqPpTUQ10ykd26sZhzD+rmR5pWhuElrhbC20M= github.com/thoas/go-funk v0.9.1 h1:O549iLZqPpTUQ10ykd26sZhzD+rmR5pWhuElrhbC20M=
github.com/tkrajina/gpxgo v1.2.1 h1:MJJtT4Re5btDGg89brFDrUP3EWz+cBmyo8pQwV0ZOak= github.com/tkrajina/gpxgo v1.2.2-0.20220217201249-321f19554eec h1:o5aL1yX+/xzvK4QfZe/iDcCoRrFoOQ/Dn43jV/thKBM=
github.com/tkrajina/gpxgo v1.2.1/go.mod h1:795sjVRFo5wWyN6oOZp0RYienGGBJjpAlgOz2nCngA0= github.com/tkrajina/gpxgo v1.2.2-0.20220217201249-321f19554eec/go.mod h1:795sjVRFo5wWyN6oOZp0RYienGGBJjpAlgOz2nCngA0=
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y= github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE= github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
github.com/u-root/uio v0.0.0-20210528114334-82958018845c h1:BFvcl34IGnw8yvJi8hlqLFo9EshRInwWBs2M5fGWzQA= github.com/u-root/uio v0.0.0-20210528114334-82958018845c h1:BFvcl34IGnw8yvJi8hlqLFo9EshRInwWBs2M5fGWzQA=

View File

@ -81,7 +81,7 @@ func (a *goBlog) serveMicropubQuery(w http.ResponseWriter, r *http.Request) {
return return
} }
w.Header().Set(contentType, contenttype.JSONUTF8) w.Header().Set(contentType, contenttype.JSONUTF8)
_, _ = buf.WriteTo(w) _, _ = io.Copy(w, buf)
} }
func (a *goBlog) serveMicropubPost(w http.ResponseWriter, r *http.Request) { func (a *goBlog) serveMicropubPost(w http.ResponseWriter, r *http.Request) {

View File

@ -2,6 +2,7 @@ package main
import ( import (
"encoding/json" "encoding/json"
"io"
"net/http" "net/http"
"go.goblog.app/app/pkgs/bufferpool" "go.goblog.app/app/pkgs/bufferpool"
@ -25,7 +26,7 @@ func (a *goBlog) serveNodeInfoDiscover(w http.ResponseWriter, r *http.Request) {
} }
w.Header().Set(contentType, contenttype.JSONUTF8) w.Header().Set(contentType, contenttype.JSONUTF8)
mw := a.min.Writer(contenttype.JSON, w) mw := a.min.Writer(contenttype.JSON, w)
_, _ = buf.WriteTo(mw) _, _ = io.Copy(mw, buf)
_ = mw.Close() _ = mw.Close()
} }
@ -60,6 +61,6 @@ func (a *goBlog) serveNodeInfo(w http.ResponseWriter, r *http.Request) {
} }
w.Header().Set(contentType, contenttype.JSONUTF8) w.Header().Set(contentType, contenttype.JSONUTF8)
mw := a.min.Writer(contenttype.JSON, w) mw := a.min.Writer(contenttype.JSON, w)
_, _ = buf.WriteTo(mw) _, _ = io.Copy(mw, buf)
_ = mw.Close() _ = mw.Close()
} }

View File

@ -74,6 +74,6 @@ func MergeMP3(out io.Writer, in ...io.Reader) error {
} }
// Copy the temporary output to the output // Copy the temporary output to the output
_, err := tmpOut.WriteTo(out) _, err := io.Copy(out, tmpOut)
return err return err
} }

View File

@ -18,21 +18,28 @@ type queueItem struct {
id int id int
} }
func (db *database) enqueue(name string, content []byte, schedule time.Time) error { func (a *goBlog) enqueue(name string, content []byte, schedule time.Time) error {
if len(content) == 0 { if len(content) == 0 {
return errors.New("empty content") return errors.New("empty content")
} }
_, err := db.exec( _, err := a.db.exec(
"insert into queue (name, content, schedule) values (@name, @content, @schedule)", "insert into queue (name, content, schedule) values (@name, @content, @schedule)",
sql.Named("name", name), sql.Named("name", name),
sql.Named("content", content), sql.Named("content", content),
sql.Named("schedule", schedule.UTC().Format(time.RFC3339Nano)), sql.Named("schedule", schedule.UTC().Format(time.RFC3339Nano)),
) )
return err if err != nil {
return err
}
// Trigger all queue listeners
for _, trigger := range a.queueTriggers {
trigger <- struct{}{}
}
return nil
} }
func (db *database) reschedule(qi *queueItem, dur time.Duration) error { func (a *goBlog) reschedule(qi *queueItem, dur time.Duration) error {
_, err := db.exec( _, err := a.db.exec(
"update queue set schedule = @schedule, content = @content where id = @id", "update queue set schedule = @schedule, content = @content where id = @id",
sql.Named("schedule", qi.schedule.Add(dur).UTC().Format(time.RFC3339Nano)), sql.Named("schedule", qi.schedule.Add(dur).UTC().Format(time.RFC3339Nano)),
sql.Named("content", qi.content), sql.Named("content", qi.content),
@ -41,13 +48,13 @@ func (db *database) reschedule(qi *queueItem, dur time.Duration) error {
return err return err
} }
func (db *database) dequeue(qi *queueItem) error { func (a *goBlog) dequeue(qi *queueItem) error {
_, err := db.exec("delete from queue where id = @id", sql.Named("id", qi.id)) _, err := a.db.exec("delete from queue where id = @id", sql.Named("id", qi.id))
return err return err
} }
func (db *database) peekQueue(ctx context.Context, name string) (*queueItem, error) { func (a *goBlog) peekQueue(ctx context.Context, name string) (*queueItem, error) {
row, err := db.queryRowContext( row, err := a.db.queryRowContext(
ctx, ctx,
"select id, name, content, schedule from queue where schedule <= @schedule and name = @name order by schedule asc limit 1", "select id, name, content, schedule from queue where schedule <= @schedule and name = @name order by schedule asc limit 1",
sql.Named("name", name), sql.Named("name", name),
@ -75,6 +82,10 @@ func (db *database) peekQueue(ctx context.Context, name string) (*queueItem, err
type queueProcessFunc func(qi *queueItem, dequeue func(), reschedule func(time.Duration)) type queueProcessFunc func(qi *queueItem, dequeue func(), reschedule func(time.Duration))
func (a *goBlog) listenOnQueue(queueName string, wait time.Duration, process queueProcessFunc) { func (a *goBlog) listenOnQueue(queueName string, wait time.Duration, process queueProcessFunc) {
// Queue trigger
trigger := make(chan struct{})
a.queueTriggers = append(a.queueTriggers, trigger)
// Start goroutine to listen on queue
go func() { go func() {
done := false done := false
var wg sync.WaitGroup var wg sync.WaitGroup
@ -88,9 +99,9 @@ func (a *goBlog) listenOnQueue(queueName string, wait time.Duration, process que
log.Println("Stopped queue:", queueName) log.Println("Stopped queue:", queueName)
}) })
for !done { for !done {
qi, err := a.db.peekQueue(ctx, queueName) qi, err := a.peekQueue(ctx, queueName)
if err != nil { if err != nil {
log.Println("queue error:", err.Error()) log.Println("queue peek error:", err.Error())
continue continue
} }
if qi == nil { if qi == nil {
@ -98,19 +109,22 @@ func (a *goBlog) listenOnQueue(queueName string, wait time.Duration, process que
select { select {
case <-time.After(wait): case <-time.After(wait):
continue continue
case <-trigger:
continue
case <-ctx.Done(): case <-ctx.Done():
done = true
continue continue
} }
} }
process( process(
qi, qi,
func() { func() {
if err := a.db.dequeue(qi); err != nil { if err := a.dequeue(qi); err != nil {
log.Println("queue dequeue error:", err.Error()) log.Println("queue dequeue error:", err.Error())
} }
}, },
func(dur time.Duration) { func(dur time.Duration) {
if err := a.db.reschedule(qi, dur); err != nil { if err := a.reschedule(qi, dur); err != nil {
log.Println("queue reschedule error:", err.Error()) log.Println("queue reschedule error:", err.Error())
} }
}, },

View File

@ -20,47 +20,46 @@ func Test_queue(t *testing.T) {
} }
_ = app.initDatabase(false) _ = app.initDatabase(false)
defer app.db.close() defer app.db.close()
db := app.db
time1 := time.Now() time1 := time.Now()
err := db.enqueue("test", []byte(""), time.Now()) err := app.enqueue("test", []byte(""), time.Now())
require.Error(t, err) require.Error(t, err)
err = db.enqueue("test", []byte("1"), time1) err = app.enqueue("test", []byte("1"), time1)
require.NoError(t, err) require.NoError(t, err)
err = db.enqueue("test", []byte("2"), time.Now()) err = app.enqueue("test", []byte("2"), time.Now())
require.NoError(t, err) require.NoError(t, err)
qi, err := db.peekQueue(context.Background(), "abc") qi, err := app.peekQueue(context.Background(), "abc")
require.NoError(t, err) require.NoError(t, err)
require.Nil(t, qi) require.Nil(t, qi)
qi, err = db.peekQueue(context.Background(), "test") qi, err = app.peekQueue(context.Background(), "test")
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, qi) require.NotNil(t, qi)
require.Equal(t, []byte("1"), qi.content) require.Equal(t, []byte("1"), qi.content)
require.Equal(t, time1.UTC(), qi.schedule.UTC()) require.Equal(t, time1.UTC(), qi.schedule.UTC())
err = db.reschedule(qi, 1*time.Second) err = app.reschedule(qi, 1*time.Second)
require.NoError(t, err) require.NoError(t, err)
qi, err = db.peekQueue(context.Background(), "test") qi, err = app.peekQueue(context.Background(), "test")
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, qi) require.NotNil(t, qi)
require.Equal(t, []byte("2"), qi.content) require.Equal(t, []byte("2"), qi.content)
err = db.dequeue(qi) err = app.dequeue(qi)
require.NoError(t, err) require.NoError(t, err)
qi, err = db.peekQueue(context.Background(), "test") qi, err = app.peekQueue(context.Background(), "test")
require.NoError(t, err) require.NoError(t, err)
require.Nil(t, qi) require.Nil(t, qi)
time.Sleep(1 * time.Second) time.Sleep(1 * time.Second)
qi, err = db.peekQueue(context.Background(), "test") qi, err = app.peekQueue(context.Background(), "test")
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, qi) require.NotNil(t, qi)
require.Equal(t, []byte("1"), qi.content) require.Equal(t, []byte("1"), qi.content)
@ -77,20 +76,19 @@ func Benchmark_queue(b *testing.B) {
} }
_ = app.initDatabase(false) _ = app.initDatabase(false)
defer app.db.close() defer app.db.close()
db := app.db
err := db.enqueue("test", []byte("1"), time.Now()) err := app.enqueue("test", []byte("1"), time.Now())
require.NoError(b, err) require.NoError(b, err)
b.Run("Peek with item", func(b *testing.B) { b.Run("Peek with item", func(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
_, _ = db.peekQueue(context.Background(), "test") _, _ = app.peekQueue(context.Background(), "test")
} }
}) })
b.Run("Peek without item", func(b *testing.B) { b.Run("Peek without item", func(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
_, _ = db.peekQueue(context.Background(), "abc") _, _ = app.peekQueue(context.Background(), "abc")
} }
}) })
} }

View File

@ -193,6 +193,10 @@ func utcNowString() string {
return time.Now().UTC().Format(time.RFC3339) return time.Now().UTC().Format(time.RFC3339)
} }
func utcNowNanos() int64 {
return time.Now().UTC().UnixNano()
}
type stringPair struct { type stringPair struct {
First, Second string First, Second string
} }

View File

@ -144,3 +144,13 @@ func Test_lowerUnescaptedPath(t *testing.T) {
assert.Equal(t, "/de/posts/fahrradanhänger", lowerUnescapedPath("/de/posts/fahrradanh%C3%84nger")) assert.Equal(t, "/de/posts/fahrradanhänger", lowerUnescapedPath("/de/posts/fahrradanh%C3%84nger"))
assert.Equal(t, "/de/posts/fahrradanhänger", lowerUnescapedPath("/de/posts/fahrradanhÄnger")) assert.Equal(t, "/de/posts/fahrradanhänger", lowerUnescapedPath("/de/posts/fahrradanhÄnger"))
} }
func Fuzz_lowerUnescaptedPath(f *testing.F) {
f.Add("/de/posts/fahrradanh%C3%84nger")
f.Fuzz(func(t *testing.T, str string) {
out := lowerUnescapedPath(str)
if out == "" {
t.Error("Empty output")
}
})
}

View File

@ -44,7 +44,7 @@ func (a *goBlog) queueMention(m *mention) error {
if err := gob.NewEncoder(buf).Encode(m); err != nil { if err := gob.NewEncoder(buf).Encode(m); err != nil {
return err return err
} }
return a.db.enqueue("wm", buf.Bytes(), time.Now()) return a.enqueue("wm", buf.Bytes(), time.Now())
} }
func (a *goBlog) verifyMention(m *mention) error { func (a *goBlog) verifyMention(m *mention) error {

View File

@ -76,10 +76,11 @@ func Test_webmentions(t *testing.T) {
mentions = app.db.getWebmentionsByAddress("https://example.com/t%C3%A4st") mentions = app.db.getWebmentionsByAddress("https://example.com/t%C3%A4st")
assert.Len(t, mentions, 1) assert.Len(t, mentions, 1)
app.db.deleteWebmention(&mention{ err = app.db.deleteWebmention(&mention{
Source: "https://example.net/test", Source: "https://example.net/test",
Target: "https://example.com/T%C3%84ST", Target: "https://example.com/T%C3%84ST",
}) })
assert.NoError(t, err)
mentions = app.db.getWebmentionsByAddress("https://example.com/täst") mentions = app.db.getWebmentionsByAddress("https://example.com/täst")
assert.Len(t, mentions, 0) assert.Len(t, mentions, 0)