Skip to content
This repository has been archived by the owner on Dec 17, 2023. It is now read-only.

Commit

Permalink
fix: better error handling, code refactoring, more tests and adjustments
Browse files Browse the repository at this point in the history
  • Loading branch information
piraces committed Feb 9, 2023
1 parent 99d5f0f commit 395c006
Show file tree
Hide file tree
Showing 13 changed files with 278 additions and 58 deletions.
2 changes: 1 addition & 1 deletion .env.sample
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ SECRET=test
DB_DIR="/db/rsslay.sqlite"
DEFAULT_PROFILE_PICTURE_URL="https://i.imgur.com/MaceU96.png"
SECRET="CHANGE_ME"
VERSION=0.3.5
VERSION=0.3.6
REPLAY_TO_RELAYS=false
RELAYS_TO_PUBLISH_TO=""
DEFAULT_WAIT_TIME_BETWEEN_BATCHES=60000
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ ENV PORT="8080"
ENV DB_DIR="/db/rsslay.sqlite"
ENV DEFAULT_PROFILE_PICTURE_URL="https://i.imgur.com/MaceU96.png"
ENV SECRET="CHANGE_ME"
ENV VERSION=0.3.5
ENV VERSION=0.3.6
ENV REPLAY_TO_RELAYS=false
ENV RELAYS_TO_PUBLISH_TO=""
ENV DEFAULT_WAIT_TIME_BETWEEN_BATCHES=60000
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.fly
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ ENV PORT="8080"
ENV DB_DIR="/db/rsslay.sqlite"
ENV DEFAULT_PROFILE_PICTURE_URL="https://i.imgur.com/MaceU96.png"
ENV SECRET="CHANGE_ME"
ENV VERSION=0.3.5
ENV VERSION=0.3.6
ENV REPLAY_TO_RELAYS=false
ENV RELAYS_TO_PUBLISH_TO=""
ENV DEFAULT_WAIT_TIME_BETWEEN_BATCHES=60000
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.railwayapp
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ ENV PORT=$PORT
ENV DB_DIR=$DB_DIR
ENV DEFAULT_PROFILE_PICTURE_URL=$DEFAULT_PROFILE_PICTURE_URL
ENV SECRET=$SECRET
ENV VERSION=0.3.5
ENV VERSION=0.3.6
ENV REPLAY_TO_RELAYS=false
ENV RELAYS_TO_PUBLISH_TO=""
ENV DEFAULT_WAIT_TIME_BETWEEN_BATCHES=60000
Expand Down
61 changes: 21 additions & 40 deletions cmd/rsslay/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (
_ "github.com/mattn/go-sqlite3"
"github.com/nbd-wtf/go-nostr"
"github.com/piraces/rsslay/internal/handlers"
"github.com/piraces/rsslay/pkg/events"
"github.com/piraces/rsslay/pkg/feed"
"github.com/piraces/rsslay/pkg/replayer"
"github.com/piraces/rsslay/scripts"
Expand All @@ -22,7 +23,6 @@ import (
"net/http"
"os"
"path"
"strings"
"sync"
"time"
)
Expand Down Expand Up @@ -122,25 +122,12 @@ func (r *Relay) UpdateListeningFilters() {
filters := relayer.GetListeningFilters()
log.Printf("checking for updates; %d filters active", len(filters))

var events []replayer.EventWithPrivateKey
var parsedEvents []replayer.EventWithPrivateKey
for _, filter := range filters {
if filter.Kinds == nil || slices.Contains(filter.Kinds, nostr.KindTextNote) {
for _, pubkey := range filter.Authors {
pubkey = strings.TrimSpace(pubkey)
row := r.db.QueryRow("SELECT privatekey, url FROM feeds WHERE publickey=$1", pubkey)

var entity feed.Entity
err := row.Scan(&entity.PrivateKey, &entity.URL)
if err != nil && err == sql.ErrNoRows {
continue
} else if err != nil {
log.Fatalf("failed when trying to retrieve row with pubkey '%s': %v", pubkey, err)
}

parsedFeed, err := feed.ParseFeed(entity.URL)
if err != nil {
log.Printf("failed to parse feed at url %q: %v", entity.URL, err)
feed.DeleteInvalidFeed(entity.URL, r.db)
parsedFeed, entity := events.GetParsedFeedForPubKey(pubkey, r.db)
if parsedFeed == nil {
continue
}

Expand All @@ -155,13 +142,13 @@ func (r *Relay) UpdateListeningFilters() {
_ = evt.Sign(entity.PrivateKey)
r.updates <- evt
r.lastEmitted.Store(entity.URL, last.(uint32))
events = append(events, replayer.EventWithPrivateKey{Event: evt, PrivateKey: entity.PrivateKey})
parsedEvents = append(parsedEvents, replayer.EventWithPrivateKey{Event: evt, PrivateKey: entity.PrivateKey})
}
}
}
}
}
r.AttemptReplayEvents(events)
r.AttemptReplayEvents(parsedEvents)
}
}

Expand Down Expand Up @@ -202,34 +189,22 @@ func (b store) DeleteEvent(_, _ string) error {
}

func (b store) QueryEvents(filter *nostr.Filter) ([]nostr.Event, error) {
var events []nostr.Event
var parsedEvents []nostr.Event
var eventsToReplay []replayer.EventWithPrivateKey

if filter.IDs != nil || len(filter.Tags) > 0 {
return events, nil
return parsedEvents, nil
}

for _, pubkey := range filter.Authors {
pubkey = strings.TrimSpace(pubkey)
row := relayInstance.db.QueryRow("SELECT privatekey, url FROM feeds WHERE publickey=$1", pubkey)
parsedFeed, entity := events.GetParsedFeedForPubKey(pubkey, relayInstance.db)

var entity feed.Entity
err := row.Scan(&entity.PrivateKey, &entity.URL)
if err != nil && err == sql.ErrNoRows {
continue
} else if err != nil {
log.Fatalf("failed when trying to retrieve row with pubkey '%s': %v", pubkey, err)
}

parsedFeed, err := feed.ParseFeed(entity.URL)
if err != nil {
log.Printf("failed to parse feed at url %q: %v", entity.URL, err)
feed.DeleteInvalidFeed(entity.URL, relayInstance.db)
if parsedFeed == nil {
continue
}

if filter.Kinds == nil || slices.Contains(filter.Kinds, nostr.KindSetMetadata) {
evt := feed.FeedToSetMetadata(pubkey, parsedFeed, entity.URL, relayInstance.EnableAutoNIP05Registration, relayInstance.DefaultProfilePictureUrl)
evt := feed.EntryFeedToSetMetadata(pubkey, parsedFeed, entity.URL, relayInstance.EnableAutoNIP05Registration, relayInstance.DefaultProfilePictureUrl)

if filter.Since != nil && evt.CreatedAt.Before(*filter.Since) {
continue
Expand All @@ -239,7 +214,7 @@ func (b store) QueryEvents(filter *nostr.Filter) ([]nostr.Event, error) {
}

_ = evt.Sign(entity.PrivateKey)
events = append(events, evt)
parsedEvents = append(parsedEvents, evt)
eventsToReplay = append(eventsToReplay, replayer.EventWithPrivateKey{Event: evt, PrivateKey: entity.PrivateKey})
}

Expand Down Expand Up @@ -267,7 +242,7 @@ func (b store) QueryEvents(filter *nostr.Filter) ([]nostr.Event, error) {
last = uint32(evt.CreatedAt.Unix())
}

events = append(events, evt)
parsedEvents = append(parsedEvents, evt)
eventsToReplay = append(eventsToReplay, replayer.EventWithPrivateKey{Event: evt, PrivateKey: entity.PrivateKey})
}

Expand All @@ -277,7 +252,7 @@ func (b store) QueryEvents(filter *nostr.Filter) ([]nostr.Event, error) {

relayInstance.AttemptReplayEvents(eventsToReplay)

return events, nil
return parsedEvents, nil
}

func (r *Relay) InjectEvents() chan nostr.Event {
Expand All @@ -286,7 +261,13 @@ func (r *Relay) InjectEvents() chan nostr.Event {

func main() {
CreateHealthCheck()
defer relayInstance.db.Close()
defer func(db *sql.DB) {
err := db.Close()
if err != nil {
log.Fatalf("failed to close the database connection: %v", err)
}
}(relayInstance.db)

if err := relayer.Start(relayInstance); err != nil {
log.Fatalf("server terminated: %v", err)
}
Expand Down
2 changes: 1 addition & 1 deletion fly.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ app = "rsslay"
internal_port = 8080
protocol = "tcp"
[services.concurrency]
hard_limit = 45
hard_limit = 50
soft_limit = 40
type = "connections"

Expand Down
22 changes: 16 additions & 6 deletions internal/handlers/handlers.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"github.com/nbd-wtf/go-nostr/nip05"
"github.com/nbd-wtf/go-nostr/nip19"
"github.com/piraces/rsslay/pkg/feed"
"github.com/piraces/rsslay/pkg/helpers"
"github.com/piraces/rsslay/web/assets"
"github.com/piraces/rsslay/web/templates"
"html/template"
Expand Down Expand Up @@ -56,12 +57,12 @@ func HandleWebpage(w http.ResponseWriter, r *http.Request, db *sql.DB) {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()

for rows.Next() {
var entry Entry
if err := rows.Scan(&entry.PubKey, &entry.Url); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
log.Printf("failed to scan row iterating feeds: %v", err)
continue
}

entry.NPubKey, _ = nip19.EncodePublicKey(entry.PubKey)
Expand Down Expand Up @@ -106,12 +107,12 @@ func HandleSearch(w http.ResponseWriter, r *http.Request, db *sql.DB) {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()

for rows.Next() {
var entry Entry
if err := rows.Scan(&entry.PubKey, &entry.Url); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
log.Printf("failed to scan row iterating feeds searching: %v", err)
continue
}

entry.NPubKey, _ = nip19.EncodePublicKey(entry.PubKey)
Expand Down Expand Up @@ -241,6 +242,15 @@ func createFeedEntry(r *http.Request, db *sql.DB, secret *string) *Entry {
entry := Entry{
Error: false,
}

if !helpers.IsValidHttpUrl(urlParam) {
log.Printf("retrieved invalid url from database %q, deleting...", urlParam)
entry.ErrorCode = http.StatusBadRequest
entry.Error = true
entry.ErrorMessage = "Invalid URL provided (must be in absolute format and with https or https scheme)..."
return &entry
}

feedUrl := feed.GetFeedURL(urlParam)
if feedUrl == "" {
entry.ErrorCode = http.StatusBadRequest
Expand Down
39 changes: 39 additions & 0 deletions pkg/events/events.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package events

import (
"database/sql"
"github.com/mmcdole/gofeed"
"github.com/piraces/rsslay/pkg/feed"
"github.com/piraces/rsslay/pkg/helpers"
"log"
"strings"
)

func GetParsedFeedForPubKey(pubKey string, db *sql.DB) (*gofeed.Feed, feed.Entity) {
pubKey = strings.TrimSpace(pubKey)
row := db.QueryRow("SELECT privatekey, url FROM feeds WHERE publickey=$1", pubKey)

var entity feed.Entity
err := row.Scan(&entity.PrivateKey, &entity.URL)
if err != nil && err == sql.ErrNoRows {
return nil, entity
} else if err != nil {
log.Printf("failed when trying to retrieve row with pubkey '%s': %v", pubKey, err)
return nil, entity
}

if !helpers.IsValidHttpUrl(entity.URL) {
log.Printf("retrieved invalid url from database %q, deleting...", entity.URL)
feed.DeleteInvalidFeed(entity.URL, db)
return nil, entity
}

parsedFeed, err := feed.ParseFeed(entity.URL)
if err != nil {
log.Printf("failed to parse feed at url %q: %v", entity.URL, err)
feed.DeleteInvalidFeed(entity.URL, db)
return nil, entity
}

return parsedFeed, entity
}
114 changes: 114 additions & 0 deletions pkg/events/events_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
package events

import (
"errors"
"github.com/DATA-DOG/go-sqlmock"
"github.com/piraces/rsslay/pkg/feed"
"github.com/stretchr/testify/assert"
"testing"
)

const samplePubKey = "73e247ee8c4ff09a50525bed7b0869c371864c0bf2b4d6a2639acaed07613958"
const samplePrivateKey = "4d0888c07093941c9db16fcffb96fdf8af49a6839e865ea6110c7ab7cbd2d3d3"
const sampleValidDirectFeedUrl = "https://mastodon.social/@Gargron.rss"
const sampleValidUrl = "https://mastodon.social/"

func TestGetParsedFeedForPubKey(t *testing.T) {
testCases := []struct {
pubKey string
expectedReturnUrl string
expectedSqlError bool
expectedSqlRow bool
expectedInvalidUrl bool
expectedInvalidFeed bool
}{
{
pubKey: samplePubKey,
expectedReturnUrl: sampleValidDirectFeedUrl,
expectedSqlError: false,
expectedSqlRow: true,
expectedInvalidUrl: false,
expectedInvalidFeed: false,
},
{
pubKey: samplePubKey,
expectedReturnUrl: "",
expectedSqlError: false,
expectedSqlRow: false,
expectedInvalidUrl: false,
expectedInvalidFeed: false,
},
{
pubKey: samplePubKey,
expectedReturnUrl: "",
expectedSqlError: true,
expectedSqlRow: false,
expectedInvalidUrl: false,
expectedInvalidFeed: false,
},
{
pubKey: samplePubKey,
expectedReturnUrl: "invalid",
expectedSqlError: false,
expectedSqlRow: true,
expectedInvalidUrl: true,
expectedInvalidFeed: false,
},
{
pubKey: samplePubKey,
expectedReturnUrl: sampleValidUrl,
expectedSqlError: false,
expectedSqlRow: true,
expectedInvalidUrl: false,
expectedInvalidFeed: true,
},
}
for _, tc := range testCases {
db, mock, err := sqlmock.New()
if err != nil {
t.Fatalf("an error '%s' was not expected when opening a stub database connection", err)
}
rows := sqlmock.NewRows([]string{"privatekey", "url"})
if tc.expectedSqlRow {
rows.AddRow(samplePrivateKey, tc.expectedReturnUrl)
}

if tc.expectedSqlError {
mock.ExpectQuery("SELECT privatekey, url FROM feeds").WillReturnError(errors.New("error"))
} else {
mock.ExpectQuery("SELECT privatekey, url FROM feeds").WillReturnRows(rows)
}
mock.ExpectClose()

parsedFeed, entity := GetParsedFeedForPubKey(tc.pubKey, db)
if tc.expectedSqlError {
assert.Nil(t, parsedFeed)
assert.Empty(t, entity)
} else if !tc.expectedSqlRow {
assert.Nil(t, parsedFeed)
assert.Empty(t, entity)
} else if tc.expectedInvalidUrl {
assert.Nil(t, parsedFeed)
assert.Equal(t, feed.Entity{
PublicKey: "",
PrivateKey: samplePrivateKey,
URL: tc.expectedReturnUrl,
}, entity)
} else if tc.expectedInvalidFeed {
assert.Nil(t, parsedFeed)
assert.Equal(t, feed.Entity{
PublicKey: "",
PrivateKey: samplePrivateKey,
URL: tc.expectedReturnUrl,
}, entity)
} else {
assert.NotNil(t, parsedFeed)
assert.Equal(t, feed.Entity{
PublicKey: "",
PrivateKey: samplePrivateKey,
URL: tc.expectedReturnUrl,
}, entity)
}
_ = db.Close()
}
}
Loading

0 comments on commit 395c006

Please sign in to comment.