aboutsummaryrefslogtreecommitdiff
path: root/internal/source
diff options
context:
space:
mode:
author2026-03-14 00:44:19 +0300
committer2026-03-14 00:44:19 +0300
commite5d6f4c02b757c83244ba5e04fead08623a27299 (patch)
tree5b5babb9887cafa3dbc165928dc2b0fd65265bda /internal/source
downloadpose-e5d6f4c02b757c83244ba5e04fead08623a27299.tar.gz
pose-e5d6f4c02b757c83244ba5e04fead08623a27299.tar.bz2
pose-e5d6f4c02b757c83244ba5e04fead08623a27299.tar.xz
pose-e5d6f4c02b757c83244ba5e04fead08623a27299.zip
начальный коммит
Diffstat (limited to '')
-rw-r--r--internal/source/feed.go95
1 files changed, 95 insertions, 0 deletions
diff --git a/internal/source/feed.go b/internal/source/feed.go
new file mode 100644
index 0000000..31f8535
--- /dev/null
+++ b/internal/source/feed.go
@@ -0,0 +1,95 @@
+package source
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "log/slog"
+ "time"
+
+ "github.com/mmcdole/gofeed"
+ cm "go.neonxp.ru/conf/model"
+ "go.neonxp.ru/pose/internal/model"
+)
+
+var (
+ ErrNoFeedAddress = errors.New("no feed address")
+)
+
+type Feed struct {
+ logger *slog.Logger
+ feed string
+ scrapeInterval time.Duration
+ feedParser *gofeed.Parser
+}
+
+func NewFeed(cfg cm.Group, logger *slog.Logger) (*Feed, error) {
+ feedSource := cfg.Get("url").String()
+ if feedSource == "" {
+ return nil, ErrNoFeedAddress
+ }
+ durStr := cfg.Get("scrape_interval").String()
+ if durStr == "" {
+ durStr = "10m"
+ }
+ dur, err := time.ParseDuration(durStr)
+ if err != nil {
+ return nil, fmt.Errorf("failed parse duration at source: %w", err)
+ }
+ return &Feed{
+ feed: feedSource,
+ scrapeInterval: dur,
+ feedParser: gofeed.NewParser(),
+ }, nil
+}
+
+func (a *Feed) Retrive(ctx context.Context) <-chan model.Item {
+ out := make(chan model.Item)
+ ticker := time.NewTicker(a.scrapeInterval)
+ go func() {
+ <-ctx.Done()
+ ticker.Stop()
+ }()
+ go func() {
+ defer close(out)
+ for _, item := range a.scrapeFeed(ctx) {
+ out <- item
+ }
+ for range ticker.C {
+ for _, item := range a.scrapeFeed(ctx) {
+ out <- item
+ }
+ }
+ }()
+
+ return out
+}
+
+func (a *Feed) scrapeFeed(ctx context.Context) []model.Item {
+ feed, err := a.feedParser.ParseURLWithContext(a.feed, ctx)
+ if err != nil {
+ a.logger.Error("failed parse feed", slog.Any("err", err))
+ return nil
+ }
+ result := make([]model.Item, 0, len(feed.Items))
+ for _, it := range feed.Items {
+ date := time.Now()
+ if it.PublishedParsed != nil {
+ date = *it.PublishedParsed
+ }
+ image := ""
+ if it.Image != nil {
+ image = it.Image.URL
+ }
+ result = append(result, model.Item{
+ ID: it.GUID,
+ Date: date,
+ Title: it.Title,
+ Summary: it.Description,
+ Link: it.Link,
+ Img: image,
+ })
+ }
+
+ return result
+}