package source import ( "context" "errors" "fmt" "log/slog" "time" "github.com/mmcdole/gofeed" cm "go.neonxp.ru/conf/model" "go.neonxp.ru/pose/internal/model" ) var ( ErrNoFeedAddress = errors.New("no feed address") ) type Feed struct { logger *slog.Logger feed string scrapeInterval time.Duration feedParser *gofeed.Parser } func NewFeed(cfg cm.Group, logger *slog.Logger) (*Feed, error) { feedSource := cfg.Get("url").String() if feedSource == "" { return nil, ErrNoFeedAddress } durStr := cfg.Get("scrape_interval").String() if durStr == "" { durStr = "10m" } dur, err := time.ParseDuration(durStr) if err != nil { return nil, fmt.Errorf("failed parse duration at source: %w", err) } return &Feed{ feed: feedSource, scrapeInterval: dur, feedParser: gofeed.NewParser(), }, nil } func (a *Feed) Retrive(ctx context.Context) <-chan model.Item { out := make(chan model.Item) ticker := time.NewTicker(a.scrapeInterval) go func() { <-ctx.Done() ticker.Stop() }() go func() { defer close(out) for _, item := range a.scrapeFeed(ctx) { out <- item } for range ticker.C { for _, item := range a.scrapeFeed(ctx) { out <- item } } }() return out } func (a *Feed) scrapeFeed(ctx context.Context) []model.Item { feed, err := a.feedParser.ParseURLWithContext(a.feed, ctx) if err != nil { a.logger.Error("failed parse feed", slog.Any("err", err)) return nil } result := make([]model.Item, 0, len(feed.Items)) for _, it := range feed.Items { date := time.Now() if it.PublishedParsed != nil { date = *it.PublishedParsed } image := "" if it.Image != nil { image = it.Image.URL } result = append(result, model.Item{ ID: it.GUID, Date: date, Title: it.Title, Summary: it.Description, Link: it.Link, Img: image, }) } return result }