2022-04-26 09:55:59 -04:00
|
|
|
package app
|
|
|
|
|
2022-05-01 15:32:41 -04:00
|
|
|
import (
|
|
|
|
"io"
|
|
|
|
|
|
|
|
"go.balki.me/tss/log"
|
|
|
|
"go.balki.me/tss/proxy"
|
|
|
|
)
|
2022-04-28 19:24:21 -04:00
|
|
|
|
2022-04-27 00:19:05 -04:00
|
|
|
func Run(configPath string) {
|
2022-04-28 19:24:21 -04:00
|
|
|
cfg, err := ParseConfig(configPath)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
log.Panic("failed to parse config", "path", configPath, "err", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
scheduler, err := NewScheduler(cfg.LastSuccessPath)
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if err := scheduler.Save(); err != nil {
|
|
|
|
log.Panic("failed to save last success info", "path", cfg.LastSuccessPath, "err", err)
|
2022-04-27 00:19:05 -04:00
|
|
|
}
|
2022-04-28 19:24:21 -04:00
|
|
|
}()
|
2022-04-26 09:55:59 -04:00
|
|
|
|
2022-05-01 19:13:27 -04:00
|
|
|
// tgram := NewTelegramSender(cfg.TelegramProxy)
|
|
|
|
|
2022-04-28 19:24:21 -04:00
|
|
|
for _, feed := range cfg.Feeds {
|
|
|
|
log.Info("processing feed", "feed", feed.Name)
|
2022-05-01 15:32:41 -04:00
|
|
|
ProcessFeed(feed, scheduler, cfg.DbDir)
|
2022-04-28 19:24:21 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
2022-04-27 00:19:05 -04:00
|
|
|
for _, feed := range cfg.Feeds {
|
|
|
|
log.Println("Processing feed", feed.Name)
|
|
|
|
data, err := Download(feed.Url, feed.Proxy)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
}
|
2022-04-26 09:55:59 -04:00
|
|
|
|
2022-04-27 00:19:05 -04:00
|
|
|
for _, feed := range c.Feeds {
|
|
|
|
log.Println("Processing feed", feed.Name)
|
|
|
|
|
|
|
|
links, err := parseFeed(data)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
2022-04-26 09:55:59 -04:00
|
|
|
|
2022-04-27 00:19:05 -04:00
|
|
|
for _, link := range links {
|
|
|
|
if alreadySent(link) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
err := sendTelegram(link, feed.Channel, feed.Rhash)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fmt.Println(configPath)
|
|
|
|
*/
|
2022-04-26 09:55:59 -04:00
|
|
|
}
|
2022-04-28 19:24:21 -04:00
|
|
|
|
2022-05-01 15:32:41 -04:00
|
|
|
func ProcessFeed(feed FeedCfg, scheduler *Scheduler, dbDir string) {
|
2022-04-28 19:24:21 -04:00
|
|
|
sd, err := scheduler.ShouldDownload(feed.Name, feed.Cron)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("shouldDownload failed", "feed", feed.Name, "err", err)
|
|
|
|
return
|
|
|
|
}
|
2022-05-01 15:32:41 -04:00
|
|
|
|
2022-04-28 19:24:21 -04:00
|
|
|
if !sd {
|
|
|
|
log.Info("skipping feed due to schedule", "feed", feed.Name)
|
|
|
|
return
|
|
|
|
}
|
2022-05-01 15:32:41 -04:00
|
|
|
|
|
|
|
db, err := NewDB(dbDir, feed.Name)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("failed to get db", "feed", feed.Name, "db_dir", dbDir, "error", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
data, err := Download(feed.Url, feed.Proxy)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("download failed", "feed", feed.Name, "url", feed.Url, "proxy", feed.Proxy, "error", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
entries, err := ParseFeed(data)
|
2022-04-28 19:24:21 -04:00
|
|
|
if err != nil {
|
2022-05-01 15:32:41 -04:00
|
|
|
log.Error("feed parsing failed", "feed", feed.Name, "data", data, "error", err)
|
2022-04-28 19:24:21 -04:00
|
|
|
return
|
|
|
|
}
|
2022-05-01 15:32:41 -04:00
|
|
|
|
|
|
|
_, err = db.Filter(entries)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("failed to filter entries", "feed", feed.Name, "error", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Download(url string, proxyUrl string) ([]byte, error) {
|
|
|
|
client, err := proxy.GetClient(proxyUrl)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
res, err := client.Get(url)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer res.Body.Close()
|
|
|
|
return io.ReadAll(res.Body)
|
2022-04-28 19:24:21 -04:00
|
|
|
}
|