Lines of
src/core.rs
from check-in be0b8602d1
that are changed by the sequence of edits moving toward
check-in d8c1d259a2:
1: use crate::{
2: Arc,
3: command,
4: Mutex,
5: sql::Db,
6: tg_bot::{
7: Callback,
8: MyMessage,
9: Tg,
10: },
11: };
12:
13: use std::{
14: borrow::Cow,
15: collections::{
16: BTreeMap,
17: HashSet,
18: },
19: time::Duration,
20: };
21:
22: use async_compat::Compat;
23: use chrono::{
24: DateTime,
25: Local,
26: };
27: use lazy_static::lazy_static;
28: use regex::Regex;
29: use reqwest::header::LAST_MODIFIED;
30: use smol::Timer;
31: use stacked_errors::{
32: Result,
33: StackableErr,
34: anyhow,
35: bail,
36: };
37: use tgbot::{
38: handler::UpdateHandler,
39: types::{
40: CallbackQuery,
41: ChatPeerId,
42: Command,
43: Update,
44: UpdateType,
45: UserPeerId,
46: },
47: };
48: use ttl_cache::TtlCache;
49:
50: lazy_static!{
51: pub static ref RE_SPECIAL: Regex = Regex::new(r"([\-_*\[\]()~`>#+|{}\.!])").unwrap();
52: }
53:
54: // This one does nothing except making sure only one token exists for each id
55: pub struct Token {
56: running: Arc<Mutex<HashSet<i32>>>,
57: my_id: i32,
58: }
59:
60: impl Token {
61: /// Attempts to acquire a per-id token by inserting `my_id` into the shared `running` set.
62: ///
63: /// If the id was not already present, the function inserts it and returns `Some(Token)`.
64: /// When the returned `Token` is dropped, the id will be removed from the `running` set,
65: /// allowing subsequent acquisitions for the same id.
66: ///
67: /// # Parameters
68: ///
69: /// - `running`: Shared set tracking active ids.
70: /// - `my_id`: Identifier to acquire a token for.
71: ///
72: /// # Returns
73: ///
74: /// `Ok(Token)` if the id was successfully acquired, `Error` if a token for the id is already active.
75: async fn new (running: &Arc<Mutex<HashSet<i32>>>, my_id: i32) -> Result<Token> {
76: let running = running.clone();
77: let mut set = running.lock_arc().await;
78: if set.contains(&my_id) {
79: bail!("Token already taken");
80: } else {
81: set.insert(my_id);
82: Ok(Token {
83: running,
84: my_id,
85: })
86: }
87: }
88: }
89:
90: impl Drop for Token {
91: /// Releases this token's claim on the shared running-set when the token is dropped.
92: ///
93: /// The token's identifier is removed from the shared `running` set so that future
94: /// operations for the same id may proceed.
95: ///
96: /// TODO: is using block_on inside block_on safe? Currently tested and working fine.
97: fn drop (&mut self) {
98: smol::block_on(async {
99: let mut set = self.running.lock_arc().await;
100: set.remove(&self.my_id);
101: })
102: }
103: }
104:
105: pub type FeedList = BTreeMap<i32, String>;
106: type UserCache = TtlCache<i64, Arc<Mutex<FeedList>>>;
107:
108: #[derive(Clone)]
109: pub struct Core {
110: pub tg: Tg,
111: pub db: Db,
112: pub feeds: Arc<Mutex<UserCache>>,
113: running: Arc<Mutex<HashSet<i32>>>,
114: http_client: reqwest::Client,
115: }
116:
117: pub struct Post {
118: uri: String,
119: _title: String,
120: _authors: String,
121: _summary: String,
122: }
123:
124: impl Core {
125: /// Create a Core instance from configuration and start its background autofetch loop.
126: ///
127: /// The provided `settings` must include:
128: /// - `owner` (integer): default chat id to use as the owner/destination,
129: /// - `api_key` (string): Telegram bot API key,
130: /// - `api_gateway` (string): Telegram API gateway host,
131: /// - `pg` (string): PostgreSQL connection string,
132: /// - optional `proxy` (string): proxy URL for the HTTP client.
133: ///
134: /// On success returns an initialized `Core` with Telegram and HTTP clients, database connection,
135: /// an empty running set for per-id tokens, and a spawned background task that periodically runs
136: /// `autofetch`. If any required setting is missing or initialization fails, an error is returned.
137: pub async fn new(settings: config::Config) -> Result<Core> {
138: let mut client = reqwest::Client::builder();
139: if let Ok(proxy) = settings.get_string("proxy") {
140: let proxy = reqwest::Proxy::all(proxy).stack()?;
141: client = client.proxy(proxy);
142: }
143:
144: let core = Core {
145: tg: Tg::new(&settings).await.stack()?,
146: db: Db::new(&settings.get_string("pg").stack()?)?,
147: feeds: Arc::new(Mutex::new(TtlCache::new(10000))),
148: running: Arc::new(Mutex::new(HashSet::new())),
149: http_client: client.build().stack()?,
150: };
151:
152: let clone = core.clone();
153: smol::spawn(Compat::new(async move {
154: loop {
155: let delay = match &clone.autofetch().await {
156: Err(err) => {
157: if let Err(err) = clone.tg.send(MyMessage::html(format!("🛑 {err}"))).await {
158: eprintln!("Autofetch error: {err:?}");
159: };
160: std::time::Duration::from_secs(60)
161: },
162: Ok(time) => *time,
163: };
164: Timer::after(delay).await;
165: }
166: })).detach();
167: Ok(core)
168: }
169:
170: /// Fetches the feed for a source, sends any newly discovered posts to the appropriate chat, and records them in the database.
171: ///
172: /// This acquires a per-source guard to prevent concurrent checks for the same `id`. If a check is already running for
173: /// the given `id`, the function returns an error. If `last_scrape` is provided, it is sent as the `If-Modified-Since`
174: /// header to the feed request. The function parses RSS or Atom feeds, sends unseen post URLs to either the source's
175: /// channel (when `real` is true) or the source owner (when `real` is false), and persists posted entries so they are
176: /// not reposted later.
177: ///
178: /// Parameters:
179: /// - `id`: Identifier of the source to check.
180: /// - `real`: When `true`, send posts to the source's channel; when `false`, send to the source owner.
181: /// - `last_scrape`: Optional timestamp used to set the `If-Modified-Since` header for the HTTP request.
182: ///
183: /// # Returns
184: ///
185: /// `Posted: N` where `N` is the number of posts processed and sent.
186: pub async fn check (&self, id: i32, real: bool, last_scrape: Option<DateTime<Local>>) -> Result<String> {
187: let mut posted: i32 = 0;
188: let mut conn = self.db.begin().await.stack()?;
189:
190: let _token = Token::new(&self.running, id).await.stack()?;
191: let source = conn.get_source(id, self.tg.owner).await.stack()?;
192: conn.set_scrape(id).await.stack()?;
193: let destination = ChatPeerId::from(match real {
194: true => source.channel_id,
195: false => source.owner,
196: });
197: let mut this_fetch: Option<DateTime<chrono::FixedOffset>> = None;
198: let mut posts: BTreeMap<DateTime<chrono::FixedOffset>, Post> = BTreeMap::new();
199:
200: let mut builder = self.http_client.get(&source.url);
201: if let Some(last_scrape) = last_scrape {
202: builder = builder.header(LAST_MODIFIED, last_scrape.to_rfc2822());
203: };
204: let response = builder.send().await.stack()?;
205: #[cfg(debug_assertions)]
206: {
207: use reqwest::header::{
208: CACHE_CONTROL,
209: EXPIRES,
210: };
211: let headers = response.headers();
212: let expires = headers.get(EXPIRES);
213: let cache = headers.get(CACHE_CONTROL);
214: if expires.is_some() || cache.is_some() {
215: println!("{} {} {:?} {:?} {:?}", Local::now().to_rfc2822(), &source.url, last_scrape, expires, cache);
216: }
217: }
218: let status = response.status();
219: let content = response.bytes().await.stack()?;
220: match rss::Channel::read_from(&content[..]) {
221: Ok(feed) => {
222: for item in feed.items() {
223: if let Some(link) = item.link() {
224: let date = match item.pub_date() {
225: Some(feed_date) => DateTime::parse_from_rfc2822(feed_date),
226: None => DateTime::parse_from_rfc3339(match item.dublin_core_ext() {
227: Some(ext) => {
228: let dates = ext.dates();
229: if dates.is_empty() {
230: bail!("Feed item has Dublin Core extension but no dates.")
231: } else {
232: &dates[0]
233: }
234: },
235: None => bail!("Feed item misses posting date."),
236: }),
237: }.stack()?;
238: posts.insert(date, Post{
239: uri: link.to_string(),
240: _title: item.title().unwrap_or("").to_string(),
241: _authors: item.author().unwrap_or("").to_string(),
242: _summary: item.content().unwrap_or("").to_string(),
243: });
244: }
245: };
246: },
247: Err(err) => match err {
248: rss::Error::InvalidStartTag => {
249: match atom_syndication::Feed::read_from(&content[..]) {
250: Ok(feed) => {
251: for item in feed.entries() {
252: let date = item.published()
253: .stack_err("Feed item missing publishing date.")?;
254: let uri = {
255: let links = item.links();
256: if links.is_empty() {
257: bail!("Feed item missing post links.");
258: } else {
259: links[0].href().to_string()
260: }
261: };
262: let _authors = item.authors().iter().map(|x| format!("{} <{:?}>", x.name(), x.email())).collect::<Vec<String>>().join(", ");
263: let _summary = if let Some(sum) = item.summary() { sum.value.clone() } else { String::new() };
264: posts.insert(*date, Post{
265: uri,
266: _title: item.title().to_string(),
267: _authors,
268: _summary,
269: });
270: };
271: },
272: Err(err) => {
273: bail!("Unsupported or mangled content:\n{:?}\n{err}\n{status:#?}\n", &source.url)
274: },
275: }
276: },
277: rss::Error::Eof => (),
278: _ => bail!("Unsupported or mangled content:\n{:?}\n{err}\n{status:#?}\n", &source.url)
279: }
280: };
281: for (date, post) in posts.iter() {
282: let post_url: Cow<str> = match source.url_re {
283: Some(ref x) => sedregex::ReplaceCommand::new(x).stack()?.execute(&post.uri),
284: None => post.uri.clone().into(),
285: };
286: if ! conn.exists(&post_url, id).await.stack()? {
287: if this_fetch.is_none() || *date > this_fetch.unwrap() {
288: this_fetch = Some(*date);
289: };
290: self.tg.send(MyMessage::html_to(match &source.iv_hash {
291: Some(hash) => format!("<a href=\"https://t.me/iv?url={post_url}&rhash={hash}\"> </a>{post_url}"),
292: None => format!("{post_url}"),
293: }, destination)).await.stack()?;
294: conn.add_post(id, date, &post_url).await.stack()?;
295: posted += 1;
296: };
297: };
298: posts.clear();
299: Ok(format!("Posted: {posted}"))
300: }
301:
302: /// Determine the delay until the next scheduled fetch and spawn background checks for any overdue sources.
303: ///
304: /// This scans the database queue, spawns background tasks to run checks for sources whose `next_fetch`
305: /// is in the past (each task uses a Core clone with the appropriate owner), and computes the shortest
306: /// duration until the next `next_fetch`.
307: async fn autofetch(&self) -> Result<std::time::Duration> {
308: let mut delay = chrono::Duration::minutes(1);
309: let now = chrono::Local::now();
310: let queue = {
311: let mut conn = self.db.begin().await.stack()?;
312: conn.get_queue().await.stack()?
313: };
314: for row in queue {
315: if let Some(next_fetch) = row.next_fetch {
316: if next_fetch < now {
317: if let (Some(owner), Some(source_id), last_scrape) = (row.owner, row.source_id, row.last_scrape) {
318: let clone = Core {
319: tg: self.tg.with_owner(owner),
320: ..self.clone()
321: };
322: let source = {
323: let mut conn = self.db.begin().await.stack()?;
324: match conn.get_one(owner, source_id).await {
325: Ok(Some(source)) => source.to_string(),
326: Ok(None) => "Source not found in database?".to_string(),
327: Err(err) => format!("Failed to fetch source data:\n{err}"),
328: }
329: };
330: smol::spawn(Compat::new(async move {
331: if let Err(err) = clone.check(source_id, true, Some(last_scrape)).await
332: && let Err(err) = clone.tg.send(MyMessage::html(format!("🛑 {source}\n<pre>{}</pre>", &err.to_string()))).await
333: {
334: eprintln!("Check error: {err}");
335: };
336: })).detach();
337: }
338: } else if next_fetch - now < delay {
339: delay = next_fetch - now;
340: }
341: }
342: };
343: delay.to_std().stack()
344: }
345:
346: /// Displays full list of managed channels for specified user
347: pub async fn list (&self, owner: UserPeerId) -> Result<String> {
348: let mut reply: Vec<String> = vec![];
349: reply.push("Channels:".into());
350: let mut conn = self.db.begin().await.stack()?;
351: for row in conn.get_list(owner).await.stack()? {
352: reply.push(row.to_string());
353: };
354: Ok(reply.join("\n\n"))
355: }
356:
357: /// Returns current cached list of feed for requested user, or loads data from database
358: pub async fn get_feeds (&self, owner: i64) -> Result<Arc<Mutex<FeedList>>> {
359: let mut feeds = self.feeds.lock_arc().await;
360: Ok(match feeds.get(&owner) {
361: None => {
362: let mut conn = self.db.begin().await.stack()?;
363: let feed_list = conn.get_feeds(owner).await.stack()?;
364: let mut map = BTreeMap::new();
365: for feed in feed_list {
366: map.insert(feed.source_id, feed.channel);
367: };
368: let res = Arc::new(Mutex::new(map));
369: feeds.insert(owner, res.clone(), Duration::from_secs(60 * 60 * 3));
370: res
371: },
372: Some(res) => res.clone(),
373: })
374: }
375:
376: /// Adds feed to cached list
377: pub async fn add_feed (&self, owner: i64, source_id: i32, channel: String) -> Result<()> {
378: let mut inserted = true;
379: {
380: let mut feeds = self.feeds.lock_arc().await;
381: if let Some(feed) = feeds.get_mut(&owner) {
382: let mut feed = feed.lock_arc().await;
383: feed.insert(source_id, channel);
384: } else {
385: inserted = false;
386: }
387: }
388: // in case insert failed - we miss the entry we needed to expand, reload everything from
389: // database
390: if !inserted {
391: self.get_feeds(owner).await.stack()?;
392: }
393: Ok(())
394: }
395:
396: /// Removes feed from cached list
397: pub async fn rm_feed (&self, owner: i64, source_id: &i32) -> Result<()> {
398: let mut dropped = false;
399: {
400: let mut feeds = self.feeds.lock_arc().await;
401: if let Some(feed) = feeds.get_mut(&owner) {
402: let mut feed = feed.lock_arc().await;
403: feed.remove(source_id);
404: dropped = true;
405: }
406: }
407: // in case we failed to found feed we need to remove - just reload everything from database
408: if !dropped {
409: self.get_feeds(owner).await.stack()?;
410: }
411: Ok(())
412: }
413:
414: pub async fn cb (&self, query: &CallbackQuery, cb: &str) -> Result<()> {
415: let cb: Callback = toml::from_str(cb).stack()?;
416: todo!();
417: Ok(())
418: }
419: }
420:
421: impl UpdateHandler for Core {
422: /// Dispatches an incoming Telegram update to a matching command handler and reports handler errors to the originating chat.
423: ///
424: /// This method inspects the update; if it contains a message that can be parsed as a bot command,
425: /// it executes the corresponding command handler. If the handler returns an error, the error text
be0b8602d1 2026-04-18 426: /// is sent back to the message's chat using MarkdownV2 formatting. Unknown commands produce an error
be0b8602d1 2026-04-18 427: /// which is also reported to the chat.
428: async fn handle (&self, update: Update) -> () {
429: match update.update_type {
430: UpdateType::Message(msg) => {
431: if let Ok(cmd) = Command::try_from(*msg) {
432: let msg = cmd.get_message();
433: let words = cmd.get_args();
434: let command = cmd.get_name();
435: let res = match command {
436: "/check" | "/clean" | "/enable" | "/delete" | "/disable" => command::command(self, command, msg, words).await,
437: "/start" => command::start(self, msg).await,
438: "/list" => command::list(self, msg).await,
439: "/test" => command::test(self, msg).await,
440: "/add" | "/update" => command::update(self, command, msg, words).await,
441: any => Err(anyhow!("Unknown command: {any}")),
442: };
be0b8602d1 2026-04-18 443: if let Err(err) = res
be0b8602d1 2026-04-18 444: && let Err(err2) = self.tg.send(MyMessage::html_to(
be0b8602d1 2026-04-18 445: format!("#error<pre>{err}</pre>"),
be0b8602d1 2026-04-18 446: msg.chat.get_id(),
be0b8602d1 2026-04-18 447: )).await
be0b8602d1 2026-04-18 448: {
be0b8602d1 2026-04-18 449: dbg!(err2);
450: }
451: } else {
452: // not a command
453: }
454: },
455: UpdateType::CallbackQuery(query) => {
456: if let Some(ref cb) = query.data
457: && let Err(err) = self.cb(&query, cb).await
458: && let Err(err) = self.tg.answer_cb(query.id, err.to_string()).await
459: {
460: println!("{err:?}");
461: }
462: },
463: _ => {
464: println!("Unhandled UpdateKind:\n{update:?}")
465: },
466: }
467: }
468: }