Lines of
src/core.rs
from check-in 3fd8c40aa8
that are changed by the sequence of edits moving toward
check-in be0b8602d1:
1: use crate::{
2: Arc,
3: command,
4: Mutex,
5: sql::Db,
6: tg_bot::{
7: Callback,
8: MyMessage,
9: Tg,
10: },
11: };
12:
13: use std::{
14: borrow::Cow,
15: collections::{
16: BTreeMap,
3fd8c40aa8 2026-03-30 17: HashMap,
18: HashSet,
19: },
20: time::Duration,
21: };
22:
23: use async_compat::Compat;
24: use chrono::{
25: DateTime,
26: Local,
27: };
28: use lazy_static::lazy_static;
29: use regex::Regex;
30: use reqwest::header::LAST_MODIFIED;
31: use smol::Timer;
32: use stacked_errors::{
33: Result,
34: StackableErr,
35: anyhow,
36: bail,
37: };
38: use tgbot::{
39: handler::UpdateHandler,
40: types::{
41: CallbackQuery,
42: ChatPeerId,
43: Command,
44: Update,
45: UpdateType,
46: UserPeerId,
47: },
48: };
49: use ttl_cache::TtlCache;
50:
51: lazy_static!{
52: pub static ref RE_SPECIAL: Regex = Regex::new(r"([\-_*\[\]()~`>#+|{}\.!])").unwrap();
53: }
54:
55: // This one does nothing except making sure only one token exists for each id
56: pub struct Token {
57: running: Arc<Mutex<HashSet<i32>>>,
58: my_id: i32,
59: }
60:
61: impl Token {
62: /// Attempts to acquire a per-id token by inserting `my_id` into the shared `running` set.
63: ///
64: /// If the id was not already present, the function inserts it and returns `Some(Token)`.
65: /// When the returned `Token` is dropped, the id will be removed from the `running` set,
66: /// allowing subsequent acquisitions for the same id.
67: ///
68: /// # Parameters
69: ///
70: /// - `running`: Shared set tracking active ids.
71: /// - `my_id`: Identifier to acquire a token for.
72: ///
73: /// # Returns
74: ///
75: /// `Ok(Token)` if the id was successfully acquired, `Error` if a token for the id is already active.
76: async fn new (running: &Arc<Mutex<HashSet<i32>>>, my_id: i32) -> Result<Token> {
77: let running = running.clone();
78: let mut set = running.lock_arc().await;
79: if set.contains(&my_id) {
80: bail!("Token already taken");
81: } else {
82: set.insert(my_id);
83: Ok(Token {
84: running,
85: my_id,
86: })
87: }
88: }
89: }
90:
91: impl Drop for Token {
92: /// Releases this token's claim on the shared running-set when the token is dropped.
93: ///
94: /// The token's identifier is removed from the shared `running` set so that future
95: /// operations for the same id may proceed.
96: ///
97: /// TODO: is using block_on inside block_on safe? Currently tested and working fine.
98: fn drop (&mut self) {
99: smol::block_on(async {
100: let mut set = self.running.lock_arc().await;
101: set.remove(&self.my_id);
102: })
103: }
104: }
105:
3fd8c40aa8 2026-03-30 106: pub type FeedList = HashMap<i32, String>;
107: type UserCache = TtlCache<i64, Arc<Mutex<FeedList>>>;
108:
109: #[derive(Clone)]
110: pub struct Core {
111: pub tg: Tg,
112: pub db: Db,
113: pub feeds: Arc<Mutex<UserCache>>,
114: running: Arc<Mutex<HashSet<i32>>>,
115: http_client: reqwest::Client,
116: }
117:
118: pub struct Post {
119: uri: String,
120: _title: String,
121: _authors: String,
122: _summary: String,
123: }
124:
125: impl Core {
126: /// Create a Core instance from configuration and start its background autofetch loop.
127: ///
128: /// The provided `settings` must include:
129: /// - `owner` (integer): default chat id to use as the owner/destination,
130: /// - `api_key` (string): Telegram bot API key,
131: /// - `api_gateway` (string): Telegram API gateway host,
132: /// - `pg` (string): PostgreSQL connection string,
133: /// - optional `proxy` (string): proxy URL for the HTTP client.
134: ///
135: /// On success returns an initialized `Core` with Telegram and HTTP clients, database connection,
136: /// an empty running set for per-id tokens, and a spawned background task that periodically runs
137: /// `autofetch`. If any required setting is missing or initialization fails, an error is returned.
138: pub async fn new(settings: config::Config) -> Result<Core> {
139: let mut client = reqwest::Client::builder();
140: if let Ok(proxy) = settings.get_string("proxy") {
141: let proxy = reqwest::Proxy::all(proxy).stack()?;
142: client = client.proxy(proxy);
143: }
144:
145: let core = Core {
146: tg: Tg::new(&settings).await.stack()?,
147: db: Db::new(&settings.get_string("pg").stack()?)?,
148: feeds: Arc::new(Mutex::new(TtlCache::new(10000))),
149: running: Arc::new(Mutex::new(HashSet::new())),
150: http_client: client.build().stack()?,
151: };
152:
153: let clone = core.clone();
154: smol::spawn(Compat::new(async move {
155: loop {
156: let delay = match &clone.autofetch().await {
157: Err(err) => {
158: if let Err(err) = clone.tg.send(MyMessage::html(format!("🛑 {err}"))).await {
159: eprintln!("Autofetch error: {err:?}");
160: };
161: std::time::Duration::from_secs(60)
162: },
163: Ok(time) => *time,
164: };
165: Timer::after(delay).await;
166: }
167: })).detach();
168: Ok(core)
169: }
170:
171: /// Fetches the feed for a source, sends any newly discovered posts to the appropriate chat, and records them in the database.
172: ///
173: /// This acquires a per-source guard to prevent concurrent checks for the same `id`. If a check is already running for
174: /// the given `id`, the function returns an error. If `last_scrape` is provided, it is sent as the `If-Modified-Since`
175: /// header to the feed request. The function parses RSS or Atom feeds, sends unseen post URLs to either the source's
176: /// channel (when `real` is true) or the source owner (when `real` is false), and persists posted entries so they are
177: /// not reposted later.
178: ///
179: /// Parameters:
180: /// - `id`: Identifier of the source to check.
181: /// - `real`: When `true`, send posts to the source's channel; when `false`, send to the source owner.
182: /// - `last_scrape`: Optional timestamp used to set the `If-Modified-Since` header for the HTTP request.
183: ///
184: /// # Returns
185: ///
186: /// `Posted: N` where `N` is the number of posts processed and sent.
187: pub async fn check (&self, id: i32, real: bool, last_scrape: Option<DateTime<Local>>) -> Result<String> {
188: let mut posted: i32 = 0;
189: let mut conn = self.db.begin().await.stack()?;
190:
191: let _token = Token::new(&self.running, id).await.stack()?;
192: let source = conn.get_source(id, self.tg.owner).await.stack()?;
193: conn.set_scrape(id).await.stack()?;
194: let destination = ChatPeerId::from(match real {
195: true => source.channel_id,
196: false => source.owner,
197: });
198: let mut this_fetch: Option<DateTime<chrono::FixedOffset>> = None;
199: let mut posts: BTreeMap<DateTime<chrono::FixedOffset>, Post> = BTreeMap::new();
200:
201: let mut builder = self.http_client.get(&source.url);
202: if let Some(last_scrape) = last_scrape {
203: builder = builder.header(LAST_MODIFIED, last_scrape.to_rfc2822());
204: };
205: let response = builder.send().await.stack()?;
206: #[cfg(debug_assertions)]
207: {
208: use reqwest::header::{
209: CACHE_CONTROL,
210: EXPIRES,
211: };
212: let headers = response.headers();
213: let expires = headers.get(EXPIRES);
214: let cache = headers.get(CACHE_CONTROL);
215: if expires.is_some() || cache.is_some() {
216: println!("{} {} {:?} {:?} {:?}", Local::now().to_rfc2822(), &source.url, last_scrape, expires, cache);
217: }
218: }
219: let status = response.status();
220: let content = response.bytes().await.stack()?;
221: match rss::Channel::read_from(&content[..]) {
222: Ok(feed) => {
223: for item in feed.items() {
224: if let Some(link) = item.link() {
225: let date = match item.pub_date() {
226: Some(feed_date) => DateTime::parse_from_rfc2822(feed_date),
227: None => DateTime::parse_from_rfc3339(match item.dublin_core_ext() {
228: Some(ext) => {
229: let dates = ext.dates();
230: if dates.is_empty() {
231: bail!("Feed item has Dublin Core extension but no dates.")
232: } else {
233: &dates[0]
234: }
235: },
236: None => bail!("Feed item misses posting date."),
237: }),
238: }.stack()?;
239: posts.insert(date, Post{
240: uri: link.to_string(),
241: _title: item.title().unwrap_or("").to_string(),
242: _authors: item.author().unwrap_or("").to_string(),
243: _summary: item.content().unwrap_or("").to_string(),
244: });
245: }
246: };
247: },
248: Err(err) => match err {
249: rss::Error::InvalidStartTag => {
250: match atom_syndication::Feed::read_from(&content[..]) {
251: Ok(feed) => {
252: for item in feed.entries() {
253: let date = item.published()
254: .stack_err("Feed item missing publishing date.")?;
255: let uri = {
256: let links = item.links();
257: if links.is_empty() {
258: bail!("Feed item missing post links.");
259: } else {
260: links[0].href().to_string()
261: }
262: };
263: let _authors = item.authors().iter().map(|x| format!("{} <{:?}>", x.name(), x.email())).collect::<Vec<String>>().join(", ");
264: let _summary = if let Some(sum) = item.summary() { sum.value.clone() } else { String::new() };
265: posts.insert(*date, Post{
266: uri,
267: _title: item.title().to_string(),
268: _authors,
269: _summary,
270: });
271: };
272: },
273: Err(err) => {
274: bail!("Unsupported or mangled content:\n{:?}\n{err}\n{status:#?}\n", &source.url)
275: },
276: }
277: },
278: rss::Error::Eof => (),
279: _ => bail!("Unsupported or mangled content:\n{:?}\n{err}\n{status:#?}\n", &source.url)
280: }
281: };
282: for (date, post) in posts.iter() {
283: let post_url: Cow<str> = match source.url_re {
284: Some(ref x) => sedregex::ReplaceCommand::new(x).stack()?.execute(&post.uri),
285: None => post.uri.clone().into(),
286: };
287: if ! conn.exists(&post_url, id).await.stack()? {
288: if this_fetch.is_none() || *date > this_fetch.unwrap() {
289: this_fetch = Some(*date);
290: };
291: self.tg.send(MyMessage::html_to(match &source.iv_hash {
292: Some(hash) => format!("<a href=\"https://t.me/iv?url={post_url}&rhash={hash}\"> </a>{post_url}"),
293: None => format!("{post_url}"),
294: }, destination)).await.stack()?;
295: conn.add_post(id, date, &post_url).await.stack()?;
296: posted += 1;
297: };
298: };
299: posts.clear();
300: Ok(format!("Posted: {posted}"))
301: }
302:
303: /// Determine the delay until the next scheduled fetch and spawn background checks for any overdue sources.
304: ///
305: /// This scans the database queue, spawns background tasks to run checks for sources whose `next_fetch`
306: /// is in the past (each task uses a Core clone with the appropriate owner), and computes the shortest
307: /// duration until the next `next_fetch`.
308: async fn autofetch(&self) -> Result<std::time::Duration> {
309: let mut delay = chrono::Duration::minutes(1);
310: let now = chrono::Local::now();
311: let queue = {
312: let mut conn = self.db.begin().await.stack()?;
313: conn.get_queue().await.stack()?
314: };
315: for row in queue {
316: if let Some(next_fetch) = row.next_fetch {
317: if next_fetch < now {
318: if let (Some(owner), Some(source_id), last_scrape) = (row.owner, row.source_id, row.last_scrape) {
319: let clone = Core {
320: tg: self.tg.with_owner(owner),
321: ..self.clone()
322: };
323: let source = {
324: let mut conn = self.db.begin().await.stack()?;
325: match conn.get_one(owner, source_id).await {
326: Ok(Some(source)) => source.to_string(),
327: Ok(None) => "Source not found in database?".to_string(),
328: Err(err) => format!("Failed to fetch source data:\n{err}"),
329: }
330: };
331: smol::spawn(Compat::new(async move {
332: if let Err(err) = clone.check(source_id, true, Some(last_scrape)).await
333: && let Err(err) = clone.tg.send(MyMessage::html(format!("🛑 {source}\n<pre>{}</pre>", &err.to_string()))).await
334: {
335: eprintln!("Check error: {err}");
336: };
337: })).detach();
338: }
339: } else if next_fetch - now < delay {
340: delay = next_fetch - now;
341: }
342: }
343: };
344: delay.to_std().stack()
345: }
346:
347: /// Displays full list of managed channels for specified user
348: pub async fn list (&self, owner: UserPeerId) -> Result<String> {
349: let mut reply: Vec<String> = vec![];
350: reply.push("Channels:".into());
351: let mut conn = self.db.begin().await.stack()?;
352: for row in conn.get_list(owner).await.stack()? {
353: reply.push(row.to_string());
354: };
355: Ok(reply.join("\n\n"))
356: }
357:
358: /// Returns current cached list of feed for requested user, or loads data from database
359: pub async fn get_feeds (&self, owner: i64) -> Result<Arc<Mutex<FeedList>>> {
360: let mut feeds = self.feeds.lock_arc().await;
361: Ok(match feeds.get(&owner) {
362: None => {
363: let mut conn = self.db.begin().await.stack()?;
364: let feed_list = conn.get_feeds(owner).await.stack()?;
3fd8c40aa8 2026-03-30 365: let mut map = HashMap::new();
366: for feed in feed_list {
367: map.insert(feed.source_id, feed.channel);
368: };
369: let res = Arc::new(Mutex::new(map));
370: feeds.insert(owner, res.clone(), Duration::from_secs(60 * 60 * 3));
371: res
372: },
373: Some(res) => res.clone(),
374: })
375: }
376:
377: /// Adds feed to cached list
378: pub async fn add_feed (&self, owner: i64, source_id: i32, channel: String) -> Result<()> {
379: let mut inserted = true;
380: {
381: let mut feeds = self.feeds.lock_arc().await;
382: if let Some(feed) = feeds.get_mut(&owner) {
383: let mut feed = feed.lock_arc().await;
384: feed.insert(source_id, channel);
385: } else {
386: inserted = false;
387: }
388: }
389: // in case insert failed - we miss the entry we needed to expand, reload everything from
390: // database
391: if !inserted {
392: self.get_feeds(owner).await.stack()?;
393: }
394: Ok(())
395: }
396:
397: /// Removes feed from cached list
398: pub async fn rm_feed (&self, owner: i64, source_id: &i32) -> Result<()> {
399: let mut dropped = false;
400: {
401: let mut feeds = self.feeds.lock_arc().await;
402: if let Some(feed) = feeds.get_mut(&owner) {
403: let mut feed = feed.lock_arc().await;
404: feed.remove(source_id);
405: dropped = true;
406: }
407: }
408: // in case we failed to found feed we need to remove - just reload everything from database
409: if !dropped {
410: self.get_feeds(owner).await.stack()?;
411: }
412: Ok(())
413: }
414:
415: pub async fn cb (&self, query: &CallbackQuery, cb: &str) -> Result<()> {
416: let cb: Callback = toml::from_str(cb).stack()?;
417: todo!();
418: Ok(())
419: }
420: }
421:
422: impl UpdateHandler for Core {
423: /// Dispatches an incoming Telegram update to a matching command handler and reports handler errors to the originating chat.
424: ///
425: /// This method inspects the update; if it contains a message that can be parsed as a bot command,
426: /// it executes the corresponding command handler. If the handler returns an error, the error text
427: /// is sent back to the message's chat using MarkdownV2 formatting. Unknown commands produce an error
428: /// which is also reported to the chat.
429: async fn handle (&self, update: Update) -> () {
430: match update.update_type {
431: UpdateType::Message(msg) => {
432: if let Ok(cmd) = Command::try_from(*msg) {
433: let msg = cmd.get_message();
434: let words = cmd.get_args();
435: let command = cmd.get_name();
436: let res = match command {
437: "/check" | "/clean" | "/enable" | "/delete" | "/disable" => command::command(self, command, msg, words).await,
438: "/start" => command::start(self, msg).await,
439: "/list" => command::list(self, msg).await,
440: "/test" => command::test(self, msg).await,
441: "/add" | "/update" => command::update(self, command, msg, words).await,
442: any => Err(anyhow!("Unknown command: {any}")),
443: };
444: if let Err(err) = res
445: && let Err(err2) = self.tg.send(MyMessage::html_to(
446: format!("#error<pre>{err}</pre>"),
447: msg.chat.get_id(),
448: )).await
449: {
450: dbg!(err2);
451: }
452: } else {
453: // not a command
454: }
455: },
456: UpdateType::CallbackQuery(query) => {
457: if let Some(ref cb) = query.data
458: && let Err(err) = self.cb(&query, cb).await
459: {
3fd8c40aa8 2026-03-30 460: if let Err(err) = self.tg.answer_cb(query.id, err.to_string()).await {
3fd8c40aa8 2026-03-30 461: println!("{err:?}");
3fd8c40aa8 2026-03-30 462: }
463: }
464: },
465: _ => {
466: println!("Unhandled UpdateKind:\n{update:?}")
467: },
468: }
469: }
470: }