Lines of
src/core.rs
from check-in f988dfd28f
that are changed by the sequence of edits moving toward
check-in 26339860ce:
1: use anyhow::{anyhow, bail, Context, Result};
2: use chrono::DateTime;
3: use sqlx::{
4: postgres::PgPoolOptions,
5: Row,
6: };
7: use std::{
8: borrow::Cow,
9: collections::{
10: BTreeMap,
11: HashSet,
12: },
13: sync::{Arc, Mutex},
14: };
15:
16: #[derive(Clone)]
17: pub struct Core {
18: //owner: i64,
19: //api_key: String,
20: owner_chat: telegram_bot::UserId,
21: pub tg: telegram_bot::Api,
22: pub my: telegram_bot::User,
23: pool: sqlx::Pool<sqlx::Postgres>,
24: sources: Arc<Mutex<HashSet<Arc<i32>>>>,
25: }
26:
27: impl Core {
28: pub async fn new(settings: config::Config) -> Result<Arc<Core>> {
29: let owner = settings.get_int("owner")?;
30: let api_key = settings.get_str("api_key")?;
31: let tg = telegram_bot::Api::new(&api_key);
32: let core = Arc::new(Core {
33: //owner,
34: //api_key: api_key.clone(),
35: my: tg.send(telegram_bot::GetMe).await?,
36: tg,
37: owner_chat: telegram_bot::UserId::new(owner),
38: pool: PgPoolOptions::new()
39: .max_connections(5)
40: .connect_timeout(std::time::Duration::new(300, 0))
41: .idle_timeout(std::time::Duration::new(60, 0))
42: .connect_lazy(&settings.get_str("pg")?)?,
43: sources: Arc::new(Mutex::new(HashSet::new())),
44: });
45: let clone = core.clone();
46: tokio::spawn(async move {
f988dfd28f 2022-02-13 47: if let Err(err) = &clone.autofetch().await {
f988dfd28f 2022-02-13 48: if let Err(err) = clone.send(&format!("š {:?}", err), None, None) {
f988dfd28f 2022-02-13 49: eprintln!("Autofetch error: {}", err);
f988dfd28f 2022-02-13 50: };
51: }
52: });
53: Ok(core)
54: }
55:
56: pub fn stream(&self) -> telegram_bot::UpdatesStream {
57: self.tg.stream()
58: }
59:
f988dfd28f 2022-02-13 60: pub fn send<'a, S>(&self, msg: S, target: Option<telegram_bot::UserId>, parse_mode: Option<telegram_bot::types::ParseMode>) -> Result<()>
61: where S: Into<Cow<'a, str>> {
62: let msg = msg.into();
63:
f988dfd28f 2022-02-13 64: let parse_mode = match parse_mode {
f988dfd28f 2022-02-13 65: Some(mode) => mode,
f988dfd28f 2022-02-13 66: None => telegram_bot::types::ParseMode::Html,
f988dfd28f 2022-02-13 67: };
f988dfd28f 2022-02-13 68: self.tg.spawn(telegram_bot::SendMessage::new(match target {
f988dfd28f 2022-02-13 69: Some(user) => user,
f988dfd28f 2022-02-13 70: None => self.owner_chat,
f988dfd28f 2022-02-13 71: }, msg).parse_mode(parse_mode));
72: Ok(())
73: }
74:
75: pub async fn check<S>(&self, id: &i32, owner: S, real: bool) -> Result<Cow<'_, str>>
76: where S: Into<i64> {
77: let owner = owner.into();
78:
79: let mut posted: i32 = 0;
80: let id = {
81: let mut set = self.sources.lock().unwrap();
82: match set.get(id) {
83: Some(id) => id.clone(),
84: None => {
85: let id = Arc::new(*id);
86: set.insert(id.clone());
87: id.clone()
88: },
89: }
90: };
91: let count = Arc::strong_count(&id);
92: if count == 2 {
93: let mut conn = self.pool.acquire().await
94: .with_context(|| format!("Query queue fetch conn:\n{:?}", &self.pool))?;
95: let row = sqlx::query("select source_id, channel_id, url, iv_hash, owner, url_re from rsstg_source where source_id = $1 and owner = $2")
96: .bind(*id)
97: .bind(owner)
98: .fetch_one(&mut conn).await
99: .with_context(|| format!("Query source:\n{:?}", &self.pool))?;
100: drop(conn);
101: let channel_id: i64 = row.try_get("channel_id")?;
102: let url: &str = row.try_get("url")?;
103: let iv_hash: Option<&str> = row.try_get("iv_hash")?;
104: let url_re = match row.try_get("url_re")? {
105: Some(x) => Some(sedregex::ReplaceCommand::new(x)?),
106: None => None,
107: };
108: let destination = match real {
109: true => telegram_bot::UserId::new(channel_id),
110: false => telegram_bot::UserId::new(row.try_get("owner")?),
111: };
112: let mut this_fetch: Option<DateTime<chrono::FixedOffset>> = None;
113: let mut posts: BTreeMap<DateTime<chrono::FixedOffset>, String> = BTreeMap::new();
114: let response = reqwest::get(url).await?;
115: let status = response.status();
116: let content = response.bytes().await?;
117: match rss::Channel::read_from(&content[..]) {
118: Ok(feed) => {
119: for item in feed.items() {
120: if let Some(link) = item.link() {
121: let date = match item.pub_date() {
122: Some(feed_date) => DateTime::parse_from_rfc2822(feed_date),
123: None => DateTime::parse_from_rfc3339(&item.dublin_core_ext().unwrap().dates()[0]),
124: }?;
125: let url = link;
f988dfd28f 2022-02-13 126: posts.insert(date, url.into());
127: }
128: };
129: },
130: Err(err) => match err {
131: rss::Error::InvalidStartTag => {
132: let feed = atom_syndication::Feed::read_from(&content[..])
133: .with_context(|| format!("Problem opening feed url:\n{}\n{}", &url, status))?;
134: for item in feed.entries() {
135: let date = item.published().unwrap();
136: let url = item.links()[0].href();
f988dfd28f 2022-02-13 137: posts.insert(*date, url.into());
138: };
139: },
140: rss::Error::Eof => (),
141: _ => bail!("Unsupported or mangled content:\n{:?}\n{:#?}\n{:#?}\n", &url, err, status)
142: }
143: };
144: for (date, url) in posts.iter() {
145: let mut conn = self.pool.acquire().await
146: .with_context(|| format!("Check post fetch conn:\n{:?}", &self.pool))?;
147: let post_url: Cow<str> = match url_re {
148: Some(ref x) => x.execute(url),
149: None => url.into(),
150: };
151: let row = sqlx::query("select exists(select true from rsstg_post where url = $1 and source_id = $2) as exists;")
152: .bind(&*post_url)
153: .bind(*id)
154: .fetch_one(&mut conn).await
155: .with_context(|| format!("Check post:\n{:?}", &conn))?;
156: let exists: bool = row.try_get("exists")?;
157: if ! exists {
158: if this_fetch == None || *date > this_fetch.unwrap() {
159: this_fetch = Some(*date);
160: };
161: self.tg.send( match iv_hash {
162: Some(hash) => telegram_bot::SendMessage::new(destination, format!("<a href=\"https://t.me/iv?url={}&rhash={}\"> </a>{0}", &post_url, hash)),
163: None => telegram_bot::SendMessage::new(destination, format!("{}", post_url)),
164: }.parse_mode(telegram_bot::types::ParseMode::Html)).await
165: .context("Can't post message:")?;
166: sqlx::query("insert into rsstg_post (source_id, posted, url) values ($1, $2, $3);")
167: .bind(*id)
168: .bind(date)
169: .bind(&*post_url)
170: .execute(&mut conn).await
171: .with_context(|| format!("Record post:\n{:?}", &conn))?;
172: drop(conn);
173: tokio::time::sleep(std::time::Duration::new(4, 0)).await;
174: };
175: posted += 1;
176: };
177: posts.clear();
178: };
179: let mut conn = self.pool.acquire().await
180: .with_context(|| format!("Update scrape fetch conn:\n{:?}", &self.pool))?;
181: sqlx::query("update rsstg_source set last_scrape = now() where source_id = $1;")
182: .bind(*id)
183: .execute(&mut conn).await
184: .with_context(|| format!("Update scrape:\n{:?}", &conn))?;
185: Ok(format!("Posted: {}", &posted).into())
186: }
187:
188: pub async fn delete<S>(&self, source_id: &i32, owner: S) -> Result<Cow<'_, str>>
189: where S: Into<i64> {
190: let owner = owner.into();
191:
192: let mut conn = self.pool.acquire().await
193: .with_context(|| format!("Delete fetch conn:\n{:?}", &self.pool))?;
194: match sqlx::query("delete from rsstg_source where source_id = $1 and owner = $2;")
195: .bind(source_id)
196: .bind(owner)
197: .execute(&mut conn).await
198: .with_context(|| format!("Delete source rule:\n{:?}", &self.pool))?
199: .rows_affected() {
200: 0 => { Ok("No data found found.".into()) },
201: x => { Ok(format!("{} sources removed.", x).into()) },
202: }
203: }
204:
205: pub async fn clean<S>(&self, source_id: &i32, owner: S) -> Result<Cow<'_, str>>
206: where S: Into<i64> {
207: let owner = owner.into();
208:
209: let mut conn = self.pool.acquire().await
210: .with_context(|| format!("Clean fetch conn:\n{:?}", &self.pool))?;
211: match sqlx::query("delete from rsstg_post p using rsstg_source s where p.source_id = $1 and owner = $2 and p.source_id = s.source_id;")
212: .bind(source_id)
213: .bind(owner)
214: .execute(&mut conn).await
215: .with_context(|| format!("Clean seen posts:\n{:?}", &self.pool))?
216: .rows_affected() {
217: 0 => { Ok("No data found found.".into()) },
218: x => { Ok(format!("{} posts purged.", x).into()) },
219: }
220: }
221:
222: pub async fn enable<S>(&self, source_id: &i32, owner: S) -> Result<&str>
223: where S: Into<i64> {
224: let owner = owner.into();
225:
226: let mut conn = self.pool.acquire().await
227: .with_context(|| format!("Enable fetch conn:\n{:?}", &self.pool))?;
228: match sqlx::query("update rsstg_source set enabled = true where source_id = $1 and owner = $2")
229: .bind(source_id)
230: .bind(owner)
231: .execute(&mut conn).await
232: .with_context(|| format!("Enable source:\n{:?}", &self.pool))?
233: .rows_affected() {
234: 1 => { Ok("Source enabled.") },
235: 0 => { Ok("Source not found.") },
236: _ => { Err(anyhow!("Database error.")) },
237: }
238: }
239:
240: pub async fn disable<S>(&self, source_id: &i32, owner: S) -> Result<&str>
241: where S: Into<i64> {
242: let owner = owner.into();
243:
244: let mut conn = self.pool.acquire().await
245: .with_context(|| format!("Disable fetch conn:\n{:?}", &self.pool))?;
246: match sqlx::query("update rsstg_source set enabled = false where source_id = $1 and owner = $2")
247: .bind(source_id)
248: .bind(owner)
249: .execute(&mut conn).await
250: .with_context(|| format!("Disable source:\n{:?}", &self.pool))?
251: .rows_affected() {
252: 1 => { Ok("Source disabled.") },
253: 0 => { Ok("Source not found.") },
254: _ => { Err(anyhow!("Database error.")) },
255: }
256: }
257:
f988dfd28f 2022-02-13 258: pub async fn update(&self, update: Option<i32>, channel: &str, channel_id: i64, url: &str, iv_hash: Option<&str>, url_re: Option<&str>, owner: i64) -> Result<&str> {
f988dfd28f 2022-02-13 259: //where S: Into<i64> {
f988dfd28f 2022-02-13 260: //let owner = owner.into();
261:
262: let mut conn = self.pool.acquire().await
263: .with_context(|| format!("Update fetch conn:\n{:?}", &self.pool))?;
264:
265: match match update {
266: Some(id) => {
267: sqlx::query("update rsstg_source set channel_id = $2, url = $3, iv_hash = $4, owner = $5, channel = $6, url_re = $7 where source_id = $1").bind(id)
268: },
269: None => {
270: sqlx::query("insert into rsstg_source (channel_id, url, iv_hash, owner, channel, url_re) values ($1, $2, $3, $4, $5, $6)")
271: },
272: }
273: .bind(channel_id)
274: .bind(url)
275: .bind(iv_hash)
276: .bind(owner)
277: .bind(channel)
278: .bind(url_re)
279: .execute(&mut conn).await {
280: Ok(_) => Ok(match update {
281: Some(_) => "Channel updated.",
282: None => "Channel added.",
283: }),
284: Err(sqlx::Error::Database(err)) => {
285: match err.downcast::<sqlx::postgres::PgDatabaseError>().routine() {
286: Some("_bt_check_unique", ) => {
287: Ok("Duplicate key.")
288: },
289: Some(_) => {
290: Ok("Database error.")
291: },
292: None => {
293: Ok("No database error extracted.")
294: },
295: }
296: },
297: Err(err) => {
298: bail!("Sorry, unknown error:\n{:#?}\n", err);
299: },
300: }
301: }
302:
f988dfd28f 2022-02-13 303: async fn autofetch(&self) -> Result<()> {
f988dfd28f 2022-02-13 304: let mut delay = chrono::Duration::minutes(1);
f988dfd28f 2022-02-13 305: let mut now;
f988dfd28f 2022-02-13 306: loop {
f988dfd28f 2022-02-13 307: let mut conn = self.pool.acquire().await
f988dfd28f 2022-02-13 308: .with_context(|| format!("Autofetch fetch conn:\n{:?}", &self.pool))?;
f988dfd28f 2022-02-13 309: now = chrono::Local::now();
f988dfd28f 2022-02-13 310: let mut queue = sqlx::query("select source_id, next_fetch, owner from rsstg_order natural left join rsstg_source where next_fetch < now() + interval '1 minute';")
f988dfd28f 2022-02-13 311: .fetch_all(&mut conn).await?;
f988dfd28f 2022-02-13 312: for row in queue.iter() {
f988dfd28f 2022-02-13 313: let source_id: i32 = row.try_get("source_id")?;
f988dfd28f 2022-02-13 314: let owner: i64 = row.try_get("owner")?;
f988dfd28f 2022-02-13 315: let next_fetch: DateTime<chrono::Local> = row.try_get("next_fetch")?;
f988dfd28f 2022-02-13 316: if next_fetch < now {
f988dfd28f 2022-02-13 317: let clone = Core {
f988dfd28f 2022-02-13 318: owner_chat: telegram_bot::UserId::new(owner),
f988dfd28f 2022-02-13 319: ..self.clone()
f988dfd28f 2022-02-13 320: };
f988dfd28f 2022-02-13 321: tokio::spawn(async move {
f988dfd28f 2022-02-13 322: if let Err(err) = clone.check(&source_id, owner, true).await {
f988dfd28f 2022-02-13 323: if let Err(err) = clone.send(&format!("š {:?}", err), None, None) {
f988dfd28f 2022-02-13 324: eprintln!("Check error: {}", err);
f988dfd28f 2022-02-13 325: };
f988dfd28f 2022-02-13 326: };
f988dfd28f 2022-02-13 327: });
f988dfd28f 2022-02-13 328: } else if next_fetch - now < delay {
f988dfd28f 2022-02-13 329: delay = next_fetch - now;
f988dfd28f 2022-02-13 330: }
f988dfd28f 2022-02-13 331: };
f988dfd28f 2022-02-13 332: queue.clear();
f988dfd28f 2022-02-13 333: tokio::time::sleep(delay.to_std()?).await;
f988dfd28f 2022-02-13 334: delay = chrono::Duration::minutes(1);
f988dfd28f 2022-02-13 335: }
336: }
337:
338: pub async fn list<S>(&self, owner: S) -> Result<String>
339: where S: Into<i64> {
340: let owner = owner.into();
341:
342: let mut reply: Vec<Cow<str>> = vec![];
343: let mut conn = self.pool.acquire().await
344: .with_context(|| format!("List fetch conn:\n{:?}", &self.pool))?;
345: reply.push("Channels:".into());
346: let rows = sqlx::query("select source_id, channel, enabled, url, iv_hash, url_re from rsstg_source where owner = $1 order by source_id")
347: .bind(owner)
348: .fetch_all(&mut conn).await?;
349: for row in rows.iter() {
350: let source_id: i32 = row.try_get("source_id")?;
351: let username: &str = row.try_get("channel")?;
352: let enabled: bool = row.try_get("enabled")?;
353: let url: &str = row.try_get("url")?;
354: let iv_hash: Option<&str> = row.try_get("iv_hash")?;
355: let url_re: Option<&str> = row.try_get("url_re")?;
356: reply.push(format!("\n\\#ļøā£ {} \\*ļøā£ `{}` {}\nš `{}`", source_id, username,
357: match enabled {
358: true => "š enabled",
359: false => "ā disabled",
360: }, url).into());
361: if let Some(hash) = iv_hash {
362: reply.push(format!("IV: `{}`", hash).into());
363: }
364: if let Some(re) = url_re {
365: reply.push(format!("RE: `{}`", re).into());
366: }
367: };
368: Ok(reply.join("\n"))
369: }
370: }