1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
|
use crate::{
command,
sql::Db,
};
use std::{
borrow::Cow,
collections::{
BTreeMap,
HashSet,
},
};
use async_std::{
task,
sync::{
Arc,
Mutex
},
};
use chrono::{
DateTime,
Local,
};
use lazy_static::lazy_static;
use regex::Regex;
use reqwest::header::{
CACHE_CONTROL,
EXPIRES,
LAST_MODIFIED
};
use tgbot::{
api::Client,
handler::UpdateHandler,
types::{
Bot,
ChatPeerId,
Command,
|
>
|
<
<
<
<
<
<
>
>
>
>
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
|
use crate::{
command,
sql::Db,
};
use std::{
borrow::Cow,
collections::{
BTreeMap,
HashSet,
},
sync::Arc,
};
use async_compat::Compat;
use chrono::{
DateTime,
Local,
};
use lazy_static::lazy_static;
use regex::Regex;
use reqwest::header::{
CACHE_CONTROL,
EXPIRES,
LAST_MODIFIED
};
use smol::{
Timer,
lock::Mutex,
};
use tgbot::{
api::Client,
handler::UpdateHandler,
types::{
Bot,
ChatPeerId,
Command,
|
| ︙ | | | ︙ | |
67
68
69
70
71
72
73
74
75
76
77
78
79
80
|
// max_delay: u16,
pub tg: Client,
pub me: Bot,
pub db: Db,
sources: Arc<Mutex<HashSet<Arc<i32>>>>,
http_client: reqwest::Client,
}
impl Core {
pub async fn new(settings: config::Config) -> Result<Core> {
let owner_chat = ChatPeerId::from(settings.get_int("owner").stack()?);
let api_key = settings.get_string("api_key").stack()?;
let tg = Client::new(&api_key).stack()?
.with_host(settings.get_string("api_gateway").stack()?);
|
>
>
>
>
>
>
>
|
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
|
// max_delay: u16,
pub tg: Client,
pub me: Bot,
pub db: Db,
sources: Arc<Mutex<HashSet<Arc<i32>>>>,
http_client: reqwest::Client,
}
pub struct Post {
uri: String,
title: String,
authors: String,
summary: String,
}
impl Core {
pub async fn new(settings: config::Config) -> Result<Core> {
let owner_chat = ChatPeerId::from(settings.get_int("owner").stack()?);
let api_key = settings.get_string("api_key").stack()?;
let tg = Client::new(&api_key).stack()?
.with_host(settings.get_string("api_gateway").stack()?);
|
| ︙ | | | ︙ | |
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
|
owner_chat,
db: Db::new(&settings.get_string("pg").stack()?)?,
sources: Arc::new(Mutex::new(HashSet::new())),
http_client,
// max_delay: 60,
};
let clone = core.clone();
task::spawn(async move {
loop {
let delay = match &clone.autofetch().await {
Err(err) => {
if let Err(err) = clone.send(format!("š {err}"), None, None).await {
eprintln!("Autofetch error: {err:?}");
};
std::time::Duration::from_secs(60)
},
Ok(time) => *time,
};
task::sleep(delay).await;
}
});
Ok(core)
}
pub async fn send <S>(&self, msg: S, target: Option<ChatPeerId>, mode: Option<ParseMode>) -> Result<Message>
where S: Into<String> {
let msg = msg.into();
|
|
|
|
|
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
|
owner_chat,
db: Db::new(&settings.get_string("pg").stack()?)?,
sources: Arc::new(Mutex::new(HashSet::new())),
http_client,
// max_delay: 60,
};
let clone = core.clone();
smol::spawn(Compat::new(async move {
loop {
let delay = match &clone.autofetch().await {
Err(err) => {
if let Err(err) = clone.send(format!("š {err}"), None, None).await {
eprintln!("Autofetch error: {err:?}");
};
std::time::Duration::from_secs(60)
},
Ok(time) => *time,
};
Timer::after(delay).await;
}
})).detach();
Ok(core)
}
pub async fn send <S>(&self, msg: S, target: Option<ChatPeerId>, mode: Option<ParseMode>) -> Result<Message>
where S: Into<String> {
let msg = msg.into();
|
| ︙ | | | ︙ | |
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
|
let source = conn.get_source(*id, self.owner_chat).await.stack()?;
conn.set_scrape(*id).await.stack()?;
let destination = ChatPeerId::from(match real {
true => source.channel_id,
false => source.owner,
});
let mut this_fetch: Option<DateTime<chrono::FixedOffset>> = None;
let mut posts: BTreeMap<DateTime<chrono::FixedOffset>, String> = BTreeMap::new();
let mut builder = self.http_client.get(&source.url);
if let Some(last_scrape) = last_scrape {
builder = builder.header(LAST_MODIFIED, last_scrape.to_rfc2822());
};
let response = builder.send().await.stack()?;
{
|
|
|
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
|
let source = conn.get_source(*id, self.owner_chat).await.stack()?;
conn.set_scrape(*id).await.stack()?;
let destination = ChatPeerId::from(match real {
true => source.channel_id,
false => source.owner,
});
let mut this_fetch: Option<DateTime<chrono::FixedOffset>> = None;
let mut posts: BTreeMap<DateTime<chrono::FixedOffset>, Post> = BTreeMap::new();
let mut builder = self.http_client.get(&source.url);
if let Some(last_scrape) = last_scrape {
builder = builder.header(LAST_MODIFIED, last_scrape.to_rfc2822());
};
let response = builder.send().await.stack()?;
{
|
| ︙ | | | ︙ | |
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
|
Ok(feed) => {
for item in feed.items() {
if let Some(link) = item.link() {
let date = match item.pub_date() {
Some(feed_date) => DateTime::parse_from_rfc2822(feed_date),
None => DateTime::parse_from_rfc3339(&item.dublin_core_ext().unwrap().dates()[0]),
}.stack()?;
let url = link;
posts.insert(date, url.to_string());
}
};
},
Err(err) => match err {
rss::Error::InvalidStartTag => {
match atom_syndication::Feed::read_from(&content[..]) {
Ok(feed) => {
for item in feed.entries() {
let date = item.published().unwrap();
let url = item.links()[0].href();
posts.insert(*date, url.to_string());
};
},
Err(err) => {
bail!("Unsupported or mangled content:\n{:?}\n{err}\n{status:#?}\n", &source.url)
},
}
},
rss::Error::Eof => (),
_ => bail!("Unsupported or mangled content:\n{:?}\n{err}\n{status:#?}\n", &source.url)
}
};
for (date, url) in posts.iter() {
let post_url: Cow<str> = match source.url_re {
Some(ref x) => sedregex::ReplaceCommand::new(x).stack()?.execute(url),
None => url.into(),
};
if let Some(exists) = conn.exists(&post_url, *id).await.stack()? {
if ! exists {
if this_fetch.is_none() || *date > this_fetch.unwrap() {
this_fetch = Some(*date);
};
self.send( match &source.iv_hash {
|
|
>
>
>
|
>
>
>
>
>
|
>
>
>
|
>
>
>
>
>
|
|
|
|
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
|
Ok(feed) => {
for item in feed.items() {
if let Some(link) = item.link() {
let date = match item.pub_date() {
Some(feed_date) => DateTime::parse_from_rfc2822(feed_date),
None => DateTime::parse_from_rfc3339(&item.dublin_core_ext().unwrap().dates()[0]),
}.stack()?;
let uri = link.to_string();
let title = item.title().unwrap_or("").to_string();
let authors = item.author().unwrap_or("").to_string();
let summary = item.content().unwrap_or("").to_string();
posts.insert(date, Post{
uri,
title,
authors,
summary,
});
}
};
},
Err(err) => match err {
rss::Error::InvalidStartTag => {
match atom_syndication::Feed::read_from(&content[..]) {
Ok(feed) => {
for item in feed.entries() {
let date = item.published().unwrap();
let uri = item.links()[0].href().to_string();
let title = item.title().to_string();
let authors = item.authors().iter().map(|x| format!("{} <{:?}>", x.name(), x.email())).collect::<Vec<String>>().join(", ");
let summary = if let Some(sum) = item.summary() { sum.value.clone() } else { String::new() };
posts.insert(*date, Post{
uri,
title,
authors,
summary,
});
};
},
Err(err) => {
bail!("Unsupported or mangled content:\n{:?}\n{err}\n{status:#?}\n", &source.url)
},
}
},
rss::Error::Eof => (),
_ => bail!("Unsupported or mangled content:\n{:?}\n{err}\n{status:#?}\n", &source.url)
}
};
for (date, post) in posts.iter() {
let post_url: Cow<str> = match source.url_re {
Some(ref x) => sedregex::ReplaceCommand::new(x).stack()?.execute(&post.uri),
None => post.uri.clone().into(),
};
if let Some(exists) = conn.exists(&post_url, *id).await.stack()? {
if ! exists {
if this_fetch.is_none() || *date > this_fetch.unwrap() {
this_fetch = Some(*date);
};
self.send( match &source.iv_hash {
|
| ︙ | | | ︙ | |
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
|
let mut conn = self.db.begin().await.stack()?;
match conn.get_one(owner, source_id).await {
Ok(Some(source)) => source.to_string(),
Ok(None) => "Source not found in database.stack()?".to_string(),
Err(err) => format!("Failed to fetch source data:\n{err}"),
}
};
task::spawn(async move {
if let Err(err) = clone.check(source_id, true, Some(last_scrape)).await {
if let Err(err) = clone.send(&format!("{source}\n\nš {}", encode(&err.to_string())), None, Some(ParseMode::MarkdownV2)).await {
eprintln!("Check error: {err}");
// clone.disable(&source_id, owner).await.unwrap();
};
};
});
}
} else if next_fetch - now < delay {
delay = next_fetch - now;
}
}
};
delay.to_std().stack()
|
|
|
|
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
|
let mut conn = self.db.begin().await.stack()?;
match conn.get_one(owner, source_id).await {
Ok(Some(source)) => source.to_string(),
Ok(None) => "Source not found in database.stack()?".to_string(),
Err(err) => format!("Failed to fetch source data:\n{err}"),
}
};
smol::spawn(Compat::new(async move {
if let Err(err) = clone.check(source_id, true, Some(last_scrape)).await {
if let Err(err) = clone.send(&format!("{source}\n\nš {}", encode(&err.to_string())), None, Some(ParseMode::MarkdownV2)).await {
eprintln!("Check error: {err}");
// clone.disable(&source_id, owner).await.unwrap();
};
};
})).detach();
}
} else if next_fetch - now < delay {
delay = next_fetch - now;
}
}
};
delay.to_std().stack()
|
| ︙ | | | ︙ | |