100
101
102
103
104
105
106
107
108
109
110
111
112
113
|
}
impl Drop for Token {
/// Releases this token's claim on the shared running-set when the token is dropped.
///
/// The token's identifier is removed from the shared `running` set so that future
/// operations for the same id may proceed.
fn drop (&mut self) {
smol::block_on(async {
let mut set = self.running.lock_arc().await;
set.remove(&self.my_id);
})
}
}
|
>
>
|
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
|
}
impl Drop for Token {
/// Releases this token's claim on the shared running-set when the token is dropped.
///
/// The token's identifier is removed from the shared `running` set so that future
/// operations for the same id may proceed.
///
/// TODO: is using block_on inside block_on safe? Currently tested and working fine.
fn drop (&mut self) {
smol::block_on(async {
let mut set = self.running.lock_arc().await;
set.remove(&self.my_id);
})
}
}
|
226
227
228
229
230
231
232
233
234
235
236
237
238
239
|
let mut posts: BTreeMap<DateTime<chrono::FixedOffset>, Post> = BTreeMap::new();
let mut builder = self.http_client.get(&source.url);
if let Some(last_scrape) = last_scrape {
builder = builder.header(LAST_MODIFIED, last_scrape.to_rfc2822());
};
let response = builder.send().await.stack()?;
{
let headers = response.headers();
let expires = headers.get(EXPIRES);
let cache = headers.get(CACHE_CONTROL);
if expires.is_some() || cache.is_some() {
println!("{} {} {:?} {:?} {:?}", Local::now().to_rfc2822(), &source.url, last_scrape, expires, cache);
}
|
>
|
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
|
let mut posts: BTreeMap<DateTime<chrono::FixedOffset>, Post> = BTreeMap::new();
let mut builder = self.http_client.get(&source.url);
if let Some(last_scrape) = last_scrape {
builder = builder.header(LAST_MODIFIED, last_scrape.to_rfc2822());
};
let response = builder.send().await.stack()?;
#[cfg(debug_assertions)]
{
let headers = response.headers();
let expires = headers.get(EXPIRES);
let cache = headers.get(CACHE_CONTROL);
if expires.is_some() || cache.is_some() {
println!("{} {} {:?} {:?} {:?}", Local::now().to_rfc2822(), &source.url, last_scrape, expires, cache);
}
|
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
|
};
},
Err(err) => match err {
rss::Error::InvalidStartTag => {
match atom_syndication::Feed::read_from(&content[..]) {
Ok(feed) => {
for item in feed.entries() {
let date = item.published().unwrap();
let uri = item.links()[0].href().to_string();
let title = item.title().to_string();
let authors = item.authors().iter().map(|x| format!("{} <{:?}>", x.name(), x.email())).collect::<Vec<String>>().join(", ");
let summary = if let Some(sum) = item.summary() { sum.value.clone() } else { String::new() };
posts.insert(*date, Post{
uri,
title,
authors,
|
|
>
>
>
>
>
>
|
>
>
|
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
|
};
},
Err(err) => match err {
rss::Error::InvalidStartTag => {
match atom_syndication::Feed::read_from(&content[..]) {
Ok(feed) => {
for item in feed.entries() {
let date = item.published()
.stack_err("Feed item missing publishing date.")?;
let uri = {
let links = item.links();
if links.is_empty() {
bail!("Feed item missing post links.");
} else {
links[0].href().to_string()
}
};
let title = item.title().to_string();
let authors = item.authors().iter().map(|x| format!("{} <{:?}>", x.name(), x.email())).collect::<Vec<String>>().join(", ");
let summary = if let Some(sum) = item.summary() { sum.value.clone() } else { String::new() };
posts.insert(*date, Post{
uri,
title,
authors,
|