From: Simon Tatham Date: Thu, 4 Jan 2024 10:42:36 +0000 (+0000) Subject: Fix extending user-list feeds. X-Git-Url: https://www.chiark.greenend.org.uk/ucgi/~ian/git?a=commitdiff_plain;h=1ce1b877fd137ee53a1fba16ead1ca52fb3ad660;p=mastodonochrome.git Fix extending user-list feeds. In lists of followers, favers, boosters etc, the ids you have to pass in the 'max_id' or 'since_id' URL query parameters are not the same as the account ids you get back. That's not surprising, now I think about it, since the chronological order in which users did things has nothing to do with any intrinsic ordering on the users themselves. For status feeds, they seemed to match, so I got away without having to pay attention to the Link: headers in the HTTP responses. But now I realise I do have to worry about that after all. Happily, there's a handy Rust crate that saves me having to do the work of parsing that complicated header type! --- diff --git a/Cargo.toml b/Cargo.toml index 84977f0..2bbdf2e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,7 @@ crossterm = "0.27.0" html2text = { version = "0.10.1", features = ["css"] } itertools = "0.12.0" lazy_static = "1.4.0" +parse_link_header = { version = "0.3.3", features = ["url"] } ratatui = "0.25.0" regex = "1.10.2" reqwest = { version = "0.11.23", features = ["blocking"] } diff --git a/src/client.rs b/src/client.rs index 46ce4a5..5c4d5b0 100644 --- a/src/client.rs +++ b/src/client.rs @@ -51,6 +51,9 @@ pub struct StreamUpdate { pub struct Feed { pub ids: VecDeque, // ids, whether of statuses, accounts or what pub origin: isize, + + extend_past: Option>, + extend_future: Option>, } pub struct Client { @@ -180,6 +183,9 @@ impl ReqParam for String { impl ReqParam for &String { fn param_value(self) -> String { self.clone() } } +impl ReqParam for i32 { + fn param_value(self) -> String { self.to_string() } +} impl ReqParam for bool { fn param_value(self) -> String { match self { @@ -640,17 +646,29 @@ impl Client { }; let req = match ext { - FeedExtend::Initial => req, + FeedExtend::Initial => req.param("limit", 32), FeedExtend::Past => if let Some(ref feed) = self.feeds.get(&id) { - match feed.ids.front() { - None => req, - Some(id) => req.param("max_id", id), + match feed.extend_past { + None => return Ok(false), + Some(ref params) => { + let mut req = req; + for (key, value) in params { + req = req.param(key, value); + } + req + } } } else { req }, FeedExtend::Future => if let Some(ref feed) = self.feeds.get(&id) { - match feed.ids.back() { - None => req, - Some(id) => req.param("min_id", id), + match feed.extend_future { + None => return Ok(false), + Some(ref params) => { + let mut req = req; + for (key, value) in params { + req = req.param(key, value); + } + req + } } } else { req }, }; @@ -661,6 +679,13 @@ impl Client { return Err(ClientError::UrlError( url.clone(), rspstatus.to_string())); } + + // Keep the Link: headers after we consume the response, for + // use later once we've constructed a Feed + let link_headers: Vec<_> = rsp.headers() + .get_all(reqwest::header::LINK) + .iter().cloned().collect(); + let body = rsp.text()?; // Decode the JSON response as a different kind of type @@ -736,6 +761,8 @@ impl Client { self.feeds.insert(id.clone(), Feed { ids, origin: 0, + extend_past: None, + extend_future: None, }); } FeedExtend::Future => { @@ -753,6 +780,31 @@ impl Client { } } + let feed = self.feeds.get_mut(id).unwrap(); + for linkhdr in link_headers { + let linkhdr_str = match linkhdr.to_str() { + Ok(s) => Ok(s), + Err(e) => Err(ClientError::UrlError( + url.clone(), e.to_string())), + }?; + let links = match parse_link_header::parse(linkhdr_str) { + Ok(links) => Ok(links), + Err(e) => Err(ClientError::UrlError( + url.clone(), e.to_string())), + }?; + for (rel, link) in links { + match rel.as_deref() { + // Oh, you think time flows _that_ way? + // Confusingly, the Mastodon protocol considers + // "next" to be heading into the past and "prev" + // the future. + Some("next") => feed.extend_past = Some(link.queries), + Some("prev") => feed.extend_future = Some(link.queries), + _ => (), + } + } + } + Ok(any_new) }