diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 58adfa90c..a9826addb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -95,6 +95,7 @@ jobs: { echo "leak:dyld4::RuntimeState" echo "leak:fetchInitializingClassList" + echo "leak:std::sys::pal::unix::stack_overflow::imp::init" } > suppressions.txt export LSAN_OPTIONS="suppressions=$(pwd)/suppressions.txt" fi diff --git a/data-url/src/lib.rs b/data-url/src/lib.rs index d7ceeb111..00689e3d4 100644 --- a/data-url/src/lib.rs +++ b/data-url/src/lib.rs @@ -124,7 +124,7 @@ impl<'a> DataUrl<'a> { /// The URL’s fragment identifier (after `#`) pub struct FragmentIdentifier<'a>(&'a str); -impl<'a> FragmentIdentifier<'a> { +impl FragmentIdentifier<'_> { /// Like in a parsed URL pub fn to_percent_encoded(&self) -> String { let mut string = String::new(); @@ -165,10 +165,10 @@ fn pretend_parse_data_url(input: &str) -> Option<&str> { let mut iter = bytes .by_ref() .filter(|&byte| !matches!(byte, b'\t' | b'\n' | b'\r')); - require!(iter.next()?.to_ascii_lowercase() == b'd'); - require!(iter.next()?.to_ascii_lowercase() == b'a'); - require!(iter.next()?.to_ascii_lowercase() == b't'); - require!(iter.next()?.to_ascii_lowercase() == b'a'); + require!(iter.next()?.eq_ignore_ascii_case(&b'd')); + require!(iter.next()?.eq_ignore_ascii_case(&b'a')); + require!(iter.next()?.eq_ignore_ascii_case(&b't')); + require!(iter.next()?.eq_ignore_ascii_case(&b'a')); require!(iter.next()? == b':'); } let bytes_consumed = left_trimmed.len() - bytes.len(); @@ -256,10 +256,10 @@ fn remove_base64_suffix(s: &str) -> Option<&str> { require!(iter.next()? == b'4'); require!(iter.next()? == b'6'); - require!(iter.next()?.to_ascii_lowercase() == b'e'); - require!(iter.next()?.to_ascii_lowercase() == b's'); - require!(iter.next()?.to_ascii_lowercase() == b'a'); - require!(iter.next()?.to_ascii_lowercase() == b'b'); + require!(iter.next()?.eq_ignore_ascii_case(&b'e')); + require!(iter.next()?.eq_ignore_ascii_case(&b's')); + require!(iter.next()?.eq_ignore_ascii_case(&b'a')); + require!(iter.next()?.eq_ignore_ascii_case(&b'b')); require!(iter.skip_while(|&byte| byte == b' ').next()? == b';'); } Some(&s[..bytes.len()]) diff --git a/form_urlencoded/src/lib.rs b/form_urlencoded/src/lib.rs index 1d68579b7..1d9582249 100644 --- a/form_urlencoded/src/lib.rs +++ b/form_urlencoded/src/lib.rs @@ -104,7 +104,7 @@ pub struct ParseIntoOwned<'a> { inner: Parse<'a>, } -impl<'a> Iterator for ParseIntoOwned<'a> { +impl Iterator for ParseIntoOwned<'_> { type Item = (String, String); fn next(&mut self) -> Option { @@ -195,7 +195,7 @@ impl Target for String { type Finished = Self; } -impl<'a> Target for &'a mut String { +impl Target for &mut String { fn as_mut_string(&mut self) -> &mut String { self } diff --git a/idna/src/punycode.rs b/idna/src/punycode.rs index 842d81940..7194c32c6 100644 --- a/idna/src/punycode.rs +++ b/idna/src/punycode.rs @@ -277,7 +277,7 @@ where phantom: PhantomData, } -impl<'a, T: PunycodeCodeUnit + Copy, C: PunycodeCaller> Iterator for Decode<'a, T, C> { +impl Iterator for Decode<'_, T, C> { type Item = char; fn next(&mut self) -> Option { @@ -309,7 +309,7 @@ impl<'a, T: PunycodeCodeUnit + Copy, C: PunycodeCaller> Iterator for Decode<'a, } } -impl<'a, T: PunycodeCodeUnit + Copy, C: PunycodeCaller> ExactSizeIterator for Decode<'a, T, C> { +impl ExactSizeIterator for Decode<'_, T, C> { fn len(&self) -> usize { self.len - self.position } diff --git a/percent_encoding/src/lib.rs b/percent_encoding/src/lib.rs index 11b6296e6..f4b23af61 100644 --- a/percent_encoding/src/lib.rs +++ b/percent_encoding/src/lib.rs @@ -181,7 +181,7 @@ impl<'a> Iterator for PercentEncode<'a> { } } -impl<'a> fmt::Display for PercentEncode<'a> { +impl fmt::Display for PercentEncode<'_> { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { for c in (*self).clone() { formatter.write_str(c)? @@ -257,7 +257,7 @@ fn after_percent_sign(iter: &mut slice::Iter<'_, u8>) -> Option { Some(h as u8 * 0x10 + l as u8) } -impl<'a> Iterator for PercentDecode<'a> { +impl Iterator for PercentDecode<'_> { type Item = u8; fn next(&mut self) -> Option { diff --git a/url/src/host.rs b/url/src/host.rs index f21a253d5..f45232c5d 100644 --- a/url/src/host.rs +++ b/url/src/host.rs @@ -64,7 +64,7 @@ pub enum Host { Ipv6(Ipv6Addr), } -impl<'a> Host<&'a str> { +impl Host<&str> { /// Return a copy of `self` that owns an allocated `String` but does not borrow an `&Url`. pub fn to_owned(&self) -> Host { match *self { diff --git a/url/src/lib.rs b/url/src/lib.rs index e015acce7..96fa1eee7 100644 --- a/url/src/lib.rs +++ b/url/src/lib.rs @@ -448,7 +448,7 @@ impl Url { /// let base = Url::parse("https://alice.com/a")?; /// let url = base.join("http://eve.com/b")?; /// assert_eq!(url.as_str(), "http://eve.com/b"); // http instead of https - + /// /// # Ok(()) /// # } /// # run().unwrap(); @@ -1492,7 +1492,6 @@ impl Url { /// # } /// # run().unwrap(); /// ``` - #[inline] pub fn query_pairs(&self) -> form_urlencoded::Parse<'_> { form_urlencoded::parse(self.query().unwrap_or("").as_bytes()) @@ -1555,7 +1554,7 @@ impl Url { /// # fn run() -> Result<(), ParseError> { /// let mut url = Url::parse("https://example.com/data.csv")?; /// assert_eq!(url.as_str(), "https://example.com/data.csv"); - + /// /// url.set_fragment(Some("cell=4,1-6,2")); /// assert_eq!(url.as_str(), "https://example.com/data.csv#cell=4,1-6,2"); /// assert_eq!(url.fragment(), Some("cell=4,1-6,2")); @@ -2674,8 +2673,7 @@ impl Url { fragment_start, }; if cfg!(debug_assertions) { - url.check_invariants() - .map_err(|reason| Error::custom(reason))? + url.check_invariants().map_err(Error::custom)? } Ok(url) } @@ -2892,7 +2890,7 @@ impl<'de> serde::Deserialize<'de> for Url { struct UrlVisitor; - impl<'de> Visitor<'de> for UrlVisitor { + impl Visitor<'_> for UrlVisitor { type Value = Url; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { @@ -3177,7 +3175,7 @@ impl<'a> form_urlencoded::Target for UrlQuery<'a> { type Finished = &'a mut Url; } -impl<'a> Drop for UrlQuery<'a> { +impl Drop for UrlQuery<'_> { fn drop(&mut self) { if let Some(url) = self.url.take() { url.restore_already_parsed_fragment(self.fragment.take()) diff --git a/url/src/parser.rs b/url/src/parser.rs index e26b50281..416484f19 100644 --- a/url/src/parser.rs +++ b/url/src/parser.rs @@ -301,7 +301,7 @@ impl Pattern for char { } } -impl<'a> Pattern for &'a str { +impl Pattern for &str { fn split_prefix(self, input: &mut Input) -> bool { for c in self.chars() { if input.next() != Some(c) { @@ -318,7 +318,7 @@ impl bool> Pattern for F { } } -impl<'i> Iterator for Input<'i> { +impl Iterator for Input<'_> { type Item = char; fn next(&mut self) -> Option { self.chars diff --git a/url/src/path_segments.rs b/url/src/path_segments.rs index 5cc8e7758..e6363c5c8 100644 --- a/url/src/path_segments.rs +++ b/url/src/path_segments.rs @@ -67,14 +67,14 @@ pub fn new(url: &mut Url) -> PathSegmentsMut<'_> { } } -impl<'a> Drop for PathSegmentsMut<'a> { +impl Drop for PathSegmentsMut<'_> { fn drop(&mut self) { self.url .restore_after_path(self.old_after_path_position, &self.after_path) } } -impl<'a> PathSegmentsMut<'a> { +impl PathSegmentsMut<'_> { /// Remove all segments in the path, leaving the minimal `url.path() == "/"`. /// /// Returns `&mut Self` so that method calls can be chained.