Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce clippy #13

Merged
merged 1 commit into from
Mar 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ name: continuous-integration

on: [push, pull_request]

env:
RUSTFLAGS: "-Dwarnings"

jobs:
build_and_test:
name: Build and test
Expand All @@ -28,3 +31,5 @@ jobs:
run: cargo build ${{ matrix.features }}
- name: Test
run: cargo test ${{ matrix.features }}
- name: Clippy
run: cargo clippy --all-targets --all-features
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]
### Added
- Clippy step to CI

### Changed
- Updated dependencies

### Fixed
- All `clippy:all` warnings and some pedantic warnings too

## [1.0.1] - 2023-11-11
### Changed
- Updated dependencies
Expand Down
13 changes: 6 additions & 7 deletions examples/count_wikidata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ fn process_primitive_block(block: pbf::PrimitiveBlock) -> Result<(), Error> {
}

if let Some(dense_nodes) = &group.dense {
let nodes = DenseNodeReader::new(&dense_nodes)?;
let nodes = DenseNodeReader::new(dense_nodes)?;

for node in nodes {
let tags = new_dense_tag_reader(string_table, node?.key_value_indices);
Expand All @@ -54,17 +54,16 @@ fn parse_block(block_parser: &mut BlockParser, raw_block: RawBlock) {
match block_parser.parse_block(raw_block) {
Ok(block) => match block {
Block::Header(header_block) => process_header_block(header_block),
Block::Primitive(primitive_block) => match process_primitive_block(primitive_block) {
Err(error) => {
error!("Error during processing a primitive block: {:?}", error)
Block::Primitive(primitive_block) => {
if let Err(error) = process_primitive_block(primitive_block) {
error!("Error during processing a primitive block: {error:?}")
}
_ => {}
},
}
Block::Unknown(unknown_block) => {
warn!("Skipping unknown block of size {}", unknown_block.len())
}
},
Err(error) => error!("Error during parsing a block: {:?}", error),
Err(error) => error!("Error during parsing a block: {error:?}"),
}
}

Expand Down
9 changes: 3 additions & 6 deletions examples/print_header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,9 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
while let Some(raw_block) = read_blob(&mut file) {
let block = block_parser.parse_block(raw_block?)?;

match block {
Block::Header(header_block) => {
println!("{:#?}", header_block);
break;
}
_ => {}
if let Block::Header(header_block) = block {
println!("{:#?}", header_block);
break;
}
}

Expand Down
65 changes: 36 additions & 29 deletions src/dense.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,15 @@ struct DeltaCodedValues {
user_sid: u32,
}

type IdDeltaIt<'a> = Iter<'a, i64>;
type LatLonDeltaIt<'a> = Zip<Iter<'a, i64>, Iter<'a, i64>>;

/// Utility for reading delta-encoded dense nodes.
pub struct DenseNodeReader<'a> {
data: &'a pbf::DenseNodes,
data_it: Enumerate<Zip<Iter<'a, i64>, Zip<Iter<'a, i64>, Iter<'a, i64>>>>, // (data_idx, (id_delta, (lat_delta, lon_delta))) iterator
key_value_idx: usize, // Starting index of the next node's keys/values
current: DeltaCodedValues, // Current values of delta coded fields
data_it: Enumerate<Zip<IdDeltaIt<'a>, LatLonDeltaIt<'a>>>, // (data_idx, (id_delta, (lat_delta, lon_delta))) iterator
key_value_idx: usize, // Starting index of the next node's keys/values
current: DeltaCodedValues, // Current values of delta coded fields
}

impl<'a> DenseNodeReader<'a> {
Expand Down Expand Up @@ -70,6 +73,10 @@ impl<'a> DenseNodeReader<'a> {
/// Ok(())
/// }
/// ```
///
/// # Errors
///
/// Will return `Err` if the latitude, longitude and ID counts in `data` do not match.
pub fn new(data: &'a pbf::DenseNodes) -> Result<Self, Error> {
if data.lat.len() != data.id.len() || data.lon.len() != data.id.len() {
Err(Error::LogicError(format!(
Expand Down Expand Up @@ -131,12 +138,12 @@ impl<'a> Iterator for DenseNodeReader<'a> {
};

Some(pbf::Info {
version: dense_info.version.get(data_idx).cloned(),
version: dense_info.version.get(data_idx).copied(),
timestamp: delta_decode(&mut self.current.timestamp, dense_info.timestamp.get(data_idx)),
changeset: delta_decode(&mut self.current.changeset, dense_info.changeset.get(data_idx)),
uid: delta_decode(&mut self.current.uid, dense_info.uid.get(data_idx)),
user_sid,
visible: dense_info.visible.get(data_idx).cloned(),
visible: dense_info.visible.get(data_idx).copied(),
})
}
None => None,
Expand Down Expand Up @@ -176,6 +183,29 @@ impl<'a> Iterator for DenseNodeReader<'a> {
}
}

/// Constructs a new `TagReader` from a dense key/value index slice, and a corresponding string table.
///
/// See [`DenseNodeReader::new`] and [`DenseNode::key_value_indices`].
pub fn new_dense_tag_reader<'a>(
string_table: &'a pbf::StringTable,
key_value_indices: &'a [i32],
) -> TagReader<'a, impl Iterator<Item = (Result<usize, Error>, Result<usize, Error>)> + 'a> {
TagReader {
string_table,
iter: key_value_indices.chunks_exact(2).map(|s| {
let convert_idx = |index: i32| -> Result<usize, Error> {
if let Ok(index) = TryInto::<usize>::try_into(index) {
Ok(index)
} else {
Err(Error::LogicError(format!("string table index {index} is invalid")))
}
};

(convert_idx(s[0]), convert_idx(s[1]))
}),
}
}

#[cfg(test)]
mod dense_node_reader_tests {
use super::*;
Expand All @@ -200,7 +230,7 @@ mod dense_node_reader_tests {
};

let reader = DenseNodeReader::new(&dense_nodes).expect("dense node reader should be created on valid data");
let mut result: Vec<DenseNode> = reader.filter_map(|r| r.ok()).collect();
let mut result: Vec<DenseNode> = reader.filter_map(Result::ok).collect();

assert_eq!(result.len(), 2);
let first = &mut result[0];
Expand Down Expand Up @@ -270,26 +300,3 @@ mod dense_node_reader_tests {
assert!(next.unwrap().is_err());
}
}

/// Constructs a new `TagReader` from a dense key/value index slice, and a corresponding string table.
///
/// See [`DenseNodeReader::new`] and [`DenseNode::key_value_indices`].
pub fn new_dense_tag_reader<'a>(
string_table: &'a pbf::StringTable,
key_value_indices: &'a [i32],
) -> TagReader<'a, impl Iterator<Item = (Result<usize, Error>, Result<usize, Error>)> + 'a> {
TagReader {
string_table,
iter: key_value_indices.chunks_exact(2).map(|s| {
let convert_idx = |index: i32| -> Result<usize, Error> {
if let Ok(index) = TryInto::<usize>::try_into(index) {
Ok(index)
} else {
Err(Error::LogicError(format!("string table index {} is invalid", index)))
}
};

(convert_idx(s[0]), convert_idx(s[1]))
}),
}
}
81 changes: 39 additions & 42 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ pub enum Error {

impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
write!(f, "{self:?}")
}
}

Expand Down Expand Up @@ -108,8 +108,6 @@ pub fn read_blob<Input>(pbf: &mut Input) -> Option<Result<RawBlock, Error>>
where
Input: std::io::Read,
{
use pbf::BlobHeader;

let mut header_size_buffer = [0u8; 4];

if let Err(error) = pbf.read_exact(&mut header_size_buffer) {
Expand All @@ -119,41 +117,52 @@ where
};
}

let blob_header_size = i32::from_be_bytes(header_size_buffer);
Some(read_blob_inner(pbf, header_size_buffer))
}

fn read_blob_inner<Input>(pbf: &mut Input, header_size_buffer: [u8; 4]) -> Result<RawBlock, Error>
where
Input: std::io::Read,
{
use pbf::BlobHeader;

let blob_header_size: usize = i32::from_be_bytes(header_size_buffer)
.try_into()
.map_err(|_err| Error::InvalidBlobHeader)?;

if !(0..64 * 1024).contains(&blob_header_size) {
return Some(Err(Error::InvalidBlobHeader));
if blob_header_size >= 64 * 1024 {
return Err(Error::InvalidBlobHeader);
}

let mut blob = vec![0u8; blob_header_size as usize];
let mut blob = vec![0u8; blob_header_size];
if let Err(error) = pbf.read_exact(&mut blob) {
return Some(Err(Error::IoError(error)));
return Err(Error::IoError(error));
}

let blob_header = match BlobHeader::decode(&*blob) {
Ok(blob_header) => blob_header,
Err(error) => return Some(Err(Error::PbfParseError(error))),
Err(error) => return Err(Error::PbfParseError(error)),
};

let block_type = BlockType::from(blob_header.r#type.as_ref());
let blob_size = blob_header.datasize;
let blob_size: usize = blob_header.datasize.try_into().map_err(|_err| Error::InvalidBlobData)?;

if !(0..32 * 1024 * 1024).contains(&blob_size) {
return Some(Err(Error::InvalidBlobData));
if blob_size >= 32 * 1024 * 1024 {
return Err(Error::InvalidBlobData);
}

blob.resize_with(blob_size as usize, Default::default);
blob.resize_with(blob_size, Default::default);

if let Err(error) = pbf.read_exact(&mut blob) {
return Some(Err(Error::IoError(error)));
return Err(Error::IoError(error));
}

let raw_block = RawBlock {
r#type: block_type,
data: blob,
};

Some(Ok(raw_block))
Ok(raw_block)
}

/// Blob compression method.
Expand Down Expand Up @@ -193,7 +202,7 @@ impl Decompressor for DefaultDecompressor {
fn decompress(method: CompressionMethod, input: &[u8], output: &mut [u8]) -> Result<(), DecompressionError> {
match method {
CompressionMethod::Zlib => {
let mut decoder = ZlibDecoder::new(input.as_ref());
let mut decoder = ZlibDecoder::new(input);

match decoder.read_exact(output) {
Ok(_) => Ok(()),
Expand Down Expand Up @@ -236,6 +245,10 @@ impl<D: Decompressor> BlockParser<D> {
}

/// Parses `raw_block` into a header, primitive or unknown block.
///
/// # Errors
///
/// Will return `Err` if an error occurs during PBF parsing, decompression or validation.
#[allow(deprecated)]
pub fn parse_block(&mut self, raw_block: RawBlock) -> Result<Block, Error> {
let blob = match pbf::Blob::decode(&*raw_block.data) {
Expand All @@ -244,8 +257,8 @@ impl<D: Decompressor> BlockParser<D> {
};

if let Some(uncompressed_size) = blob.raw_size {
self.block_buffer
.resize_with(uncompressed_size as usize, Default::default);
let uncompressed_size: usize = uncompressed_size.try_into().map_err(|_err| Error::InvalidBlobData)?;
self.block_buffer.resize_with(uncompressed_size, Default::default);
}

if let Some(blob_data) = blob.data {
Expand Down Expand Up @@ -318,20 +331,16 @@ where
if let Ok(utf8_string) = str::from_utf8(bytes) {
Ok(utf8_string)
} else {
Err(Error::LogicError(format!(
"string at index {} is not valid UTF-8",
index
)))
Err(Error::LogicError(format!("string at index {index} is not valid UTF-8")))
}
} else {
Err(Error::LogicError(format!(
"string table index {} is out of bounds ({})",
index,
"string table index {index} is out of bounds ({})",
self.string_table.s.len()
)))
}
} else {
Err(Error::LogicError(format!("string table index {} is invalid", index)))
Err(Error::LogicError(format!("string table index {index} is invalid")))
}
};

Expand Down Expand Up @@ -390,28 +399,16 @@ mod tag_reader_tests {
#[test]
fn valid_input() {
let key_vals = ["", "key1", "val1", "key2", "val2"];
let mut string_table = pbf::StringTable::default();
string_table.s = key_vals.iter().map(|s| s.as_bytes().to_vec()).collect();
let string_table = pbf::StringTable {
s: key_vals.iter().map(|s| s.as_bytes().to_vec()).collect(),
};

let key_indices = [1, 3];
let value_indices = [2, 4];
let mut reader = new_tag_reader(&string_table, &key_indices, &value_indices);

match reader.next() {
Some(tags) => match tags {
(Ok("key1"), Ok("val1")) => {}
_ => assert!(false),
},
None => assert!(false),
}

match reader.next() {
Some(tags) => match tags {
(Ok("key2"), Ok("val2")) => {}
_ => assert!(false),
},
None => assert!(false),
}
matches!(reader.next(), Some((Ok("key1"), Ok("val1"))));
matches!(reader.next(), Some((Ok("key2"), Ok("val2"))));

assert!(reader.next().is_none());
}
Expand Down
1 change: 1 addition & 0 deletions src/pbf.rs
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
#![allow(clippy::pedantic)]
include!(concat!(env!("OUT_DIR"), "/proto/osmpbf.rs"));
Loading