Skip to content

Commit

Permalink
Lots of lints and clippy things
Browse files Browse the repository at this point in the history
* use `[lints]` Cargo.toml sections
* added a few fixme/todos to the code - we don't need to solve them now, but they should remind us to look at them at some point - might be a source of bugs there.
  • Loading branch information
nyurik committed Nov 20, 2023
1 parent e7f9444 commit 739b9ee
Show file tree
Hide file tree
Showing 6 changed files with 48 additions and 24 deletions.
10 changes: 10 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,13 @@ tokio = { version = "1", features = ["test-util", "macros", "rt"] }

[package.metadata.docs.rs]
all-features = true

[lints.rust]
unsafe_code = "forbid"
unused_qualifications = "warn"

[lints.clippy]
pedantic = { level = "warn", priority = -1 }
missing_errors_doc = "allow"
module_name_repetitions = "allow"
similar_names = "allow"
20 changes: 12 additions & 8 deletions src/async_reader.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
// FIXME: This seems like a bug - there are lots of u64 to usize conversions in this file,
// so any file larger than 4GB, or an untrusted file with bad data may crash.
#![allow(clippy::cast_possible_truncation)]

#[cfg(feature = "mmap-async-tokio")]
use std::path::Path;

Expand Down Expand Up @@ -30,18 +34,18 @@ pub struct AsyncPmTilesReader<B, C = NoCache> {
}

impl<B: AsyncBackend + Sync + Send> AsyncPmTilesReader<B, NoCache> {
/// Creates a new reader from a specified source and validates the provided PMTiles archive is valid.
/// Creates a new reader from a specified source and validates the provided `PMTiles` archive is valid.
///
/// Note: Prefer using new_with_* methods.
/// Note: Prefer using `new_with_*` methods.
pub async fn try_from_source(backend: B) -> Result<Self, PmtError> {
Self::try_from_cached_source(backend, NoCache).await
}
}

impl<B: AsyncBackend + Sync + Send, C: DirectoryCache + Sync + Send> AsyncPmTilesReader<B, C> {
/// Creates a new cached reader from a specified source and validates the provided PMTiles archive is valid.
/// Creates a new cached reader from a specified source and validates the provided `PMTiles` archive is valid.
///
/// Note: Prefer using new_with_* methods.
/// Note: Prefer using `new_with_*` methods.
pub async fn try_from_cached_source(backend: B, cache: C) -> Result<Self, PmtError> {
// Read the first 127 and up to 16,384 bytes to ensure we can initialize the header and root directory.
let mut initial_bytes = backend.read(0, MAX_INITIAL_BYTES).await?;
Expand Down Expand Up @@ -216,7 +220,7 @@ impl<B: AsyncBackend + Sync + Send, C: DirectoryCache + Sync + Send> AsyncPmTile

#[cfg(feature = "http-async")]
impl AsyncPmTilesReader<HttpBackend, NoCache> {
/// Creates a new PMTiles reader from a URL using the Reqwest backend.
/// Creates a new `PMTiles` reader from a URL using the Reqwest backend.
///
/// Fails if [url] does not exist or is an invalid archive. (Note: HTTP requests are made to validate it.)
pub async fn new_with_url<U: IntoUrl>(client: Client, url: U) -> Result<Self, PmtError> {
Expand All @@ -226,7 +230,7 @@ impl AsyncPmTilesReader<HttpBackend, NoCache> {

#[cfg(feature = "http-async")]
impl<C: DirectoryCache + Sync + Send> AsyncPmTilesReader<HttpBackend, C> {
/// Creates a new PMTiles reader with cache from a URL using the Reqwest backend.
/// Creates a new `PMTiles` reader with cache from a URL using the Reqwest backend.
///
/// Fails if [url] does not exist or is an invalid archive. (Note: HTTP requests are made to validate it.)
pub async fn new_with_cached_url<U: IntoUrl>(
Expand All @@ -242,7 +246,7 @@ impl<C: DirectoryCache + Sync + Send> AsyncPmTilesReader<HttpBackend, C> {

#[cfg(feature = "mmap-async-tokio")]
impl AsyncPmTilesReader<MmapBackend, NoCache> {
/// Creates a new PMTiles reader from a file path using the async mmap backend.
/// Creates a new `PMTiles` reader from a file path using the async mmap backend.
///
/// Fails if [p] does not exist or is an invalid archive.
pub async fn new_with_path<P: AsRef<Path>>(path: P) -> Result<Self, PmtError> {
Expand All @@ -252,7 +256,7 @@ impl AsyncPmTilesReader<MmapBackend, NoCache> {

#[cfg(feature = "mmap-async-tokio")]
impl<C: DirectoryCache + Sync + Send> AsyncPmTilesReader<MmapBackend, C> {
/// Creates a new cached PMTiles reader from a file path using the async mmap backend.
/// Creates a new cached `PMTiles` reader from a file path using the async mmap backend.
///
/// Fails if [p] does not exist or is an invalid archive.
pub async fn new_with_cached_path<P: AsRef<Path>>(cache: C, path: P) -> Result<Self, PmtError> {
Expand Down
2 changes: 1 addition & 1 deletion src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl DirectoryCache for NoCache {
async fn insert_dir(&self, _offset: usize, _directory: Directory) {}
}

/// A simple HashMap-based implementation of a PMTiles directory cache.
/// A simple HashMap-based implementation of a `PMTiles` directory cache.
#[derive(Default)]
pub struct HashMapCache {
pub cache: Arc<RwLock<HashMap<usize, Directory>>>,
Expand Down
17 changes: 9 additions & 8 deletions src/directory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ impl Debug for Directory {

impl Directory {
#[cfg(any(feature = "http-async", feature = "mmap-async-tokio"))]
#[must_use]
pub fn find_tile_id(&self, tile_id: u64) -> Option<&DirEntry> {
match self.entries.binary_search_by(|e| e.tile_id.cmp(&tile_id)) {
Ok(idx) => self.entries.get(idx),
Expand All @@ -27,7 +28,7 @@ impl Directory {
if next_id > 0 {
let previous_tile = self.entries.get(next_id - 1)?;
if previous_tile.is_leaf()
|| tile_id - previous_tile.tile_id < previous_tile.run_length as u64
|| tile_id - previous_tile.tile_id < u64::from(previous_tile.run_length)
{
return Some(previous_tile);
}
Expand All @@ -49,28 +50,28 @@ impl TryFrom<Bytes> for Directory {

// Read tile IDs
let mut next_tile_id = 0;
for entry in entries.iter_mut() {
for entry in &mut entries {
next_tile_id += buffer.read_u64_varint()?;
entry.tile_id = next_tile_id;
}

// Read Run Lengths
for entry in entries.iter_mut() {
for entry in &mut entries {
entry.run_length = buffer.read_u32_varint()?;
}

// Read Lengths
for entry in entries.iter_mut() {
for entry in &mut entries {
entry.length = buffer.read_u32_varint()?;
}

// Read Offsets
let mut last_entry: Option<&DirEntry> = None;
for entry in entries.iter_mut() {
for entry in &mut entries {
let offset = buffer.read_u64_varint()?;
entry.offset = if offset == 0 {
let e = last_entry.ok_or(PmtError::InvalidEntry)?;
e.offset + e.length as u64
e.offset + u64::from(e.length)
} else {
offset - 1
};
Expand Down Expand Up @@ -116,7 +117,7 @@ mod tests {
reader.read_exact(header_bytes.as_mut()).unwrap();

let header = Header::try_from_bytes(header_bytes.freeze()).unwrap();
let mut directory_bytes = BytesMut::zeroed(header.root_length as usize);
let mut directory_bytes = BytesMut::zeroed(usize::try_from(header.root_length).unwrap());
reader.read_exact(directory_bytes.as_mut()).unwrap();

let mut decompressed = BytesMut::zeroed(directory_bytes.len() * 2);
Expand All @@ -136,7 +137,7 @@ mod tests {
// ...it breaks pattern on the 59th tile
assert_eq!(directory.entries[58].tile_id, 58);
assert_eq!(directory.entries[58].run_length, 2);
assert_eq!(directory.entries[58].offset, 422070);
assert_eq!(directory.entries[58].offset, 422_070);
assert_eq!(directory.entries[58].length, 850);
}
}
20 changes: 14 additions & 6 deletions src/header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ pub enum Compression {
}

impl Compression {
#[must_use]
pub fn content_encoding(&self) -> Option<&'static str> {
Some(match self {
Compression::Gzip => "gzip",
Expand All @@ -75,6 +76,7 @@ impl TryInto<Compression> for u8 {

#[cfg(feature = "tilejson")]
impl Header {
#[must_use]
pub fn get_tilejson(&self, sources: Vec<String>) -> tilejson::TileJSON {
tilejson::tilejson! {
tiles: sources,
Expand All @@ -85,19 +87,21 @@ impl Header {
}
}

#[must_use]
pub fn get_bounds(&self) -> tilejson::Bounds {
tilejson::Bounds::new(
self.min_longitude as f64,
self.min_latitude as f64,
self.max_longitude as f64,
self.max_latitude as f64,
f64::from(self.min_longitude),
f64::from(self.min_latitude),
f64::from(self.max_longitude),
f64::from(self.max_latitude),
)
}

#[must_use]
pub fn get_center(&self) -> tilejson::Center {
tilejson::Center::new(
self.center_longitude as f64,
self.center_latitude as f64,
f64::from(self.center_longitude),
f64::from(self.center_latitude),
self.center_zoom,
)
}
Expand All @@ -113,6 +117,7 @@ pub enum TileType {
}

impl TileType {
#[must_use]
pub fn content_type(&self) -> &'static str {
match self {
TileType::Mvt => "application/vnd.mapbox-vector-tile",
Expand Down Expand Up @@ -143,7 +148,9 @@ static V3_MAGIC: &str = "PMTiles";
static V2_MAGIC: &str = "PM";

impl Header {
#[allow(clippy::cast_precision_loss)]
fn read_coordinate_part<B: Buf>(mut buf: B) -> f32 {
// TODO: would it be more precise to do `((value as f64) / 10_000_000.) as f32` ?
buf.get_i32_le() as f32 / 10_000_000.
}

Expand Down Expand Up @@ -195,6 +202,7 @@ impl Header {

#[cfg(test)]
mod tests {
#![allow(clippy::unreadable_literal, clippy::float_cmp)]
use std::fs::File;
use std::io::Read;
use std::num::NonZeroU64;
Expand Down
3 changes: 2 additions & 1 deletion src/tile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ pub(crate) fn tile_id(z: u8, x: u64, y: u64) -> u64 {
return 0;
}

let base_id: u64 = 1 + (1..z).map(|i| 4u64.pow(i as u32)).sum::<u64>();
// TODO: minor optimization with bit shifting
let base_id: u64 = 1 + (1..z).map(|i| 4u64.pow(u32::from(i))).sum::<u64>();

let tile_id = hilbert_2d::u64::xy2h_discrete(x, y, z.into(), hilbert_2d::Variant::Hilbert);

Expand Down

0 comments on commit 739b9ee

Please sign in to comment.