From 5a17f81c261825d00d7c678d5cb4a41f10c4501f Mon Sep 17 00:00:00 2001 From: Jonathan Behrens Date: Wed, 14 Aug 2024 13:19:11 -0700 Subject: [PATCH] Bump MSRV to 1.80 --- .github/workflows/rust.yml | 8 ++++---- Cargo.toml | 2 +- src/decoder.rs | 27 ++++++++++----------------- src/encoder.rs | 2 +- src/lossless_transform.rs | 13 +------------ 5 files changed, 17 insertions(+), 35 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index f6003db..522604e 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -14,14 +14,14 @@ jobs: strategy: fail-fast: false matrix: - rust: ["1.67.1", nightly, beta, stable] + rust: ["1.80.1", nightly, beta, stable] steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly - if: ${{ matrix.rust == '1.67.1' }} + if: ${{ matrix.rust == '1.80.1' }} - name: Generate Cargo.lock with minimal-version dependencies - if: ${{ matrix.rust == '1.67.1' }} + if: ${{ matrix.rust == '1.80.1' }} run: cargo -Zminimal-versions generate-lockfile - uses: dtolnay/rust-toolchain@v1 @@ -34,7 +34,7 @@ jobs: - name: build run: cargo build -v - name: test - if: ${{ matrix.rust != '1.67.1' }} + if: ${{ matrix.rust != '1.80.1' }} run: cargo test -v && cargo doc -v - name: bench if: ${{ matrix.rust == 'nightly' }} diff --git a/Cargo.toml b/Cargo.toml index 6c71792..afdac53 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ name = "image-webp" version = "0.2.0" edition = "2021" license = "MIT OR Apache-2.0" -rust-version = "1.67.1" +rust-version = "1.80.1" description = "WebP encoding and decoding in pure Rust" homepage = "https://github.com/image-rs/image-webp" diff --git a/src/decoder.rs b/src/decoder.rs index 357bed9..7977918 100644 --- a/src/decoder.rs +++ b/src/decoder.rs @@ -2,7 +2,7 @@ use byteorder_lite::{LittleEndian, ReadBytesExt}; use quick_error::quick_error; use std::collections::HashMap; -use std::io::{self, BufRead, BufReader, Cursor, Read, Seek}; +use std::io::{self, BufRead, Cursor, Read, Seek}; use std::num::NonZeroU16; use std::ops::Range; @@ -385,15 +385,8 @@ impl WebPDecoder { let max_position = position + riff_size.saturating_sub(12); self.r.seek(io::SeekFrom::Start(position))?; - // Resist denial of service attacks by using a BufReader. In most images there - // should be a very small number of chunks. However, nothing prevents a malicious - // image from having an extremely large number of "unknown" chunks. Issuing - // millions of reads and seeks against the underlying reader might be very - // expensive. - let mut reader = BufReader::with_capacity(64 << 10, &mut self.r); - while position < max_position { - match read_chunk_header(&mut reader) { + match read_chunk_header(&mut self.r) { Ok((chunk, chunk_size, chunk_size_rounded)) => { let range = position + 8..position + 8 + chunk_size; position += 8 + chunk_size_rounded; @@ -408,8 +401,8 @@ impl WebPDecoder { return Err(DecodingError::InvalidChunkSize); } - reader.seek_relative(12)?; - let duration = reader.read_u32::()? & 0xffffff; + self.r.seek_relative(12)?; + let duration = self.r.read_u32::()? & 0xffffff; self.loop_duration = self.loop_duration.wrapping_add(u64::from(duration)); @@ -419,19 +412,19 @@ impl WebPDecoder { // and the spec says that lossless images SHOULD NOT contain ALPH // chunks, so we treat both as indicators of lossy images. if !self.is_lossy { - let (subchunk, ..) = read_chunk_header(&mut reader)?; + let (subchunk, ..) = read_chunk_header(&mut self.r)?; if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk { self.is_lossy = true; } - reader.seek_relative(chunk_size_rounded as i64 - 24)?; + self.r.seek_relative(chunk_size_rounded as i64 - 24)?; } else { - reader.seek_relative(chunk_size_rounded as i64 - 16)?; + self.r.seek_relative(chunk_size_rounded as i64 - 16)?; } continue; } - reader.seek_relative(chunk_size_rounded as i64)?; + self.r.seek_relative(chunk_size_rounded as i64)?; } Err(DecodingError::IoError(e)) if e.kind() == io::ErrorKind::UnexpectedEof => @@ -885,13 +878,13 @@ pub(crate) fn range_reader( Ok(r.take(range.end - range.start)) } -pub(crate) fn read_fourcc(mut r: R) -> Result { +pub(crate) fn read_fourcc(mut r: R) -> Result { let mut chunk_fourcc = [0; 4]; r.read_exact(&mut chunk_fourcc)?; Ok(WebPRiffChunk::from_fourcc(chunk_fourcc)) } -pub(crate) fn read_chunk_header( +pub(crate) fn read_chunk_header( mut r: R, ) -> Result<(WebPRiffChunk, u64, u64), DecodingError> { let chunk = read_fourcc(&mut r)?; diff --git a/src/encoder.rs b/src/encoder.rs index 0300bd1..b84a217 100644 --- a/src/encoder.rs +++ b/src/encoder.rs @@ -286,7 +286,7 @@ fn write_huffman_tree( const fn length_to_symbol(len: u16) -> (u16, u8) { let len = len - 1; - let highest_bit = 15 - len.leading_zeros() as u16; // TODO: use ilog2 once MSRV >= 1.67 + let highest_bit = len.ilog2() as u16; let second_highest_bit = (len >> (highest_bit - 1)) & 1; let extra_bits = highest_bit - 1; let symbol = 2 * highest_bit + second_highest_bit; diff --git a/src/lossless_transform.rs b/src/lossless_transform.rs index 5efde37..009b206 100644 --- a/src/lossless_transform.rs +++ b/src/lossless_transform.rs @@ -386,17 +386,6 @@ pub(crate) fn apply_color_indexing_transform( table_size: u16, table_data: &[u8], ) { - // TODO: Replace with built-in div_ceil when MSRV is 1.73+ - const fn div_ceil(a: u16, b: u16) -> u16 { - let d = a / b; - let r = a % b; - if r > 0 && b > 0 { - d + 1 - } else { - d - } - } - if table_size > 16 { let mut table = table_data.chunks_exact(4).collect::>(); table.resize(256, &[0; 4]); @@ -434,7 +423,7 @@ pub(crate) fn apply_color_indexing_transform( let table = table.chunks_exact(4 << width_bits).collect::>(); let entry_size = 4 << width_bits; - let index_image_width = div_ceil(width, 1 << width_bits) as usize; + let index_image_width = width.div_ceil(1 << width_bits) as usize; let final_entry_size = width as usize * 4 - entry_size * (index_image_width - 1); for y in (0..height as usize).rev() {