Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for AVIF #130

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions jest.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,5 @@ export default {
moduleNameMapper: {
"(.+)\\.js": "$1"
},
testTimeout: 30000,
};
1,400 changes: 693 additions & 707 deletions package-lock.json

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions scripts/live-traffic/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
images/
results.json
yarn-error.log
yarn.lock
143 changes: 143 additions & 0 deletions scripts/live-traffic/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
/**
* Compare WebP and AVIF files from live access logs data.
*
* This script will simulate the effectiveness of AVIF images over WebP based off
* live traffic data from CloudFront Access logs. This is to provide a measurement of
* best defaults when both WebP and AVIF is supported.
*
* This script expects a filer path of JSON data of CloudFront requests, formated by the Altis Cloud
* Dashboard API.
*
* Example usage: node index.js ./tachyon-access-logs.json --bucket=hmn-uploads --prefix=humanmade-production
*
* This will match tachyon access log entries with the s3 file, download the S3 files and generate webp and avif images
* at the size and args in the query string from the access log. The script will take the top 1000 post popular image
* requests from the access log, and generate the predicted total data usage comparison between WebP and AVIF.
*
* Images will be downloaded to the "./images" directory, as a cache. This means the script can be tweaked, or implementation changed
* without needing to re-download S3 images.
*/
const fs = require('fs');
const AWS = require('aws-sdk');
const cliProgress = require('cli-progress');
const tachyon = require('../../index');
const args = require('yargs/yargs')(require('yargs/helpers').hideBin(process.argv)).argv
const S3 = new AWS.S3();

const filePath = args._[0];
const s3Bucket = args.bucket;
const s3Prefix = args.prefix;

let requests = String( fs.readFileSync( filePath ) ).split( "\n" );

// parse JSON logs to only the data we need.
requests = requests.map( line => {
line = line.endsWith(',') ? line.substr(0, line.length - 1 ) : line;
try {
line = JSON.parse( line );
} catch {
console.error( "Unable to parse log line " + line )
return null;
}
return { file: line['cs-uri-stem'], args: line['cs-uri-query'] }
} );

// Strip all non-tachyon requests.
requests = requests.filter( item => {
if ( ! item ) {
return false;
}
return item.file.indexOf( '/tachyon/' ) === 0;
} );

// Group requests by file & args and count uniques.
requests = requests.reduce( ( all, item ) => {
const key = `${ item.file }?${ item.args || '' }`;
all[ key ] = all[ key ] ? all[ key ] + 1 : 1;
return all;
}, {} );

requests = Object.entries( requests );

// Sort requests by total requests, so we can pick the top N images.
requests = requests.sort( ( a, b ) => a[1] > b[1] ? -1 : 1 );
requests = requests.slice( 0, 1000 );

// Parse back out the file and args from the flat map
requests = requests.map( r => {
let [ path, args ] = r[0].split( '?' );
args = args.length === 0 ? {} : args.split( '&' ).reduce( ( all, item ) => {
all[ item.split('=')[0] ] = decodeURIComponent(decodeURIComponent(item.split('=')[1]));
return all;
}, {} )
path = path.replace( '/tachyon/', '/uploads/' );
return {
path,
args,
count: r[1],
}
} );


(async () => {
const progress = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
progress.start( requests.length, 0 )
for (let index = 0; index < requests.length; index++) {
const item = requests[index];
item.filePath = `${ __dirname }/images/${ item.path.split( '/' ).join( '-' ) }`;
let fileBuffer = null;
progress.increment();
if ( ! fs.existsSync( item.filePath ) ) {
try {
const file = await S3.makeUnauthenticatedRequest( 'getObject', { Bucket: s3Bucket, Key: s3Prefix + item.path } ).promise();
fs.writeFileSync( item.filePath, file.Body );
fileBuffer = file.Body;
} catch ( e ) {
console.warn( `Unable to fetch file ${ s3Prefix + item.path }. ${ e.message }`)
requests[index] = null;
continue;
}
} else {
fileBuffer = fs.readFileSync( item.filePath );
}
const startWebp = new Date().getTime();
const webp = await tachyon.resizeBuffer( fileBuffer, { ...item.args, webp: true } );
const endWebp = new Date().getTime();

const startAvif = new Date().getTime();
const avif = await tachyon.resizeBuffer( fileBuffer, { ...item.args, avif: true } );
const endAvif = new Date().getTime();

item.webp = webp.info.size;
item.webpTime = endWebp - startWebp;
item.avif = avif.info.size;
item.avifTime = endAvif - startAvif;
item.ratio = item.webp / item.avif;
}
progress.stop();
requests = requests.filter( Boolean );
let bytesOfWebp = 0;
let bytesOfAvif = 0;
let timeWebp = 0;
let timeAvif = 0;
let totalSmaller = 0;

fs.writeFileSync( __dirname + '/results.json', JSON.stringify( requests, null, 4 ) );

requests.forEach( item => {
bytesOfWebp += item.webp * item.count;
bytesOfAvif += item.avif * item.count;
timeWebp += item.webpTime;
timeAvif += item.avifTime;
if ( item.avif < item.webp ) {
totalSmaller += 1;
}
} );

console.log( `Based off request counts, WebP total data: ${ ( bytesOfWebp / 1024 / 1024 / 1024 ).toFixed(2) }GB vs Avif: ${ ( bytesOfAvif / 1024 / 1024 / 1024 ).toFixed(2) }GB` );
console.log( `Avif files ${ 100 - ( Math.round( bytesOfAvif / bytesOfWebp * 100 ) ) }% smaller in total` );
console.log( `${ totalSmaller } of ${ requests.length } avif files were smaller than webp.` );
console.log( `${ timeWebp }ms in webp, ${ timeAvif }ms in avif.` );

requests.sort( (a,b) => a.ratio > b.ratio ? -1 : 1 );
})();
11 changes: 11 additions & 0 deletions scripts/live-traffic/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"name": "live-traffic",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"dependencies": {
"@aws-sdk/client-s3": "^3.0.0",
"cli-progress": "^3.8.2",
"yargs": "^16.2.0"
}
}
3 changes: 3 additions & 0 deletions src/lambda-handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ const streamify_handler: StreamifyHandler = async ( event, response ) => {
if ( typeof args.webp === 'undefined' ) {
args.webp = !! ( event.headers && Object.keys( event.headers ).find( key => key.toLowerCase() === 'x-webp' ) );
}
if ( typeof args.avif === 'undefined' ) {
args.avif = !! ( event.headers && event.headers['X-Avif'] );
}

// If there is a presign param, we need to decode it and add it to the args. This is to provide a secondary way to pass pre-sign params,
// as using them in a Lambda function URL invocation will trigger a Lambda error.
Expand Down
46 changes: 45 additions & 1 deletion src/lib.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ export interface Args {
quality?: string | number;
w?: string;
webp?: string | boolean;
avif?: string | boolean;
zoom?: string;
'X-Amz-Algorithm'?: string;
'X-Amz-Content-Sha256'?: string;
Expand Down Expand Up @@ -112,6 +113,40 @@ function applyZoomCompression( defaultValue: number, zoom: number ): number {
return clamp( value, min, defaultValue );
}

function normalizeAVIFQuality(jpegQuality: number): number {
const qualityMap: { [key: number]: number } = {
0: 0,
50: 48,
60: 51,
70: 58,
80: 64,
82: 65,
100: 100
}

if (qualityMap[jpegQuality]) {
return qualityMap[jpegQuality];
}

// Interpolate between the ranges
let rangeJPEGStart = 0, rangeJPEGEnd = 0, rangeAVIFStart = 0, rangeAVIFEnd = 0;
for (const jpeg in qualityMap) {
rangeJPEGEnd = parseInt(jpeg);
rangeAVIFEnd = qualityMap[rangeJPEGEnd];
if (jpegQuality < rangeJPEGEnd) {
break;
}
rangeJPEGStart = rangeJPEGEnd;
rangeAVIFStart = rangeAVIFEnd;
}
const jpegRatio = rangeJPEGEnd - rangeJPEGStart;
const avifRatio = rangeAVIFEnd - rangeAVIFStart;
const ratio = avifRatio / jpegRatio;
const avifQualityInterpolation = (jpegQuality - rangeJPEGStart) * ratio;
const quality = rangeAVIFStart + avifQualityInterpolation;
return quality;
}

type ResizeBufferResult = {
data: Buffer;
info: sharp.OutputInfo & {
Expand Down Expand Up @@ -306,7 +341,16 @@ export async function resizeBuffer(
}

// allow override of compression quality
if ( args.webp ) {
if (args.avif) {
// If we are using the default quality (82), then map it to an equivalent default
// for AVIF, as quality values between formats are not comparable. See
// https://www.industrialempathy.com/posts/avif-webp-quality-settings/ for
// a comparison of JPG, WebP and AVIF quality.
let quality = normalizeAVIFQuality( args.quality as number );
image.avif({
quality: Math.round( clamp( quality, 0, 100 ) ),
});
} else if (args.webp) {
image.webp( {
quality: Math.round( clamp( args.quality, 0, 100 ) ),
} );
Expand Down
Loading
Loading