revert: avif support (#8511)

This commit is contained in:
Erika 2023-09-11 23:00:43 +02:00 committed by GitHub
parent cda7d80ac5
commit bf341d6762
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
35 changed files with 1406 additions and 102 deletions

View file

@ -1,5 +0,0 @@
---
'astro': patch
---
Support AVIF input assets

View file

@ -108,10 +108,6 @@ declare module '*.svg' {
const metadata: ImageMetadata;
export default metadata;
}
declare module '*.avif' {
const metadata: ImageMetadata;
export default metadata;
}
declare module 'astro:transitions' {
type TransitionModule = typeof import('./dist/transitions/index.js');

View file

@ -157,7 +157,6 @@
"p-limit": "^4.0.0",
"path-to-regexp": "^6.2.1",
"preferred-pm": "^3.1.2",
"probe-image-size": "^7.2.3",
"prompts": "^2.4.2",
"rehype": "^12.0.1",
"resolve": "^1.22.4",
@ -198,7 +197,6 @@
"@types/js-yaml": "^4.0.5",
"@types/mime": "^3.0.1",
"@types/mocha": "^10.0.1",
"@types/probe-image-size": "^7.2.0",
"@types/prompts": "^2.4.4",
"@types/resolve": "^1.20.2",
"@types/send": "^0.17.1",

View file

@ -1,6 +1,14 @@
export const VIRTUAL_MODULE_ID = 'astro:assets';
export const VIRTUAL_SERVICE_ID = 'virtual:image-service';
export const VALID_INPUT_FORMATS = [
// TODO: `image-size` does not support the following formats, so users can't import them.
// However, it would be immensely useful to add, for three reasons:
// - `heic` and `heif` are common formats, especially among Apple users.
// - AVIF is a common format on the web that's bound to become more and more common.
// - It's totally reasonable for an user's provided image service to want to support more image types.
//'heic',
//'heif',
//'avif',
'jpeg',
'jpg',
'png',
@ -8,7 +16,6 @@ export const VALID_INPUT_FORMATS = [
'webp',
'gif',
'svg',
'avif',
] as const;
/**
* Valid formats that our base services support.
@ -22,6 +29,5 @@ export const VALID_SUPPORTED_FORMATS = [
'webp',
'gif',
'svg',
'avif',
] as const;
export const VALID_OUTPUT_FORMATS = ['avif', 'png', 'webp', 'jpeg', 'jpg', 'svg'] as const;

View file

@ -1,13 +1,8 @@
import probe from 'probe-image-size';
import type { ImageInputFormat, ImageMetadata } from '../types.js';
import imageSize from '../vendor/image-size/index.js';
export async function imageMetadata(data: Buffer): Promise<Omit<ImageMetadata, 'src'> | undefined> {
const result = probe.sync(data);
if (result === null) {
throw new Error('Failed to probe image size.');
}
const { width, height, type, orientation } = result;
const { width, height, type, orientation } = imageSize(data);
const isPortrait = (orientation || 0) >= 5;
if (!width || !height || !type) {

View file

@ -0,0 +1,3 @@
Vendored version of `image-size` and `queue` because we had issues with the CJS nature of those packages.
Should hopefully be fixed by https://github.com/image-size/image-size/pull/370

View file

@ -0,0 +1,9 @@
The MIT License (MIT)
Copyright © 2017 Aditya Yadav, http://netroy.in
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -0,0 +1,30 @@
import { typeHandlers, type imageType } from './types.js'
const keys = Object.keys(typeHandlers) as imageType[]
// This map helps avoid validating for every single image type
const firstBytes: { [byte: number]: imageType } = {
0x38: 'psd',
0x42: 'bmp',
0x44: 'dds',
0x47: 'gif',
0x49: 'tiff',
0x4d: 'tiff',
0x52: 'webp',
0x69: 'icns',
0x89: 'png',
0xff: 'jpg'
}
export function detector(buffer: Buffer): imageType | undefined {
const byte = buffer[0]
if (byte in firstBytes) {
const type = firstBytes[byte]
if (type && typeHandlers[type].validate(buffer)) {
return type
}
}
const finder = (key: imageType) => typeHandlers[key].validate(buffer)
return keys.find(finder)
}

View file

@ -0,0 +1,146 @@
import * as fs from "node:fs";
import * as path from "node:path";
import Queue from "../queue/queue.js";
import { detector } from "./detector.js";
import { typeHandlers, type imageType } from "./types.js";
import type { ISizeCalculationResult } from "./types/interface.js";
type CallbackFn = (e: Error | null, r?: ISizeCalculationResult) => void;
// Maximum buffer size, with a default of 512 kilobytes.
// TO-DO: make this adaptive based on the initial signature of the image
const MaxBufferSize = 512 * 1024;
// This queue is for async `fs` operations, to avoid reaching file-descriptor limits
const queue = new Queue({ concurrency: 100, autostart: true });
interface Options {
disabledFS: boolean;
disabledTypes: imageType[];
}
const globalOptions: Options = {
disabledFS: false,
disabledTypes: [],
};
/**
* Return size information based on a buffer
*
* @param {Buffer} buffer
* @param {String} filepath
* @returns {Object}
*/
function lookup(buffer: Buffer, filepath?: string): ISizeCalculationResult {
// detect the file type.. don't rely on the extension
const type = detector(buffer);
if (typeof type !== "undefined") {
if (globalOptions.disabledTypes.indexOf(type) > -1) {
throw new TypeError("disabled file type: " + type);
}
// find an appropriate handler for this file type
if (type in typeHandlers) {
const size = typeHandlers[type].calculate(buffer, filepath);
if (size !== undefined) {
size.type = type;
return size;
}
}
}
// throw up, if we don't understand the file
throw new TypeError(
"unsupported file type: " + type + " (file: " + filepath + ")"
);
}
/**
* Reads a file into a buffer.
* @param {String} filepath
* @returns {Promise<Buffer>}
*/
async function asyncFileToBuffer(filepath: string): Promise<Buffer> {
const handle = await fs.promises.open(filepath, "r");
const { size } = await handle.stat();
if (size <= 0) {
await handle.close();
throw new Error("Empty file");
}
const bufferSize = Math.min(size, MaxBufferSize);
const buffer = Buffer.alloc(bufferSize);
await handle.read(buffer, 0, bufferSize, 0);
await handle.close();
return buffer;
}
/**
* Synchronously reads a file into a buffer, blocking the nodejs process.
*
* @param {String} filepath
* @returns {Buffer}
*/
function syncFileToBuffer(filepath: string): Buffer {
// read from the file, synchronously
const descriptor = fs.openSync(filepath, "r");
const { size } = fs.fstatSync(descriptor);
if (size <= 0) {
fs.closeSync(descriptor);
throw new Error("Empty file");
}
const bufferSize = Math.min(size, MaxBufferSize);
const buffer = Buffer.alloc(bufferSize);
fs.readSync(descriptor, buffer, 0, bufferSize, 0);
fs.closeSync(descriptor);
return buffer;
}
export default imageSize;
export function imageSize(input: Buffer | string): ISizeCalculationResult;
export function imageSize(input: string, callback: CallbackFn): void;
/**
* @param {Buffer|string} input - buffer or relative/absolute path of the image file
* @param {Function=} [callback] - optional function for async detection
*/
export function imageSize(
input: Buffer | string,
callback?: CallbackFn
): ISizeCalculationResult | void {
// Handle buffer input
if (Buffer.isBuffer(input)) {
return lookup(input);
}
// input should be a string at this point
if (typeof input !== "string" || globalOptions.disabledFS) {
throw new TypeError("invalid invocation. input should be a Buffer");
}
// resolve the file path
const filepath = path.resolve(input);
if (typeof callback === "function") {
queue.push(() =>
asyncFileToBuffer(filepath)
.then((buffer) =>
process.nextTick(callback, null, lookup(buffer, filepath))
)
.catch(callback)
);
} else {
const buffer = syncFileToBuffer(filepath);
return lookup(buffer, filepath);
}
}
export const disableFS = (v: boolean): void => {
globalOptions.disabledFS = v;
};
export const disableTypes = (types: imageType[]): void => {
globalOptions.disabledTypes = types;
};
export const setConcurrency = (c: number): void => {
queue.concurrency = c;
};
export const types = Object.keys(typeHandlers);

View file

@ -0,0 +1,10 @@
type Bits = 16 | 32
type MethodName = 'readUInt16BE' | 'readUInt16LE' | 'readUInt32BE' | 'readUInt32LE'
// Abstract reading multi-byte unsigned integers
export function readUInt(buffer: Buffer, bits: Bits, offset: number, isBigEndian: boolean): number {
offset = offset || 0
const endian = isBigEndian ? 'BE' : 'LE'
const methodName: MethodName = ('readUInt' + bits + endian) as MethodName
return buffer[methodName].call(buffer, offset)
}

View file

@ -0,0 +1,38 @@
// load all available handlers explicitely for browserify support
import { BMP } from './types/bmp.js'
import { CUR } from './types/cur.js'
import { DDS } from './types/dds.js'
import { GIF } from './types/gif.js'
import { ICNS } from './types/icns.js'
import { ICO } from './types/ico.js'
import { J2C } from './types/j2c.js'
import { JP2 } from './types/jp2.js'
import { JPG } from './types/jpg.js'
import { KTX } from './types/ktx.js'
import { PNG } from './types/png.js'
import { PNM } from './types/pnm.js'
import { PSD } from './types/psd.js'
import { SVG } from './types/svg.js'
import { TIFF } from './types/tiff.js'
import { WEBP } from './types/webp.js'
export const typeHandlers = {
bmp: BMP,
cur: CUR,
dds: DDS,
gif: GIF,
icns: ICNS,
ico: ICO,
j2c: J2C,
jp2: JP2,
jpg: JPG,
ktx: KTX,
png: PNG,
pnm: PNM,
psd: PSD,
svg: SVG,
tiff: TIFF,
webp: WEBP,
}
export type imageType = keyof typeof typeHandlers

View file

@ -0,0 +1,14 @@
import type { IImage } from './interface'
export const BMP: IImage = {
validate(buffer) {
return ('BM' === buffer.toString('ascii', 0, 2))
},
calculate(buffer) {
return {
height: Math.abs(buffer.readInt32LE(22)),
width: buffer.readUInt32LE(18)
}
}
}

View file

@ -0,0 +1,16 @@
import { ICO } from './ico.js'
import type { IImage } from './interface'
const TYPE_CURSOR = 2
export const CUR: IImage = {
validate(buffer) {
if (buffer.readUInt16LE(0) !== 0) {
return false
}
return buffer.readUInt16LE(2) === TYPE_CURSOR
},
calculate(buffer) {
return ICO.calculate(buffer)
}
}

View file

@ -0,0 +1,14 @@
import type { IImage } from './interface'
export const DDS: IImage = {
validate(buffer) {
return buffer.readUInt32LE(0) === 0x20534444
},
calculate(buffer) {
return {
height: buffer.readUInt32LE(12),
width: buffer.readUInt32LE(16)
}
}
}

View file

@ -0,0 +1,16 @@
import type { IImage } from './interface'
const gifRegexp = /^GIF8[79]a/
export const GIF: IImage = {
validate(buffer) {
const signature = buffer.toString('ascii', 0, 6)
return (gifRegexp.test(signature))
},
calculate(buffer) {
return {
height: buffer.readUInt16LE(8),
width: buffer.readUInt16LE(6)
}
}
}

View file

@ -0,0 +1,113 @@
import type { IImage, ISize } from './interface'
/**
* ICNS Header
*
* | Offset | Size | Purpose |
* | 0 | 4 | Magic literal, must be "icns" (0x69, 0x63, 0x6e, 0x73) |
* | 4 | 4 | Length of file, in bytes, msb first. |
*
*/
const SIZE_HEADER = 4 + 4 // 8
const FILE_LENGTH_OFFSET = 4 // MSB => BIG ENDIAN
/**
* Image Entry
*
* | Offset | Size | Purpose |
* | 0 | 4 | Icon type, see OSType below. |
* | 4 | 4 | Length of data, in bytes (including type and length), msb first. |
* | 8 | n | Icon data |
*/
const ENTRY_LENGTH_OFFSET = 4 // MSB => BIG ENDIAN
const ICON_TYPE_SIZE: {[key: string]: number} = {
ICON: 32,
'ICN#': 32,
// m => 16 x 16
'icm#': 16,
icm4: 16,
icm8: 16,
// s => 16 x 16
'ics#': 16,
ics4: 16,
ics8: 16,
is32: 16,
s8mk: 16,
icp4: 16,
// l => 32 x 32
icl4: 32,
icl8: 32,
il32: 32,
l8mk: 32,
icp5: 32,
ic11: 32,
// h => 48 x 48
ich4: 48,
ich8: 48,
ih32: 48,
h8mk: 48,
// . => 64 x 64
icp6: 64,
ic12: 32,
// t => 128 x 128
it32: 128,
t8mk: 128,
ic07: 128,
// . => 256 x 256
ic08: 256,
ic13: 256,
// . => 512 x 512
ic09: 512,
ic14: 512,
// . => 1024 x 1024
ic10: 1024,
}
function readImageHeader(buffer: Buffer, imageOffset: number): [string, number] {
const imageLengthOffset = imageOffset + ENTRY_LENGTH_OFFSET
return [
buffer.toString('ascii', imageOffset, imageLengthOffset),
buffer.readUInt32BE(imageLengthOffset)
]
}
function getImageSize(type: string): ISize {
const size = ICON_TYPE_SIZE[type]
return { width: size, height: size, type }
}
export const ICNS: IImage = {
validate(buffer) {
return ('icns' === buffer.toString('ascii', 0, 4))
},
calculate(buffer) {
const bufferLength = buffer.length
const fileLength = buffer.readUInt32BE(FILE_LENGTH_OFFSET)
let imageOffset = SIZE_HEADER
let imageHeader = readImageHeader(buffer, imageOffset)
let imageSize = getImageSize(imageHeader[0])
imageOffset += imageHeader[1]
if (imageOffset === fileLength) {
return imageSize
}
const result = {
height: imageSize.height,
images: [imageSize],
width: imageSize.width
}
while (imageOffset < fileLength && imageOffset < bufferLength) {
imageHeader = readImageHeader(buffer, imageOffset)
imageSize = getImageSize(imageHeader[0])
imageOffset += imageHeader[1]
result.images.push(imageSize)
}
return result
}
}

View file

@ -0,0 +1,76 @@
import type { IImage, ISize, ISizeCalculationResult } from './interface'
const TYPE_ICON = 1
/**
* ICON Header
*
* | Offset | Size | Purpose |
* | 0 | 2 | Reserved. Must always be 0. |
* | 2 | 2 | Image type: 1 for icon (.ICO) image, 2 for cursor (.CUR) image. Other values are invalid. |
* | 4 | 2 | Number of images in the file. |
*
*/
const SIZE_HEADER = 2 + 2 + 2 // 6
/**
* Image Entry
*
* | Offset | Size | Purpose |
* | 0 | 1 | Image width in pixels. Can be any number between 0 and 255. Value 0 means width is 256 pixels. |
* | 1 | 1 | Image height in pixels. Can be any number between 0 and 255. Value 0 means height is 256 pixels. |
* | 2 | 1 | Number of colors in the color palette. Should be 0 if the image does not use a color palette. |
* | 3 | 1 | Reserved. Should be 0. |
* | 4 | 2 | ICO format: Color planes. Should be 0 or 1. |
* | | | CUR format: The horizontal coordinates of the hotspot in number of pixels from the left. |
* | 6 | 2 | ICO format: Bits per pixel. |
* | | | CUR format: The vertical coordinates of the hotspot in number of pixels from the top. |
* | 8 | 4 | The size of the image's data in bytes |
* | 12 | 4 | The offset of BMP or PNG data from the beginning of the ICO/CUR file |
*
*/
const SIZE_IMAGE_ENTRY = 1 + 1 + 1 + 1 + 2 + 2 + 4 + 4 // 16
function getSizeFromOffset(buffer: Buffer, offset: number): number {
const value = buffer.readUInt8(offset)
return value === 0 ? 256 : value
}
function getImageSize(buffer: Buffer, imageIndex: number): ISize {
const offset = SIZE_HEADER + (imageIndex * SIZE_IMAGE_ENTRY)
return {
height: getSizeFromOffset(buffer, offset + 1),
width: getSizeFromOffset(buffer, offset)
}
}
export const ICO: IImage = {
validate(buffer) {
if (buffer.readUInt16LE(0) !== 0) {
return false
}
return buffer.readUInt16LE(2) === TYPE_ICON
},
calculate(buffer) {
const nbImages = buffer.readUInt16LE(4)
const imageSize = getImageSize(buffer, 0)
if (nbImages === 1) {
return imageSize
}
const imgs: ISize[] = [imageSize]
for (let imageIndex = 1; imageIndex < nbImages; imageIndex += 1) {
imgs.push(getImageSize(buffer, imageIndex))
}
const result: ISizeCalculationResult = {
height: imageSize.height,
images: imgs,
width: imageSize.width
}
return result
}
}

View file

@ -0,0 +1,15 @@
export interface ISize {
width: number | undefined
height: number | undefined
orientation?: number
type?: string
}
export interface ISizeCalculationResult extends ISize {
images?: ISize[]
}
export interface IImage {
validate: (buffer: Buffer) => boolean
calculate: (buffer: Buffer, filepath?: string) => ISizeCalculationResult
}

View file

@ -0,0 +1,15 @@
import type { IImage } from './interface'
export const J2C: IImage = {
validate(buffer) {
// TODO: this doesn't seem right. SIZ marker doesn't have to be right after the SOC
return buffer.toString('hex', 0, 4) === 'ff4fff51'
},
calculate(buffer) {
return {
height: buffer.readUInt32BE(12),
width: buffer.readUInt32BE(8),
}
}
}

View file

@ -0,0 +1,61 @@
import type { IImage, ISize } from './interface'
const BoxTypes = {
ftyp: '66747970',
ihdr: '69686472',
jp2h: '6a703268',
jp__: '6a502020',
rreq: '72726571',
xml_: '786d6c20'
}
const calculateRREQLength = (box: Buffer): number => {
const unit = box.readUInt8(0)
let offset = 1 + (2 * unit)
const numStdFlags = box.readUInt16BE(offset)
const flagsLength = numStdFlags * (2 + unit)
offset = offset + 2 + flagsLength
const numVendorFeatures = box.readUInt16BE(offset)
const featuresLength = numVendorFeatures * (16 + unit)
return offset + 2 + featuresLength
}
const parseIHDR = (box: Buffer): ISize => {
return {
height: box.readUInt32BE(4),
width: box.readUInt32BE(8),
}
}
export const JP2: IImage = {
validate(buffer) {
const signature = buffer.toString('hex', 4, 8)
const signatureLength = buffer.readUInt32BE(0)
if (signature !== BoxTypes.jp__ || signatureLength < 1) {
return false
}
const ftypeBoxStart = signatureLength + 4
const ftypBoxLength = buffer.readUInt32BE(signatureLength)
const ftypBox = buffer.slice(ftypeBoxStart, ftypeBoxStart + ftypBoxLength)
return ftypBox.toString('hex', 0, 4) === BoxTypes.ftyp
},
calculate(buffer) {
const signatureLength = buffer.readUInt32BE(0)
const ftypBoxLength = buffer.readUInt16BE(signatureLength + 2)
let offset = signatureLength + 4 + ftypBoxLength
const nextBoxType = buffer.toString('hex', offset, offset + 4)
switch (nextBoxType) {
case BoxTypes.rreq:
// WHAT ARE THESE 4 BYTES?????
const MAGIC = 4
offset = offset + 4 + MAGIC + calculateRREQLength(buffer.slice(offset + 4))
return parseIHDR(buffer.slice(offset + 8, offset + 24))
case BoxTypes.jp2h :
return parseIHDR(buffer.slice(offset + 8, offset + 24))
default:
throw new TypeError('Unsupported header found: ' + buffer.toString('ascii', offset, offset + 4))
}
}
}

View file

@ -0,0 +1,151 @@
// NOTE: we only support baseline and progressive JPGs here
// due to the structure of the loader class, we only get a buffer
// with a maximum size of 4096 bytes. so if the SOF marker is outside
// if this range we can't detect the file size correctly.
import { readUInt } from '../readUInt.js'
import type { IImage, ISize } from './interface'
const EXIF_MARKER = '45786966'
const APP1_DATA_SIZE_BYTES = 2
const EXIF_HEADER_BYTES = 6
const TIFF_BYTE_ALIGN_BYTES = 2
const BIG_ENDIAN_BYTE_ALIGN = '4d4d'
const LITTLE_ENDIAN_BYTE_ALIGN = '4949'
// Each entry is exactly 12 bytes
const IDF_ENTRY_BYTES = 12
const NUM_DIRECTORY_ENTRIES_BYTES = 2
function isEXIF(buffer: Buffer): boolean {
return (buffer.toString('hex', 2, 6) === EXIF_MARKER)
}
function extractSize(buffer: Buffer, index: number): ISize {
return {
height : buffer.readUInt16BE(index),
width : buffer.readUInt16BE(index + 2)
}
}
function extractOrientation(exifBlock: Buffer, isBigEndian: boolean) {
// TODO: assert that this contains 0x002A
// let STATIC_MOTOROLA_TIFF_HEADER_BYTES = 2
// let TIFF_IMAGE_FILE_DIRECTORY_BYTES = 4
// TODO: derive from TIFF_IMAGE_FILE_DIRECTORY_BYTES
const idfOffset = 8
// IDF osset works from right after the header bytes
// (so the offset includes the tiff byte align)
const offset = EXIF_HEADER_BYTES + idfOffset
const idfDirectoryEntries = readUInt(exifBlock, 16, offset, isBigEndian)
for (let directoryEntryNumber = 0; directoryEntryNumber < idfDirectoryEntries; directoryEntryNumber++) {
const start = offset + NUM_DIRECTORY_ENTRIES_BYTES + (directoryEntryNumber * IDF_ENTRY_BYTES)
const end = start + IDF_ENTRY_BYTES
// Skip on corrupt EXIF blocks
if (start > exifBlock.length) {
return
}
const block = exifBlock.slice(start, end)
const tagNumber = readUInt(block, 16, 0, isBigEndian)
// 0x0112 (decimal: 274) is the `orientation` tag ID
if (tagNumber === 274) {
const dataFormat = readUInt(block, 16, 2, isBigEndian)
if (dataFormat !== 3) {
return
}
// unsinged int has 2 bytes per component
// if there would more than 4 bytes in total it's a pointer
const numberOfComponents = readUInt(block, 32, 4, isBigEndian)
if (numberOfComponents !== 1) {
return
}
return readUInt(block, 16, 8, isBigEndian)
}
}
}
function validateExifBlock(buffer: Buffer, index: number) {
// Skip APP1 Data Size
const exifBlock = buffer.slice(APP1_DATA_SIZE_BYTES, index)
// Consider byte alignment
const byteAlign = exifBlock.toString('hex', EXIF_HEADER_BYTES, EXIF_HEADER_BYTES + TIFF_BYTE_ALIGN_BYTES)
// Ignore Empty EXIF. Validate byte alignment
const isBigEndian = byteAlign === BIG_ENDIAN_BYTE_ALIGN
const isLittleEndian = byteAlign === LITTLE_ENDIAN_BYTE_ALIGN
if (isBigEndian || isLittleEndian) {
return extractOrientation(exifBlock, isBigEndian)
}
}
function validateBuffer(buffer: Buffer, index: number): void {
// index should be within buffer limits
if (index > buffer.length) {
throw new TypeError('Corrupt JPG, exceeded buffer limits')
}
// Every JPEG block must begin with a 0xFF
if (buffer[index] !== 0xFF) {
throw new TypeError('Invalid JPG, marker table corrupted')
}
}
export const JPG: IImage = {
validate(buffer) {
const SOIMarker = buffer.toString('hex', 0, 2)
return ('ffd8' === SOIMarker)
},
calculate(buffer) {
// Skip 4 chars, they are for signature
buffer = buffer.slice(4)
let orientation: number | undefined
let next: number
while (buffer.length) {
// read length of the next block
const i = buffer.readUInt16BE(0)
if (isEXIF(buffer)) {
orientation = validateExifBlock(buffer, i)
}
// ensure correct format
validateBuffer(buffer, i)
// 0xFFC0 is baseline standard(SOF)
// 0xFFC1 is baseline optimized(SOF)
// 0xFFC2 is progressive(SOF2)
next = buffer[i + 1]
if (next === 0xC0 || next === 0xC1 || next === 0xC2) {
const size = extractSize(buffer, i + 5)
// TODO: is orientation=0 a valid answer here?
if (!orientation) {
return size
}
return {
height: size.height,
orientation,
width: size.width
}
}
// move to the next block
buffer = buffer.slice(i + 2)
}
throw new TypeError('Invalid JPG, no size found')
}
}

View file

@ -0,0 +1,16 @@
import type { IImage } from './interface'
const SIGNATURE = 'KTX 11'
export const KTX: IImage = {
validate(buffer) {
return SIGNATURE === buffer.toString('ascii', 1, 7)
},
calculate(buffer) {
return {
height: buffer.readUInt32LE(40),
width: buffer.readUInt32LE(36),
}
}
}

View file

@ -0,0 +1,36 @@
import type { IImage } from './interface'
const pngSignature = 'PNG\r\n\x1a\n'
const pngImageHeaderChunkName = 'IHDR'
// Used to detect "fried" png's: http://www.jongware.com/pngdefry.html
const pngFriedChunkName = 'CgBI'
export const PNG: IImage = {
validate(buffer) {
if (pngSignature === buffer.toString('ascii', 1, 8)) {
let chunkName = buffer.toString('ascii', 12, 16)
if (chunkName === pngFriedChunkName) {
chunkName = buffer.toString('ascii', 28, 32)
}
if (chunkName !== pngImageHeaderChunkName) {
throw new TypeError('Invalid PNG')
}
return true
}
return false
},
calculate(buffer) {
if (buffer.toString('ascii', 12, 16) === pngFriedChunkName) {
return {
height: buffer.readUInt32BE(36),
width: buffer.readUInt32BE(32)
}
}
return {
height: buffer.readUInt32BE(20),
width: buffer.readUInt32BE(16)
}
}
}

View file

@ -0,0 +1,80 @@
import type { IImage, ISize } from './interface'
const PNMTypes: { [signature: string]: string } = {
P1: 'pbm/ascii',
P2: 'pgm/ascii',
P3: 'ppm/ascii',
P4: 'pbm',
P5: 'pgm',
P6: 'ppm',
P7: 'pam',
PF: 'pfm'
}
const Signatures = Object.keys(PNMTypes)
type Handler = (type: string[]) => ISize
const handlers: { [type: string]: Handler} = {
default: (lines) => {
let dimensions: string[] = []
while (lines.length > 0) {
const line = lines.shift()!
if (line[0] === '#') {
continue
}
dimensions = line.split(' ')
break
}
if (dimensions.length === 2) {
return {
height: parseInt(dimensions[1], 10),
width: parseInt(dimensions[0], 10),
}
} else {
throw new TypeError('Invalid PNM')
}
},
pam: (lines) => {
const size: { [key: string]: number } = {}
while (lines.length > 0) {
const line = lines.shift()!
if (line.length > 16 || line.charCodeAt(0) > 128) {
continue
}
const [key, value] = line.split(' ')
if (key && value) {
size[key.toLowerCase()] = parseInt(value, 10)
}
if (size.height && size.width) {
break
}
}
if (size.height && size.width) {
return {
height: size.height,
width: size.width
}
} else {
throw new TypeError('Invalid PAM')
}
}
}
export const PNM: IImage = {
validate(buffer) {
const signature = buffer.toString('ascii', 0, 2)
return Signatures.includes(signature)
},
calculate(buffer) {
const signature = buffer.toString('ascii', 0, 2)
const type = PNMTypes[signature]
// TODO: this probably generates garbage. move to a stream based parser
const lines = buffer.toString('ascii', 3).split(/[\r\n]+/)
const handler = handlers[type] || handlers.default
return handler(lines)
}
}

View file

@ -0,0 +1,14 @@
import type { IImage } from './interface'
export const PSD: IImage = {
validate(buffer) {
return ('8BPS' === buffer.toString('ascii', 0, 4))
},
calculate(buffer) {
return {
height: buffer.readUInt32BE(14),
width: buffer.readUInt32BE(18)
}
}
}

View file

@ -0,0 +1,106 @@
import type { IImage, ISize } from './interface'
interface IAttributes {
width: number | null
height: number | null
viewbox?: IAttributes | null
}
const svgReg = /<svg\s([^>"']|"[^"]*"|'[^']*')*>/
const extractorRegExps = {
height: /\sheight=(['"])([^%]+?)\1/,
root: svgReg,
viewbox: /\sviewBox=(['"])(.+?)\1/i,
width: /\swidth=(['"])([^%]+?)\1/,
}
const INCH_CM = 2.54
const units: { [unit: string]: number } = {
in: 96,
cm: 96 / INCH_CM,
em: 16,
ex: 8,
m: 96 / INCH_CM * 100,
mm: 96 / INCH_CM / 10,
pc: 96 / 72 / 12,
pt: 96 / 72,
px: 1
}
const unitsReg = new RegExp(`^([0-9.]+(?:e\\d+)?)(${Object.keys(units).join('|')})?$`)
function parseLength(len: string) {
const m = unitsReg.exec(len)
if (!m) {
return undefined
}
return Math.round(Number(m[1]) * (units[m[2]] || 1))
}
function parseViewbox(viewbox: string): IAttributes {
const bounds = viewbox.split(' ')
return {
height: parseLength(bounds[3])!,
width: parseLength(bounds[2])!
}
}
function parseAttributes(root: string): IAttributes {
const width = root.match(extractorRegExps.width)
const height = root.match(extractorRegExps.height)
const viewbox = root.match(extractorRegExps.viewbox)
return {
height: height && parseLength(height[2])!,
viewbox: viewbox && parseViewbox(viewbox[2])!,
width: width && parseLength(width[2])!,
}
}
function calculateByDimensions(attrs: IAttributes): ISize {
return {
height: attrs.height!,
width: attrs.width!,
}
}
function calculateByViewbox(attrs: IAttributes, viewbox: IAttributes): ISize {
const ratio = (viewbox.width!) / (viewbox.height!)
if (attrs.width) {
return {
height: Math.floor(attrs.width / ratio),
width: attrs.width,
}
}
if (attrs.height) {
return {
height: attrs.height,
width: Math.floor(attrs.height * ratio),
}
}
return {
height: viewbox.height!,
width: viewbox.width!,
}
}
export const SVG: IImage = {
validate(buffer) {
const str = String(buffer)
return svgReg.test(str)
},
calculate(buffer) {
const root = buffer.toString('utf8').match(extractorRegExps.root)
if (root) {
const attrs = parseAttributes(root[0])
if (attrs.width && attrs.height) {
return calculateByDimensions(attrs)
}
if (attrs.viewbox) {
return calculateByViewbox(attrs, attrs.viewbox)
}
}
throw new TypeError('Invalid SVG')
}
}

View file

@ -0,0 +1,115 @@
// based on http://www.compix.com/fileformattif.htm
// TO-DO: support big-endian as well
import * as fs from 'node:fs'
import { readUInt } from '../readUInt.js'
import type { IImage } from './interface'
// Read IFD (image-file-directory) into a buffer
function readIFD(buffer: Buffer, filepath: string, isBigEndian: boolean) {
const ifdOffset = readUInt(buffer, 32, 4, isBigEndian)
// read only till the end of the file
let bufferSize = 1024
const fileSize = fs.statSync(filepath).size
if (ifdOffset + bufferSize > fileSize) {
bufferSize = fileSize - ifdOffset - 10
}
// populate the buffer
const endBuffer = Buffer.alloc(bufferSize)
const descriptor = fs.openSync(filepath, 'r')
fs.readSync(descriptor, endBuffer, 0, bufferSize, ifdOffset)
fs.closeSync(descriptor)
return endBuffer.slice(2)
}
// TIFF values seem to be messed up on Big-Endian, this helps
function readValue(buffer: Buffer, isBigEndian: boolean): number {
const low = readUInt(buffer, 16, 8, isBigEndian)
const high = readUInt(buffer, 16, 10, isBigEndian)
return (high << 16) + low
}
// move to the next tag
function nextTag(buffer: Buffer) {
if (buffer.length > 24) {
return buffer.slice(12)
}
}
// Extract IFD tags from TIFF metadata
function extractTags(buffer: Buffer, isBigEndian: boolean) {
const tags: {[key: number]: number} = {}
let temp: Buffer | undefined = buffer
while (temp?.length) {
const code = readUInt(temp, 16, 0, isBigEndian)
const type = readUInt(temp, 16, 2, isBigEndian)
const length = readUInt(temp, 32, 4, isBigEndian)
// 0 means end of IFD
if (code === 0) {
break
} else {
// 256 is width, 257 is height
// if (code === 256 || code === 257) {
if (length === 1 && (type === 3 || type === 4)) {
tags[code] = readValue(temp, isBigEndian)
}
// move to the next tag
temp = nextTag(temp)
}
}
return tags
}
// Test if the TIFF is Big Endian or Little Endian
function determineEndianness(buffer: Buffer) {
const signature = buffer.toString('ascii', 0, 2)
if ('II' === signature) {
return 'LE'
} else if ('MM' === signature) {
return 'BE'
}
}
const signatures = [
// '492049', // currently not supported
'49492a00', // Little endian
'4d4d002a', // Big Endian
// '4d4d002a', // BigTIFF > 4GB. currently not supported
]
export const TIFF: IImage = {
validate(buffer) {
return signatures.includes(buffer.toString('hex', 0, 4))
},
calculate(buffer, filepath) {
if (!filepath) {
throw new TypeError('Tiff doesn\'t support buffer')
}
// Determine BE/LE
const isBigEndian = determineEndianness(buffer) === 'BE'
// read the IFD
const ifdBuffer = readIFD(buffer, filepath, isBigEndian)
// extract the tags from the IFD
const tags = extractTags(ifdBuffer, isBigEndian)
const width = tags[256]
const height = tags[257]
if (!width || !height) {
throw new TypeError('Invalid Tiff. Missing tags')
}
return { height, width }
}
}

View file

@ -0,0 +1,65 @@
// based on https://developers.google.com/speed/webp/docs/riff_container
import type { IImage, ISize } from './interface'
function calculateExtended(buffer: Buffer): ISize {
return {
height: 1 + buffer.readUIntLE(7, 3),
width: 1 + buffer.readUIntLE(4, 3)
}
}
function calculateLossless(buffer: Buffer): ISize {
return {
height: 1 + (((buffer[4] & 0xF) << 10) | (buffer[3] << 2) | ((buffer[2] & 0xC0) >> 6)),
width: 1 + (((buffer[2] & 0x3F) << 8) | buffer[1])
}
}
function calculateLossy(buffer: Buffer): ISize {
// `& 0x3fff` returns the last 14 bits
// TO-DO: include webp scaling in the calculations
return {
height: buffer.readInt16LE(8) & 0x3fff,
width: buffer.readInt16LE(6) & 0x3fff
}
}
export const WEBP: IImage = {
validate(buffer) {
const riffHeader = 'RIFF' === buffer.toString('ascii', 0, 4)
const webpHeader = 'WEBP' === buffer.toString('ascii', 8, 12)
const vp8Header = 'VP8' === buffer.toString('ascii', 12, 15)
return (riffHeader && webpHeader && vp8Header)
},
calculate(buffer) {
const chunkHeader = buffer.toString('ascii', 12, 16)
buffer = buffer.slice(20, 30)
// Extended webp stream signature
if (chunkHeader === 'VP8X') {
const extendedHeader = buffer[0]
const validStart = (extendedHeader & 0xc0) === 0
const validEnd = (extendedHeader & 0x01) === 0
if (validStart && validEnd) {
return calculateExtended(buffer)
} else {
// TODO: breaking change
throw new TypeError('Invalid WebP')
}
}
// Lossless webp stream signature
if (chunkHeader === 'VP8 ' && buffer[0] !== 0x2f) {
return calculateLossy(buffer)
}
// Lossy webp stream signature
const signature = buffer.toString('hex', 3, 6)
if (chunkHeader === 'VP8L' && signature !== '9d012a') {
return calculateLossless(buffer)
}
throw new TypeError('Invalid WebP')
}
}

View file

@ -0,0 +1,8 @@
The MIT License (MIT)
Copyright (c) 2014 Jesse Tane <jesse.tane@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -0,0 +1,225 @@
const has = Object.prototype.hasOwnProperty
/**
* Since CustomEvent is only supported in nodejs since version 19,
* you have to create your own class instead of using CustomEvent
* @see https://github.com/nodejs/node/issues/40678
* */
export class QueueEvent extends Event {
constructor (name, detail) {
super(name)
this.detail = detail
}
}
export default class Queue extends EventTarget {
constructor (options = {}) {
super()
const { concurrency = Infinity, timeout = 0, autostart = false, results = null } = options
this.concurrency = concurrency
this.timeout = timeout
this.autostart = autostart
this.results = results
this.pending = 0
this.session = 0
this.running = false
this.jobs = []
this.timers = []
this.addEventListener('error', this._errorHandler)
}
_errorHandler(evt) {
this.end(evt.detail.error);
}
pop () {
return this.jobs.pop()
}
shift () {
return this.jobs.shift()
}
indexOf (searchElement, fromIndex) {
return this.jobs.indexOf(searchElement, fromIndex)
}
lastIndexOf (searchElement, fromIndex) {
if (fromIndex !== undefined) { return this.jobs.lastIndexOf(searchElement, fromIndex) }
return this.jobs.lastIndexOf(searchElement)
}
slice (start, end) {
this.jobs = this.jobs.slice(start, end)
return this
}
reverse () {
this.jobs.reverse()
return this
}
push (...workers) {
const methodResult = this.jobs.push(...workers)
if (this.autostart) {
this.start()
}
return methodResult
}
unshift (...workers) {
const methodResult = this.jobs.unshift(...workers)
if (this.autostart) {
this.start()
}
return methodResult
}
splice (start, deleteCount, ...workers) {
this.jobs.splice(start, deleteCount, ...workers)
if (this.autostart) {
this.start()
}
return this
}
get length () {
return this.pending + this.jobs.length
}
start (callback) {
let awaiter;
if (callback) {
this._addCallbackToEndEvent(callback)
} else {
awaiter = this._createPromiseToEndEvent();
}
this.running = true
if (this.pending >= this.concurrency) {
return
}
if (this.jobs.length === 0) {
if (this.pending === 0) {
this.done()
}
return
}
const job = this.jobs.shift()
const session = this.session
const timeout = (job !== undefined) && has.call(job, 'timeout') ? job.timeout : this.timeout
let once = true
let timeoutId = null
let didTimeout = false
let resultIndex = null
const next = (error, ...result) => {
if (once && this.session === session) {
once = false
this.pending--
if (timeoutId !== null) {
this.timers = this.timers.filter((tID) => tID !== timeoutId)
clearTimeout(timeoutId)
}
if (error) {
this.dispatchEvent(new QueueEvent('error', { error, job }))
} else if (!didTimeout) {
if (resultIndex !== null && this.results !== null) {
this.results[resultIndex] = [...result]
}
this.dispatchEvent(new QueueEvent('success', { result: [...result], job }))
}
if (this.session === session) {
if (this.pending === 0 && this.jobs.length === 0) {
this.done()
} else if (this.running) {
this.start()
}
}
}
}
if (timeout) {
timeoutId = setTimeout(() => {
didTimeout = true
this.dispatchEvent(new QueueEvent('timeout', { next, job }))
next()
}, timeout)
this.timers.push(timeoutId)
}
if (this.results != null) {
resultIndex = this.results.length
this.results[resultIndex] = null
}
this.pending++
this.dispatchEvent(new QueueEvent('start', { job }))
const promise = job(next)
if (promise !== undefined && typeof promise.then === 'function') {
promise.then(function (result) {
return next(undefined, result)
}).catch(function (err) {
return next(err || true)
})
}
if (this.running && this.jobs.length > 0) {
return this.start()
}
return awaiter;
}
stop () {
this.running = false
}
end (error) {
this.clearTimers()
this.jobs.length = 0
this.pending = 0
this.done(error)
}
clearTimers () {
this.timers.forEach((timer) => {
clearTimeout(timer)
})
this.timers = []
}
_addCallbackToEndEvent (cb) {
const onend = (evt) => {
this.removeEventListener('end', onend)
cb(evt.detail.error, this.results)
}
this.addEventListener('end', onend)
}
_createPromiseToEndEvent() {
return new Promise((resolve) => {
this._addCallbackToEndEvent((error, results) => {
resolve({ error, results });
});
});
}
done (error) {
this.session++
this.running = false
this.dispatchEvent(new QueueEvent('end', { error }))
}
}

View file

@ -14,7 +14,7 @@ import { hashTransform, propsToFilename } from './utils/transformToPath.js';
const resolvedVirtualModuleId = '\0' + VIRTUAL_MODULE_ID;
const assetRegex = new RegExp(`\\.(${VALID_INPUT_FORMATS.join('|')})$`, 'i');
const assetRegex = new RegExp(`\.(${VALID_INPUT_FORMATS.join('|')})$`, 'i');
export default function assets({
settings,

View file

@ -174,22 +174,6 @@ describe('astro:image', () => {
expect(res.status).to.equal(200);
expect(loading).to.not.be.undefined;
});
it('supports avif', async () => {
let res = await fixture.fetch('/avif');
let html = await res.text();
$ = cheerio.load(html);
console.log(html);
let $img = $('img');
expect($img).to.have.a.lengthOf(1);
let src = $img.attr('src');
res = await fixture.fetch(src);
expect(res.status).to.equal(200);
expect(res.headers.get('content-type')).to.equal('image/avif');
});
});
describe('vite-isms', () => {

View file

@ -1,5 +0,0 @@
---
import light_walrus from "../assets/light_walrus.avif";
---
<img src={light_walrus.src} />

View file

@ -596,9 +596,6 @@ importers:
preferred-pm:
specifier: ^3.1.2
version: 3.1.2
probe-image-size:
specifier: ^7.2.3
version: 7.2.3
prompts:
specifier: ^2.4.2
version: 2.4.2
@ -709,9 +706,6 @@ importers:
'@types/mocha':
specifier: ^10.0.1
version: 10.0.1
'@types/probe-image-size':
specifier: ^7.2.0
version: 7.2.0
'@types/prompts':
specifier: ^2.4.4
version: 2.4.4
@ -8896,12 +8890,6 @@ packages:
/@types/ms@0.7.31:
resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==}
/@types/needle@3.2.0:
resolution: {integrity: sha512-6XzvzEyJ2ozFNfPajFmqH9JOt0Hp+9TawaYpJT59iIP/zR0U37cfWCRwosyIeEBBZBi021Osq4jGAD3AOju5fg==}
dependencies:
'@types/node': 18.17.8
dev: true
/@types/nlcst@1.0.0:
resolution: {integrity: sha512-3TGCfOcy8R8mMQ4CNSNOe3PG66HttvjcLzCoOpvXvDtfWOTi+uT/rxeOKm/qEwbM4SNe1O/PjdiBK2YcTjU4OQ==}
dependencies:
@ -8943,13 +8931,6 @@ packages:
resolution: {integrity: sha512-ZTaqn/qSqUuAq1YwvOFQfVW1AR/oQJlLSZVustdjwI+GZ8kr0MSHBj0tsXPW1EqHubx50gtBEjbPGsdZwQwCjQ==}
dev: true
/@types/probe-image-size@7.2.0:
resolution: {integrity: sha512-R5H3vw62gHNHrn+JGZbKejb+Z2D/6E5UNVlhCzIaBBLroMQMOFqy5Pap2gM+ZZHdqBtVU0/cx/M6to+mOJcoew==}
dependencies:
'@types/needle': 3.2.0
'@types/node': 18.17.8
dev: true
/@types/prompts@2.4.4:
resolution: {integrity: sha512-p5N9uoTH76lLvSAaYSZtBCdEXzpOOufsRjnhjVSrZGXikVGHX9+cc9ERtHRV4hvBKHyZb1bg4K+56Bd2TqUn4A==}
dependencies:
@ -10774,17 +10755,6 @@ packages:
dependencies:
ms: 2.0.0
/debug@3.2.7:
resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
dependencies:
ms: 2.1.3
dev: false
/debug@4.3.4(supports-color@8.1.1):
resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==}
engines: {node: '>=6.0'}
@ -12547,6 +12517,7 @@ packages:
engines: {node: '>=0.10.0'}
dependencies:
safer-buffer: 2.1.2
dev: true
/iconv-lite@0.6.3:
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
@ -13277,6 +13248,7 @@ packages:
/lodash.merge@4.6.2:
resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==}
dev: true
/lodash.sortby@4.7.0:
resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==}
@ -14362,18 +14334,6 @@ packages:
resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==}
dev: true
/needle@2.9.1:
resolution: {integrity: sha512-6R9fqJ5Zcmf+uYaFgdIHmLwNldn5HbK8L5ybn7Uz+ylX/rnOsSp1AHcvQSrCaFN+qNM1wpymHqD7mVasEOlHGQ==}
engines: {node: '>= 4.4.x'}
hasBin: true
dependencies:
debug: 3.2.7
iconv-lite: 0.4.24
sax: 1.2.4
transitivePeerDependencies:
- supports-color
dev: false
/negotiator@0.6.3:
resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==}
engines: {node: '>= 0.6'}
@ -15471,16 +15431,6 @@ packages:
engines: {node: '>=6'}
dev: false
/probe-image-size@7.2.3:
resolution: {integrity: sha512-HubhG4Rb2UH8YtV4ba0Vp5bQ7L78RTONYu/ujmCu5nBI8wGv24s4E9xSKBi0N1MowRpxk76pFCpJtW0KPzOK0w==}
dependencies:
lodash.merge: 4.6.2
needle: 2.9.1
stream-parser: 0.3.1
transitivePeerDependencies:
- supports-color
dev: false
/progress@2.0.3:
resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==}
engines: {node: '>=0.4.0'}
@ -16115,6 +16065,7 @@ packages:
/safer-buffer@2.1.2:
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
dev: true
/sass-formatter@0.7.7:
resolution: {integrity: sha512-axtQ7c7Cf4UgHsD8e4okhIkkc90+tdgBIfUMx69+qJuMNq9EOo2k+RH/mDKj0XeA5z3nC1Ca5TCntuxRhI+1MA==}
@ -16534,14 +16485,6 @@ packages:
engines: {node: '>=4', npm: '>=6'}
dev: true
/stream-parser@0.3.1:
resolution: {integrity: sha512-bJ/HgKq41nlKvlhccD5kaCr/P+Hu0wPNKPJOH7en+YrJu/9EgqUF+88w5Jb6KNcjOFMhfX4B2asfeAtIGuHObQ==}
dependencies:
debug: 2.6.9
transitivePeerDependencies:
- supports-color
dev: false
/stream-transform@2.1.3:
resolution: {integrity: sha512-9GHUiM5hMiCi6Y03jD2ARC1ettBXkQBoQAe7nJsPknnI0ow10aXjTnew8QtYQmLjzn974BnmWEAJgCY6ZP1DeQ==}
dependencies: