diff --git a/.aegir.js b/.aegir.js index 9752d511..d171722f 100644 --- a/.aegir.js +++ b/.aegir.js @@ -1,3 +1,50 @@ +import { dirname, join } from 'node:path' +import { createReadStream } from 'node:fs'; +import { fileURLToPath } from 'node:url'; +import * as http from 'node:http'; +import getPort from 'aegir/get-port' + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +// create an http server that will host the fixture data files. When receiving a request for a fileName, it will return './src/fixtures/data/${fileName}' +async function createFixtureServer() { + const port = await getPort(3333) + const fixturesDataFolder = join(__dirname, 'fixture') + const server = await new Promise((resolve, _reject) => { + const s = http.createServer(async (req, res) => { + // Set CORS headers + res.setHeader('Access-Control-Allow-Origin', '*'); + res.setHeader('Access-Control-Request-Method', '*'); + res.setHeader('Access-Control-Allow-Methods', 'OPTIONS, GET'); + res.setHeader('Access-Control-Allow-Headers', '*'); + if ( req.method === 'OPTIONS' ) { + res.writeHead(200); + res.end(); + return; + } + const fileName = req.url?.split('/').pop() + if (fileName) { + try { + createReadStream(join(fixturesDataFolder, fileName)).pipe(res) + res.writeHead(200, {'Content-Type': 'application/octet-stream'}) + } catch (e) { + console.error(e) + res.writeHead(500, e.message) + res.end() + } + } else { + res.writeHead(404) + res.end() + } + }).listen(port, () => resolve(s)) + }) + + return { + server, + port + } +} + /** @type {import('aegir').PartialOptions} */ export default { build: { @@ -15,6 +62,22 @@ export default { 'test.js', '**/*.test-d.ts', '**/*.spec.[tj]s', + 'test/**' ] - } + }, + test: { + async before(_options) { + const { server: httpServer, port: httpPort } = await createFixtureServer() + return { + env: { + FIXTURE_DATA_SERVER: `http://127.0.0.1:${httpPort}` + }, + httpServer + } + }, + after: async (_options, {httpServer}) => { + await httpServer.closeAllConnections() + await httpServer.close() + } + }, } diff --git a/package.json b/package.json index 48d40669..0922ccec 100644 --- a/package.json +++ b/package.json @@ -63,7 +63,13 @@ "lint": "aegir lint", "lint:fix": "aegir lint --fix", "release": "aegir release", - "test": "npm run build && ava --fail-fast", + "test": "aegir test", + "test:chrome": "aegir test -t browser --cov", + "test:chrome-webworker": "aegir test -t webworker", + "test:firefox": "aegir test -t browser -- --browser firefox", + "test:firefox-webworker": "aegir test -t webworker -- --browser firefox", + "test:node": "aegir test -t node --cov", + "test:electron-main": "aegir test -t electron-main", "test:tsd": "tsd" }, "eslintConfig": { diff --git a/src/basic-token-type.ts b/src/basic-token-type.ts index d68fbc4b..84cf0934 100644 --- a/src/basic-token-type.ts +++ b/src/basic-token-type.ts @@ -1,5 +1,6 @@ import * as ieee754 from 'ieee754' import type { IToken, IGetToken } from '@tokenizer/token' +import { bufferToString } from './buffer-dataview-tools.js' // import { Buffer } from 'node:buffer'; // Primitive types @@ -431,8 +432,8 @@ export class BufferType implements IGetToken { public constructor (public len: number) { } - public get (uint8Array: Uint8Array, off: number): Buffer { - return Buffer.from(uint8Array.subarray(off, off + this.len)) + public get (uint8Array: Uint8Array, off: number): Uint8Array { + return uint8Array.subarray(off, off + this.len) } } @@ -444,7 +445,7 @@ export class StringType implements IGetToken { } public get (uint8Array: Uint8Array, offset: number): string { - return Buffer.from(uint8Array).toString(this.encoding, offset, offset + this.len) + return bufferToString(uint8Array, this.encoding, offset, offset + this.len) } } diff --git a/src/core.js b/src/core.js index e1aa27de..0e1e36d1 100644 --- a/src/core.js +++ b/src/core.js @@ -30,14 +30,9 @@ function _check (buffer, headers, options) { offset: 0, ...options } - // console.group('_check') - // console.log('options: ', options) - // console.log('headers: ', headers) let result = true for (const [index, header] of headers.entries()) { - // console.log('index: ', index) - // console.log('header: ', header) // If a bitmask is set if (options.mask) { // If header doesn't equal `buf` with bits masked off @@ -51,8 +46,6 @@ function _check (buffer, headers, options) { break } } - // console.log('result: ', result) - // console.groupEnd() return result } @@ -1234,7 +1227,7 @@ export class FileTypeParser { // ASF_Header_Object first 80 bytes if (this.check([0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9])) { async function readHeader () { - const guid = Buffer.alloc(16) + const guid = alloc(16) await tokenizer.readBuffer(guid) return { id: guid, @@ -1249,7 +1242,7 @@ export class FileTypeParser { let payload = header.size - 24 if (_check(header.id, [0x91, 0x07, 0xDC, 0xB7, 0xB7, 0xA9, 0xCF, 0x11, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65])) { // Sync on Stream-Properties-Object (B7DC0791-A9B7-11CF-8EE6-00C00C205365) - const typeId = Buffer.alloc(16) + const typeId = alloc(16) payload -= await tokenizer.readBuffer(typeId) if (_check(typeId, [0x40, 0x9E, 0x69, 0xF8, 0x4D, 0x5B, 0xCF, 0x11, 0xA8, 0xFD, 0x00, 0x80, 0x5F, 0x5C, 0x44, 0x2B])) { diff --git a/test/file-type.spec.ts b/test/file-type.spec.ts index 00833f3b..ea486aa8 100644 --- a/test/file-type.spec.ts +++ b/test/file-type.spec.ts @@ -1,29 +1,14 @@ /* eslint-env mocha */ - -// import process from 'node:process'; -import { Buffer } from 'node:buffer' -import fs from 'node:fs' -import path from 'node:path' -// import stream from 'node:stream' -import { fileURLToPath } from 'node:url' -// import test from 'ava' -import { Parser as ReadmeParser } from 'commonmark' -// import { readableNoopStream } from 'noop-stream' import { fileTypeFromBuffer, - // fileTypeFromStream, - // fileTypeFromFile, - // fileTypeFromBlob, FileTypeParser, - // fileTypeStream, supportedExtensions, supportedMimeTypes } from '../src/index.js' import { expect } from 'aegir/chai' +import { getFixtureDataUint8Array } from './get-fixture-data.js' +import { alloc } from 'uint8arrays/alloc' -const __dirname = path.dirname(fileURLToPath(import.meta.url)) - -const fixturePath = path.join(__dirname, '..', '..', 'fixture') const missingTests = new Set([ 'mpc' @@ -297,9 +282,8 @@ async function checkBufferLike (type, bufferLike) { async function testFromBuffer (ext: string, name?: string) { const fixtureName = `${(name ?? 'fixture')}.${ext}` + const chunk = await getFixtureDataUint8Array(fixtureName) - const file = path.join(fixturePath, fixtureName) - const chunk = fs.readFileSync(file) await checkBufferLike(ext, chunk) await checkBufferLike(ext, new Uint8Array(chunk)) await checkBufferLike(ext, chunk.buffer.slice(chunk.byteOffset, chunk.byteOffset + chunk.byteLength)) @@ -314,16 +298,8 @@ async function testFromBuffer (ext: string, name?: string) { // } async function testFalsePositive (ext, name) { - const file = path.join(fixturePath, `${name}.${ext}`) - const fileContent = fs.readFileSync(file) + const chunk = await getFixtureDataUint8Array(`${name}.${ext}`) - // await t.is(await fileTypeFromBuffer(fileContent), undefined) - await expect(fileTypeFromBuffer(fileContent)).to.eventually.be(undefined) - - const chunk = fs.readFileSync(file) - // t.is(await fileTypeFromBuffer(chunk), undefined) - // t.is(await fileTypeFromBuffer(new Uint8Array(chunk)), undefined) - // t.is(await fileTypeFromBuffer(chunk.buffer), undefined) await expect(fileTypeFromBuffer(chunk)).to.eventually.be(undefined) await expect(fileTypeFromBuffer(new Uint8Array(chunk))).to.eventually.be(undefined) await expect(fileTypeFromBuffer(chunk.buffer)).to.eventually.be(undefined) @@ -473,183 +449,16 @@ it('validate the input argument type', async () => { // await t.notThrowsAsync(fileTypeFromBuffer(Buffer.from('x'))) // await t.notThrowsAsync(fileTypeFromBuffer(new Uint8Array())) // await t.notThrowsAsync(fileTypeFromBuffer(new ArrayBuffer())) - await expect(fileTypeFromBuffer(Buffer.from('x'))).to.eventually.not.be.rejected() await expect(fileTypeFromBuffer(new Uint8Array())).to.eventually.not.be.rejected() + await expect(fileTypeFromBuffer(new Uint8Array().buffer)).to.eventually.not.be.rejected() await expect(fileTypeFromBuffer(new ArrayBuffer(0))).to.eventually.not.be.rejected() }) -it('validate the repo has all extensions and mimes in sync', () => { - // File: core.js (base truth) - function readIndexJS () { - const core = fs.readFileSync('src/core.js', { encoding: 'utf8' }) - const extArray = core.match(/(?<=ext:\s')(.*)(?=',)/g) - const mimeArray = core.match(/(?<=mime:\s')(.*)(?=')/g) - const exts = new Set(extArray) - const mimes = new Set(mimeArray) - - return { - exts, - mimes - } - } - - // File: core.d.ts - function readIndexDTS () { - const core = fs.readFileSync('core.d.ts', { encoding: 'utf8' }) - const matches = core.match(/(?<=\|\s')(.*)(?=')/g) - const extArray: string[] = [] - const mimeArray: string[] = [] - if (matches == null) { - return { - extArray, - mimeArray - } - } - - for (const match of matches) { - if (match.includes('/')) { - mimeArray.push(match) - } else { - extArray.push(match) - } - } - - return { - extArray, - mimeArray - } - } - - // File: package.json - function readPackageJSON () { - const packageJson = fs.readFileSync('package.json', { encoding: 'utf8' }) - const { keywords } = JSON.parse(packageJson) - - const allowedExtras = new Set([ - 'mime', - 'file', - 'type', - 'magic', - 'archive', - 'image', - 'img', - 'pic', - 'picture', - 'flash', - 'photo', - 'video', - 'detect', - 'check', - 'is', - 'exif', - 'binary', - 'buffer', - 'uint8array', - 'webassembly' - ]) - - const extArray = keywords.filter(keyword => !allowedExtras.has(keyword)) - return extArray - } - - // File: readme.md - function readReadmeMD () { - const index = fs.readFileSync('readme.md', { encoding: 'utf8' }) - const extArray = index.match(/(?<=-\s\[`)(.*)(?=`)/g) - return extArray - } - - // Helpers - // Find extensions/mimes that are defined twice in a file - function findDuplicates (input) { - // TODO: Fix this. - return input.reduce((accumulator, element, index, array) => { - if (array.indexOf(element) !== index && !accumulator.includes(element)) { - accumulator.push(element) - } - - return accumulator - }, []) - } - - // Find extensions/mimes that are in another file but not in `core.js` - function findExtras (array, set) { - return array.filter(element => !set.has(element)) - } - - // Find extensions/mimes that are in `core.js` but missing from another file - function findMissing (array: string[], set) { - const missing: string[] = [] - const other = new Set(array) - for (const element of set) { - if (!other.has(element)) { - missing.push(element) - } - } - - return missing - } - - // Test runner - function validate (found, baseTruth, fileName, extOrMime) { - const duplicates = findDuplicates(found) - const extras = findExtras(found, baseTruth) - const missing = findMissing(found, baseTruth) - // t.is(duplicates.length, 0, `Found duplicate ${extOrMime}: ${duplicates} in ${fileName}.`) - // t.is(extras.length, 0, `Extra ${extOrMime}: ${extras} in ${fileName}.`) - // t.is(missing.length, 0, `Missing ${extOrMime}: ${missing} in ${fileName}.`) - expect(duplicates).to.have.length(0, `Found duplicate ${extOrMime}: ${duplicates} in ${fileName}.`) - expect(extras).to.have.length(0, `Extra ${extOrMime}: ${extras} in ${fileName}.`) - expect(missing).to.have.length(0, `Missing ${extOrMime}: ${missing} in ${fileName}.`) - } - - // Get the base truth of extensions and mimes supported from core.js - const { exts, mimes } = readIndexJS() - - // Validate all extensions - const filesWithExtensions = { - 'core.d.ts': readIndexDTS().extArray, - 'supported.js': [...supportedExtensions], - 'package.json': readPackageJSON(), - 'readme.md': readReadmeMD() - } - - for (const fileName in filesWithExtensions) { - if (filesWithExtensions[fileName]) { - const foundExtensions = filesWithExtensions[fileName] - validate(foundExtensions, exts, fileName, 'extensions') - } - } - - // Validate all mimes - const filesWithMimeTypes = { - 'core.d.ts': readIndexDTS().mimeArray, - 'supported.js': [...supportedMimeTypes] - } - - for (const fileName in filesWithMimeTypes) { - if (filesWithMimeTypes[fileName]) { - const foundMimeTypes = filesWithMimeTypes[fileName] - validate(foundMimeTypes, mimes, fileName, 'mimes') - } - } -}) - -// class BufferedStream extends stream.Readable { -// constructor (buffer) { -// super() -// this.push(buffer) -// this.push(null) -// } - -// _read () {} -// } - it('odd file sizes', async () => { const oddFileSizes = [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 255, 256, 257, 511, 512, 513] for (const size of oddFileSizes) { - const buffer = Buffer.alloc(size) + const buffer = alloc(size) // await t.notThrowsAsync(fileTypeFromBuffer(buffer), `fromBuffer: File size: ${size} bytes`) await expect(fileTypeFromBuffer(buffer)).to.eventually.not.be.rejected(`fromBuffer: File size: ${size} bytes`) } @@ -661,47 +470,17 @@ it('odd file sizes', async () => { // } }) -it('supported files types are listed alphabetically', async () => { - const readme = await fs.promises.readFile('readme.md', { encoding: 'utf8' }) - let currentNode = new ReadmeParser().parse(readme).firstChild - - while (currentNode) { - if (currentNode.type === 'heading' && currentNode.firstChild.literal === 'Supported file types') { - // Header → List → First list item - currentNode = currentNode.next.firstChild - break - } - - currentNode = currentNode.next - } - - let previousFileType - - while (currentNode) { - // List item → Paragraph → Link → Inline code → Text - const currentFileType = currentNode.firstChild.firstChild.firstChild.literal - - if (previousFileType) { - // t.true(currentFileType > previousFileType, `${currentFileType} should be listed before ${previousFileType}`) - expect(currentFileType > previousFileType).to.be.true(`${currentFileType} should be listed before ${previousFileType}`) - } - - previousFileType = currentFileType - currentNode = currentNode.next - } -}) it('corrupt MKV throws', async () => { - const filePath = path.join(fixturePath, 'fixture-corrupt.mkv') - const fileContent = fs.readFileSync(filePath) - // await t.throwsAsync(fileTypeFromBuffer(fileContent), { message: /End-Of-Stream/ }) + const fileContent = await getFixtureDataUint8Array('fixture-corrupt.mkv') + await expect(fileTypeFromBuffer(fileContent)).to.eventually.be.rejectedWith(/End-Of-Stream/) }) // Create a custom detector for the just made up "unicorn" file type const unicornDetector = async tokenizer => { const unicornHeader = [85, 78, 73, 67, 79, 82, 78] // "UNICORN" as decimal string - const buffer = Buffer.alloc(7) + const buffer = alloc(7) await tokenizer.peekBuffer(buffer, { length: unicornHeader.length, mayBeLess: true }) if (unicornHeader.every((value, index) => value === buffer[index])) { return { ext: 'unicorn', mime: 'application/unicorn' } @@ -731,28 +510,25 @@ it('fileTypeFromBlob should detect custom file type "unicorn" using custom detec }) it('fileTypeFromBlob should keep detecting default file types when no custom detector matches', async () => { - const file = path.join(fixturePath, 'fixture.png') - const chunk = fs.readFileSync(file) + const chunk = await getFixtureDataUint8Array('fixture.png') + const blob = new Blob([chunk]) const customDetectors = [unicornDetector] const parser = new FileTypeParser({ customDetectors }) const result = await parser.fromBlob(blob) - // t.deepEqual(result, { ext: 'png', mime: 'image/png' }) expect(result).to.deep.equal({ ext: 'png', mime: 'image/png' }) }) it('fileTypeFromBlob should allow overriding default file type detectors', async () => { - const file = path.join(fixturePath, 'fixture.png') - const chunk = fs.readFileSync(file) + const chunk = await getFixtureDataUint8Array('fixture.png') const blob = new Blob([chunk]) const customDetectors = [mockPngDetector] const parser = new FileTypeParser({ customDetectors }) const result = await parser.fromBlob(blob) - // t.deepEqual(result, { ext: 'mockPng', mime: 'image/mockPng' }) expect(result).to.deep.equal({ ext: 'mockPng', mime: 'image/mockPng' }) }) @@ -764,31 +540,26 @@ it('fileTypeFromBuffer should detect custom file type "unicorn" using custom det const parser = new FileTypeParser({ customDetectors }) const result = await parser.fromBuffer(uint8ArrayContent) - // t.deepEqual(result, { ext: 'unicorn', mime: 'application/unicorn' }) expect(result).to.deep.equal({ ext: 'unicorn', mime: 'application/unicorn' }) }) it('fileTypeFromBuffer should keep detecting default file types when no custom detector matches', async () => { - const file = path.join(fixturePath, 'fixture.png') - const uint8ArrayContent = fs.readFileSync(file) + const uint8ArrayContent = await getFixtureDataUint8Array('fixture.png') const customDetectors = [unicornDetector] const parser = new FileTypeParser({ customDetectors }) const result = await parser.fromBuffer(uint8ArrayContent) - // t.deepEqual(result, { ext: 'png', mime: 'image/png' }) expect(result).to.deep.equal({ ext: 'png', mime: 'image/png' }) }) it('fileTypeFromBuffer should allow overriding default file type detectors', async () => { - const file = path.join(fixturePath, 'fixture.png') - const uint8ArrayContent = fs.readFileSync(file) + const uint8ArrayContent = await getFixtureDataUint8Array('fixture.png') const customDetectors = [mockPngDetector] const parser = new FileTypeParser({ customDetectors }) const result = await parser.fromBuffer(uint8ArrayContent) - // t.deepEqual(result, { ext: 'mockPng', mime: 'image/mockPng' }) expect(result).to.deep.equal({ ext: 'mockPng', mime: 'image/mockPng' }) }) diff --git a/test/get-fixture-data.ts b/test/get-fixture-data.ts new file mode 100644 index 00000000..b7a6f5b1 --- /dev/null +++ b/test/get-fixture-data.ts @@ -0,0 +1,15 @@ +async function getFixtureData (filename: string): Promise { + + const fixtureDataResp = await fetch(`${process.env.FIXTURE_DATA_SERVER}/${filename}`, { method: 'GET' }) + + if (!fixtureDataResp.ok) throw new Error(`Failed to fetch ${filename}: ${fixtureDataResp.statusText}`) + if (fixtureDataResp.body == null) throw new Error(`Failed to fetch ${filename}: no body`) + + return fixtureDataResp +} + +export async function getFixtureDataUint8Array (filename: string): Promise { + const fixtureDataResp = await getFixtureData(filename) + + return new Uint8Array(await fixtureDataResp.arrayBuffer()) +} diff --git a/test/node.ts b/test/node.ts new file mode 100644 index 00000000..1a3337d0 --- /dev/null +++ b/test/node.ts @@ -0,0 +1,213 @@ +/* eslint-env mocha */ + +// import process from 'node:process'; +// import { Buffer } from 'node:buffer' +import fs from 'node:fs' +// import path from 'node:path' +// import stream from 'node:stream' +// import { fileURLToPath } from 'node:url' +// import test from 'ava' +import { Parser as ReadmeParser } from 'commonmark' +// import { readableNoopStream } from 'noop-stream' +import { + // fileTypeFromBuffer, + // fileTypeFromStream, + // fileTypeFromFile, + // fileTypeFromBlob, + // FileTypeParser, + // fileTypeStream, + supportedExtensions, + supportedMimeTypes +} from '../src/index.js' +import { expect } from 'aegir/chai' + +import './file-type.spec.js' +// import loadFixture from 'aegir/fixtures' + +it('validate the repo has all extensions and mimes in sync', () => { + // File: core.js (base truth) + function readIndexJS () { + const core = fs.readFileSync('src/core.js', { encoding: 'utf8' }) + + const extArray = core.match(/(?<=ext:\s')(.*)(?=',)/g) + const mimeArray = core.match(/(?<=mime:\s')(.*)(?=')/g) + const exts = new Set(extArray) + const mimes = new Set(mimeArray) + + return { + exts, + mimes + } + } + + // File: core.d.ts + function readIndexDTS () { + const core = fs.readFileSync('core.d.ts', { encoding: 'utf8' }) + const matches = core.match(/(?<=\|\s')(.*)(?=')/g) + const extArray: string[] = [] + const mimeArray: string[] = [] + if (matches == null) { + return { + extArray, + mimeArray + } + } + + for (const match of matches) { + if (match.includes('/')) { + mimeArray.push(match) + } else { + extArray.push(match) + } + } + + return { + extArray, + mimeArray + } + } + + // File: package.json + function readPackageJSON () { + const packageJson = fs.readFileSync('package.json', { encoding: 'utf8' }) + const { keywords } = JSON.parse(packageJson) + + const allowedExtras = new Set([ + 'mime', + 'file', + 'type', + 'magic', + 'archive', + 'image', + 'img', + 'pic', + 'picture', + 'flash', + 'photo', + 'video', + 'detect', + 'check', + 'is', + 'exif', + 'binary', + 'buffer', + 'uint8array', + 'webassembly' + ]) + + const extArray = keywords.filter(keyword => !allowedExtras.has(keyword)) + return extArray + } + + // File: readme.md + function readReadmeMD () { + const index = fs.readFileSync('readme.md', { encoding: 'utf8' }) + const extArray = index.match(/(?<=-\s\[`)(.*)(?=`)/g) + return extArray + } + + // Helpers + // Find extensions/mimes that are defined twice in a file + function findDuplicates (input) { + // TODO: Fix this. + return input.reduce((accumulator, element, index, array) => { + if (array.indexOf(element) !== index && !accumulator.includes(element)) { + accumulator.push(element) + } + + return accumulator + }, []) + } + + // Find extensions/mimes that are in another file but not in `core.js` + function findExtras (array, set) { + return array.filter(element => !set.has(element)) + } + + // Find extensions/mimes that are in `core.js` but missing from another file + function findMissing (array: string[], set) { + const missing: string[] = [] + const other = new Set(array) + for (const element of set) { + if (!other.has(element)) { + missing.push(element) + } + } + + return missing + } + + // Test runner + function validate (found, baseTruth, fileName, extOrMime) { + const duplicates = findDuplicates(found) + const extras = findExtras(found, baseTruth) + const missing = findMissing(found, baseTruth) + // t.is(duplicates.length, 0, `Found duplicate ${extOrMime}: ${duplicates} in ${fileName}.`) + // t.is(extras.length, 0, `Extra ${extOrMime}: ${extras} in ${fileName}.`) + // t.is(missing.length, 0, `Missing ${extOrMime}: ${missing} in ${fileName}.`) + expect(duplicates).to.have.length(0, `Found duplicate ${extOrMime}: ${duplicates} in ${fileName}.`) + expect(extras).to.have.length(0, `Extra ${extOrMime}: ${extras} in ${fileName}.`) + expect(missing).to.have.length(0, `Missing ${extOrMime}: ${missing} in ${fileName}.`) + } + + // Get the base truth of extensions and mimes supported from core.js + const { exts, mimes } = readIndexJS() + + // Validate all extensions + const filesWithExtensions = { + 'core.d.ts': readIndexDTS().extArray, + 'supported.js': [...supportedExtensions], + 'package.json': readPackageJSON(), + 'readme.md': readReadmeMD() + } + + for (const fileName in filesWithExtensions) { + if (filesWithExtensions[fileName]) { + const foundExtensions = filesWithExtensions[fileName] + validate(foundExtensions, exts, fileName, 'extensions') + } + } + + // Validate all mimes + const filesWithMimeTypes = { + 'core.d.ts': readIndexDTS().mimeArray, + 'supported.js': [...supportedMimeTypes] + } + + for (const fileName in filesWithMimeTypes) { + if (filesWithMimeTypes[fileName]) { + const foundMimeTypes = filesWithMimeTypes[fileName] + validate(foundMimeTypes, mimes, fileName, 'mimes') + } + } +}) + +it('supported files types are listed alphabetically', async () => { + const readme = await fs.promises.readFile('readme.md', { encoding: 'utf8' }) + let currentNode = new ReadmeParser().parse(readme).firstChild + + while (currentNode) { + if (currentNode.type === 'heading' && currentNode.firstChild.literal === 'Supported file types') { + // Header → List → First list item + currentNode = currentNode.next.firstChild + break + } + + currentNode = currentNode.next + } + + let previousFileType + + while (currentNode) { + // List item → Paragraph → Link → Inline code → Text + const currentFileType = currentNode.firstChild.firstChild.firstChild.literal + + if (previousFileType) { + // t.true(currentFileType > previousFileType, `${currentFileType} should be listed before ${previousFileType}`) + expect(currentFileType > previousFileType).to.be.true(`${currentFileType} should be listed before ${previousFileType}`) + } + + previousFileType = currentFileType + currentNode = currentNode.next + } +})