Fix Windows tests (#212)
* Fix Windows fetchContent() * Fix Windows bundling & config loading * Fix astro-prettier formatting for Windows
This commit is contained in:
parent
8f1acf57a5
commit
9d092b56c7
19 changed files with 307 additions and 300 deletions
5
.changeset/itchy-carrots-visit.md
Normal file
5
.changeset/itchy-carrots-visit.md
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
'astro': patch
|
||||||
|
---
|
||||||
|
|
||||||
|
Bugfix: Windows collection API path bug
|
|
@ -76,12 +76,14 @@
|
||||||
"rehype-parse": "^7.0.1",
|
"rehype-parse": "^7.0.1",
|
||||||
"rollup": "^2.43.1",
|
"rollup": "^2.43.1",
|
||||||
"rollup-plugin-terser": "^7.0.2",
|
"rollup-plugin-terser": "^7.0.2",
|
||||||
"sass": "^1.32.8",
|
"sass": "^1.32.13",
|
||||||
"shorthash": "^0.0.2",
|
"shorthash": "^0.0.2",
|
||||||
|
"slash": "^4.0.0",
|
||||||
"snowpack": "^3.3.7",
|
"snowpack": "^3.3.7",
|
||||||
"source-map-support": "^0.5.19",
|
"source-map-support": "^0.5.19",
|
||||||
"string-width": "^5.0.0",
|
"string-width": "^5.0.0",
|
||||||
"svelte": "^3.35.0",
|
"svelte": "^3.35.0",
|
||||||
|
"tiny-glob": "^0.2.8",
|
||||||
"unified": "^9.2.1",
|
"unified": "^9.2.1",
|
||||||
"vue": "^3.0.10",
|
"vue": "^3.0.10",
|
||||||
"yargs-parser": "^20.2.7"
|
"yargs-parser": "^20.2.7"
|
||||||
|
@ -100,8 +102,7 @@
|
||||||
"@types/react-dom": "^17.0.2",
|
"@types/react-dom": "^17.0.2",
|
||||||
"@types/sass": "^1.16.0",
|
"@types/sass": "^1.16.0",
|
||||||
"@types/yargs-parser": "^20.2.0",
|
"@types/yargs-parser": "^20.2.0",
|
||||||
"astro-scripts": "0.0.1",
|
"astro-scripts": "0.0.1"
|
||||||
"slash": "^4.0.0"
|
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=14.0.0",
|
"node": ">=14.0.0",
|
||||||
|
|
|
@ -10,7 +10,7 @@ import cheerio from 'cheerio';
|
||||||
import del from 'del';
|
import del from 'del';
|
||||||
import { bold, green, yellow } from 'kleur/colors';
|
import { bold, green, yellow } from 'kleur/colors';
|
||||||
import mime from 'mime';
|
import mime from 'mime';
|
||||||
import { fdir } from 'fdir';
|
import glob from 'tiny-glob';
|
||||||
import { bundleCSS } from './build/bundle/css.js';
|
import { bundleCSS } from './build/bundle/css.js';
|
||||||
import { bundleJS, collectJSImports } from './build/bundle/js';
|
import { bundleJS, collectJSImports } from './build/bundle/js';
|
||||||
import { buildCollectionPage, buildStaticPage, getPageType } from './build/page.js';
|
import { buildCollectionPage, buildStaticPage, getPageType } from './build/page.js';
|
||||||
|
@ -26,13 +26,10 @@ const logging: LogOptions = {
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Return contents of src/pages */
|
/** Return contents of src/pages */
|
||||||
async function allPages(root: URL) {
|
async function allPages(root: URL): Promise<URL[]> {
|
||||||
const api = new fdir()
|
const cwd = fileURLToPath(root);
|
||||||
.filter((p) => /\.(astro|md)$/.test(p))
|
const files = await glob('**/*.{astro,md}', { cwd, filesOnly: true });
|
||||||
.withFullPaths()
|
return files.map((f) => new URL(f, root));
|
||||||
.crawl(fileURLToPath(root));
|
|
||||||
const files = await api.withPromise();
|
|
||||||
return files as string[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Is this URL remote? */
|
/** Is this URL remote? */
|
||||||
|
@ -63,22 +60,20 @@ export async function build(astroConfig: AstroConfig): Promise<0 | 1> {
|
||||||
const { runtimeConfig } = runtime;
|
const { runtimeConfig } = runtime;
|
||||||
const { backendSnowpack: snowpack } = runtimeConfig;
|
const { backendSnowpack: snowpack } = runtimeConfig;
|
||||||
|
|
||||||
const pages = await allPages(pageRoot);
|
|
||||||
|
|
||||||
// 0. erase build directory
|
|
||||||
await del(fileURLToPath(dist));
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 1. Build Pages
|
|
||||||
* Source files are built in parallel and stored in memory. Most assets are also gathered here, too.
|
|
||||||
*/
|
|
||||||
timer.build = performance.now();
|
|
||||||
try {
|
try {
|
||||||
|
// 0. erase build directory
|
||||||
|
await del(fileURLToPath(dist));
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 1. Build Pages
|
||||||
|
* Source files are built in parallel and stored in memory. Most assets are also gathered here, too.
|
||||||
|
*/
|
||||||
|
timer.build = performance.now();
|
||||||
|
const pages = await allPages(pageRoot);
|
||||||
info(logging, 'build', yellow('! building pages...'));
|
info(logging, 'build', yellow('! building pages...'));
|
||||||
const release = trapWarn(); // Vue also console.warns, this silences it.
|
const release = trapWarn(); // Vue also console.warns, this silences it.
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
pages.map(async (pathname) => {
|
pages.map(async (filepath) => {
|
||||||
const filepath = new URL(`file://${pathname}`);
|
|
||||||
const buildPage = getPageType(filepath) === 'collection' ? buildCollectionPage : buildStaticPage;
|
const buildPage = getPageType(filepath) === 'collection' ? buildCollectionPage : buildStaticPage;
|
||||||
await buildPage({
|
await buildPage({
|
||||||
astroConfig,
|
astroConfig,
|
||||||
|
@ -94,152 +89,140 @@ export async function build(astroConfig: AstroConfig): Promise<0 | 1> {
|
||||||
);
|
);
|
||||||
info(logging, 'build', green('✔'), 'pages built.');
|
info(logging, 'build', green('✔'), 'pages built.');
|
||||||
release();
|
release();
|
||||||
} catch (err) {
|
debug(logging, 'build', `built pages [${stopTimer(timer.build)}]`);
|
||||||
error(logging, 'generate', err);
|
|
||||||
await runtime.shutdown();
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
debug(logging, 'build', `built pages [${stopTimer(timer.build)}]`);
|
|
||||||
|
|
||||||
// after pages are built, build depTree
|
// after pages are built, build depTree
|
||||||
timer.deps = performance.now();
|
timer.deps = performance.now();
|
||||||
const scanPromises: Promise<void>[] = [];
|
const scanPromises: Promise<void>[] = [];
|
||||||
for (const id of Object.keys(buildState)) {
|
for (const id of Object.keys(buildState)) {
|
||||||
if (buildState[id].contentType !== 'text/html') continue; // only scan HTML files
|
if (buildState[id].contentType !== 'text/html') continue; // only scan HTML files
|
||||||
const pageDeps = findDeps(buildState[id].contents as string, {
|
const pageDeps = findDeps(buildState[id].contents as string, {
|
||||||
astroConfig,
|
astroConfig,
|
||||||
srcPath: buildState[id].srcPath,
|
srcPath: buildState[id].srcPath,
|
||||||
});
|
});
|
||||||
depTree[id] = pageDeps;
|
depTree[id] = pageDeps;
|
||||||
|
|
||||||
// while scanning we will find some unbuilt files; make sure those are all built while scanning
|
// while scanning we will find some unbuilt files; make sure those are all built while scanning
|
||||||
for (const url of [...pageDeps.js, ...pageDeps.css, ...pageDeps.images]) {
|
for (const url of [...pageDeps.js, ...pageDeps.css, ...pageDeps.images]) {
|
||||||
if (!buildState[url])
|
if (!buildState[url])
|
||||||
scanPromises.push(
|
scanPromises.push(
|
||||||
runtime.load(url).then((result) => {
|
runtime.load(url).then((result) => {
|
||||||
if (result.statusCode !== 200) {
|
if (result.statusCode !== 200) {
|
||||||
// there shouldn’t be a build error here
|
// there shouldn’t be a build error here
|
||||||
throw (result as any).error || new Error(`unexpected status ${result.statusCode} when loading ${url}`);
|
throw (result as any).error || new Error(`unexpected status ${result.statusCode} when loading ${url}`);
|
||||||
}
|
}
|
||||||
buildState[url] = {
|
buildState[url] = {
|
||||||
srcPath: new URL(url, projectRoot),
|
srcPath: new URL(url, projectRoot),
|
||||||
contents: result.contents,
|
contents: result.contents,
|
||||||
contentType: result.contentType || mime.getType(url) || '',
|
contentType: result.contentType || mime.getType(url) || '',
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
try {
|
|
||||||
await Promise.all(scanPromises);
|
await Promise.all(scanPromises);
|
||||||
} catch (err) {
|
debug(logging, 'build', `scanned deps [${stopTimer(timer.deps)}]`);
|
||||||
error(logging, 'build', err);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
debug(logging, 'build', `scanned deps [${stopTimer(timer.deps)}]`);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 2. Bundling 1st Pass: In-memory
|
* 2. Bundling 1st Pass: In-memory
|
||||||
* Bundle CSS, and anything else that can happen in memory (for now, JS bundling happens after writing to disk)
|
* Bundle CSS, and anything else that can happen in memory (for now, JS bundling happens after writing to disk)
|
||||||
*/
|
*/
|
||||||
info(logging, 'build', yellow('! optimizing css...'));
|
info(logging, 'build', yellow('! optimizing css...'));
|
||||||
timer.prebundle = performance.now();
|
timer.prebundle = performance.now();
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
bundleCSS({ buildState, astroConfig, logging, depTree }).then(() => {
|
bundleCSS({ buildState, astroConfig, logging, depTree }).then(() => {
|
||||||
debug(logging, 'build', `bundled CSS [${stopTimer(timer.prebundle)}]`);
|
debug(logging, 'build', `bundled CSS [${stopTimer(timer.prebundle)}]`);
|
||||||
}),
|
}),
|
||||||
// TODO: optimize images?
|
// TODO: optimize images?
|
||||||
]);
|
]);
|
||||||
// TODO: minify HTML?
|
// TODO: minify HTML?
|
||||||
info(logging, 'build', green('✔'), 'css optimized.');
|
info(logging, 'build', green('✔'), 'css optimized.');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 3. Write to disk
|
* 3. Write to disk
|
||||||
* Also clear in-memory bundle
|
* Also clear in-memory bundle
|
||||||
*/
|
*/
|
||||||
// collect stats output
|
// collect stats output
|
||||||
const urlStats = await collectBundleStats(buildState, depTree);
|
const urlStats = await collectBundleStats(buildState, depTree);
|
||||||
|
|
||||||
// collect JS imports for bundling
|
// collect JS imports for bundling
|
||||||
const jsImports = await collectJSImports(buildState);
|
const jsImports = await collectJSImports(buildState);
|
||||||
|
|
||||||
// write sitemap
|
// write sitemap
|
||||||
if (astroConfig.buildOptions.sitemap && astroConfig.buildOptions.site) {
|
if (astroConfig.buildOptions.sitemap && astroConfig.buildOptions.site) {
|
||||||
timer.sitemap = performance.now();
|
timer.sitemap = performance.now();
|
||||||
info(logging, 'build', yellow('! creating sitemap...'));
|
info(logging, 'build', yellow('! creating sitemap...'));
|
||||||
const sitemap = generateSitemap(buildState, astroConfig.buildOptions.site);
|
const sitemap = generateSitemap(buildState, astroConfig.buildOptions.site);
|
||||||
const sitemapPath = new URL('sitemap.xml', dist);
|
const sitemapPath = new URL('sitemap.xml', dist);
|
||||||
await fs.promises.mkdir(path.dirname(fileURLToPath(sitemapPath)), { recursive: true });
|
await fs.promises.mkdir(path.dirname(fileURLToPath(sitemapPath)), { recursive: true });
|
||||||
await fs.promises.writeFile(sitemapPath, sitemap, 'utf8');
|
await fs.promises.writeFile(sitemapPath, sitemap, 'utf8');
|
||||||
info(logging, 'build', green('✔'), 'sitemap built.');
|
info(logging, 'build', green('✔'), 'sitemap built.');
|
||||||
debug(logging, 'build', `built sitemap [${stopTimer(timer.sitemap)}]`);
|
debug(logging, 'build', `built sitemap [${stopTimer(timer.sitemap)}]`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// write to disk and free up memory
|
// write to disk and free up memory
|
||||||
timer.write = performance.now();
|
timer.write = performance.now();
|
||||||
await Promise.all(
|
|
||||||
Object.keys(buildState).map(async (id) => {
|
|
||||||
const outPath = new URL(`.${id}`, dist);
|
|
||||||
const parentDir = path.posix.dirname(fileURLToPath(outPath));
|
|
||||||
await fs.promises.mkdir(parentDir, { recursive: true });
|
|
||||||
await fs.promises.writeFile(outPath, buildState[id].contents, buildState[id].encoding);
|
|
||||||
delete buildState[id];
|
|
||||||
delete depTree[id];
|
|
||||||
})
|
|
||||||
);
|
|
||||||
debug(logging, 'build', `wrote files to disk [${stopTimer(timer.write)}]`);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 4. Copy Public Assets
|
|
||||||
*/
|
|
||||||
if (fs.existsSync(astroConfig.public)) {
|
|
||||||
info(logging, 'build', yellow(`! copying public folder...`));
|
|
||||||
timer.public = performance.now();
|
|
||||||
const pub = astroConfig.public;
|
|
||||||
const publicFiles = (await new fdir().withFullPaths().crawl(fileURLToPath(pub)).withPromise()) as string[];
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
publicFiles.map(async (filepath) => {
|
Object.keys(buildState).map(async (id) => {
|
||||||
const fileUrl = new URL(`file://${filepath}`);
|
const outPath = new URL(`.${id}`, dist);
|
||||||
const rel = path.relative(fileURLToPath(pub), fileURLToPath(fileUrl));
|
const parentDir = path.dirname(fileURLToPath(outPath));
|
||||||
const outPath = new URL('./' + rel, dist);
|
await fs.promises.mkdir(parentDir, { recursive: true });
|
||||||
await fs.promises.mkdir(path.dirname(fileURLToPath(outPath)), { recursive: true });
|
await fs.promises.writeFile(outPath, buildState[id].contents, buildState[id].encoding);
|
||||||
await fs.promises.copyFile(fileUrl, outPath);
|
delete buildState[id];
|
||||||
|
delete depTree[id];
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
debug(logging, 'build', `copied public folder [${stopTimer(timer.public)}]`);
|
debug(logging, 'build', `wrote files to disk [${stopTimer(timer.write)}]`);
|
||||||
info(logging, 'build', green('✔'), 'public folder copied.');
|
|
||||||
} else {
|
|
||||||
if (path.basename(astroConfig.public.toString()) !== 'public') {
|
|
||||||
info(logging, 'tip', yellow(`! no public folder ${astroConfig.public} found...`));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 5. Bundling 2nd Pass: On disk
|
* 4. Copy Public Assets
|
||||||
* Bundle JS, which requires hard files to optimize
|
*/
|
||||||
*/
|
if (fs.existsSync(astroConfig.public)) {
|
||||||
info(logging, 'build', yellow(`! bundling...`));
|
info(logging, 'build', yellow(`! copying public folder...`));
|
||||||
if (jsImports.size > 0) {
|
timer.public = performance.now();
|
||||||
try {
|
const cwd = fileURLToPath(astroConfig.public);
|
||||||
|
const publicFiles = await glob('**/*', { cwd, filesOnly: true });
|
||||||
|
await Promise.all(
|
||||||
|
publicFiles.map(async (filepath) => {
|
||||||
|
const srcPath = new URL(filepath, astroConfig.public);
|
||||||
|
const distPath = new URL(filepath, dist);
|
||||||
|
await fs.promises.mkdir(path.dirname(fileURLToPath(distPath)), { recursive: true });
|
||||||
|
await fs.promises.copyFile(srcPath, distPath);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
debug(logging, 'build', `copied public folder [${stopTimer(timer.public)}]`);
|
||||||
|
info(logging, 'build', green('✔'), 'public folder copied.');
|
||||||
|
} else {
|
||||||
|
if (path.basename(astroConfig.public.toString()) !== 'public') {
|
||||||
|
info(logging, 'tip', yellow(`! no public folder ${astroConfig.public} found...`));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 5. Bundling 2nd Pass: On disk
|
||||||
|
* Bundle JS, which requires hard files to optimize
|
||||||
|
*/
|
||||||
|
info(logging, 'build', yellow(`! bundling...`));
|
||||||
|
if (jsImports.size > 0) {
|
||||||
timer.bundleJS = performance.now();
|
timer.bundleJS = performance.now();
|
||||||
const jsStats = await bundleJS(jsImports, { dist: new URL(dist + '/', projectRoot), runtime });
|
const jsStats = await bundleJS(jsImports, { dist: new URL(dist + '/', projectRoot), runtime });
|
||||||
mapBundleStatsToURLStats({ urlStats, depTree, bundleStats: jsStats });
|
mapBundleStatsToURLStats({ urlStats, depTree, bundleStats: jsStats });
|
||||||
debug(logging, 'build', `bundled JS [${stopTimer(timer.bundleJS)}]`);
|
debug(logging, 'build', `bundled JS [${stopTimer(timer.bundleJS)}]`);
|
||||||
info(logging, 'build', green(`✔`), 'bundling complete.');
|
info(logging, 'build', green(`✔`), 'bundling complete.');
|
||||||
} catch (err) {
|
|
||||||
error(logging, 'build', err);
|
|
||||||
await runtime.shutdown();
|
|
||||||
return 1;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 6. Print stats
|
* 6. Print stats
|
||||||
*/
|
*/
|
||||||
logURLStats(logging, urlStats);
|
logURLStats(logging, urlStats);
|
||||||
await runtime.shutdown();
|
await runtime.shutdown();
|
||||||
info(logging, 'build', bold(green('▶ Build Complete!')));
|
info(logging, 'build', bold(green('▶ Build Complete!')));
|
||||||
return 0;
|
return 0;
|
||||||
|
} catch (err) {
|
||||||
|
error(logging, 'build', err);
|
||||||
|
await runtime.shutdown();
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Given an HTML string, collect <link> and <img> tags */
|
/** Given an HTML string, collect <link> and <img> tags */
|
||||||
|
|
|
@ -37,9 +37,10 @@ export function getPageType(filepath: URL): 'collection' | 'static' {
|
||||||
|
|
||||||
/** Build collection */
|
/** Build collection */
|
||||||
export async function buildCollectionPage({ astroConfig, filepath, logging, mode, runtime, site, resolvePackageUrl, buildState }: PageBuildOptions): Promise<void> {
|
export async function buildCollectionPage({ astroConfig, filepath, logging, mode, runtime, site, resolvePackageUrl, buildState }: PageBuildOptions): Promise<void> {
|
||||||
const rel = path.posix.relative(fileURLToPath(astroConfig.astroRoot) + '/pages', fileURLToPath(filepath)); // pages/index.astro
|
const pagesPath = new URL('./pages/', astroConfig.astroRoot);
|
||||||
const pagePath = `/${rel.replace(/\$([^.]+)\.astro$/, '$1')}`;
|
const srcURL = filepath.pathname.replace(pagesPath.pathname, '/');
|
||||||
const srcPath = new URL('pages/' + rel, astroConfig.astroRoot);
|
const outURL = srcURL.replace(/\$([^.]+)\.astro$/, '$1');
|
||||||
|
|
||||||
const builtURLs = new Set<string>(); // !important: internal cache that prevents building the same URLs
|
const builtURLs = new Set<string>(); // !important: internal cache that prevents building the same URLs
|
||||||
|
|
||||||
/** Recursively build collection URLs */
|
/** Recursively build collection URLs */
|
||||||
|
@ -48,9 +49,9 @@ export async function buildCollectionPage({ astroConfig, filepath, logging, mode
|
||||||
const result = await runtime.load(url);
|
const result = await runtime.load(url);
|
||||||
builtURLs.add(url);
|
builtURLs.add(url);
|
||||||
if (result.statusCode === 200) {
|
if (result.statusCode === 200) {
|
||||||
const outPath = path.posix.join('/', url, 'index.html');
|
const outPath = path.posix.join(url, '/index.html');
|
||||||
buildState[outPath] = {
|
buildState[outPath] = {
|
||||||
srcPath,
|
srcPath: filepath,
|
||||||
contents: result.contents,
|
contents: result.contents,
|
||||||
contentType: 'text/html',
|
contentType: 'text/html',
|
||||||
encoding: 'utf8',
|
encoding: 'utf8',
|
||||||
|
@ -60,7 +61,7 @@ export async function buildCollectionPage({ astroConfig, filepath, logging, mode
|
||||||
}
|
}
|
||||||
|
|
||||||
const [result] = await Promise.all([
|
const [result] = await Promise.all([
|
||||||
loadCollection(pagePath) as Promise<LoadResult>, // first run will always return a result so assert type here
|
loadCollection(outURL) as Promise<LoadResult>, // first run will always return a result so assert type here
|
||||||
gatherRuntimes({ astroConfig, buildState, filepath, logging, resolvePackageUrl, mode, runtime }),
|
gatherRuntimes({ astroConfig, buildState, filepath, logging, resolvePackageUrl, mode, runtime }),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@ -68,7 +69,7 @@ export async function buildCollectionPage({ astroConfig, filepath, logging, mode
|
||||||
throw new Error((result as any).error);
|
throw new Error((result as any).error);
|
||||||
}
|
}
|
||||||
if (result.statusCode === 200 && !result.collectionInfo) {
|
if (result.statusCode === 200 && !result.collectionInfo) {
|
||||||
throw new Error(`[${rel}]: Collection page must export createCollection() function`);
|
throw new Error(`[${srcURL}]: Collection page must export createCollection() function`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// note: for pages that require params (/tag/:tag), we will get a 404 but will still get back collectionInfo that tell us what the URLs should be
|
// note: for pages that require params (/tag/:tag), we will get a 404 but will still get back collectionInfo that tell us what the URLs should be
|
||||||
|
@ -87,11 +88,12 @@ export async function buildCollectionPage({ astroConfig, filepath, logging, mode
|
||||||
);
|
);
|
||||||
|
|
||||||
if (result.collectionInfo.rss) {
|
if (result.collectionInfo.rss) {
|
||||||
if (!site) throw new Error(`[${rel}] createCollection() tried to generate RSS but "buildOptions.site" missing in astro.config.mjs`);
|
if (!site) throw new Error(`[${srcURL}] createCollection() tried to generate RSS but "buildOptions.site" missing in astro.config.mjs`);
|
||||||
const rss = generateRSS({ ...(result.collectionInfo.rss as any), site }, rel.replace(/\$([^.]+)\.astro$/, '$1'));
|
let feedURL = outURL === '/' ? '/index' : outURL;
|
||||||
const feedURL = path.posix.join('/feed', `${pagePath}.xml`);
|
feedURL = '/feed' + feedURL + '.xml';
|
||||||
|
const rss = generateRSS({ ...(result.collectionInfo.rss as any), site }, { srcFile: srcURL, feedURL });
|
||||||
buildState[feedURL] = {
|
buildState[feedURL] = {
|
||||||
srcPath,
|
srcPath: filepath,
|
||||||
contents: rss,
|
contents: rss,
|
||||||
contentType: 'application/rss+xml',
|
contentType: 'application/rss+xml',
|
||||||
encoding: 'utf8',
|
encoding: 'utf8',
|
||||||
|
@ -102,22 +104,20 @@ export async function buildCollectionPage({ astroConfig, filepath, logging, mode
|
||||||
|
|
||||||
/** Build static page */
|
/** Build static page */
|
||||||
export async function buildStaticPage({ astroConfig, buildState, filepath, logging, mode, resolvePackageUrl, runtime }: PageBuildOptions): Promise<void> {
|
export async function buildStaticPage({ astroConfig, buildState, filepath, logging, mode, resolvePackageUrl, runtime }: PageBuildOptions): Promise<void> {
|
||||||
const rel = path.posix.relative(fileURLToPath(astroConfig.astroRoot) + '/pages', fileURLToPath(filepath)); // pages/index.astro
|
const pagesPath = new URL('./pages/', astroConfig.astroRoot);
|
||||||
const pagePath = `/${rel.replace(/\.(astro|md)$/, '')}`;
|
const url = filepath.pathname.replace(pagesPath.pathname, '/').replace(/(index)?\.(astro|md)$/, '');
|
||||||
|
|
||||||
let relPath = path.posix.join('/', rel.replace(/\.(astro|md)$/, '.html'));
|
|
||||||
if (!relPath.endsWith('index.html')) {
|
|
||||||
relPath = relPath.replace(/\.html$/, '/index.html');
|
|
||||||
}
|
|
||||||
|
|
||||||
const srcPath = new URL('pages/' + rel, astroConfig.astroRoot);
|
|
||||||
|
|
||||||
// build page in parallel with gathering runtimes
|
// build page in parallel with gathering runtimes
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
runtime.load(pagePath).then((result) => {
|
runtime.load(url).then((result) => {
|
||||||
if (result.statusCode === 200) {
|
if (result.statusCode !== 200) throw new Error((result as any).error);
|
||||||
buildState[relPath] = { srcPath, contents: result.contents, contentType: 'text/html', encoding: 'utf8' };
|
const outFile = path.posix.join(url, '/index.html');
|
||||||
}
|
buildState[outFile] = {
|
||||||
|
srcPath: filepath,
|
||||||
|
contents: result.contents,
|
||||||
|
contentType: 'text/html',
|
||||||
|
encoding: 'utf8',
|
||||||
|
};
|
||||||
}),
|
}),
|
||||||
gatherRuntimes({ astroConfig, buildState, filepath, logging, resolvePackageUrl, mode, runtime }),
|
gatherRuntimes({ astroConfig, buildState, filepath, logging, resolvePackageUrl, mode, runtime }),
|
||||||
]);
|
]);
|
||||||
|
|
|
@ -3,17 +3,25 @@ import parser from 'fast-xml-parser';
|
||||||
import { canonicalURL } from './util.js';
|
import { canonicalURL } from './util.js';
|
||||||
|
|
||||||
/** Validates createCollection.rss */
|
/** Validates createCollection.rss */
|
||||||
export function validateRSS(rss: CollectionRSS, filename: string): void {
|
export function validateRSS(rss: CollectionRSS, srcFile: string): void {
|
||||||
if (!rss.title) throw new Error(`[${filename}] rss.title required`);
|
if (!rss.title) throw new Error(`[${srcFile}] rss.title required`);
|
||||||
if (!rss.description) throw new Error(`[${filename}] rss.description required`);
|
if (!rss.description) throw new Error(`[${srcFile}] rss.description required`);
|
||||||
if (typeof rss.item !== 'function') throw new Error(`[${filename}] rss.item() function required`);
|
if (typeof rss.item !== 'function') throw new Error(`[${srcFile}] rss.item() function required`);
|
||||||
|
}
|
||||||
|
|
||||||
|
type RSSInput<T> = { data: T[]; site: string } & CollectionRSS<T>;
|
||||||
|
interface RSSOptions {
|
||||||
|
srcFile: string;
|
||||||
|
feedURL: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Generate RSS 2.0 feed */
|
/** Generate RSS 2.0 feed */
|
||||||
export function generateRSS<T>(input: { data: T[]; site: string } & CollectionRSS<T>, filename: string): string {
|
export function generateRSS<T>(input: RSSInput<T>, options: RSSOptions): string {
|
||||||
let xml = `<?xml version="1.0" encoding="UTF-8"?><rss version="2.0"`;
|
const { srcFile, feedURL } = options;
|
||||||
|
|
||||||
validateRSS(input as any, filename);
|
validateRSS(input as any, srcFile);
|
||||||
|
|
||||||
|
let xml = `<?xml version="1.0" encoding="UTF-8"?><rss version="2.0"`;
|
||||||
|
|
||||||
// xmlns
|
// xmlns
|
||||||
if (input.xmlns) {
|
if (input.xmlns) {
|
||||||
|
@ -27,18 +35,18 @@ export function generateRSS<T>(input: { data: T[]; site: string } & CollectionRS
|
||||||
// title, description, customData
|
// title, description, customData
|
||||||
xml += `<title><![CDATA[${input.title}]]></title>`;
|
xml += `<title><![CDATA[${input.title}]]></title>`;
|
||||||
xml += `<description><![CDATA[${input.description}]]></description>`;
|
xml += `<description><![CDATA[${input.description}]]></description>`;
|
||||||
xml += `<link>${canonicalURL('/feed/' + filename + '.xml', input.site).href}</link>`;
|
xml += `<link>${canonicalURL(feedURL, input.site).href}</link>`;
|
||||||
if (typeof input.customData === 'string') xml += input.customData;
|
if (typeof input.customData === 'string') xml += input.customData;
|
||||||
|
|
||||||
// items
|
// items
|
||||||
if (!Array.isArray(input.data) || !input.data.length) throw new Error(`[${filename}] data() returned no items. Can’t generate RSS feed.`);
|
if (!Array.isArray(input.data) || !input.data.length) throw new Error(`[${srcFile}] data() returned no items. Can’t generate RSS feed.`);
|
||||||
for (const item of input.data) {
|
for (const item of input.data) {
|
||||||
xml += `<item>`;
|
xml += `<item>`;
|
||||||
const result = input.item(item);
|
const result = input.item(item);
|
||||||
// validate
|
// validate
|
||||||
if (typeof result !== 'object') throw new Error(`[${filename}] rss.item() expected to return an object, returned ${typeof result}.`);
|
if (typeof result !== 'object') throw new Error(`[${srcFile}] rss.item() expected to return an object, returned ${typeof result}.`);
|
||||||
if (!result.title) throw new Error(`[${filename}] rss.item() returned object but required "title" is missing.`);
|
if (!result.title) throw new Error(`[${srcFile}] rss.item() returned object but required "title" is missing.`);
|
||||||
if (!result.link) throw new Error(`[${filename}] rss.item() returned object but required "link" is missing.`);
|
if (!result.link) throw new Error(`[${srcFile}] rss.item() returned object but required "link" is missing.`);
|
||||||
xml += `<title><![CDATA[${result.title}]]></title>`;
|
xml += `<title><![CDATA[${result.title}]]></title>`;
|
||||||
xml += `<link>${canonicalURL(result.link, input.site).href}</link>`;
|
xml += `<link>${canonicalURL(result.link, input.site).href}</link>`;
|
||||||
if (result.description) xml += `<description><![CDATA[${result.description}]]></description>`;
|
if (result.description) xml += `<description><![CDATA[${result.description}]]></description>`;
|
||||||
|
|
|
@ -2,13 +2,14 @@ import type { AstroConfig } from '../@types/astro';
|
||||||
import { performance } from 'perf_hooks';
|
import { performance } from 'perf_hooks';
|
||||||
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { fileURLToPath, URL } from 'url';
|
import { URL } from 'url';
|
||||||
|
|
||||||
/** Normalize URL to its canonical form */
|
/** Normalize URL to its canonical form */
|
||||||
export function canonicalURL(url: string, base?: string): URL {
|
export function canonicalURL(url: string, base?: string): URL {
|
||||||
let pathname = url.replace(/\/index.html$/, ''); // index.html is not canonical
|
let pathname = url.replace(/\/index.html$/, ''); // index.html is not canonical
|
||||||
pathname = pathname.replace(/\/1\/?$/, ''); // neither is a trailing /1/ (impl. detail of collections)
|
pathname = pathname.replace(/\/1\/?$/, ''); // neither is a trailing /1/ (impl. detail of collections)
|
||||||
if (!path.extname(pathname)) pathname = pathname.replace(/(\/+)?$/, '/'); // add trailing slash if there’s no extension
|
if (!path.extname(pathname)) pathname = pathname.replace(/(\/+)?$/, '/'); // add trailing slash if there’s no extension
|
||||||
|
pathname = pathname.replace(/\/+/g, '/'); // remove duplicate slashes (URL() won’t)
|
||||||
return new URL(pathname, base);
|
return new URL(pathname, base);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -20,12 +21,14 @@ export function sortSet(set: Set<string>): Set<string> {
|
||||||
/** Resolve final output URL */
|
/** Resolve final output URL */
|
||||||
export function getDistPath(specifier: string, { astroConfig, srcPath }: { astroConfig: AstroConfig; srcPath: URL }): string {
|
export function getDistPath(specifier: string, { astroConfig, srcPath }: { astroConfig: AstroConfig; srcPath: URL }): string {
|
||||||
if (specifier[0] === '/') return specifier; // assume absolute URLs are correct
|
if (specifier[0] === '/') return specifier; // assume absolute URLs are correct
|
||||||
|
const pagesDir = path.join(astroConfig.astroRoot.pathname, 'pages');
|
||||||
|
|
||||||
const fileLoc = path.posix.join(path.posix.dirname(fileURLToPath(srcPath)), specifier);
|
const fileLoc = path.posix.join(path.posix.dirname(srcPath.pathname), specifier);
|
||||||
const projectLoc = path.posix.relative(fileURLToPath(astroConfig.astroRoot), fileLoc);
|
const projectLoc = path.posix.relative(astroConfig.astroRoot.pathname, fileLoc);
|
||||||
const pagesDir = fileURLToPath(new URL('/pages', astroConfig.astroRoot));
|
|
||||||
|
const isPage = fileLoc.includes(pagesDir);
|
||||||
// if this lives above src/pages, return that URL
|
// if this lives above src/pages, return that URL
|
||||||
if (fileLoc.includes(pagesDir)) {
|
if (isPage) {
|
||||||
const [, publicURL] = projectLoc.split(pagesDir);
|
const [, publicURL] = projectLoc.split(pagesDir);
|
||||||
return publicURL || '/index.html'; // if this is missing, this is the root
|
return publicURL || '/index.html'; // if this is missing, this is the root
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,13 @@
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { fdir, PathsOutput } from 'fdir';
|
import glob from 'tiny-glob/sync.js';
|
||||||
import { fileURLToPath } from 'url';
|
|
||||||
import slash from 'slash';
|
import slash from 'slash';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handling for import.meta.glob and import.meta.globEager
|
* Handling for import.meta.glob and import.meta.globEager
|
||||||
*/
|
*/
|
||||||
|
|
||||||
interface GlobOptions {
|
interface GlobOptions {
|
||||||
namespace: string;
|
namespace: string;
|
||||||
filename: string;
|
filename: string;
|
||||||
projectRoot: URL;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GlobResult {
|
interface GlobResult {
|
||||||
|
@ -20,36 +17,15 @@ interface GlobResult {
|
||||||
code: string;
|
code: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const crawler = new fdir();
|
|
||||||
|
|
||||||
/** General glob handling */
|
/** General glob handling */
|
||||||
function globSearch(spec: string, { filename }: { filename: string }): string[] {
|
function globSearch(spec: string, { filename }: { filename: string }): string[] {
|
||||||
try {
|
try {
|
||||||
// Note: fdir’s glob requires you to do some work finding the closest non-glob folder.
|
const cwd = path.dirname(filename);
|
||||||
// For example, this fails: .glob("./post/*.md").crawl("/…/src/pages") ❌
|
let found = glob(spec, { cwd, filesOnly: true });
|
||||||
// …but this doesn’t: .glob("*.md").crawl("/…/src/pages/post") ✅
|
|
||||||
let globDir = '';
|
|
||||||
let glob = spec;
|
|
||||||
for (const part of spec.split('/')) {
|
|
||||||
if (!part.includes('*')) {
|
|
||||||
// iterate through spec until first '*' is reached
|
|
||||||
globDir = path.posix.join(globDir, part); // this must be POSIX-style
|
|
||||||
glob = glob.replace(`${part}/`, ''); // move parent dirs off spec, and onto globDir
|
|
||||||
} else {
|
|
||||||
// at first '*', exit
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const cwd = path.join(path.dirname(filename), globDir.replace(/\//g, path.sep)); // this must match OS (could be '/' or '\')
|
|
||||||
let found = crawler.glob(glob).crawlWithOptions(cwd, { includeBasePath: true }).sync() as PathsOutput;
|
|
||||||
if (!found.length) {
|
if (!found.length) {
|
||||||
throw new Error(`No files matched "${spec}" from ${filename}`);
|
throw new Error(`No files matched "${spec}" from ${filename}`);
|
||||||
}
|
}
|
||||||
return found.map((importPath) => {
|
return found.map((f) => slash(f[0] === '.' ? f : `./${f}`));
|
||||||
if (importPath.startsWith('http') || importPath.startsWith('.')) return importPath;
|
|
||||||
return './' + path.posix.join(globDir, path.posix.relative(slash(cwd), importPath));
|
|
||||||
});
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw new Error(`No files matched "${spec}" from ${filename}`);
|
throw new Error(`No files matched "${spec}" from ${filename}`);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
import type { CompileOptions } from '../../@types/compiler';
|
|
||||||
import type { AstroConfig, ValidExtensionPlugins } from '../../@types/astro';
|
|
||||||
import type { Ast, Script, Style, TemplateNode } from 'astro-parser';
|
import type { Ast, Script, Style, TemplateNode } from 'astro-parser';
|
||||||
import type { TransformResult } from '../../@types/astro';
|
import type { CompileOptions } from '../../@types/compiler';
|
||||||
|
import type { AstroConfig, TransformResult, ValidExtensionPlugins } from '../../@types/astro';
|
||||||
|
|
||||||
import 'source-map-support/register.js';
|
import 'source-map-support/register.js';
|
||||||
import eslexer from 'es-module-lexer';
|
import eslexer from 'es-module-lexer';
|
||||||
import esbuild from 'esbuild';
|
import esbuild from 'esbuild';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
|
||||||
import { walk } from 'estree-walker';
|
import { walk } from 'estree-walker';
|
||||||
import _babelGenerator from '@babel/generator';
|
import _babelGenerator from '@babel/generator';
|
||||||
import babelParser from '@babel/parser';
|
import babelParser from '@babel/parser';
|
||||||
|
@ -20,9 +18,9 @@ import { isFetchContent } from './utils.js';
|
||||||
import { yellow } from 'kleur/colors';
|
import { yellow } from 'kleur/colors';
|
||||||
|
|
||||||
const traverse: typeof babelTraverse.default = (babelTraverse.default as any).default;
|
const traverse: typeof babelTraverse.default = (babelTraverse.default as any).default;
|
||||||
const babelGenerator: typeof _babelGenerator =
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
_babelGenerator.default;
|
const babelGenerator: typeof _babelGenerator = _babelGenerator.default;
|
||||||
const { transformSync } = esbuild;
|
const { transformSync } = esbuild;
|
||||||
|
|
||||||
interface Attribute {
|
interface Attribute {
|
||||||
|
@ -453,7 +451,6 @@ function compileModule(module: Script, state: CodegenState, compileOptions: Comp
|
||||||
|
|
||||||
// handle createCollection, if any
|
// handle createCollection, if any
|
||||||
if (createCollection) {
|
if (createCollection) {
|
||||||
// TODO: improve this? while transforming in-place isn’t great, this happens at most once per-route
|
|
||||||
const ast = babelParser.parse(createCollection, {
|
const ast = babelParser.parse(createCollection, {
|
||||||
sourceType: 'module',
|
sourceType: 'module',
|
||||||
});
|
});
|
||||||
|
@ -484,7 +481,7 @@ function compileModule(module: Script, state: CodegenState, compileOptions: Comp
|
||||||
const spec = (init as any).arguments[0].value;
|
const spec = (init as any).arguments[0].value;
|
||||||
if (typeof spec !== 'string') break;
|
if (typeof spec !== 'string') break;
|
||||||
|
|
||||||
const globResult = fetchContent(spec, { namespace, filename: state.filename, projectRoot: compileOptions.astroConfig.projectRoot });
|
const globResult = fetchContent(spec, { namespace, filename: state.filename });
|
||||||
|
|
||||||
let imports = '';
|
let imports = '';
|
||||||
for (const importStatement of globResult.imports) {
|
for (const importStatement of globResult.imports) {
|
||||||
|
@ -503,7 +500,7 @@ function compileModule(module: Script, state: CodegenState, compileOptions: Comp
|
||||||
|
|
||||||
// Astro.fetchContent()
|
// Astro.fetchContent()
|
||||||
for (const [namespace, { spec }] of contentImports.entries()) {
|
for (const [namespace, { spec }] of contentImports.entries()) {
|
||||||
const globResult = fetchContent(spec, { namespace, filename: state.filename, projectRoot: compileOptions.astroConfig.projectRoot });
|
const globResult = fetchContent(spec, { namespace, filename: state.filename });
|
||||||
for (const importStatement of globResult.imports) {
|
for (const importStatement of globResult.imports) {
|
||||||
state.importExportStatements.add(importStatement);
|
state.importExportStatements.add(importStatement);
|
||||||
}
|
}
|
||||||
|
@ -605,7 +602,7 @@ function compileHtml(enterNode: TemplateNode, state: CodegenState, compileOption
|
||||||
outSource += `h(${wrapper}, ${attributes ? generateAttributes(attributes) : 'null'}`;
|
outSource += `h(${wrapper}, ${attributes ? generateAttributes(attributes) : 'null'}`;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// handle errors in scope with filename
|
// handle errors in scope with filename
|
||||||
const rel = filename.replace(fileURLToPath(astroConfig.projectRoot), '');
|
const rel = filename.replace(astroConfig.projectRoot.pathname, '');
|
||||||
// TODO: return actual codeframe here
|
// TODO: return actual codeframe here
|
||||||
error(compileOptions.logging, rel, err.toString());
|
error(compileOptions.logging, rel, err.toString());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import type { AstroConfig } from './@types/astro';
|
import type { AstroConfig } from './@types/astro';
|
||||||
|
|
||||||
import 'source-map-support/register.js';
|
import 'source-map-support/register.js';
|
||||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
import path from 'path';
|
||||||
import { existsSync } from 'fs';
|
import { existsSync } from 'fs';
|
||||||
|
|
||||||
/** Type util */
|
/** Type util */
|
||||||
|
@ -73,17 +73,13 @@ function normalizeConfig(userConfig: any, root: string): AstroConfig {
|
||||||
|
|
||||||
/** Attempt to load an `astro.config.mjs` file */
|
/** Attempt to load an `astro.config.mjs` file */
|
||||||
export async function loadConfig(rawRoot: string | undefined, configFileName = 'astro.config.mjs'): Promise<AstroConfig> {
|
export async function loadConfig(rawRoot: string | undefined, configFileName = 'astro.config.mjs'): Promise<AstroConfig> {
|
||||||
if (typeof rawRoot === 'undefined') {
|
const root = rawRoot ? path.resolve(rawRoot) : process.cwd();
|
||||||
rawRoot = process.cwd();
|
const astroConfigPath = new URL(`./${configFileName}`, `file://${root}/`);
|
||||||
}
|
|
||||||
|
|
||||||
const root = pathResolve(rawRoot);
|
|
||||||
const astroConfigPath = pathJoin(root, configFileName);
|
|
||||||
|
|
||||||
// load
|
// load
|
||||||
let config: any;
|
let config: any;
|
||||||
if (existsSync(astroConfigPath)) {
|
if (existsSync(astroConfigPath)) {
|
||||||
config = configDefaults((await import(astroConfigPath)).default);
|
config = configDefaults((await import(astroConfigPath.href)).default);
|
||||||
} else {
|
} else {
|
||||||
config = configDefaults();
|
config = configDefaults();
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ import 'source-map-support/register.js';
|
||||||
import { existsSync } from 'fs';
|
import { existsSync } from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
import { fdir, PathsOutput } from 'fdir';
|
import glob from 'tiny-glob/sync.js';
|
||||||
|
|
||||||
interface PageLocation {
|
interface PageLocation {
|
||||||
fileURL: URL;
|
fileURL: URL;
|
||||||
|
@ -108,14 +108,9 @@ export function searchForPage(url: URL, astroRoot: URL): SearchResult {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const crawler = new fdir();
|
|
||||||
|
|
||||||
/** load a collection route */
|
/** load a collection route */
|
||||||
function loadCollection(url: string, astroRoot: URL): { currentPage?: number; location: PageLocation } | undefined {
|
function loadCollection(url: string, astroRoot: URL): { currentPage?: number; location: PageLocation } | undefined {
|
||||||
const pages = (crawler
|
const pages = glob('**/$*.astro', { cwd: path.join(fileURLToPath(astroRoot), 'pages'), filesOnly: true });
|
||||||
.glob('**/*')
|
|
||||||
.crawl(path.join(fileURLToPath(astroRoot), 'pages'))
|
|
||||||
.sync() as PathsOutput).filter((filepath) => filepath.startsWith('$') || filepath.includes('/$'));
|
|
||||||
for (const pageURL of pages) {
|
for (const pageURL of pages) {
|
||||||
const reqURL = new RegExp('^/' + pageURL.replace(/\$([^/]+)\.astro/, '$1') + '/?(.*)');
|
const reqURL = new RegExp('^/' + pageURL.replace(/\$([^/]+)\.astro/, '$1') + '/?(.*)');
|
||||||
const match = url.match(reqURL);
|
const match = url.match(reqURL);
|
||||||
|
|
|
@ -7,8 +7,8 @@ const Collections = suite('Collections');
|
||||||
|
|
||||||
setup(Collections, './fixtures/astro-collection');
|
setup(Collections, './fixtures/astro-collection');
|
||||||
|
|
||||||
Collections('generates list & sorts successfully', async ({ runtime }) => {
|
Collections('shallow selector (*.md)', async ({ runtime }) => {
|
||||||
const result = await runtime.load('/posts');
|
const result = await runtime.load('/shallow');
|
||||||
if (result.error) throw new Error(result.error);
|
if (result.error) throw new Error(result.error);
|
||||||
const $ = doc(result.contents);
|
const $ = doc(result.contents);
|
||||||
const urls = [
|
const urls = [
|
||||||
|
@ -16,11 +16,24 @@ Collections('generates list & sorts successfully', async ({ runtime }) => {
|
||||||
return $(this).attr('href');
|
return $(this).attr('href');
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
assert.equal(urls, ['/post/nested/a', '/post/three', '/post/two']);
|
// assert they loaded in newest -> oldest order (not alphabetical)
|
||||||
|
assert.equal(urls, ['/post/three', '/post/two', '/post/one']);
|
||||||
|
});
|
||||||
|
|
||||||
|
Collections('deep selector (**/*.md)', async ({ runtime }) => {
|
||||||
|
const result = await runtime.load('/nested');
|
||||||
|
if (result.error) throw new Error(result.error);
|
||||||
|
const $ = doc(result.contents);
|
||||||
|
const urls = [
|
||||||
|
...$('#posts a').map(function () {
|
||||||
|
return $(this).attr('href');
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
assert.equal(urls, ['/post/nested/a', '/post/three', '/post/two', '/post/one']);
|
||||||
});
|
});
|
||||||
|
|
||||||
Collections('generates pagination successfully', async ({ runtime }) => {
|
Collections('generates pagination successfully', async ({ runtime }) => {
|
||||||
const result = await runtime.load('/posts');
|
const result = await runtime.load('/paginated');
|
||||||
if (result.error) throw new Error(result.error);
|
if (result.error) throw new Error(result.error);
|
||||||
const $ = doc(result.contents);
|
const $ = doc(result.contents);
|
||||||
const prev = $('#prev-page');
|
const prev = $('#prev-page');
|
||||||
|
|
|
@ -1,47 +1,14 @@
|
||||||
import { existsSync, promises as fsPromises } from 'fs';
|
|
||||||
import { join } from 'path';
|
|
||||||
import { fileURLToPath } from 'url';
|
|
||||||
import { suite } from 'uvu';
|
import { suite } from 'uvu';
|
||||||
import * as assert from 'uvu/assert';
|
import * as assert from 'uvu/assert';
|
||||||
import { createRuntime } from '#astro/runtime';
|
|
||||||
import { build } from '#astro/build';
|
|
||||||
import { loadConfig } from '#astro/config';
|
|
||||||
import { doc } from './test-utils.js';
|
import { doc } from './test-utils.js';
|
||||||
|
import { setup, setupBuild } from './helpers.js';
|
||||||
const { rmdir, readFile } = fsPromises;
|
|
||||||
|
|
||||||
const Markdown = suite('Astro Markdown');
|
const Markdown = suite('Astro Markdown');
|
||||||
|
|
||||||
let runtime, setupError, fixturePath, astroConfig;
|
setup(Markdown, './fixtures/astro-markdown');
|
||||||
|
setupBuild(Markdown, './fixtures/astro-markdown');
|
||||||
|
|
||||||
Markdown.before(async () => {
|
Markdown('Can load markdown pages with hmx', async ({ runtime }) => {
|
||||||
fixturePath = fileURLToPath(new URL('./fixtures/astro-markdown', import.meta.url));
|
|
||||||
|
|
||||||
astroConfig = await loadConfig(fixturePath);
|
|
||||||
|
|
||||||
const logging = {
|
|
||||||
level: 'error',
|
|
||||||
dest: process.stderr,
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
runtime = await createRuntime(astroConfig, { logging });
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
setupError = err;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Markdown.after(async () => {
|
|
||||||
(await runtime) && runtime.shutdown();
|
|
||||||
rmdir(join(fixturePath, 'dist'), { recursive: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
Markdown('No errors creating a runtime', () => {
|
|
||||||
assert.equal(setupError, undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
Markdown('Can load markdown pages with hmx', async () => {
|
|
||||||
const result = await runtime.load('/post');
|
const result = await runtime.load('/post');
|
||||||
if (result.error) throw new Error(result.error);
|
if (result.error) throw new Error(result.error);
|
||||||
|
|
||||||
|
@ -50,7 +17,7 @@ Markdown('Can load markdown pages with hmx', async () => {
|
||||||
assert.ok($('#test').length, 'There is a div added via a component from markdown');
|
assert.ok($('#test').length, 'There is a div added via a component from markdown');
|
||||||
});
|
});
|
||||||
|
|
||||||
Markdown('Can load more complex jsxy stuff', async () => {
|
Markdown('Can load more complex jsxy stuff', async ({ runtime }) => {
|
||||||
const result = await runtime.load('/complex');
|
const result = await runtime.load('/complex');
|
||||||
if (result.error) throw new Error(result.error);
|
if (result.error) throw new Error(result.error);
|
||||||
|
|
||||||
|
@ -59,13 +26,14 @@ Markdown('Can load more complex jsxy stuff', async () => {
|
||||||
assert.equal($el.text(), 'Hello world');
|
assert.equal($el.text(), 'Hello world');
|
||||||
});
|
});
|
||||||
|
|
||||||
Markdown('Bundles client-side JS for prod', async () => {
|
Markdown('Bundles client-side JS for prod', async (context) => {
|
||||||
await build(astroConfig);
|
await context.build();
|
||||||
|
|
||||||
const complexHtml = await readFile(join(fixturePath, './dist/complex/index.html'), 'utf-8');
|
|
||||||
|
|
||||||
|
const complexHtml = await context.readFile('/complex/index.html');
|
||||||
assert.match(complexHtml, `import("/_astro/components/Counter.js"`);
|
assert.match(complexHtml, `import("/_astro/components/Counter.js"`);
|
||||||
assert.ok(existsSync(join(fixturePath, `./dist/_astro/components/Counter.js`)), 'Counter.jsx is bundled for prod');
|
|
||||||
|
const counterJs = await context.readFile('/_astro/components/Counter.js');
|
||||||
|
assert.ok(counterJs, 'Counter.jsx is bundled for prod');
|
||||||
});
|
});
|
||||||
|
|
||||||
Markdown.run();
|
Markdown.run();
|
||||||
|
|
|
@ -6,9 +6,7 @@ const RSS = suite('RSS Generation');
|
||||||
|
|
||||||
setupBuild(RSS, './fixtures/astro-rss');
|
setupBuild(RSS, './fixtures/astro-rss');
|
||||||
|
|
||||||
const snapshot =
|
const snapshot = `<?xml version="1.0" encoding="UTF-8"?><rss version="2.0" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:content="http://purl.org/rss/1.0/modules/content/"><channel><title><![CDATA[MF Doomcast]]></title><description><![CDATA[The podcast about the things you find on a picnic, or at a picnic table]]></description><link>https://mysite.dev/feed/episodes.xml</link><language>en-us</language><itunes:author>MF Doom</itunes:author><item><title><![CDATA[Rap Snitch Knishes (feat. Mr. Fantastik)]]></title><link>https://mysite.dev/episode/rap-snitch-knishes/</link><description><![CDATA[Complex named this song the “22nd funniest rap song of all time.”]]></description><pubDate>Tue, 16 Nov 2004 00:00:00 GMT</pubDate><itunes:episodeType>music</itunes:episodeType><itunes:duration>172</itunes:duration><itunes:explicit>true</itunes:explicit></item><item><title><![CDATA[Fazers]]></title><link>https://mysite.dev/episode/fazers/</link><description><![CDATA[Rhapsody ranked Take Me to Your Leader 17th on its list “Hip-Hop’s Best Albums of the Decade”]]></description><pubDate>Thu, 03 Jul 2003 00:00:00 GMT</pubDate><itunes:episodeType>music</itunes:episodeType><itunes:duration>197</itunes:duration><itunes:explicit>true</itunes:explicit></item><item><title><![CDATA[Rhymes Like Dimes (feat. Cucumber Slice)]]></title><link>https://mysite.dev/episode/rhymes-like-dimes/</link><description><![CDATA[Operation: Doomsday has been heralded as an underground classic that established MF Doom's rank within the underground hip-hop scene during the early to mid-2000s.\n]]></description><pubDate>Tue, 19 Oct 1999 00:00:00 GMT</pubDate><itunes:episodeType>music</itunes:episodeType><itunes:duration>259</itunes:duration><itunes:explicit>true</itunes:explicit></item></channel></rss>`;
|
||||||
`<?xml version="1.0" encoding="UTF-8"?><rss version="2.0" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:content="http://purl.org/rss/1.0/modules/content/"><channel><title><![CDATA[MF Doomcast]]></title><description><![CDATA[The podcast about the things you find on a picnic, or at a picnic table]]></description><link>https://mysite.dev/feed/episodes.xml</link><language>en-us</language><itunes:author>MF Doom</itunes:author><item><title><![CDATA[Rap Snitch Knishes (feat. Mr. Fantastik)]]></title><link>https://mysite.dev/episode/rap-snitch-knishes/</link><description><![CDATA[Complex named this song the “22nd funniest rap song of all time.”]]></description><pubDate>Tue, 16 Nov 2004 00:00:00 GMT</pubDate><itunes:episodeType>music</itunes:episodeType><itunes:duration>172</itunes:duration><itunes:explicit>true</itunes:explicit></item><item><title><![CDATA[Fazers]]></title><link>https://mysite.dev/episode/fazers/</link><description><![CDATA[Rhapsody ranked Take Me to Your Leader 17th on its list “Hip-Hop’s Best Albums of the Decade”]]></description><pubDate>Thu, 03 Jul 2003 00:00:00 GMT</pubDate><itunes:episodeType>music</itunes:episodeType><itunes:duration>197</itunes:duration><itunes:explicit>true</itunes:explicit></item><item><title><![CDATA[Rhymes Like Dimes (feat. Cucumber Slice)]]></title><link>https://mysite.dev/episode/rhymes-like-dimes/</link><description><![CDATA[Operation: Doomsday has been heralded as an underground classic that established MF Doom's rank within the underground hip-hop scene during the early to mid-2000s.\n` +
|
|
||||||
']]></description><pubDate>Tue, 19 Oct 1999 00:00:00 GMT</pubDate><itunes:episodeType>music</itunes:episodeType><itunes:duration>259</itunes:duration><itunes:explicit>true</itunes:explicit></item></channel></rss>';
|
|
||||||
|
|
||||||
RSS('Generates RSS correctly', async (context) => {
|
RSS('Generates RSS correctly', async (context) => {
|
||||||
await context.build();
|
await context.build();
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
import { suite } from 'uvu';
|
import { suite } from 'uvu';
|
||||||
import * as assert from 'uvu/assert';
|
import * as assert from 'uvu/assert';
|
||||||
import { runDevServer } from './helpers.js';
|
import { runDevServer } from './helpers.js';
|
||||||
|
@ -6,8 +7,9 @@ import { loadConfig } from '#astro/config';
|
||||||
const ConfigPort = suite('Config path');
|
const ConfigPort = suite('Config path');
|
||||||
|
|
||||||
const root = new URL('./fixtures/config-port/', import.meta.url);
|
const root = new URL('./fixtures/config-port/', import.meta.url);
|
||||||
|
|
||||||
ConfigPort('can be specified in the astro config', async (context) => {
|
ConfigPort('can be specified in the astro config', async (context) => {
|
||||||
const astroConfig = await loadConfig(root.pathname);
|
const astroConfig = await loadConfig(fileURLToPath(root));
|
||||||
assert.equal(astroConfig.devOptions.port, 3001);
|
assert.equal(astroConfig.devOptions.port, 3001);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
27
packages/astro/test/fixtures/astro-collection/src/pages/$nested.astro
vendored
Normal file
27
packages/astro/test/fixtures/astro-collection/src/pages/$nested.astro
vendored
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
---
|
||||||
|
export let collection: any;
|
||||||
|
|
||||||
|
export async function createCollection() {
|
||||||
|
return {
|
||||||
|
async data() {
|
||||||
|
let data = Astro.fetchContent('./post/**/*.md');
|
||||||
|
data.sort((a, b) => new Date(b.date) - new Date(a.date));
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
---
|
||||||
|
|
||||||
|
<div id="posts">
|
||||||
|
{collection.data.map((post) => (
|
||||||
|
<article>
|
||||||
|
<h1>{post.title}</h1>
|
||||||
|
<a href={post.url}>Read more</a>
|
||||||
|
</article>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<nav>
|
||||||
|
{collection.url.prev && <a id="prev-page" href={collection.url.prev}>Previous page</a>}
|
||||||
|
{collection.url.next && <a id="next-page" href={collection.url.next}>Next page</a>}
|
||||||
|
</nav>
|
28
packages/astro/test/fixtures/astro-collection/src/pages/$paginated.astro
vendored
Normal file
28
packages/astro/test/fixtures/astro-collection/src/pages/$paginated.astro
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
---
|
||||||
|
export let collection: any;
|
||||||
|
|
||||||
|
export async function createCollection() {
|
||||||
|
return {
|
||||||
|
async data() {
|
||||||
|
let data = Astro.fetchContent('./post/**/*.md');
|
||||||
|
data.sort((a, b) => new Date(b.date) - new Date(a.date));
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
pageSize: 1
|
||||||
|
};
|
||||||
|
}
|
||||||
|
---
|
||||||
|
|
||||||
|
<div id="posts">
|
||||||
|
{collection.data.map((post) => (
|
||||||
|
<article>
|
||||||
|
<h1>{post.title}</h1>
|
||||||
|
<a href={post.url}>Read more</a>
|
||||||
|
</article>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<nav>
|
||||||
|
{collection.url.prev && <a id="prev-page" href={collection.url.prev}>Previous page</a>}
|
||||||
|
{collection.url.next && <a id="next-page" href={collection.url.next}>Next page</a>}
|
||||||
|
</nav>
|
|
@ -8,7 +8,7 @@ export async function createCollection() {
|
||||||
data.sort((a, b) => new Date(b.date) - new Date(a.date));
|
data.sort((a, b) => new Date(b.date) - new Date(a.date));
|
||||||
return data;
|
return data;
|
||||||
},
|
},
|
||||||
pageSize: 3
|
pageSize: 4
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
---
|
---
|
|
@ -5,7 +5,7 @@ import { promises as fs } from 'fs';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
const Prettier = suite('Prettier formatting');
|
const Prettier = suite('Prettier formatting');
|
||||||
|
|
||||||
const readFile = (path) => fs.readFile(fileURLToPath(new URL(`./fixtures${path}`, import.meta.url))).then((res) => res.toString());
|
const readFile = (path) => fs.readFile(fileURLToPath(new URL(`./fixtures${path}`, import.meta.url))).then((res) => res.toString().replace(/\r\n/g, '\n'));
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility to get `[src, out]` files
|
* Utility to get `[src, out]` files
|
||||||
|
|
13
yarn.lock
13
yarn.lock
|
@ -4626,7 +4626,7 @@ fd-slicer@~1.1.0:
|
||||||
|
|
||||||
fdir@^5.0.0:
|
fdir@^5.0.0:
|
||||||
version "5.0.0"
|
version "5.0.0"
|
||||||
resolved "https://registry.npmjs.org/fdir/-/fdir-5.0.0.tgz"
|
resolved "https://registry.yarnpkg.com/fdir/-/fdir-5.0.0.tgz#a40b5d9adfb530daeca55558e8ad87ec14a44769"
|
||||||
integrity sha512-cteqwWMA43lEmgwOg5HSdvhVFD39vHjQDhZkRMlKmeoNPtSSgUw1nUypydiY2upMdGiBFBZvNBDbnoBh0yCzaQ==
|
integrity sha512-cteqwWMA43lEmgwOg5HSdvhVFD39vHjQDhZkRMlKmeoNPtSSgUw1nUypydiY2upMdGiBFBZvNBDbnoBh0yCzaQ==
|
||||||
|
|
||||||
figures@^3.0.0, figures@^3.2.0:
|
figures@^3.0.0, figures@^3.2.0:
|
||||||
|
@ -9801,13 +9801,20 @@ safe-regex@^1.1.0:
|
||||||
resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz"
|
resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz"
|
||||||
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
|
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
|
||||||
|
|
||||||
sass@^1.3.0, sass@^1.32.8:
|
sass@^1.3.0:
|
||||||
version "1.32.12"
|
version "1.32.12"
|
||||||
resolved "https://registry.npmjs.org/sass/-/sass-1.32.12.tgz"
|
resolved "https://registry.npmjs.org/sass/-/sass-1.32.12.tgz"
|
||||||
integrity sha512-zmXn03k3hN0KaiVTjohgkg98C3UowhL1/VSGdj4/VAAiMKGQOE80PFPxFP2Kyq0OUskPKcY5lImkhBKEHlypJA==
|
integrity sha512-zmXn03k3hN0KaiVTjohgkg98C3UowhL1/VSGdj4/VAAiMKGQOE80PFPxFP2Kyq0OUskPKcY5lImkhBKEHlypJA==
|
||||||
dependencies:
|
dependencies:
|
||||||
chokidar ">=3.0.0 <4.0.0"
|
chokidar ">=3.0.0 <4.0.0"
|
||||||
|
|
||||||
|
sass@^1.32.13:
|
||||||
|
version "1.32.13"
|
||||||
|
resolved "https://registry.yarnpkg.com/sass/-/sass-1.32.13.tgz#8d29c849e625a415bce71609c7cf95e15f74ed00"
|
||||||
|
integrity sha512-dEgI9nShraqP7cXQH+lEXVf73WOPCse0QlFzSD8k+1TcOxCMwVXfQlr0jtoluZysQOyJGnfr21dLvYKDJq8HkA==
|
||||||
|
dependencies:
|
||||||
|
chokidar ">=3.0.0 <4.0.0"
|
||||||
|
|
||||||
scheduler@^0.18.0:
|
scheduler@^0.18.0:
|
||||||
version "0.18.0"
|
version "0.18.0"
|
||||||
resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.18.0.tgz"
|
resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.18.0.tgz"
|
||||||
|
@ -11050,7 +11057,7 @@ tiny-emitter@^2.0.0:
|
||||||
|
|
||||||
tiny-glob@^0.2.8:
|
tiny-glob@^0.2.8:
|
||||||
version "0.2.8"
|
version "0.2.8"
|
||||||
resolved "https://registry.npmjs.org/tiny-glob/-/tiny-glob-0.2.8.tgz"
|
resolved "https://registry.yarnpkg.com/tiny-glob/-/tiny-glob-0.2.8.tgz#b2792c396cc62db891ffa161fe8b33e76123e531"
|
||||||
integrity sha512-vkQP7qOslq63XRX9kMswlby99kyO5OvKptw7AMwBVMjXEI7Tb61eoI5DydyEMOseyGS5anDN1VPoVxEvH01q8w==
|
integrity sha512-vkQP7qOslq63XRX9kMswlby99kyO5OvKptw7AMwBVMjXEI7Tb61eoI5DydyEMOseyGS5anDN1VPoVxEvH01q8w==
|
||||||
dependencies:
|
dependencies:
|
||||||
globalyzer "0.1.0"
|
globalyzer "0.1.0"
|
||||||
|
|
Loading…
Reference in a new issue