Compare commits
1 commit
main
...
cloudflare
Author | SHA1 | Date | |
---|---|---|---|
|
325080c543 |
176 changed files with 351 additions and 5638 deletions
8
.Dockerfile
Normal file
8
.Dockerfile
Normal file
|
@ -0,0 +1,8 @@
|
|||
FROM gitpod/workspace-node
|
||||
|
||||
# Install latest pnpm
|
||||
RUN curl -fsSL https://get.pnpm.io/install.sh | SHELL=`which bash` bash -
|
||||
|
||||
# Install deno in gitpod
|
||||
RUN curl -fsSL https://deno.land/x/install/install.sh | sh
|
||||
RUN /home/gitpod/.deno/bin/deno completions bash > /home/gitpod/.bashrc.d/90-deno && echo 'export DENO_INSTALL="/home/gitpod/.deno"' >> /home/gitpod/.bashrc.d/90-deno && echo 'export PATH="$DENO_INSTALL/bin:$PATH"' >> /home/gitpod/.bashrc.d/90-deno
|
|
@ -1,5 +0,0 @@
|
|||
---
|
||||
'astro': patch
|
||||
---
|
||||
|
||||
Fixed an issue where the transitions router did not work within framework components.
|
|
@ -1,5 +0,0 @@
|
|||
---
|
||||
'@astrojs/cloudflare': patch
|
||||
---
|
||||
|
||||
fixes `AdvancedRuntime` & `DirectoryRuntime` types to work woth Cloudflare caches
|
|
@ -6,6 +6,10 @@ RUN npm install -g @playwright/test
|
|||
# Install latest pnpm
|
||||
RUN npm install -g pnpm
|
||||
|
||||
# Install deno
|
||||
ENV DENO_INSTALL=/usr/local
|
||||
RUN curl -fsSL https://deno.land/x/install/install.sh | sh
|
||||
|
||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
&& curl -sSL https://dl.google.com/linux/direct/google-chrome-stable_current_$(dpkg --print-architecture).deb -o /tmp/chrome.deb \
|
||||
&& apt-get -y install /tmp/chrome.deb
|
||||
|
|
34
.devcontainer/deno/devcontainer.json
Normal file
34
.devcontainer/deno/devcontainer.json
Normal file
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"name": "Deno",
|
||||
"build": {
|
||||
"dockerfile": "../examples.deno.Dockerfile"
|
||||
},
|
||||
|
||||
"workspaceFolder": "/workspaces/astro/examples/deno",
|
||||
|
||||
"portsAttributes": {
|
||||
"4321": {
|
||||
"label": "Application",
|
||||
"onAutoForward": "openPreview"
|
||||
}
|
||||
},
|
||||
|
||||
"forwardPorts": [4321],
|
||||
|
||||
"postCreateCommand": "pnpm install && cd /workspaces/astro && pnpm run build",
|
||||
|
||||
"waitFor": "postCreateCommand",
|
||||
|
||||
"postAttachCommand": {
|
||||
"Server": "pnpm start --host"
|
||||
},
|
||||
|
||||
"customizations": {
|
||||
"codespaces": {
|
||||
"openFiles": ["src/pages/index.astro"]
|
||||
},
|
||||
"vscode": {
|
||||
"extensions": ["astro-build.astro-vscode", "esbenp.prettier-vscode"]
|
||||
}
|
||||
}
|
||||
}
|
10
.devcontainer/examples.deno.Dockerfile
Normal file
10
.devcontainer/examples.deno.Dockerfile
Normal file
|
@ -0,0 +1,10 @@
|
|||
FROM mcr.microsoft.com/devcontainers/javascript-node:0-18
|
||||
|
||||
# Install latest pnpm
|
||||
RUN npm install -g pnpm
|
||||
|
||||
# Install deno
|
||||
ENV DENO_INSTALL=/usr/local
|
||||
RUN curl -fsSL https://deno.land/x/install/install.sh | sh
|
||||
|
||||
COPY example-welcome-message.txt /usr/local/etc/vscode-dev-containers/first-run-notice.txt
|
5
.github/workflows/ci.yml
vendored
5
.github/workflows/ci.yml
vendored
|
@ -133,6 +133,11 @@ jobs:
|
|||
node-version: ${{ matrix.NODE_VERSION }}
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Use Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.35.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
|
||||
|
|
5
.github/workflows/snapshot-release.yml
vendored
5
.github/workflows/snapshot-release.yml
vendored
|
@ -19,11 +19,6 @@ jobs:
|
|||
name: Create a snapshot release of a pull request
|
||||
if: ${{ github.repository_owner == 'withastro' && github.event.issue.pull_request && startsWith(github.event.comment.body, '!preview') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Check if user has admin access (only admins can publish snapshot releases)."
|
||||
uses: "lannonbr/repo-permission-check-action@2.0.0"
|
||||
|
|
24
.github/workflows/test-hosts.yml
vendored
24
.github/workflows/test-hosts.yml
vendored
|
@ -11,6 +11,8 @@ env:
|
|||
VERCEL_ORG_ID: ${{ secrets.VERCEL_TEST_ORG_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_TEST_PROJECT_ID }}
|
||||
VERCEL_TOKEN: ${{ secrets.VERCEL_TEST_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_TEST_SITE_ID }}
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_TEST_AUTH_TOKEN }}
|
||||
FORCE_COLOR: true
|
||||
|
||||
jobs:
|
||||
|
@ -32,20 +34,22 @@ jobs:
|
|||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
|
||||
- name: Build Astro
|
||||
run: pnpm turbo build --filter astro --filter @astrojs/vercel
|
||||
|
||||
- name: Build test project
|
||||
- name: Install Hosts CLIs
|
||||
run: pnpm install --global netlify-cli vercel
|
||||
|
||||
- name: Deploy Vercel
|
||||
working-directory: ./packages/integrations/vercel/test/hosted/hosted-astro-project
|
||||
run:
|
||||
pnpm run build
|
||||
|
||||
- name: Deploy to Vercel
|
||||
working-directory: ./packages/integrations/vercel/test/hosted/hosted-astro-project
|
||||
run:
|
||||
pnpm dlx vercel --prod --prebuilt
|
||||
vercel --prod --prebuilt
|
||||
|
||||
- name: Test
|
||||
- name: Deploy Netlify
|
||||
working-directory: ./packages/integrations/netlify/test/hosted/hosted-astro-project
|
||||
run:
|
||||
pnpm run build
|
||||
netlify deploy --prod
|
||||
|
||||
- name: Test both hosts
|
||||
run:
|
||||
pnpm run test:e2e:hosts
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
---
|
||||
image:
|
||||
file: .Dockerfile
|
||||
# Commands to start on workspace startup
|
||||
tasks:
|
||||
- before: |
|
||||
|
|
|
@ -3,9 +3,6 @@
|
|||
# Convert context URL to an array
|
||||
mapfile -t CONTEXT_URL_ITEMS < <(echo "$GITPOD_WORKSPACE_CONTEXT_URL" | tr '/' '\n')
|
||||
|
||||
# Install latest pnpm
|
||||
curl -fsSL https://get.pnpm.io/install.sh | SHELL=`which bash` bash -
|
||||
|
||||
# Check if Gitpod started from a specific example directory in the repository
|
||||
if [ "${CONTEXT_URL_ITEMS[7]}" = "examples" ]; then
|
||||
EXAMPLE_PROJECT=${CONTEXT_URL_ITEMS[8]}
|
||||
|
|
|
@ -52,6 +52,8 @@ Join us on [Discord](https://astro.build/chat) to meet other maintainers. We'll
|
|||
| [@astrojs/svelte](packages/integrations/svelte) | [![astro version](https://img.shields.io/npm/v/@astrojs/svelte.svg?label=%20)](packages/integrations/svelte/CHANGELOG.md) |
|
||||
| [@astrojs/vue](packages/integrations/vue) | [![astro version](https://img.shields.io/npm/v/@astrojs/vue.svg?label=%20)](packages/integrations/vue/CHANGELOG.md) |
|
||||
| [@astrojs/lit](packages/integrations/lit) | [![astro version](https://img.shields.io/npm/v/@astrojs/lit.svg?label=%20)](packages/integrations/lit/CHANGELOG.md) |
|
||||
| [@astrojs/deno](packages/integrations/deno) | [![astro version](https://img.shields.io/npm/v/@astrojs/deno.svg?label=%20)](packages/integrations/deno/CHANGELOG.md) |
|
||||
| [@astrojs/netlify](packages/integrations/netlify) | [![astro version](https://img.shields.io/npm/v/@astrojs/netlify.svg?label=%20)](packages/integrations/netlify/CHANGELOG.md) |
|
||||
| [@astrojs/node](packages/integrations/node) | [![astro version](https://img.shields.io/npm/v/@astrojs/node.svg?label=%20)](packages/integrations/node/CHANGELOG.md) |
|
||||
| [@astrojs/vercel](packages/integrations/vercel) | [![astro version](https://img.shields.io/npm/v/@astrojs/vercel.svg?label=%20)](packages/integrations/vercel/CHANGELOG.md) |
|
||||
| [@astrojs/cloudflare](packages/integrations/cloudflare) | [![astro version](https://img.shields.io/npm/v/@astrojs/cloudflare.svg?label=%20)](packages/integrations/cloudflare/CHANGELOG.md) |
|
||||
|
|
|
@ -11,6 +11,6 @@
|
|||
"astro": "astro"
|
||||
},
|
||||
"dependencies": {
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,6 @@
|
|||
"@astrojs/mdx": "^1.1.1",
|
||||
"@astrojs/rss": "^3.0.0",
|
||||
"@astrojs/sitemap": "^3.0.1",
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
],
|
||||
"scripts": {},
|
||||
"devDependencies": {
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"astro": "^2.0.0-beta.0"
|
||||
|
|
|
@ -14,6 +14,6 @@
|
|||
"@astrojs/alpinejs": "^0.3.1",
|
||||
"@types/alpinejs": "^3.7.2",
|
||||
"alpinejs": "^3.12.3",
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
"dependencies": {
|
||||
"@astrojs/lit": "^3.0.1",
|
||||
"@webcomponents/template-shadowroot": "^0.2.1",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"lit": "^2.8.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
"@astrojs/solid-js": "^3.0.2",
|
||||
"@astrojs/svelte": "^4.0.3",
|
||||
"@astrojs/vue": "^3.0.1",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"preact": "^10.17.1",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
"dependencies": {
|
||||
"@astrojs/preact": "^3.0.1",
|
||||
"@preact/signals": "^1.2.1",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"preact": "^10.17.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
"@astrojs/react": "^3.0.3",
|
||||
"@types/react": "^18.2.21",
|
||||
"@types/react-dom": "^18.2.7",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@astrojs/solid-js": "^3.0.2",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"solid-js": "^1.7.11"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@astrojs/svelte": "^4.0.3",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"svelte": "^4.2.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@astrojs/vue": "^3.0.1",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"vue": "^3.3.4"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,6 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@astrojs/node": "^6.0.3",
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,9 +10,9 @@ export default function createIntegration(): AstroIntegration {
|
|||
// See the @astrojs/react integration for an example
|
||||
// https://github.com/withastro/astro/blob/main/packages/integrations/react/src/index.ts
|
||||
},
|
||||
'astro:build:setup': ({ pages, updateConfig }) => {
|
||||
// See the @astrojs/lit integration for an example
|
||||
// https://github.com/withastro/astro/blob/main/packages/integrations/lit/src/index.ts
|
||||
'astro:build:setup': ({ config, updateConfig }) => {
|
||||
// See the @astrojs/netlify integration for an example
|
||||
// https://github.com/withastro/astro/blob/main/packages/integrations/netlify/src/integration-functions.ts
|
||||
},
|
||||
'astro:build:done': ({ dir, routes }) => {
|
||||
// See the @astrojs/partytown integration for an example
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
],
|
||||
"scripts": {},
|
||||
"devDependencies": {
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"astro": "^2.0.0-beta.0"
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@astrojs/node": "^6.0.3",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"html-minifier": "^4.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,6 @@
|
|||
"astro": "astro"
|
||||
},
|
||||
"dependencies": {
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,6 @@
|
|||
"astro": "astro"
|
||||
},
|
||||
"dependencies": {
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,6 @@
|
|||
"astro": "astro"
|
||||
},
|
||||
"dependencies": {
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
"dependencies": {
|
||||
"@astrojs/node": "^6.0.3",
|
||||
"@astrojs/svelte": "^4.0.3",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"svelte": "^4.2.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,8 +10,8 @@
|
|||
"astro": "astro"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@astrojs/tailwind": "^5.0.2",
|
||||
"@astrojs/tailwind": "^5.0.1",
|
||||
"@astrojs/node": "^6.0.3",
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,6 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@astrojs/markdoc": "^0.5.2",
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@astrojs/markdown-remark": "^3.2.1",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"hast-util-select": "^5.0.5",
|
||||
"rehype-autolink-headings": "^6.1.1",
|
||||
"rehype-slug": "^5.1.0",
|
||||
|
|
|
@ -11,6 +11,6 @@
|
|||
"astro": "astro"
|
||||
},
|
||||
"dependencies": {
|
||||
"astro": "^3.2.4"
|
||||
"astro": "^3.2.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
"dependencies": {
|
||||
"@astrojs/mdx": "^1.1.1",
|
||||
"@astrojs/preact": "^3.0.1",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"preact": "^10.17.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
"dependencies": {
|
||||
"@astrojs/preact": "^3.0.1",
|
||||
"@nanostores/preact": "^0.5.0",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"nanostores": "^0.9.3",
|
||||
"preact": "^10.17.1"
|
||||
}
|
||||
|
|
|
@ -12,9 +12,9 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@astrojs/mdx": "^1.1.1",
|
||||
"@astrojs/tailwind": "^5.0.2",
|
||||
"@astrojs/tailwind": "^5.0.1",
|
||||
"@types/canvas-confetti": "^1.6.0",
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"autoprefixer": "^10.4.15",
|
||||
"canvas-confetti": "^1.6.0",
|
||||
"postcss": "^8.4.28",
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
"astro": "astro"
|
||||
},
|
||||
"dependencies": {
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"vite-plugin-pwa": "0.16.4",
|
||||
"workbox-window": "^7.0.0"
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
"test": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"astro": "^3.2.4",
|
||||
"astro": "^3.2.3",
|
||||
"vitest": "^0.34.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,25 +1,5 @@
|
|||
# astro
|
||||
|
||||
## 3.2.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [#8638](https://github.com/withastro/astro/pull/8638) [`160d1cd75`](https://github.com/withastro/astro/commit/160d1cd755e70af1d8ec294d01dd2cb32d60db50) Thanks [@florian-lefebvre](https://github.com/florian-lefebvre)! - The `@astrojs/tailwind` integration now creates a `tailwind.config.mjs` file by default
|
||||
|
||||
- [#8767](https://github.com/withastro/astro/pull/8767) [`30de32436`](https://github.com/withastro/astro/commit/30de324361bc261956eb9fc08fe60a82ff602a9b) Thanks [@martrapp](https://github.com/martrapp)! - Revert fix #8472
|
||||
|
||||
[#8472](https://github.com/withastro/astro/pull/8472) caused some style files from previous pages to not be cleanly deleted on view transitions. For a discussion of a future fix for the original issue [#8144](https://github.com/withastro/astro/issues/8114) see [#8745](https://github.com/withastro/astro/pull/8745).
|
||||
|
||||
- [#8741](https://github.com/withastro/astro/pull/8741) [`c4a7ec425`](https://github.com/withastro/astro/commit/c4a7ec4255e7acb9555cb8bb74ea13c5fbb2ac17) Thanks [@lilnasy](https://github.com/lilnasy)! - Fixed an issue on Windows where lowercase drive letters in current working directory led to missing scripts and styles.
|
||||
|
||||
- [#8772](https://github.com/withastro/astro/pull/8772) [`c24f70d91`](https://github.com/withastro/astro/commit/c24f70d91601dd3a6b5a84f04d61824e775e9b44) Thanks [@martrapp](https://github.com/martrapp)! - Fix flickering during view transitions
|
||||
|
||||
- [#8754](https://github.com/withastro/astro/pull/8754) [`93b092266`](https://github.com/withastro/astro/commit/93b092266febfad16a48575f8eee12d5910bf071) Thanks [@bluwy](https://github.com/bluwy)! - Make CSS chunk names less confusing
|
||||
|
||||
- [#8776](https://github.com/withastro/astro/pull/8776) [`29cdfa024`](https://github.com/withastro/astro/commit/29cdfa024886dd581cb207586f7dfec6966bdd4e) Thanks [@martrapp](https://github.com/martrapp)! - Fix transition attributes on islands
|
||||
|
||||
- [#8773](https://github.com/withastro/astro/pull/8773) [`eaed844ea`](https://github.com/withastro/astro/commit/eaed844ea8f2f52e0c9caa40bb3ec7377e10595f) Thanks [@sumimakito](https://github.com/sumimakito)! - Fix an issue where HTML attributes do not render if getHTMLAttributes in an image service returns a Promise
|
||||
|
||||
## 3.2.3
|
||||
|
||||
### Patch Changes
|
||||
|
|
|
@ -32,13 +32,6 @@ async function main() {
|
|||
}
|
||||
}
|
||||
|
||||
// windows drive letters can sometimes be lowercase, which vite cannot process
|
||||
if (process.platform === 'win32') {
|
||||
const cwd = process.cwd();
|
||||
const correctedCwd = cwd.slice(0, 1).toUpperCase() + cwd.slice(1);
|
||||
if (correctedCwd !== cwd) process.chdir(correctedCwd);
|
||||
}
|
||||
|
||||
return import('./dist/cli/index.js')
|
||||
.then(({ cli }) => cli(process.argv))
|
||||
.catch((error) => {
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
import React from 'react';
|
||||
import { navigate } from "astro:transitions/client";
|
||||
export default function ClickToNavigate({ to, id }) {
|
||||
return <button id={id} onClick={() => navigate(to)}>Navigate to `{to}`</button>;
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
---
|
||||
import { ViewTransitions } from 'astro:transitions';
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<ViewTransitions/>
|
||||
</head>
|
||||
<body>
|
||||
<p>Local transitions</p>
|
||||
<slot/>
|
||||
<script>
|
||||
document.addEventListener("astro:after-swap", () => {
|
||||
document.querySelector("p").addEventListener("transitionstart", () => {
|
||||
console.info("transitionstart");
|
||||
});
|
||||
document.documentElement.setAttribute("class", "blue");
|
||||
});
|
||||
document.dispatchEvent(new Event("astro:after-swap"));
|
||||
</script>
|
||||
</body>
|
||||
<style>
|
||||
p {
|
||||
transition: background-color 1s;
|
||||
}
|
||||
p {
|
||||
background-color: #0ee;
|
||||
color: red;
|
||||
}
|
||||
.blue p {
|
||||
background-color: #ee0;
|
||||
color: blue;
|
||||
}
|
||||
</style>
|
||||
</html>
|
|
@ -1,12 +0,0 @@
|
|||
---
|
||||
import ClickToNavigate from "../components/ClickToNavigate.jsx"
|
||||
import { ViewTransitions } from "astro:transitions";
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<ViewTransitions />
|
||||
</head>
|
||||
<body>
|
||||
<ClickToNavigate id="react-client-load-navigate-button" to="/two" client:load/>
|
||||
</body>
|
||||
</html>
|
|
@ -1,6 +0,0 @@
|
|||
---
|
||||
import ListenerLayout from '../components/listener-layout.astro';
|
||||
---
|
||||
<ListenerLayout>
|
||||
<a id="totwo" href="/listener-two">Go to listener two</a>
|
||||
</ListenerLayout>
|
|
@ -1,6 +0,0 @@
|
|||
---
|
||||
import ListenerLayout from '../components/listener-layout.astro';
|
||||
---
|
||||
<ListenerLayout>
|
||||
<a id="toone" href="/listener-one">Go to listener one</a>
|
||||
</ListenerLayout>
|
|
@ -230,28 +230,6 @@ test.describe('View Transitions', () => {
|
|||
await expect(h, 'imported CSS updated').toHaveCSS('background-color', 'rgba(0, 0, 0, 0)');
|
||||
});
|
||||
|
||||
test('No page rendering during swap()', async ({ page, astro }) => {
|
||||
let transitions = 0;
|
||||
page.on('console', (msg) => {
|
||||
if (msg.type() === 'info' && msg.text() === 'transitionstart') ++transitions;
|
||||
});
|
||||
|
||||
// Go to page 1
|
||||
await page.goto(astro.resolveUrl('/listener-one'));
|
||||
let p = page.locator('#totwo');
|
||||
await expect(p, 'should have content').toHaveText('Go to listener two');
|
||||
// on load a CSS transition is started triggered by a class on the html element
|
||||
expect(transitions).toEqual(1);
|
||||
|
||||
// go to page 2
|
||||
await page.click('#totwo');
|
||||
p = page.locator('#toone');
|
||||
await expect(p, 'should have content').toHaveText('Go to listener one');
|
||||
// swap() resets that class, the after-swap listener sets it again.
|
||||
// the temporarily missing class must not trigger page rendering
|
||||
expect(transitions).toEqual(1);
|
||||
});
|
||||
|
||||
test('click hash links does not do navigation', async ({ page, astro }) => {
|
||||
// Go to page 1
|
||||
await page.goto(astro.resolveUrl('/one'));
|
||||
|
@ -670,7 +648,7 @@ test.describe('View Transitions', () => {
|
|||
expect(loads.length, 'There should be 2 page loads').toEqual(2);
|
||||
});
|
||||
|
||||
test.skip('client:only styles are retained on transition', async ({ page, astro }) => {
|
||||
test('client:only styles are retained on transition', async ({ page, astro }) => {
|
||||
const totalExpectedStyles = 7;
|
||||
|
||||
// Go to page 1
|
||||
|
@ -753,21 +731,6 @@ test.describe('View Transitions', () => {
|
|||
await expect(p, 'should have content').toHaveText('Page 1');
|
||||
});
|
||||
|
||||
test('Use the client side router in framework components', async ({ page, astro }) => {
|
||||
await page.goto(astro.resolveUrl('/client-load'));
|
||||
|
||||
// the button is set to naviagte() to /two
|
||||
const button = page.locator('#react-client-load-navigate-button');
|
||||
|
||||
await expect(button, 'should have content').toHaveText('Navigate to `/two`');
|
||||
|
||||
await button.click();
|
||||
|
||||
const p = page.locator('#two');
|
||||
|
||||
await expect(p, 'should have content').toHaveText('Page 2');
|
||||
});
|
||||
|
||||
test('body inline scripts do not re-execute on navigation', async ({ page, astro }) => {
|
||||
const errors = [];
|
||||
page.addListener('pageerror', (err) => {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "astro",
|
||||
"version": "3.2.4",
|
||||
"version": "3.2.3",
|
||||
"description": "Astro is a modern site builder with web best practices, performance, and DX front-of-mind.",
|
||||
"type": "module",
|
||||
"author": "withastro",
|
||||
|
|
|
@ -111,7 +111,7 @@ export async function getImage(
|
|||
src: imageURL,
|
||||
attributes:
|
||||
service.getHTMLAttributes !== undefined
|
||||
? await service.getHTMLAttributes(validatedOptions, imageConfig)
|
||||
? service.getHTMLAttributes(validatedOptions, imageConfig)
|
||||
: {},
|
||||
};
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ const ALIASES = new Map([
|
|||
]);
|
||||
const ASTRO_CONFIG_STUB = `import { defineConfig } from 'astro/config';\n\nexport default defineConfig({});`;
|
||||
const TAILWIND_CONFIG_STUB = `/** @type {import('tailwindcss').Config} */
|
||||
export default {
|
||||
module.exports = {
|
||||
content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
|
||||
theme: {
|
||||
extend: {},
|
||||
|
@ -74,6 +74,7 @@ const OFFICIAL_ADAPTER_TO_IMPORT_MAP: Record<string, string> = {
|
|||
vercel: '@astrojs/vercel/serverless',
|
||||
cloudflare: '@astrojs/cloudflare',
|
||||
node: '@astrojs/node',
|
||||
deno: '@astrojs/deno',
|
||||
};
|
||||
|
||||
// Users might lack access to the global npm registry, this function
|
||||
|
@ -159,7 +160,7 @@ export async function add(names: string[], { flags }: AddOptions) {
|
|||
'./tailwind.config.mjs',
|
||||
'./tailwind.config.js',
|
||||
],
|
||||
defaultConfigFile: './tailwind.config.mjs',
|
||||
defaultConfigFile: './tailwind.config.cjs',
|
||||
defaultConfigContent: TAILWIND_CONFIG_STUB,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ type RawContentEvent = { name: ChokidarEvent; entry: string };
|
|||
type ContentEvent = { name: ChokidarEvent; entry: URL };
|
||||
|
||||
type DataEntryMetadata = Record<string, never>;
|
||||
type ContentEntryMetadata = { slug: string, path: string };
|
||||
type ContentEntryMetadata = { slug: string };
|
||||
type CollectionEntryMap = {
|
||||
[collection: string]:
|
||||
| {
|
||||
|
@ -276,7 +276,7 @@ export async function createContentTypesGenerator({
|
|||
if (!(entryKey in collectionEntryMap[collectionKey].entries)) {
|
||||
collectionEntryMap[collectionKey] = {
|
||||
type: 'content',
|
||||
entries: { ...collectionInfo.entries, [entryKey]: { slug: addedSlug, path: event.entry.toString() } },
|
||||
entries: { ...collectionInfo.entries, [entryKey]: { slug: addedSlug } },
|
||||
};
|
||||
}
|
||||
return { shouldGenerateTypes: true };
|
||||
|
@ -453,15 +453,7 @@ async function writeContentFiles({
|
|||
)}] }`;
|
||||
|
||||
const slugType = JSON.stringify(entryMetadata.slug);
|
||||
contentTypesStr += [
|
||||
`${entryKey}: {`,
|
||||
` id: ${entryKey};`,
|
||||
` slug: ${slugType};`,
|
||||
` path: ${JSON.stringify(entryMetadata.path)};`,
|
||||
` body: string;`,
|
||||
` collection: ${collectionKey};`,
|
||||
` data: ${dataType}`,
|
||||
`} & ${renderType};`].join("\n");
|
||||
contentTypesStr += `${entryKey}: {\n id: ${entryKey};\n slug: ${slugType};\n body: string;\n collection: ${collectionKey};\n data: ${dataType}\n} & ${renderType};\n`;
|
||||
}
|
||||
contentTypesStr += `};\n`;
|
||||
break;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import type { GetModuleInfo, ModuleInfo } from 'rollup';
|
||||
import type { GetModuleInfo } from 'rollup';
|
||||
|
||||
import crypto from 'node:crypto';
|
||||
import npath from 'node:path';
|
||||
|
@ -6,29 +6,20 @@ import type { AstroSettings } from '../../@types/astro.js';
|
|||
import { viteID } from '../util.js';
|
||||
import { getTopLevelPages } from './graph.js';
|
||||
|
||||
// These pages could be used as base names for the chunk hashed name, but they are confusing
|
||||
// and should be avoided it possible
|
||||
const confusingBaseNames = ['404', '500'];
|
||||
|
||||
// The short name for when the hash can be included
|
||||
// We could get rid of this and only use the createSlugger implementation, but this creates
|
||||
// slightly prettier names.
|
||||
export function shortHashedName(id: string, ctx: { getModuleInfo: GetModuleInfo }): string {
|
||||
const parents = Array.from(getTopLevelPages(id, ctx));
|
||||
return createNameHash(
|
||||
getFirstParentId(parents),
|
||||
parents.map(([page]) => page.id)
|
||||
);
|
||||
}
|
||||
const firstParentId = parents[0]?.[0].id;
|
||||
const firstParentName = firstParentId ? npath.parse(firstParentId).name : 'index';
|
||||
|
||||
export function createNameHash(baseId: string | undefined, hashIds: string[]): string {
|
||||
const baseName = baseId ? prettifyBaseName(npath.parse(baseId).name) : 'index';
|
||||
const hash = crypto.createHash('sha256');
|
||||
for (const id of hashIds) {
|
||||
hash.update(id, 'utf-8');
|
||||
for (const [page] of parents) {
|
||||
hash.update(page.id, 'utf-8');
|
||||
}
|
||||
const h = hash.digest('hex').slice(0, 8);
|
||||
const proposedName = baseName + '.' + h;
|
||||
const proposedName = firstParentName + '.' + h;
|
||||
return proposedName;
|
||||
}
|
||||
|
||||
|
@ -43,7 +34,7 @@ export function createSlugger(settings: AstroSettings) {
|
|||
.map(([page]) => page.id)
|
||||
.sort()
|
||||
.join('-');
|
||||
const firstParentId = getFirstParentId(parents) || indexPage;
|
||||
const firstParentId = parents[0]?.[0].id || indexPage;
|
||||
|
||||
// Use the last two segments, for ex /docs/index
|
||||
let dir = firstParentId;
|
||||
|
@ -54,7 +45,7 @@ export function createSlugger(settings: AstroSettings) {
|
|||
break;
|
||||
}
|
||||
|
||||
const name = prettifyBaseName(npath.parse(npath.basename(dir)).name);
|
||||
const name = npath.parse(npath.basename(dir)).name;
|
||||
key = key.length ? name + sep + key : name;
|
||||
dir = npath.dirname(dir);
|
||||
i++;
|
||||
|
@ -85,32 +76,3 @@ export function createSlugger(settings: AstroSettings) {
|
|||
return name;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the first parent id from `parents` where its name is not confusing.
|
||||
* Returns undefined if there's no parents.
|
||||
*/
|
||||
function getFirstParentId(parents: [ModuleInfo, number, number][]) {
|
||||
for (const parent of parents) {
|
||||
const id = parent[0].id;
|
||||
const baseName = npath.parse(id).name;
|
||||
if (!confusingBaseNames.includes(baseName)) {
|
||||
return id;
|
||||
}
|
||||
}
|
||||
// If all parents are confusing, just use the first one. Or if there's no
|
||||
// parents, this will return undefined.
|
||||
return parents[0]?.[0].id;
|
||||
}
|
||||
|
||||
const charsToReplaceRe = /[.\[\]]/g;
|
||||
const underscoresRe = /_+/g;
|
||||
/**
|
||||
* Prettify base names so they're easier to read:
|
||||
* - index -> index
|
||||
* - [slug] -> _slug_
|
||||
* - [...spread] -> _spread_
|
||||
*/
|
||||
function prettifyBaseName(str: string) {
|
||||
return str.replace(charsToReplaceRe, '_').replace(underscoresRe, '_');
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import * as crypto from 'node:crypto';
|
||||
import * as npath from 'node:path';
|
||||
import type { GetModuleInfo } from 'rollup';
|
||||
import { type ResolvedConfig, type Plugin as VitePlugin } from 'vite';
|
||||
import { isBuildableCSSRequest } from '../../../vite-plugin-astro-server/util.js';
|
||||
|
@ -91,7 +93,7 @@ function rollupPluginAstroBuildCSS(options: PluginOptions): VitePlugin[] {
|
|||
if (new URL(pageInfo.id, 'file://').searchParams.has(PROPAGATED_ASSET_FLAG)) {
|
||||
// Split delayed assets to separate modules
|
||||
// so they can be injected where needed
|
||||
const chunkId = assetName.createNameHash(id, [id]);
|
||||
const chunkId = createNameHash(id, [id]);
|
||||
internals.cssModuleToChunkIdMap.set(id, chunkId);
|
||||
return chunkId;
|
||||
}
|
||||
|
@ -270,6 +272,17 @@ function rollupPluginAstroBuildCSS(options: PluginOptions): VitePlugin[] {
|
|||
|
||||
/***** UTILITY FUNCTIONS *****/
|
||||
|
||||
function createNameHash(baseId: string, hashIds: string[]): string {
|
||||
const baseName = baseId ? npath.parse(baseId).name : 'index';
|
||||
const hash = crypto.createHash('sha256');
|
||||
for (const id of hashIds) {
|
||||
hash.update(id, 'utf-8');
|
||||
}
|
||||
const h = hash.digest('hex').slice(0, 8);
|
||||
const proposedName = baseName + '.' + h;
|
||||
return proposedName;
|
||||
}
|
||||
|
||||
function* getParentClientOnlys(
|
||||
id: string,
|
||||
ctx: { getModuleInfo: GetModuleInfo },
|
||||
|
|
|
@ -15,13 +15,10 @@ export interface HydrationMetadata {
|
|||
componentExport: { value: string };
|
||||
}
|
||||
|
||||
type Props = Record<string | number | symbol, any>;
|
||||
|
||||
interface ExtractedProps {
|
||||
isPage: boolean;
|
||||
hydration: HydrationMetadata | null;
|
||||
props: Props;
|
||||
propsWithoutTransitionAttributes: Props;
|
||||
props: Record<string | number | symbol, any>;
|
||||
}
|
||||
|
||||
const transitionDirectivesToCopyOnIsland = Object.freeze([
|
||||
|
@ -32,14 +29,13 @@ const transitionDirectivesToCopyOnIsland = Object.freeze([
|
|||
// Used to extract the directives, aka `client:load` information about a component.
|
||||
// Finds these special props and removes them from what gets passed into the component.
|
||||
export function extractDirectives(
|
||||
inputProps: Props,
|
||||
inputProps: Record<string | number | symbol, any>,
|
||||
clientDirectives: SSRResult['clientDirectives']
|
||||
): ExtractedProps {
|
||||
let extracted: ExtractedProps = {
|
||||
isPage: false,
|
||||
hydration: null,
|
||||
props: {},
|
||||
propsWithoutTransitionAttributes: {},
|
||||
};
|
||||
for (const [key, value] of Object.entries(inputProps)) {
|
||||
if (key.startsWith('server:')) {
|
||||
|
@ -100,14 +96,10 @@ export function extractDirectives(
|
|||
}
|
||||
} else {
|
||||
extracted.props[key] = value;
|
||||
if (!transitionDirectivesToCopyOnIsland.includes(key)) {
|
||||
extracted.propsWithoutTransitionAttributes[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const sym of Object.getOwnPropertySymbols(inputProps)) {
|
||||
extracted.props[sym] = inputProps[sym];
|
||||
extracted.propsWithoutTransitionAttributes[sym] = inputProps[sym];
|
||||
}
|
||||
|
||||
return extracted;
|
||||
|
|
|
@ -92,10 +92,7 @@ async function renderFrameworkComponent(
|
|||
displayName,
|
||||
};
|
||||
|
||||
const { hydration, isPage, props, propsWithoutTransitionAttributes } = extractDirectives(
|
||||
_props,
|
||||
clientDirectives
|
||||
);
|
||||
const { hydration, isPage, props } = extractDirectives(_props, clientDirectives);
|
||||
let html = '';
|
||||
let attrs: Record<string, string> | undefined = undefined;
|
||||
|
||||
|
@ -220,7 +217,7 @@ async function renderFrameworkComponent(
|
|||
({ html, attrs } = await renderer.ssr.renderToStaticMarkup.call(
|
||||
{ result },
|
||||
Component,
|
||||
propsWithoutTransitionAttributes,
|
||||
props,
|
||||
children,
|
||||
metadata
|
||||
));
|
||||
|
@ -245,7 +242,7 @@ If you're still stuck, please open an issue on GitHub or join us at https://astr
|
|||
({ html, attrs } = await renderer.ssr.renderToStaticMarkup.call(
|
||||
{ result },
|
||||
Component,
|
||||
propsWithoutTransitionAttributes,
|
||||
props,
|
||||
children,
|
||||
metadata
|
||||
));
|
||||
|
|
|
@ -13,14 +13,9 @@ type Events = 'astro:page-load' | 'astro:after-swap';
|
|||
// only update history entries that are managed by us
|
||||
// leave other entries alone and do not accidently add state.
|
||||
const persistState = (state: State) => history.state && history.replaceState(state, '');
|
||||
|
||||
const inBrowser = import.meta.env.SSR === false;
|
||||
|
||||
export const supportsViewTransitions = inBrowser && !!document.startViewTransition;
|
||||
|
||||
export const supportsViewTransitions = !!document.startViewTransition;
|
||||
export const transitionEnabledOnThisPage = () =>
|
||||
inBrowser && !!document.querySelector('[name="astro-view-transitions-enabled"]');
|
||||
|
||||
!!document.querySelector('[name="astro-view-transitions-enabled"]');
|
||||
const samePage = (otherLocation: URL) =>
|
||||
location.pathname === otherLocation.pathname && location.search === otherLocation.search;
|
||||
const triggerEvent = (name: Events) => document.dispatchEvent(new Event(name));
|
||||
|
@ -45,27 +40,26 @@ const announce = () => {
|
|||
60
|
||||
);
|
||||
};
|
||||
|
||||
const PERSIST_ATTR = 'data-astro-transition-persist';
|
||||
|
||||
let parser: DOMParser;
|
||||
const parser = new DOMParser();
|
||||
// explained at its usage
|
||||
let noopEl: HTMLDivElement;
|
||||
if (import.meta.env.DEV) {
|
||||
noopEl = document.createElement('div');
|
||||
}
|
||||
|
||||
// The History API does not tell you if navigation is forward or back, so
|
||||
// you can figure it using an index. On pushState the index is incremented so you
|
||||
// can use that to determine popstate if going forward or back.
|
||||
let currentHistoryIndex = 0;
|
||||
|
||||
if (inBrowser) {
|
||||
if (history.state) {
|
||||
// we reloaded a page with history state
|
||||
// (e.g. history navigation from non-transition page or browser reload)
|
||||
currentHistoryIndex = history.state.index;
|
||||
scrollTo({ left: history.state.scrollX, top: history.state.scrollY });
|
||||
} else if (transitionEnabledOnThisPage()) {
|
||||
history.replaceState({ index: currentHistoryIndex, scrollX, scrollY, intraPage: false }, '');
|
||||
}
|
||||
if (history.state) {
|
||||
// we reloaded a page with history state
|
||||
// (e.g. history navigation from non-transition page or browser reload)
|
||||
currentHistoryIndex = history.state.index;
|
||||
scrollTo({ left: history.state.scrollX, top: history.state.scrollY });
|
||||
} else if (transitionEnabledOnThisPage()) {
|
||||
history.replaceState({ index: currentHistoryIndex, scrollX, scrollY, intraPage: false }, '');
|
||||
}
|
||||
|
||||
const throttle = (cb: (...args: any[]) => any, delay: number) => {
|
||||
let wait = false;
|
||||
// During the waiting time additional events are lost.
|
||||
|
@ -157,24 +151,18 @@ function isInfinite(animation: Animation) {
|
|||
|
||||
const updateHistoryAndScrollPosition = (toLocation: URL, replace: boolean, intraPage: boolean) => {
|
||||
const fresh = !samePage(toLocation);
|
||||
let scrolledToTop = false;
|
||||
if (toLocation.href !== location.href) {
|
||||
if (replace) {
|
||||
history.replaceState({ ...history.state }, '', toLocation.href);
|
||||
} else {
|
||||
history.replaceState({ ...history.state, intraPage }, '');
|
||||
history.pushState(
|
||||
{ index: ++currentHistoryIndex, scrollX: 0, scrollY: 0 },
|
||||
'',
|
||||
toLocation.href
|
||||
);
|
||||
history.pushState({ index: ++currentHistoryIndex, scrollX, scrollY }, '', toLocation.href);
|
||||
}
|
||||
// now we are on the new page for non-history navigations!
|
||||
// (with history navigation page change happens before popstate is fired)
|
||||
// freshly loaded pages start from the top
|
||||
if (fresh) {
|
||||
scrollTo({ left: 0, top: 0, behavior: 'instant' });
|
||||
scrolledToTop = true;
|
||||
}
|
||||
}
|
||||
if (toLocation.hash) {
|
||||
|
@ -183,9 +171,7 @@ const updateHistoryAndScrollPosition = (toLocation: URL, replace: boolean, intra
|
|||
// that won't reload the page but instead scroll to the fragment
|
||||
location.href = toLocation.href;
|
||||
} else {
|
||||
if (!scrolledToTop) {
|
||||
scrollTo({ left: 0, top: 0, behavior: 'instant' });
|
||||
}
|
||||
scrollTo({ left: 0, top: 0, behavior: 'instant' });
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -212,6 +198,22 @@ async function updateDOM(
|
|||
const href = el.getAttribute('href');
|
||||
return newDocument.head.querySelector(`link[rel=stylesheet][href="${href}"]`);
|
||||
}
|
||||
// What follows is a fix for an issue (#8472) with missing client:only styles after transition.
|
||||
// That problem exists only in dev mode where styles are injected into the page by Vite.
|
||||
// Returning a noop element ensures that the styles are not removed from the old document.
|
||||
// Guarding the code below with the dev mode check
|
||||
// allows tree shaking to remove this code in production.
|
||||
if (import.meta.env.DEV) {
|
||||
if (el.tagName === 'STYLE' && el.dataset.viteDevId) {
|
||||
const devId = el.dataset.viteDevId;
|
||||
// If this same style tag exists, remove it from the new page
|
||||
return (
|
||||
newDocument.querySelector(`style[data-vite-dev-id="${devId}"]`) ||
|
||||
// Otherwise, keep it anyways. This is client:only styles.
|
||||
noopEl
|
||||
);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
|
@ -347,8 +349,6 @@ async function transition(
|
|||
toLocation = new URL(response.redirected);
|
||||
}
|
||||
|
||||
parser ??= new DOMParser();
|
||||
|
||||
const newDocument = parser.parseFromString(response.html, response.mediaType);
|
||||
// The next line might look like a hack,
|
||||
// but it is actually necessary as noscript elements
|
||||
|
@ -385,22 +385,7 @@ async function transition(
|
|||
}
|
||||
}
|
||||
|
||||
let navigateOnServerWarned = false;
|
||||
|
||||
export function navigate(href: string, options?: Options) {
|
||||
if (inBrowser === false) {
|
||||
if (!navigateOnServerWarned) {
|
||||
// instantiate an error for the stacktrace to show to user.
|
||||
const warning = new Error(
|
||||
'The view transtions client API was called during a server side render. This may be unintentional as the navigate() function is expected to be called in response to user interactions. Please make sure that your usage is correct.'
|
||||
);
|
||||
warning.name = 'Warning';
|
||||
console.warn(warning);
|
||||
navigateOnServerWarned = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// not ours
|
||||
if (!transitionEnabledOnThisPage()) {
|
||||
location.href = href;
|
||||
|
@ -418,61 +403,58 @@ export function navigate(href: string, options?: Options) {
|
|||
}
|
||||
}
|
||||
|
||||
function onPopState(ev: PopStateEvent) {
|
||||
if (!transitionEnabledOnThisPage() && ev.state) {
|
||||
// The current page doesn't have View Transitions enabled
|
||||
// but the page we navigate to does (because it set the state).
|
||||
// Do a full page refresh to reload the client-side router from the new page.
|
||||
// Scroll restauration will then happen during the reload when the router's code is re-executed
|
||||
if (supportsViewTransitions || getFallback() !== 'none') {
|
||||
addEventListener('popstate', (ev) => {
|
||||
if (!transitionEnabledOnThisPage() && ev.state) {
|
||||
// The current page doesn't have View Transitions enabled
|
||||
// but the page we navigate to does (because it set the state).
|
||||
// Do a full page refresh to reload the client-side router from the new page.
|
||||
// Scroll restauration will then happen during the reload when the router's code is re-executed
|
||||
if (history.scrollRestoration) {
|
||||
history.scrollRestoration = 'manual';
|
||||
}
|
||||
location.reload();
|
||||
return;
|
||||
}
|
||||
|
||||
// History entries without state are created by the browser (e.g. for hash links)
|
||||
// Our view transition entries always have state.
|
||||
// Just ignore stateless entries.
|
||||
// The browser will handle navigation fine without our help
|
||||
if (ev.state === null) {
|
||||
if (history.scrollRestoration) {
|
||||
history.scrollRestoration = 'auto';
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// With the default "auto", the browser will jump to the old scroll position
|
||||
// before the ViewTransition is complete.
|
||||
if (history.scrollRestoration) {
|
||||
history.scrollRestoration = 'manual';
|
||||
}
|
||||
location.reload();
|
||||
return;
|
||||
}
|
||||
|
||||
// History entries without state are created by the browser (e.g. for hash links)
|
||||
// Our view transition entries always have state.
|
||||
// Just ignore stateless entries.
|
||||
// The browser will handle navigation fine without our help
|
||||
if (ev.state === null) {
|
||||
if (history.scrollRestoration) {
|
||||
history.scrollRestoration = 'auto';
|
||||
const state: State = history.state;
|
||||
if (state.intraPage) {
|
||||
// this is non transition intra-page scrolling
|
||||
scrollTo(state.scrollX, state.scrollY);
|
||||
} else {
|
||||
const nextIndex = state.index;
|
||||
const direction: Direction = nextIndex > currentHistoryIndex ? 'forward' : 'back';
|
||||
currentHistoryIndex = nextIndex;
|
||||
transition(direction, new URL(location.href), {}, state);
|
||||
}
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
// With the default "auto", the browser will jump to the old scroll position
|
||||
// before the ViewTransition is complete.
|
||||
if (history.scrollRestoration) {
|
||||
history.scrollRestoration = 'manual';
|
||||
}
|
||||
addEventListener('load', onPageLoad);
|
||||
// There's not a good way to record scroll position before a back button.
|
||||
// So the way we do it is by listening to scrollend if supported, and if not continuously record the scroll position.
|
||||
const updateState = () => {
|
||||
persistState({ ...history.state, scrollX, scrollY });
|
||||
};
|
||||
|
||||
const state: State = history.state;
|
||||
if (state.intraPage) {
|
||||
// this is non transition intra-page scrolling
|
||||
scrollTo(state.scrollX, state.scrollY);
|
||||
} else {
|
||||
const nextIndex = state.index;
|
||||
const direction: Direction = nextIndex > currentHistoryIndex ? 'forward' : 'back';
|
||||
currentHistoryIndex = nextIndex;
|
||||
transition(direction, new URL(location.href), {}, state);
|
||||
}
|
||||
}
|
||||
|
||||
if (inBrowser) {
|
||||
if (supportsViewTransitions || getFallback() !== 'none') {
|
||||
addEventListener('popstate', onPopState);
|
||||
addEventListener('load', onPageLoad);
|
||||
// There's not a good way to record scroll position before a back button.
|
||||
// So the way we do it is by listening to scrollend if supported, and if not continuously record the scroll position.
|
||||
const updateState = () => {
|
||||
persistState({ ...history.state, scrollX, scrollY });
|
||||
};
|
||||
|
||||
if ('onscrollend' in window) addEventListener('scrollend', updateState);
|
||||
else addEventListener('scroll', throttle(updateState, 300));
|
||||
|
||||
markScriptsExec();
|
||||
}
|
||||
if ('onscrollend' in window) addEventListener('scrollend', updateState);
|
||||
else addEventListener('scroll', throttle(updateState, 300));
|
||||
|
||||
markScriptsExec();
|
||||
}
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
import { defineConfig } from 'astro/config';
|
||||
import react from '@astrojs/react';
|
||||
|
||||
// https://astro.build/config
|
||||
export default defineConfig({
|
||||
integrations: [react()],
|
||||
});
|
|
@ -3,9 +3,6 @@
|
|||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"astro": "workspace:*",
|
||||
"@astrojs/react": "workspace:*",
|
||||
"react": "^18.1.0",
|
||||
"react-dom": "^18.1.0"
|
||||
"astro": "workspace:*"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
.counter {
|
||||
display: grid;
|
||||
font-size: 2em;
|
||||
grid-template-columns: repeat(3, minmax(0, 1fr));
|
||||
margin-top: 2em;
|
||||
place-items: center;
|
||||
}
|
||||
|
||||
.counter-message {
|
||||
text-align: center;
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
import React, { useState } from 'react';
|
||||
import './Island.css';
|
||||
|
||||
export default function Counter({ children, count: initialCount, id }) {
|
||||
const [count, setCount] = useState(initialCount);
|
||||
const add = () => setCount((i) => i + 1);
|
||||
const subtract = () => setCount((i) => i - 1);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div id={id} className="counter">
|
||||
<button className="decrement" onClick={subtract}>-</button>
|
||||
<pre>{count}</pre>
|
||||
<button className="increment" onClick={add}>+</button>
|
||||
</div>
|
||||
<div className="counter-message">{children}</div>
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
---
|
||||
import Island from '../components/Island.jsx';
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<Island id="1" count="{1}" children="Greetings!" transition:persist="here" client:load/>
|
||||
</body>
|
||||
</html>
|
|
@ -22,14 +22,4 @@ describe('View Transitions styles', () => {
|
|||
|
||||
expect($('head style')).to.have.a.lengthOf(3);
|
||||
});
|
||||
|
||||
it('should not duplicate transition attributes on island contents', async () => {
|
||||
let res = await fixture.fetch('/hasIsland');
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
expect($('astro-island[data-astro-transition-persist]')).to.have.a.lengthOf(1);
|
||||
expect(
|
||||
$('astro-island[data-astro-transition-persist] > [data-astro-transition-persist]')
|
||||
).to.have.a.lengthOf(0);
|
||||
});
|
||||
});
|
||||
|
|
3
packages/integrations/cloudflare/.gitignore
vendored
3
packages/integrations/cloudflare/.gitignore
vendored
|
@ -1,3 +0,0 @@
|
|||
# Astro cloudflare directory mode creates a function directory
|
||||
functions
|
||||
.mf
|
File diff suppressed because one or more lines are too long
|
@ -1,404 +1,3 @@
|
|||
# @astrojs/cloudflare
|
||||
|
||||
An SSR adapter for use with Cloudflare Pages Functions targets. Write your code in Astro/Javascript and deploy to Cloudflare Pages.
|
||||
|
||||
## Install
|
||||
|
||||
Add the Cloudflare adapter to enable SSR in your Astro project with the following `astro add` command. This will install the adapter and make the appropriate changes to your `astro.config.mjs` file in one step.
|
||||
|
||||
```sh
|
||||
# Using NPM
|
||||
npx astro add cloudflare
|
||||
# Using Yarn
|
||||
yarn astro add cloudflare
|
||||
# Using PNPM
|
||||
pnpm astro add cloudflare
|
||||
```
|
||||
|
||||
If you prefer to install the adapter manually instead, complete the following two steps:
|
||||
|
||||
1. Add the Cloudflare adapter to your project's dependencies using your preferred package manager. If you’re using npm or aren’t sure, run this in the terminal:
|
||||
|
||||
```bash
|
||||
npm install @astrojs/cloudflare
|
||||
```
|
||||
|
||||
2. Add the following to your `astro.config.mjs` file:
|
||||
|
||||
```diff lang="js"
|
||||
// astro.config.mjs
|
||||
import { defineConfig } from 'astro/config';
|
||||
+ import cloudflare from '@astrojs/cloudflare';
|
||||
|
||||
export default defineConfig({
|
||||
+ output: 'server',
|
||||
+ adapter: cloudflare(),
|
||||
});
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
### `mode`
|
||||
|
||||
`mode: "advanced" | "directory"`
|
||||
|
||||
default `"advanced"`
|
||||
|
||||
This configuration option defines how your Astro project is deployed to Cloudflare Pages.
|
||||
|
||||
- `advanced` mode picks up the `_worker.js` file in the `dist` folder
|
||||
- `directory` mode picks up the files in the `functions` folder, by default only one `[[path]].js` file is generated
|
||||
|
||||
Switching to directory mode allows you to add additional files manually such as [Cloudflare Pages Plugins](https://developers.cloudflare.com/pages/platform/functions/plugins/), [Cloudflare Pages Middleware](https://developers.cloudflare.com/pages/platform/functions/middleware/) or custom functions using [Cloudflare Pages Functions Routing](https://developers.cloudflare.com/pages/platform/functions/routing/).
|
||||
|
||||
```js
|
||||
// astro.config.mjs
|
||||
export default defineConfig({
|
||||
adapter: cloudflare({ mode: 'directory' }),
|
||||
});
|
||||
```
|
||||
|
||||
To compile a separate bundle for each page, set the `functionPerRoute` option in your Cloudflare adapter config. This option requires some manual maintenance of the `functions` folder. Files emitted by Astro will overwrite existing files with identical names in the `functions` folder, so you must choose unique file names for each file you manually add. Additionally, the adapter will never empty the `functions` folder of outdated files, so you must clean up the folder manually when you remove pages.
|
||||
|
||||
```diff lang="js"
|
||||
// astro.config.mjs
|
||||
import {defineConfig} from "astro/config";
|
||||
import cloudflare from '@astrojs/cloudflare';
|
||||
|
||||
export default defineConfig({
|
||||
adapter: cloudflare({
|
||||
mode: 'directory',
|
||||
+ functionPerRoute: true
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
This adapter doesn't support the [`edgeMiddleware`](https://docs.astro.build/en/reference/adapter-reference/#edgemiddleware) option.
|
||||
|
||||
### `routes.strategy`
|
||||
|
||||
`routes.strategy: "auto" | "include" | "exclude"`
|
||||
|
||||
default `"auto"`
|
||||
|
||||
Determines how `routes.json` will be generated if no [custom `_routes.json`](#custom-_routesjson) is provided.
|
||||
|
||||
There are three options available:
|
||||
|
||||
- **`"auto"` (default):** Will automatically select the strategy that generates the fewest entries. This should almost always be sufficient, so choose this option unless you have a specific reason not to.
|
||||
|
||||
- **`include`:** Pages and endpoints that are not pre-rendered are listed as `include` entries, telling Cloudflare to invoke these routes as functions. `exclude` entries are only used to resolve conflicts. Usually the best strategy when your website has mostly static pages and only a few dynamic pages or endpoints.
|
||||
|
||||
Example: For `src/pages/index.astro` (static), `src/pages/company.astro` (static), `src/pages/users/faq.astro` (static) and `/src/pages/users/[id].astro` (SSR) this will produce the following `_routes.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": 1,
|
||||
"include": [
|
||||
"/_image", // Astro's image endpoint
|
||||
"/users/*" // Dynamic route
|
||||
],
|
||||
"exclude": [
|
||||
// Static routes that needs to be exempted from the dynamic wildcard route above
|
||||
"/users/faq/",
|
||||
"/users/faq/index.html"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- **`exclude`:** Pre-rendered pages are listed as `exclude` entries (telling Cloudflare to handle these routes as static assets). Usually the best strategy when your website has mostly dynamic pages or endpoints and only a few static pages.
|
||||
|
||||
Example: For the same pages as in the previous example this will produce the following `_routes.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": 1,
|
||||
"include": [
|
||||
"/*" // Handle everything as function except the routes below
|
||||
],
|
||||
"exclude": [
|
||||
// All static assets
|
||||
"/",
|
||||
"/company/",
|
||||
"/index.html",
|
||||
"/users/faq/",
|
||||
"/favicon.png",
|
||||
"/company/index.html",
|
||||
"/users/faq/index.html"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### `routes.include`
|
||||
|
||||
`routes.include: string[]`
|
||||
|
||||
default `[]`
|
||||
|
||||
If you want to use the automatic `_routes.json` generation, but want to include additional routes (e.g. when having custom functions in the `functions` folder), you can use the `routes.include` option to add additional routes to the `include` array.
|
||||
|
||||
### `routes.exclude`
|
||||
|
||||
`routes.exclude: string[]`
|
||||
|
||||
default `[]`
|
||||
|
||||
If you want to use the automatic `_routes.json` generation, but want to exclude additional routes, you can use the `routes.exclude` option to add additional routes to the `exclude` array.
|
||||
|
||||
The following example automatically generates `_routes.json` while including and excluding additional routes. Note that that is only necessary if you have custom functions in the `functions` folder that are not handled by Astro.
|
||||
|
||||
```diff lang="js"
|
||||
// astro.config.mjs
|
||||
export default defineConfig({
|
||||
adapter: cloudflare({
|
||||
mode: 'directory',
|
||||
+ routes: {
|
||||
+ strategy: 'include',
|
||||
+ include: ['/users/*'], // handled by custom function: functions/users/[id].js
|
||||
+ exclude: ['/users/faq'], // handled by static page: pages/users/faq.astro
|
||||
+ },
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
### `wasmModuleImports`
|
||||
|
||||
`wasmModuleImports: boolean`
|
||||
|
||||
default: `false`
|
||||
|
||||
Whether or not to import `.wasm` files [directly as ES modules](https://github.com/WebAssembly/esm-integration/tree/main/proposals/esm-integration) using the `.wasm?module` import syntax.
|
||||
|
||||
Add `wasmModuleImports: true` to `astro.config.mjs` to enable this functionality in both the Cloudflare build and the Astro dev server. Read more about [using Wasm modules](#use-wasm-modules).
|
||||
|
||||
```diff lang="js"
|
||||
// astro.config.mjs
|
||||
import {defineConfig} from "astro/config";
|
||||
import cloudflare from '@astrojs/cloudflare';
|
||||
|
||||
export default defineConfig({
|
||||
adapter: cloudflare({
|
||||
+ wasmModuleImports: true
|
||||
}),
|
||||
output: 'server'
|
||||
})
|
||||
```
|
||||
|
||||
### `runtime`
|
||||
|
||||
`runtime: "off" | "local"`
|
||||
|
||||
default `"off"`
|
||||
|
||||
Determines whether and how the Cloudflare Runtime is added to `astro dev`.
|
||||
|
||||
The Cloudflare Runtime includes [Cloudflare bindings](https://developers.cloudflare.com/pages/platform/functions/bindings), [environment variables](https://developers.cloudflare.com/pages/platform/functions/bindings/#environment-variables), and the [cf object](https://developers.cloudflare.com/workers/runtime-apis/request/#incomingrequestcfproperties). Read more about [accessing the Cloudflare Runtime](#cloudflare-runtime).
|
||||
|
||||
- `local`: uses bindings mocking and locally static placeholders
|
||||
- `off`: no access to the Cloudflare runtime using `astro dev`. You can alternatively use [Preview with Wrangler](#preview-with-wrangler)
|
||||
|
||||
```diff lang="js"
|
||||
// astro.config.mjs
|
||||
import { defineConfig } from 'astro/config';
|
||||
import cloudflare from '@astrojs/cloudflare';
|
||||
|
||||
export default defineConfig({
|
||||
output: 'server',
|
||||
adapter: cloudflare({
|
||||
+ runtime: 'local',
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
## Cloudflare runtime
|
||||
|
||||
Gives you access to [environment variables](https://developers.cloudflare.com/pages/platform/functions/bindings/#environment-variables), and [Cloudflare bindings](https://developers.cloudflare.com/pages/platform/functions/bindings).
|
||||
|
||||
Currently supported bindings:
|
||||
|
||||
- [Cloudflare D1](https://developers.cloudflare.com/d1/)
|
||||
- [Cloudflare R2](https://developers.cloudflare.com/r2/)
|
||||
- [Cloudflare Workers KV](https://developers.cloudflare.com/kv/)
|
||||
- [Cloudflare Durable Objects](https://developers.cloudflare.com/durable-objects/)
|
||||
|
||||
You can access the runtime from Astro components through `Astro.locals` inside any `.astro` file.
|
||||
|
||||
```astro
|
||||
---
|
||||
// src/pages/index.astro
|
||||
const runtime = Astro.locals.runtime;
|
||||
---
|
||||
|
||||
<pre>{JSON.stringify(runtime.env)}</pre>
|
||||
```
|
||||
|
||||
You can access the runtime from API endpoints through `context.locals`:
|
||||
|
||||
```js
|
||||
// src/pages/api/someFile.js
|
||||
export function GET(context) {
|
||||
const runtime = context.locals.runtime;
|
||||
|
||||
return new Response('Some body');
|
||||
}
|
||||
```
|
||||
|
||||
### Typing
|
||||
|
||||
If you have configured `mode: advanced`, you can type the `runtime` object using `AdvancedRuntime`:
|
||||
|
||||
```ts
|
||||
// src/env.d.ts
|
||||
/// <reference types="astro/client" />
|
||||
|
||||
type KVNamespace = import('@cloudflare/workers-types/experimental').KVNamespace;
|
||||
type ENV = {
|
||||
SERVER_URL: string;
|
||||
KV_BINDING: KVNamespace;
|
||||
};
|
||||
|
||||
type Runtime = import('@astrojs/cloudflare').AdvancedRuntime<ENV>;
|
||||
|
||||
declare namespace App {
|
||||
interface Locals extends Runtime {
|
||||
user: {
|
||||
name: string;
|
||||
surname: string;
|
||||
};
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you have configured `mode: directory`, you can type the `runtime` object using `DirectoryRuntime`:
|
||||
|
||||
```ts
|
||||
// src/env.d.ts
|
||||
/// <reference types="astro/client" />
|
||||
|
||||
type KVNamespace = import('@cloudflare/workers-types/experimental').KVNamespace;
|
||||
type ENV = {
|
||||
SERVER_URL: string;
|
||||
KV_BINDING: KVNamespace;
|
||||
};
|
||||
|
||||
type Runtime = import('@astrojs/cloudflare').DirectoryRuntime<ENV>;
|
||||
|
||||
declare namespace App {
|
||||
interface Locals extends Runtime {
|
||||
user: {
|
||||
name: string;
|
||||
surname: string;
|
||||
};
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Platform
|
||||
|
||||
### Headers
|
||||
|
||||
You can attach [custom headers](https://developers.cloudflare.com/pages/platform/headers/) to your responses by adding a `_headers` file in your Astro project's `public/` folder. This file will be copied to your build output directory.
|
||||
|
||||
### Redirects
|
||||
|
||||
You can declare [custom redirects](https://developers.cloudflare.com/pages/platform/redirects/) using Cloudflare Pages. This allows you to redirect requests to a different URL. You can add a `_redirects` file in your Astro project's `public/` folder. This file will be copied to your build output directory.
|
||||
|
||||
### Routes
|
||||
|
||||
You can define which routes are invoking functions and which are static assets, using [Cloudflare routing](https://developers.cloudflare.com/pages/platform/functions/routing/#functions-invocation-routes) via a `_routes.json` file. This file is automatically generated by Astro.
|
||||
|
||||
#### Custom `_routes.json`
|
||||
|
||||
By default, `@astrojs/cloudflare` will generate a `_routes.json` file with `include` and `exclude` rules based on your applications's dynamic and static routes.
|
||||
This will enable Cloudflare to serve files and process static redirects without a function invocation. Creating a custom `_routes.json` will override this automatic optimization. See [Cloudflare's documentation on creating a custom `routes.json`](https://developers.cloudflare.com/pages/platform/functions/routing/#create-a-_routesjson-file) for more details.
|
||||
|
||||
## Use Wasm modules
|
||||
|
||||
The following is an example of importing a Wasm module that then responds to requests by adding the request's number parameters together.
|
||||
|
||||
```js
|
||||
// pages/add/[a]/[b].js
|
||||
import mod from '../util/add.wasm?module';
|
||||
|
||||
// instantiate ahead of time to share module
|
||||
const addModule: any = new WebAssembly.Instance(mod);
|
||||
|
||||
export async function GET(context) {
|
||||
const a = Number.parseInt(context.params.a);
|
||||
const b = Number.parseInt(context.params.b);
|
||||
return new Response(`${addModule.exports.add(a, b)}`);
|
||||
}
|
||||
```
|
||||
|
||||
While this example is trivial, Wasm can be used to accelerate computationally intensive operations which do not involve significant I/O such as embedding an image processing library.
|
||||
|
||||
## Node.js compatibility
|
||||
|
||||
Astro's Cloudflare adapter allows you to use any Node.js runtime API supported by Cloudflare:
|
||||
|
||||
- assert
|
||||
- AsyncLocalStorage
|
||||
- Buffer
|
||||
- Crypto
|
||||
- Diagnostics Channel
|
||||
- EventEmitter
|
||||
- path
|
||||
- process
|
||||
- Streams
|
||||
- StringDecoder
|
||||
- util
|
||||
|
||||
To use these APIs, your page or endpoint must be server-side rendered (not pre-rendered) and must use the the `import {} from 'node:*'` import syntax.
|
||||
|
||||
```js
|
||||
// pages/api/endpoint.js
|
||||
export const prerender = false;
|
||||
import { Buffer } from 'node:buffer';
|
||||
```
|
||||
|
||||
Additionally, you'll need to enable the Compatibility Flag in Cloudflare. The configuration for this flag may vary based on where you deploy your Astro site. For detailed guidance, please refer to the [Cloudflare documentation on enabling Node.js compatibility](https://developers.cloudflare.com/workers/runtime-apis/nodejs).
|
||||
|
||||
## Cloudflare module support
|
||||
|
||||
All Cloudflare namespaced packages (e.g. `cloudflare:sockets`) are allowlisted for use. Note that the package `cloudflare:sockets` does not work locally without using Wrangler dev mode.
|
||||
|
||||
## Preview with Wrangler
|
||||
|
||||
To use [`wrangler`](https://developers.cloudflare.com/workers/wrangler/) to run your application locally, update the preview script:
|
||||
|
||||
```json
|
||||
//package.json
|
||||
"preview": "wrangler pages dev ./dist"
|
||||
```
|
||||
|
||||
[`wrangler`](https://developers.cloudflare.com/workers/wrangler/) gives you access to [Cloudflare bindings](https://developers.cloudflare.com/pages/platform/functions/bindings), [environment variables](https://developers.cloudflare.com/pages/platform/functions/bindings/#environment-variables), and the [cf object](https://developers.cloudflare.com/workers/runtime-apis/request/#incomingrequestcfproperties). Getting hot reloading or the astro dev server to work with Wrangler might require custom setup. See [community examples](https://github.com/withastro/roadmap/discussions/590).
|
||||
|
||||
### Meaningful error messages
|
||||
|
||||
Currently, errors during running your application in Wrangler are not very useful, due to the minification of your code. For better debugging, you can add `vite.build.minify = false` setting to your `astro.config.mjs`.
|
||||
|
||||
```diff lang="js"
|
||||
// astro.config.mjs
|
||||
export default defineConfig({
|
||||
adapter: cloudflare(),
|
||||
output: 'server',
|
||||
|
||||
+ vite: {
|
||||
+ build: {
|
||||
+ minify: false,
|
||||
+ },
|
||||
+ },
|
||||
});
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
For help, check out the `#support` channel on [Discord](https://astro.build/chat). Our friendly Support Squad members are here to help!
|
||||
|
||||
You can also check our [Astro Integration Documentation][astro-integration] for more on integrations.
|
||||
|
||||
## Contributing
|
||||
|
||||
This package is maintained by Astro's Core team. You're welcome to submit an issue or PR!
|
||||
|
||||
[astro-integration]: https://docs.astro.build/en/guides/integrations-guide/
|
||||
The Cloudflare adapter package has moved. Please see [the new repository for the Cloudflare adapter](https://github.com/withastro/adapters/tree/main/packages/cloudflare).
|
||||
|
|
|
@ -1,65 +1,7 @@
|
|||
{
|
||||
"name": "@astrojs/cloudflare",
|
||||
"description": "Deploy your site to Cloudflare Workers/Pages",
|
||||
"version": "7.5.2",
|
||||
"type": "module",
|
||||
"types": "./dist/index.d.ts",
|
||||
"author": "withastro",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/withastro/astro.git",
|
||||
"directory": "packages/integrations/cloudflare"
|
||||
},
|
||||
"keywords": [
|
||||
"withastro",
|
||||
"astro-adapter"
|
||||
],
|
||||
"bugs": "https://github.com/withastro/astro/issues",
|
||||
"homepage": "https://docs.astro.build/en/guides/integrations-guide/cloudflare/",
|
||||
"exports": {
|
||||
".": "./dist/index.js",
|
||||
"./entrypoints/server.advanced.js": "./dist/entrypoints/server.advanced.js",
|
||||
"./entrypoints/server.directory.js": "./dist/entrypoints/server.directory.js",
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "astro-scripts build \"src/**/*.ts\" && tsc",
|
||||
"build:ci": "astro-scripts build \"src/**/*.ts\"",
|
||||
"dev": "astro-scripts dev \"src/**/*.ts\"",
|
||||
"test": "mocha --exit --timeout 30000 test/",
|
||||
"test:match": "mocha --exit --timeout 30000 -g"
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/underscore-redirects": "workspace:*",
|
||||
"@cloudflare/workers-types": "^4.20230821.0",
|
||||
"miniflare": "^3.20230918.0",
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@miniflare/cache": "^2.14.1",
|
||||
"@miniflare/shared": "^2.14.1",
|
||||
"@miniflare/storage-memory": "^2.14.1",
|
||||
"dotenv": "^16.3.1",
|
||||
"esbuild": "^0.19.2",
|
||||
"find-up": "^6.3.0",
|
||||
"tiny-glob": "^0.2.9",
|
||||
"vite": "^4.4.9"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"astro": "workspace:^3.2.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/iarna__toml": "^2.0.2",
|
||||
"astro": "workspace:*",
|
||||
"astro-scripts": "workspace:*",
|
||||
"chai": "^4.3.7",
|
||||
"cheerio": "1.0.0-rc.12",
|
||||
"mocha": "^10.2.0",
|
||||
"wrangler": "^3.5.1"
|
||||
},
|
||||
"publishConfig": {
|
||||
"provenance": true
|
||||
}
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"keywords": [],
|
||||
"dont_remove": "This is a placeholder for the same of the docs smoke test"
|
||||
}
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
import type {
|
||||
Request as CFRequest,
|
||||
CacheStorage,
|
||||
ExecutionContext,
|
||||
} from '@cloudflare/workers-types';
|
||||
import type { SSRManifest } from 'astro';
|
||||
import { App } from 'astro/app';
|
||||
import { getProcessEnvProxy, isNode } from '../util.js';
|
||||
|
||||
if (!isNode) {
|
||||
process.env = getProcessEnvProxy();
|
||||
}
|
||||
|
||||
type Env = {
|
||||
ASSETS: { fetch: (req: Request) => Promise<Response> };
|
||||
};
|
||||
|
||||
export interface AdvancedRuntime<T extends object = object> {
|
||||
runtime: {
|
||||
waitUntil: (promise: Promise<any>) => void;
|
||||
env: Env & T;
|
||||
cf: CFRequest['cf'];
|
||||
caches: CacheStorage;
|
||||
};
|
||||
}
|
||||
|
||||
export function createExports(manifest: SSRManifest) {
|
||||
const app = new App(manifest);
|
||||
|
||||
const fetch = async (request: Request & CFRequest, env: Env, context: ExecutionContext) => {
|
||||
// TODO: remove this any cast in the future
|
||||
// REF: the type cast to any is needed because the Cloudflare Env Type is not assignable to type 'ProcessEnv'
|
||||
process.env = env as any;
|
||||
|
||||
const { pathname } = new URL(request.url);
|
||||
|
||||
// static assets fallback, in case default _routes.json is not used
|
||||
if (manifest.assets.has(pathname)) {
|
||||
return env.ASSETS.fetch(request);
|
||||
}
|
||||
|
||||
let routeData = app.match(request, { matchNotFound: true });
|
||||
if (routeData) {
|
||||
Reflect.set(
|
||||
request,
|
||||
Symbol.for('astro.clientAddress'),
|
||||
request.headers.get('cf-connecting-ip')
|
||||
);
|
||||
|
||||
const locals: AdvancedRuntime = {
|
||||
runtime: {
|
||||
waitUntil: (promise: Promise<any>) => {
|
||||
context.waitUntil(promise);
|
||||
},
|
||||
env: env,
|
||||
cf: request.cf,
|
||||
caches: caches as unknown as CacheStorage,
|
||||
},
|
||||
};
|
||||
|
||||
let response = await app.render(request, routeData, locals);
|
||||
|
||||
if (app.setCookieHeaders) {
|
||||
for (const setCookieHeader of app.setCookieHeaders(response)) {
|
||||
response.headers.append('Set-Cookie', setCookieHeader);
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
return new Response(null, {
|
||||
status: 404,
|
||||
statusText: 'Not found',
|
||||
});
|
||||
};
|
||||
|
||||
return { default: { fetch } };
|
||||
}
|
|
@ -1,72 +0,0 @@
|
|||
import type { Request as CFRequest, CacheStorage, EventContext } from '@cloudflare/workers-types';
|
||||
import type { SSRManifest } from 'astro';
|
||||
import { App } from 'astro/app';
|
||||
import { getProcessEnvProxy, isNode } from '../util.js';
|
||||
|
||||
if (!isNode) {
|
||||
process.env = getProcessEnvProxy();
|
||||
}
|
||||
export interface DirectoryRuntime<T extends object = object> {
|
||||
runtime: {
|
||||
waitUntil: (promise: Promise<any>) => void;
|
||||
env: EventContext<unknown, string, unknown>['env'] & T;
|
||||
cf: CFRequest['cf'];
|
||||
caches: CacheStorage;
|
||||
};
|
||||
}
|
||||
|
||||
export function createExports(manifest: SSRManifest) {
|
||||
const app = new App(manifest);
|
||||
|
||||
const onRequest = async (context: EventContext<unknown, string, unknown>) => {
|
||||
const request = context.request as CFRequest & Request;
|
||||
const { env } = context;
|
||||
|
||||
// TODO: remove this any cast in the future
|
||||
// REF: the type cast to any is needed because the Cloudflare Env Type is not assignable to type 'ProcessEnv'
|
||||
process.env = env as any;
|
||||
|
||||
const { pathname } = new URL(request.url);
|
||||
// static assets fallback, in case default _routes.json is not used
|
||||
if (manifest.assets.has(pathname)) {
|
||||
return env.ASSETS.fetch(request);
|
||||
}
|
||||
|
||||
let routeData = app.match(request, { matchNotFound: true });
|
||||
if (routeData) {
|
||||
Reflect.set(
|
||||
request,
|
||||
Symbol.for('astro.clientAddress'),
|
||||
request.headers.get('cf-connecting-ip')
|
||||
);
|
||||
|
||||
const locals: DirectoryRuntime = {
|
||||
runtime: {
|
||||
waitUntil: (promise: Promise<any>) => {
|
||||
context.waitUntil(promise);
|
||||
},
|
||||
env: context.env,
|
||||
cf: request.cf,
|
||||
caches: caches as unknown as CacheStorage,
|
||||
},
|
||||
};
|
||||
|
||||
let response = await app.render(request, routeData, locals);
|
||||
|
||||
if (app.setCookieHeaders) {
|
||||
for (const setCookieHeader of app.setCookieHeaders(response)) {
|
||||
response.headers.append('Set-Cookie', setCookieHeader);
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
return new Response(null, {
|
||||
status: 404,
|
||||
statusText: 'Not found',
|
||||
});
|
||||
};
|
||||
|
||||
return { onRequest, manifest };
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
import type { AstroAdapter, AstroFeatureMap } from 'astro';
|
||||
|
||||
export function getAdapter({
|
||||
isModeDirectory,
|
||||
functionPerRoute,
|
||||
}: {
|
||||
isModeDirectory: boolean;
|
||||
functionPerRoute: boolean;
|
||||
}): AstroAdapter {
|
||||
const astroFeatures = {
|
||||
hybridOutput: 'stable',
|
||||
staticOutput: 'unsupported',
|
||||
serverOutput: 'stable',
|
||||
assets: {
|
||||
supportKind: 'stable',
|
||||
isSharpCompatible: false,
|
||||
isSquooshCompatible: false,
|
||||
},
|
||||
} satisfies AstroFeatureMap;
|
||||
|
||||
if (isModeDirectory) {
|
||||
return {
|
||||
name: '@astrojs/cloudflare',
|
||||
serverEntrypoint: '@astrojs/cloudflare/entrypoints/server.directory.js',
|
||||
exports: ['onRequest', 'manifest'],
|
||||
adapterFeatures: {
|
||||
functionPerRoute,
|
||||
edgeMiddleware: false,
|
||||
},
|
||||
supportedAstroFeatures: astroFeatures,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
name: '@astrojs/cloudflare',
|
||||
serverEntrypoint: '@astrojs/cloudflare/entrypoints/server.advanced.js',
|
||||
exports: ['default'],
|
||||
supportedAstroFeatures: astroFeatures,
|
||||
};
|
||||
}
|
|
@ -1,615 +0,0 @@
|
|||
import type { AstroConfig, AstroIntegration, RouteData } from 'astro';
|
||||
|
||||
import { createRedirectsFromAstroRoutes } from '@astrojs/underscore-redirects';
|
||||
import { AstroError } from 'astro/errors';
|
||||
import esbuild from 'esbuild';
|
||||
import { Miniflare } from 'miniflare';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import { dirname, relative, sep } from 'node:path';
|
||||
import { fileURLToPath, pathToFileURL } from 'node:url';
|
||||
import glob from 'tiny-glob';
|
||||
import { getAdapter } from './getAdapter.js';
|
||||
import { deduplicatePatterns } from './utils/deduplicatePatterns.js';
|
||||
import { getCFObject } from './utils/getCFObject.js';
|
||||
import {
|
||||
getD1Bindings,
|
||||
getDOBindings,
|
||||
getEnvVars,
|
||||
getKVBindings,
|
||||
getR2Bindings,
|
||||
} from './utils/parser.js';
|
||||
import { prependForwardSlash } from './utils/prependForwardSlash.js';
|
||||
import { rewriteWasmImportPath } from './utils/rewriteWasmImportPath.js';
|
||||
import { wasmModuleLoader } from './utils/wasm-module-loader.js';
|
||||
|
||||
export type { AdvancedRuntime } from './entrypoints/server.advanced.js';
|
||||
export type { DirectoryRuntime } from './entrypoints/server.directory.js';
|
||||
|
||||
type Options = {
|
||||
mode?: 'directory' | 'advanced';
|
||||
functionPerRoute?: boolean;
|
||||
/** Configure automatic `routes.json` generation */
|
||||
routes?: {
|
||||
/** Strategy for generating `include` and `exclude` patterns
|
||||
* - `auto`: Will use the strategy that generates the least amount of entries.
|
||||
* - `include`: For each page or endpoint in your application that is not prerendered, an entry in the `include` array will be generated. For each page that is prerendered and whoose path is matched by an `include` entry, an entry in the `exclude` array will be generated.
|
||||
* - `exclude`: One `"/*"` entry in the `include` array will be generated. For each page that is prerendered, an entry in the `exclude` array will be generated.
|
||||
* */
|
||||
strategy?: 'auto' | 'include' | 'exclude';
|
||||
/** Additional `include` patterns */
|
||||
include?: string[];
|
||||
/** Additional `exclude` patterns */
|
||||
exclude?: string[];
|
||||
};
|
||||
/**
|
||||
* 'off': current behaviour (wrangler is needed)
|
||||
* 'local': use a static req.cf object, and env vars defined in wrangler.toml & .dev.vars (astro dev is enough)
|
||||
* 'remote': use a dynamic real-live req.cf object, and env vars defined in wrangler.toml & .dev.vars (astro dev is enough)
|
||||
*/
|
||||
runtime?: 'off' | 'local' | 'remote';
|
||||
wasmModuleImports?: boolean;
|
||||
};
|
||||
|
||||
interface BuildConfig {
|
||||
server: URL;
|
||||
client: URL;
|
||||
assets: string;
|
||||
serverEntry: string;
|
||||
split?: boolean;
|
||||
}
|
||||
|
||||
export default function createIntegration(args?: Options): AstroIntegration {
|
||||
let _config: AstroConfig;
|
||||
let _buildConfig: BuildConfig;
|
||||
let _mf: Miniflare;
|
||||
let _entryPoints = new Map<RouteData, URL>();
|
||||
|
||||
const SERVER_BUILD_FOLDER = '/$server_build/';
|
||||
|
||||
const isModeDirectory = args?.mode === 'directory';
|
||||
const functionPerRoute = args?.functionPerRoute ?? false;
|
||||
const runtimeMode = args?.runtime ?? 'off';
|
||||
|
||||
return {
|
||||
name: '@astrojs/cloudflare',
|
||||
hooks: {
|
||||
'astro:config:setup': ({ config, updateConfig }) => {
|
||||
updateConfig({
|
||||
build: {
|
||||
client: new URL(`.${config.base}`, config.outDir),
|
||||
server: new URL(`.${SERVER_BUILD_FOLDER}`, config.outDir),
|
||||
serverEntry: '_worker.mjs',
|
||||
redirects: false,
|
||||
},
|
||||
vite: {
|
||||
// load .wasm files as WebAssembly modules
|
||||
plugins: [
|
||||
wasmModuleLoader({
|
||||
disabled: !args?.wasmModuleImports,
|
||||
assetsDirectory: config.build.assets,
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
},
|
||||
'astro:config:done': ({ setAdapter, config }) => {
|
||||
setAdapter(getAdapter({ isModeDirectory, functionPerRoute }));
|
||||
_config = config;
|
||||
_buildConfig = config.build;
|
||||
|
||||
if (_config.output === 'static') {
|
||||
throw new AstroError(
|
||||
'[@astrojs/cloudflare] `output: "server"` or `output: "hybrid"` is required to use this adapter. Otherwise, this adapter is not necessary to deploy a static site to Cloudflare.'
|
||||
);
|
||||
}
|
||||
|
||||
if (_config.base === SERVER_BUILD_FOLDER) {
|
||||
throw new AstroError(
|
||||
'[@astrojs/cloudflare] `base: "${SERVER_BUILD_FOLDER}"` is not allowed. Please change your `base` config to something else.'
|
||||
);
|
||||
}
|
||||
},
|
||||
'astro:server:setup': ({ server }) => {
|
||||
if (runtimeMode !== 'off') {
|
||||
server.middlewares.use(async function middleware(req, res, next) {
|
||||
try {
|
||||
const cf = await getCFObject(runtimeMode);
|
||||
const vars = await getEnvVars();
|
||||
const D1Bindings = await getD1Bindings();
|
||||
const R2Bindings = await getR2Bindings();
|
||||
const KVBindings = await getKVBindings();
|
||||
const DOBindings = await getDOBindings();
|
||||
let bindingsEnv = new Object({});
|
||||
|
||||
// fix for the error "kj/filesystem-disk-unix.c++:1709: warning: PWD environment variable doesn't match current directory."
|
||||
// note: This mismatch might be primarily due to the test runner.
|
||||
const originalPWD = process.env.PWD;
|
||||
process.env.PWD = process.cwd();
|
||||
|
||||
_mf = new Miniflare({
|
||||
modules: true,
|
||||
script: '',
|
||||
cache: true,
|
||||
cachePersist: true,
|
||||
cacheWarnUsage: true,
|
||||
d1Databases: D1Bindings,
|
||||
d1Persist: true,
|
||||
r2Buckets: R2Bindings,
|
||||
r2Persist: true,
|
||||
kvNamespaces: KVBindings,
|
||||
kvPersist: true,
|
||||
durableObjects: DOBindings,
|
||||
durableObjectsPersist: true,
|
||||
});
|
||||
await _mf.ready;
|
||||
|
||||
for (const D1Binding of D1Bindings) {
|
||||
const db = await _mf.getD1Database(D1Binding);
|
||||
Reflect.set(bindingsEnv, D1Binding, db);
|
||||
}
|
||||
for (const R2Binding of R2Bindings) {
|
||||
const bucket = await _mf.getR2Bucket(R2Binding);
|
||||
Reflect.set(bindingsEnv, R2Binding, bucket);
|
||||
}
|
||||
for (const KVBinding of KVBindings) {
|
||||
const namespace = await _mf.getKVNamespace(KVBinding);
|
||||
Reflect.set(bindingsEnv, KVBinding, namespace);
|
||||
}
|
||||
for (const key in DOBindings) {
|
||||
if (Object.prototype.hasOwnProperty.call(DOBindings, key)) {
|
||||
const DO = await _mf.getDurableObjectNamespace(key);
|
||||
Reflect.set(bindingsEnv, key, DO);
|
||||
}
|
||||
}
|
||||
const mfCache = await _mf.getCaches();
|
||||
|
||||
process.env.PWD = originalPWD;
|
||||
const clientLocalsSymbol = Symbol.for('astro.locals');
|
||||
Reflect.set(req, clientLocalsSymbol, {
|
||||
runtime: {
|
||||
env: {
|
||||
// default binding for static assets will be dynamic once we support mocking of bindings
|
||||
ASSETS: {},
|
||||
// this is just a VAR for CF to change build behavior, on dev it should be 0
|
||||
CF_PAGES: '0',
|
||||
// will be fetched from git dynamically once we support mocking of bindings
|
||||
CF_PAGES_BRANCH: 'TBA',
|
||||
// will be fetched from git dynamically once we support mocking of bindings
|
||||
CF_PAGES_COMMIT_SHA: 'TBA',
|
||||
CF_PAGES_URL: `http://${req.headers.host}`,
|
||||
...bindingsEnv,
|
||||
...vars,
|
||||
},
|
||||
cf: cf,
|
||||
waitUntil: (_promise: Promise<any>) => {
|
||||
return;
|
||||
},
|
||||
caches: mfCache,
|
||||
},
|
||||
});
|
||||
next();
|
||||
} catch {
|
||||
next();
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
'astro:server:done': async ({ logger }) => {
|
||||
if (_mf) {
|
||||
logger.info('Cleaning up the Miniflare instance, and shutting down the workerd server.');
|
||||
await _mf.dispose();
|
||||
}
|
||||
},
|
||||
'astro:build:setup': ({ vite, target }) => {
|
||||
if (target === 'server') {
|
||||
vite.resolve ||= {};
|
||||
vite.resolve.alias ||= {};
|
||||
|
||||
const aliases = [{ find: 'react-dom/server', replacement: 'react-dom/server.browser' }];
|
||||
|
||||
if (Array.isArray(vite.resolve.alias)) {
|
||||
vite.resolve.alias = [...vite.resolve.alias, ...aliases];
|
||||
} else {
|
||||
for (const alias of aliases) {
|
||||
(vite.resolve.alias as Record<string, string>)[alias.find] = alias.replacement;
|
||||
}
|
||||
}
|
||||
vite.ssr ||= {};
|
||||
vite.ssr.target = 'webworker';
|
||||
|
||||
// Cloudflare env is only available per request. This isn't feasible for code that access env vars
|
||||
// in a global way, so we shim their access as `process.env.*`. We will populate `process.env` later
|
||||
// in its fetch handler.
|
||||
vite.define = {
|
||||
'process.env': 'process.env',
|
||||
...vite.define,
|
||||
};
|
||||
}
|
||||
},
|
||||
'astro:build:ssr': ({ entryPoints }) => {
|
||||
_entryPoints = entryPoints;
|
||||
},
|
||||
'astro:build:done': async ({ pages, routes, dir }) => {
|
||||
const functionsUrl = new URL('functions/', _config.root);
|
||||
const assetsUrl = new URL(_buildConfig.assets, _buildConfig.client);
|
||||
|
||||
if (isModeDirectory) {
|
||||
await fs.promises.mkdir(functionsUrl, { recursive: true });
|
||||
}
|
||||
|
||||
// TODO: remove _buildConfig.split in Astro 4.0
|
||||
if (isModeDirectory && (_buildConfig.split || functionPerRoute)) {
|
||||
const entryPointsURL = [..._entryPoints.values()];
|
||||
const entryPaths = entryPointsURL.map((entry) => fileURLToPath(entry));
|
||||
const outputUrl = new URL('$astro', _buildConfig.server);
|
||||
const outputDir = fileURLToPath(outputUrl);
|
||||
//
|
||||
// Sadly, when wasmModuleImports is enabled, this needs to build esbuild for each depth of routes/entrypoints
|
||||
// independently so that relative import paths to the assets are the correct depth of '../' traversals
|
||||
// This is inefficient, so wasmModuleImports is opt-in. This could potentially be improved in the future by
|
||||
// taking advantage of the esbuild "onEnd" hook to rewrite import code per entry point relative to where the final
|
||||
// destination of the entrypoint is
|
||||
const entryPathsGroupedByDepth = !args.wasmModuleImports
|
||||
? [entryPaths]
|
||||
: entryPaths
|
||||
.reduce((sum, thisPath) => {
|
||||
const depthFromRoot = thisPath.split(sep).length;
|
||||
sum.set(depthFromRoot, (sum.get(depthFromRoot) || []).concat(thisPath));
|
||||
return sum;
|
||||
}, new Map<number, string[]>())
|
||||
.values();
|
||||
|
||||
for (const pathsGroup of entryPathsGroupedByDepth) {
|
||||
// for some reason this exports to "entry.pages" on windows instead of "pages" on unix environments.
|
||||
// This deduces the name of the "pages" build directory
|
||||
const pagesDirname = relative(fileURLToPath(_buildConfig.server), pathsGroup[0]).split(
|
||||
sep
|
||||
)[0];
|
||||
const absolutePagesDirname = fileURLToPath(new URL(pagesDirname, _buildConfig.server));
|
||||
const urlWithinFunctions = new URL(
|
||||
relative(absolutePagesDirname, pathsGroup[0]),
|
||||
functionsUrl
|
||||
);
|
||||
const relativePathToAssets = relative(
|
||||
dirname(fileURLToPath(urlWithinFunctions)),
|
||||
fileURLToPath(assetsUrl)
|
||||
);
|
||||
await esbuild.build({
|
||||
target: 'es2022',
|
||||
platform: 'browser',
|
||||
conditions: ['workerd', 'worker', 'browser'],
|
||||
external: [
|
||||
'node:assert',
|
||||
'node:async_hooks',
|
||||
'node:buffer',
|
||||
'node:crypto',
|
||||
'node:diagnostics_channel',
|
||||
'node:events',
|
||||
'node:path',
|
||||
'node:process',
|
||||
'node:stream',
|
||||
'node:string_decoder',
|
||||
'node:util',
|
||||
'cloudflare:*',
|
||||
],
|
||||
entryPoints: pathsGroup,
|
||||
outbase: absolutePagesDirname,
|
||||
outdir: outputDir,
|
||||
allowOverwrite: true,
|
||||
format: 'esm',
|
||||
bundle: true,
|
||||
minify: _config.vite?.build?.minify !== false,
|
||||
banner: {
|
||||
js: `globalThis.process = {
|
||||
argv: [],
|
||||
env: {},
|
||||
};`,
|
||||
},
|
||||
logOverride: {
|
||||
'ignored-bare-import': 'silent',
|
||||
},
|
||||
plugins: !args?.wasmModuleImports
|
||||
? []
|
||||
: [rewriteWasmImportPath({ relativePathToAssets })],
|
||||
});
|
||||
}
|
||||
|
||||
const outputFiles: Array<string> = await glob(`**/*`, {
|
||||
cwd: outputDir,
|
||||
filesOnly: true,
|
||||
});
|
||||
|
||||
// move the files into the functions folder
|
||||
// & make sure the file names match Cloudflare syntax for routing
|
||||
for (const outputFile of outputFiles) {
|
||||
const path = outputFile.split(sep);
|
||||
|
||||
const finalSegments = path.map((segment) =>
|
||||
segment
|
||||
.replace(/(\_)(\w+)(\_)/g, (_, __, prop) => {
|
||||
return `[${prop}]`;
|
||||
})
|
||||
.replace(/(\_\-\-\-)(\w+)(\_)/g, (_, __, prop) => {
|
||||
return `[[${prop}]]`;
|
||||
})
|
||||
);
|
||||
|
||||
finalSegments[finalSegments.length - 1] = finalSegments[finalSegments.length - 1]
|
||||
.replace('entry.', '')
|
||||
.replace(/(.*)\.(\w+)\.(\w+)$/g, (_, fileName, __, newExt) => {
|
||||
return `${fileName}.${newExt}`;
|
||||
});
|
||||
|
||||
const finalDirPath = finalSegments.slice(0, -1).join(sep);
|
||||
const finalPath = finalSegments.join(sep);
|
||||
|
||||
const newDirUrl = new URL(finalDirPath, functionsUrl);
|
||||
await fs.promises.mkdir(newDirUrl, { recursive: true });
|
||||
|
||||
const oldFileUrl = new URL(`$astro/${outputFile}`, outputUrl);
|
||||
const newFileUrl = new URL(finalPath, functionsUrl);
|
||||
await fs.promises.rename(oldFileUrl, newFileUrl);
|
||||
}
|
||||
} else {
|
||||
const entryPath = fileURLToPath(new URL(_buildConfig.serverEntry, _buildConfig.server));
|
||||
const entryUrl = new URL(_buildConfig.serverEntry, _config.outDir);
|
||||
const buildPath = fileURLToPath(entryUrl);
|
||||
// A URL for the final build path after renaming
|
||||
const finalBuildUrl = pathToFileURL(buildPath.replace(/\.mjs$/, '.js'));
|
||||
|
||||
await esbuild.build({
|
||||
target: 'es2022',
|
||||
platform: 'browser',
|
||||
conditions: ['workerd', 'worker', 'browser'],
|
||||
external: [
|
||||
'node:assert',
|
||||
'node:async_hooks',
|
||||
'node:buffer',
|
||||
'node:crypto',
|
||||
'node:diagnostics_channel',
|
||||
'node:events',
|
||||
'node:path',
|
||||
'node:process',
|
||||
'node:stream',
|
||||
'node:string_decoder',
|
||||
'node:util',
|
||||
'cloudflare:*',
|
||||
],
|
||||
entryPoints: [entryPath],
|
||||
outfile: buildPath,
|
||||
allowOverwrite: true,
|
||||
format: 'esm',
|
||||
bundle: true,
|
||||
minify: _config.vite?.build?.minify !== false,
|
||||
banner: {
|
||||
js: `globalThis.process = {
|
||||
argv: [],
|
||||
env: {},
|
||||
};`,
|
||||
},
|
||||
logOverride: {
|
||||
'ignored-bare-import': 'silent',
|
||||
},
|
||||
plugins: !args?.wasmModuleImports
|
||||
? []
|
||||
: [
|
||||
rewriteWasmImportPath({
|
||||
relativePathToAssets: isModeDirectory
|
||||
? relative(fileURLToPath(functionsUrl), fileURLToPath(assetsUrl))
|
||||
: relative(fileURLToPath(_buildConfig.client), fileURLToPath(assetsUrl)),
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
// Rename to worker.js
|
||||
await fs.promises.rename(buildPath, finalBuildUrl);
|
||||
|
||||
if (isModeDirectory) {
|
||||
const directoryUrl = new URL('[[path]].js', functionsUrl);
|
||||
await fs.promises.rename(finalBuildUrl, directoryUrl);
|
||||
}
|
||||
}
|
||||
|
||||
// throw the server folder in the bin
|
||||
const serverUrl = new URL(_buildConfig.server);
|
||||
await fs.promises.rm(serverUrl, { recursive: true, force: true });
|
||||
|
||||
// move cloudflare specific files to the root
|
||||
const cloudflareSpecialFiles = ['_headers', '_redirects', '_routes.json'];
|
||||
|
||||
if (_config.base !== '/') {
|
||||
for (const file of cloudflareSpecialFiles) {
|
||||
try {
|
||||
await fs.promises.rename(
|
||||
new URL(file, _buildConfig.client),
|
||||
new URL(file, _config.outDir)
|
||||
);
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add also the worker file so it's excluded from the _routes.json generation
|
||||
if (!isModeDirectory) {
|
||||
cloudflareSpecialFiles.push('_worker.js');
|
||||
}
|
||||
|
||||
const routesExists = await fs.promises
|
||||
.stat(new URL('./_routes.json', _config.outDir))
|
||||
.then((stat) => stat.isFile())
|
||||
.catch(() => false);
|
||||
|
||||
// this creates a _routes.json, in case there is none present to enable
|
||||
// cloudflare to handle static files and support _redirects configuration
|
||||
if (!routesExists) {
|
||||
/**
|
||||
* These route types are candiates for being part of the `_routes.json` `include` array.
|
||||
*/
|
||||
const potentialFunctionRouteTypes = ['endpoint', 'page'];
|
||||
|
||||
const functionEndpoints = routes
|
||||
// Certain route types, when their prerender option is set to false, run on the server as function invocations
|
||||
.filter((route) => potentialFunctionRouteTypes.includes(route.type) && !route.prerender)
|
||||
.map((route) => {
|
||||
const includePattern =
|
||||
'/' +
|
||||
route.segments
|
||||
.flat()
|
||||
.map((segment) => (segment.dynamic ? '*' : segment.content))
|
||||
.join('/');
|
||||
|
||||
const regexp = new RegExp(
|
||||
'^\\/' +
|
||||
route.segments
|
||||
.flat()
|
||||
.map((segment) => (segment.dynamic ? '(.*)' : segment.content))
|
||||
.join('\\/') +
|
||||
'$'
|
||||
);
|
||||
|
||||
return {
|
||||
includePattern,
|
||||
regexp,
|
||||
};
|
||||
});
|
||||
|
||||
const staticPathList: Array<string> = (
|
||||
await glob(`${fileURLToPath(_buildConfig.client)}/**/*`, {
|
||||
cwd: fileURLToPath(_config.outDir),
|
||||
filesOnly: true,
|
||||
dot: true,
|
||||
})
|
||||
)
|
||||
.filter((file: string) => cloudflareSpecialFiles.indexOf(file) < 0)
|
||||
.map((file: string) => `/${file.replace(/\\/g, '/')}`);
|
||||
|
||||
for (let page of pages) {
|
||||
let pagePath = prependForwardSlash(page.pathname);
|
||||
if (_config.base !== '/') {
|
||||
const base = _config.base.endsWith('/') ? _config.base.slice(0, -1) : _config.base;
|
||||
pagePath = `${base}${pagePath}`;
|
||||
}
|
||||
staticPathList.push(pagePath);
|
||||
}
|
||||
|
||||
const redirectsExists = await fs.promises
|
||||
.stat(new URL('./_redirects', _config.outDir))
|
||||
.then((stat) => stat.isFile())
|
||||
.catch(() => false);
|
||||
|
||||
// convert all redirect source paths into a list of routes
|
||||
// and add them to the static path
|
||||
if (redirectsExists) {
|
||||
const redirects = (
|
||||
await fs.promises.readFile(new URL('./_redirects', _config.outDir), 'utf-8')
|
||||
)
|
||||
.split(os.EOL)
|
||||
.map((line) => {
|
||||
const parts = line.split(' ');
|
||||
if (parts.length < 2) {
|
||||
return null;
|
||||
} else {
|
||||
// convert /products/:id to /products/*
|
||||
return (
|
||||
parts[0]
|
||||
.replace(/\/:.*?(?=\/|$)/g, '/*')
|
||||
// remove query params as they are not supported by cloudflare
|
||||
.replace(/\?.*$/, '')
|
||||
);
|
||||
}
|
||||
})
|
||||
.filter(
|
||||
(line, index, arr) => line !== null && arr.indexOf(line) === index
|
||||
) as string[];
|
||||
|
||||
if (redirects.length > 0) {
|
||||
staticPathList.push(...redirects);
|
||||
}
|
||||
}
|
||||
|
||||
const redirectRoutes: [RouteData, string][] = routes
|
||||
.filter((r) => r.type === 'redirect')
|
||||
.map((r) => {
|
||||
return [r, ''];
|
||||
});
|
||||
const trueRedirects = createRedirectsFromAstroRoutes({
|
||||
config: _config,
|
||||
routeToDynamicTargetMap: new Map(Array.from(redirectRoutes)),
|
||||
dir,
|
||||
});
|
||||
if (!trueRedirects.empty()) {
|
||||
await fs.promises.appendFile(
|
||||
new URL('./_redirects', _config.outDir),
|
||||
trueRedirects.print()
|
||||
);
|
||||
}
|
||||
|
||||
staticPathList.push(...routes.filter((r) => r.type === 'redirect').map((r) => r.route));
|
||||
|
||||
const strategy = args?.routes?.strategy ?? 'auto';
|
||||
|
||||
// Strategy `include`: include all function endpoints, and then exclude static paths that would be matched by an include pattern
|
||||
const includeStrategy =
|
||||
strategy === 'exclude'
|
||||
? undefined
|
||||
: {
|
||||
include: deduplicatePatterns(
|
||||
functionEndpoints
|
||||
.map((endpoint) => endpoint.includePattern)
|
||||
.concat(args?.routes?.include ?? [])
|
||||
),
|
||||
exclude: deduplicatePatterns(
|
||||
staticPathList
|
||||
.filter((file: string) =>
|
||||
functionEndpoints.some((endpoint) => endpoint.regexp.test(file))
|
||||
)
|
||||
.concat(args?.routes?.exclude ?? [])
|
||||
),
|
||||
};
|
||||
|
||||
// Cloudflare requires at least one include pattern:
|
||||
// https://developers.cloudflare.com/pages/platform/functions/routing/#limits
|
||||
// So we add a pattern that we immediately exclude again
|
||||
if (includeStrategy?.include.length === 0) {
|
||||
includeStrategy.include = ['/'];
|
||||
includeStrategy.exclude = ['/'];
|
||||
}
|
||||
|
||||
// Strategy `exclude`: include everything, and then exclude all static paths
|
||||
const excludeStrategy =
|
||||
strategy === 'include'
|
||||
? undefined
|
||||
: {
|
||||
include: ['/*'],
|
||||
exclude: deduplicatePatterns(staticPathList.concat(args?.routes?.exclude ?? [])),
|
||||
};
|
||||
|
||||
const includeStrategyLength = includeStrategy
|
||||
? includeStrategy.include.length + includeStrategy.exclude.length
|
||||
: Infinity;
|
||||
|
||||
const excludeStrategyLength = excludeStrategy
|
||||
? excludeStrategy.include.length + excludeStrategy.exclude.length
|
||||
: Infinity;
|
||||
|
||||
const winningStrategy =
|
||||
includeStrategyLength <= excludeStrategyLength ? includeStrategy : excludeStrategy;
|
||||
|
||||
await fs.promises.writeFile(
|
||||
new URL('./_routes.json', _config.outDir),
|
||||
JSON.stringify(
|
||||
{
|
||||
version: 1,
|
||||
...winningStrategy,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
export const isNode =
|
||||
typeof process === 'object' && Object.prototype.toString.call(process) === '[object process]';
|
||||
|
||||
export function getProcessEnvProxy() {
|
||||
return new Proxy(
|
||||
{},
|
||||
{
|
||||
get: (target, prop) => {
|
||||
console.warn(
|
||||
// NOTE: \0 prevents Vite replacement
|
||||
`Unable to access \`import.meta\0.env.${prop.toString()}\` on initialization ` +
|
||||
`as the Cloudflare platform only provides the environment variables per request. ` +
|
||||
`Please move the environment variable access inside a function ` +
|
||||
`that's only called after a request has been received.`
|
||||
);
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
/**
|
||||
* Remove duplicates and redundant patterns from an `include` or `exclude` list.
|
||||
* Otherwise Cloudflare will throw an error on deployment. Plus, it saves more entries.
|
||||
* E.g. `['/foo/*', '/foo/*', '/foo/bar'] => ['/foo/*']`
|
||||
* @param patterns a list of `include` or `exclude` patterns
|
||||
* @returns a deduplicated list of patterns
|
||||
*/
|
||||
export function deduplicatePatterns(patterns: string[]) {
|
||||
const openPatterns: RegExp[] = [];
|
||||
|
||||
// A value in the set may only occur once; it is unique in the set's collection.
|
||||
// ref: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set
|
||||
return [...new Set(patterns)]
|
||||
.sort((a, b) => a.length - b.length)
|
||||
.filter((pattern) => {
|
||||
if (openPatterns.some((p) => p.test(pattern))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (pattern.endsWith('*')) {
|
||||
openPatterns.push(new RegExp(`^${pattern.replace(/(\*\/)*\*$/g, '.*')}`));
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
import type { IncomingRequestCfProperties } from '@cloudflare/workers-types/experimental';
|
||||
|
||||
export async function getCFObject(
|
||||
runtimeMode: string
|
||||
): Promise<IncomingRequestCfProperties | void> {
|
||||
const CF_ENDPOINT = 'https://workers.cloudflare.com/cf.json';
|
||||
const CF_FALLBACK: IncomingRequestCfProperties = {
|
||||
asOrganization: '',
|
||||
asn: 395747,
|
||||
colo: 'DFW',
|
||||
city: 'Austin',
|
||||
region: 'Texas',
|
||||
regionCode: 'TX',
|
||||
metroCode: '635',
|
||||
postalCode: '78701',
|
||||
country: 'US',
|
||||
continent: 'NA',
|
||||
timezone: 'America/Chicago',
|
||||
latitude: '30.27130',
|
||||
longitude: '-97.74260',
|
||||
clientTcpRtt: 0,
|
||||
httpProtocol: 'HTTP/1.1',
|
||||
requestPriority: 'weight=192;exclusive=0',
|
||||
tlsCipher: 'AEAD-AES128-GCM-SHA256',
|
||||
tlsVersion: 'TLSv1.3',
|
||||
tlsClientAuth: {
|
||||
certPresented: '0',
|
||||
certVerified: 'NONE',
|
||||
certRevoked: '0',
|
||||
certIssuerDN: '',
|
||||
certSubjectDN: '',
|
||||
certIssuerDNRFC2253: '',
|
||||
certSubjectDNRFC2253: '',
|
||||
certIssuerDNLegacy: '',
|
||||
certSubjectDNLegacy: '',
|
||||
certSerial: '',
|
||||
certIssuerSerial: '',
|
||||
certSKI: '',
|
||||
certIssuerSKI: '',
|
||||
certFingerprintSHA1: '',
|
||||
certFingerprintSHA256: '',
|
||||
certNotBefore: '',
|
||||
certNotAfter: '',
|
||||
},
|
||||
edgeRequestKeepAliveStatus: 0,
|
||||
hostMetadata: undefined,
|
||||
clientTrustScore: 99,
|
||||
botManagement: {
|
||||
corporateProxy: false,
|
||||
verifiedBot: false,
|
||||
ja3Hash: '25b4882c2bcb50cd6b469ff28c596742',
|
||||
staticResource: false,
|
||||
detectionIds: [],
|
||||
score: 99,
|
||||
},
|
||||
};
|
||||
|
||||
if (runtimeMode === 'local') {
|
||||
return CF_FALLBACK;
|
||||
} else if (runtimeMode === 'remote') {
|
||||
try {
|
||||
const res = await fetch(CF_ENDPOINT);
|
||||
const cfText = await res.text();
|
||||
const storedCf = JSON.parse(cfText);
|
||||
return storedCf;
|
||||
} catch (e: any) {
|
||||
return CF_FALLBACK;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,191 +0,0 @@
|
|||
/**
|
||||
* This file is a derivative work of wrangler by Cloudflare
|
||||
* An upstream request for exposing this API was made here:
|
||||
* https://github.com/cloudflare/workers-sdk/issues/3897
|
||||
*
|
||||
* Until further notice, we will be using this file as a workaround
|
||||
* TODO: Tackle this file, once their is an decision on the upstream request
|
||||
*/
|
||||
|
||||
import type {} from '@cloudflare/workers-types/experimental';
|
||||
import TOML from '@iarna/toml';
|
||||
import dotenv from 'dotenv';
|
||||
import { findUpSync } from 'find-up';
|
||||
import * as fs from 'node:fs';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
let _wrangler: any;
|
||||
|
||||
function findWranglerToml(
|
||||
referencePath: string = process.cwd(),
|
||||
preferJson = false
|
||||
): string | undefined {
|
||||
if (preferJson) {
|
||||
return (
|
||||
findUpSync(`wrangler.json`, { cwd: referencePath }) ??
|
||||
findUpSync(`wrangler.toml`, { cwd: referencePath })
|
||||
);
|
||||
}
|
||||
return findUpSync(`wrangler.toml`, { cwd: referencePath });
|
||||
}
|
||||
type File = {
|
||||
file?: string;
|
||||
fileText?: string;
|
||||
};
|
||||
type Location = File & {
|
||||
line: number;
|
||||
column: number;
|
||||
length?: number;
|
||||
lineText?: string;
|
||||
suggestion?: string;
|
||||
};
|
||||
type Message = {
|
||||
text: string;
|
||||
location?: Location;
|
||||
notes?: Message[];
|
||||
kind?: 'warning' | 'error';
|
||||
};
|
||||
class ParseError extends Error implements Message {
|
||||
readonly text: string;
|
||||
readonly notes: Message[];
|
||||
readonly location?: Location;
|
||||
readonly kind: 'warning' | 'error';
|
||||
|
||||
constructor({ text, notes, location, kind }: Message) {
|
||||
super(text);
|
||||
this.name = this.constructor.name;
|
||||
this.text = text;
|
||||
this.notes = notes ?? [];
|
||||
this.location = location;
|
||||
this.kind = kind ?? 'error';
|
||||
}
|
||||
}
|
||||
const TOML_ERROR_NAME = 'TomlError';
|
||||
const TOML_ERROR_SUFFIX = ' at row ';
|
||||
type TomlError = Error & {
|
||||
line: number;
|
||||
col: number;
|
||||
};
|
||||
function parseTOML(input: string, file?: string): TOML.JsonMap | never {
|
||||
try {
|
||||
// Normalize CRLF to LF to avoid hitting https://github.com/iarna/iarna-toml/issues/33.
|
||||
const normalizedInput = input.replace(/\r\n/g, '\n');
|
||||
return TOML.parse(normalizedInput);
|
||||
} catch (err) {
|
||||
const { name, message, line, col } = err as TomlError;
|
||||
if (name !== TOML_ERROR_NAME) {
|
||||
throw err;
|
||||
}
|
||||
const text = message.substring(0, message.lastIndexOf(TOML_ERROR_SUFFIX));
|
||||
const lineText = input.split('\n')[line];
|
||||
const location = {
|
||||
lineText,
|
||||
line: line + 1,
|
||||
column: col - 1,
|
||||
file,
|
||||
fileText: input,
|
||||
};
|
||||
throw new ParseError({ text, location });
|
||||
}
|
||||
}
|
||||
|
||||
export interface DotEnv {
|
||||
path: string;
|
||||
parsed: dotenv.DotenvParseOutput;
|
||||
}
|
||||
function tryLoadDotEnv(path: string): DotEnv | undefined {
|
||||
try {
|
||||
const parsed = dotenv.parse(fs.readFileSync(path));
|
||||
return { path, parsed };
|
||||
} catch (e) {
|
||||
// logger.debug(`Failed to load .env file "${path}":`, e);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Loads a dotenv file from <path>, preferring to read <path>.<environment> if
|
||||
* <environment> is defined and that file exists.
|
||||
*/
|
||||
|
||||
export function loadDotEnv(path: string): DotEnv | undefined {
|
||||
return tryLoadDotEnv(path);
|
||||
}
|
||||
function getVarsForDev(config: any, configPath: string | undefined): any {
|
||||
const configDir = resolve(dirname(configPath ?? '.'));
|
||||
const devVarsPath = resolve(configDir, '.dev.vars');
|
||||
const loaded = loadDotEnv(devVarsPath);
|
||||
if (loaded !== undefined) {
|
||||
return {
|
||||
...config.vars,
|
||||
...loaded.parsed,
|
||||
};
|
||||
} else {
|
||||
return config.vars;
|
||||
}
|
||||
}
|
||||
|
||||
function parseConfig() {
|
||||
if (_wrangler) return _wrangler;
|
||||
let rawConfig;
|
||||
const configPath = findWranglerToml(process.cwd(), false); // false = args.experimentalJsonConfig
|
||||
if (!configPath) {
|
||||
throw new Error('Could not find wrangler.toml');
|
||||
}
|
||||
// Load the configuration from disk if available
|
||||
if (configPath?.endsWith('toml')) {
|
||||
rawConfig = parseTOML(fs.readFileSync(configPath).toString(), configPath);
|
||||
}
|
||||
_wrangler = { rawConfig, configPath };
|
||||
return { rawConfig, configPath };
|
||||
}
|
||||
|
||||
export async function getEnvVars() {
|
||||
const { rawConfig, configPath } = parseConfig();
|
||||
const vars = getVarsForDev(rawConfig, configPath);
|
||||
return vars;
|
||||
}
|
||||
|
||||
export async function getD1Bindings() {
|
||||
const { rawConfig } = parseConfig();
|
||||
if (!rawConfig) return [];
|
||||
if (!rawConfig?.d1_databases) return [];
|
||||
const bindings = (rawConfig?.d1_databases as []).map(
|
||||
(binding: { binding: string }) => binding.binding
|
||||
);
|
||||
return bindings;
|
||||
}
|
||||
|
||||
export async function getR2Bindings() {
|
||||
const { rawConfig } = parseConfig();
|
||||
if (!rawConfig) return [];
|
||||
if (!rawConfig?.r2_buckets) return [];
|
||||
const bindings = (rawConfig?.r2_buckets as []).map(
|
||||
(binding: { binding: string }) => binding.binding
|
||||
);
|
||||
return bindings;
|
||||
}
|
||||
|
||||
export async function getKVBindings() {
|
||||
const { rawConfig } = parseConfig();
|
||||
if (!rawConfig) return [];
|
||||
if (!rawConfig?.kv_namespaces) return [];
|
||||
const bindings = (rawConfig?.kv_namespaces as []).map(
|
||||
(binding: { binding: string }) => binding.binding
|
||||
);
|
||||
return bindings;
|
||||
}
|
||||
|
||||
export function getDOBindings(): Record<
|
||||
string,
|
||||
{ scriptName?: string | undefined; unsafeUniqueKey?: string | undefined; className: string }
|
||||
> {
|
||||
const { rawConfig } = parseConfig();
|
||||
if (!rawConfig) return {};
|
||||
if (!rawConfig?.durable_objects) return {};
|
||||
const output = new Object({}) as Record<
|
||||
string,
|
||||
{ scriptName?: string | undefined; unsafeUniqueKey?: string | undefined; className: string }
|
||||
>;
|
||||
for (const binding of rawConfig?.durable_objects.bindings) {
|
||||
Reflect.set(output, binding.name, { className: binding.class_name });
|
||||
}
|
||||
return output;
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
export function prependForwardSlash(path: string) {
|
||||
return path[0] === '/' ? path : '/' + path;
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
import esbuild from 'esbuild';
|
||||
import { basename } from 'node:path';
|
||||
|
||||
/**
|
||||
*
|
||||
* @param relativePathToAssets - relative path from the final location for the current esbuild output bundle, to the assets directory.
|
||||
*/
|
||||
export function rewriteWasmImportPath({
|
||||
relativePathToAssets,
|
||||
}: {
|
||||
relativePathToAssets: string;
|
||||
}): esbuild.Plugin {
|
||||
return {
|
||||
name: 'wasm-loader',
|
||||
setup(build) {
|
||||
build.onResolve({ filter: /.*\.wasm.mjs$/ }, (args) => {
|
||||
const updatedPath = [
|
||||
relativePathToAssets.replaceAll('\\', '/'),
|
||||
basename(args.path).replace(/\.mjs$/, ''),
|
||||
].join('/');
|
||||
|
||||
return {
|
||||
path: updatedPath,
|
||||
external: true, // mark it as external in the bundle
|
||||
};
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { type Plugin } from 'vite';
|
||||
|
||||
/**
|
||||
* Loads '*.wasm?module' imports as WebAssembly modules, which is the only way to load WASM in cloudflare workers.
|
||||
* Current proposal for WASM modules: https://github.com/WebAssembly/esm-integration/tree/main/proposals/esm-integration
|
||||
* Cloudflare worker WASM from javascript support: https://developers.cloudflare.com/workers/runtime-apis/webassembly/javascript/
|
||||
* @param disabled - if true throws a helpful error message if wasm is encountered and wasm imports are not enabled,
|
||||
* otherwise it will error obscurely in the esbuild and vite builds
|
||||
* @param assetsDirectory - the folder name for the assets directory in the build directory. Usually '_astro'
|
||||
* @returns Vite plugin to load WASM tagged with '?module' as a WASM modules
|
||||
*/
|
||||
export function wasmModuleLoader({
|
||||
disabled,
|
||||
assetsDirectory,
|
||||
}: {
|
||||
disabled: boolean;
|
||||
assetsDirectory: string;
|
||||
}): Plugin {
|
||||
const postfix = '.wasm?module';
|
||||
let isDev = false;
|
||||
|
||||
return {
|
||||
name: 'vite:wasm-module-loader',
|
||||
enforce: 'pre',
|
||||
configResolved(config) {
|
||||
isDev = config.command === 'serve';
|
||||
},
|
||||
config(_, __) {
|
||||
// let vite know that file format and the magic import string is intentional, and will be handled in this plugin
|
||||
return {
|
||||
assetsInclude: ['**/*.wasm?module'],
|
||||
build: { rollupOptions: { external: /^__WASM_ASSET__.+\.wasm\.mjs$/i } },
|
||||
};
|
||||
},
|
||||
|
||||
load(id, _) {
|
||||
if (!id.endsWith(postfix)) {
|
||||
return;
|
||||
}
|
||||
if (disabled) {
|
||||
throw new Error(
|
||||
`WASM module's cannot be loaded unless you add \`wasmModuleImports: true\` to your astro config.`
|
||||
);
|
||||
}
|
||||
|
||||
const filePath = id.slice(0, -1 * '?module'.length);
|
||||
|
||||
const data = fs.readFileSync(filePath);
|
||||
const base64 = data.toString('base64');
|
||||
|
||||
const base64Module = `
|
||||
const wasmModule = new WebAssembly.Module(Uint8Array.from(atob("${base64}"), c => c.charCodeAt(0)));
|
||||
export default wasmModule
|
||||
`;
|
||||
if (isDev) {
|
||||
// no need to wire up the assets in dev mode, just rewrite
|
||||
return base64Module;
|
||||
} else {
|
||||
// just some shared ID
|
||||
let hash = hashString(base64);
|
||||
// emit the wasm binary as an asset file, to be picked up later by the esbuild bundle for the worker.
|
||||
// give it a shared deterministic name to make things easy for esbuild to switch on later
|
||||
const assetName = path.basename(filePath).split('.')[0] + '.' + hash + '.wasm';
|
||||
this.emitFile({
|
||||
type: 'asset',
|
||||
// put it explicitly in the _astro assets directory with `fileName` rather than `name` so that
|
||||
// vite doesn't give it a random id in its name. We need to be able to easily rewrite from
|
||||
// the .mjs loader and the actual wasm asset later in the ESbuild for the worker
|
||||
fileName: path.join(assetsDirectory, assetName),
|
||||
source: fs.readFileSync(filePath),
|
||||
});
|
||||
|
||||
// however, by default, the SSG generator cannot import the .wasm as a module, so embed as a base64 string
|
||||
const chunkId = this.emitFile({
|
||||
type: 'prebuilt-chunk',
|
||||
fileName: assetName + '.mjs',
|
||||
code: base64Module,
|
||||
});
|
||||
|
||||
return `
|
||||
import wasmModule from "__WASM_ASSET__${chunkId}.wasm.mjs";
|
||||
export default wasmModule;
|
||||
`;
|
||||
}
|
||||
},
|
||||
|
||||
// output original wasm file relative to the chunk
|
||||
renderChunk(code, chunk, _) {
|
||||
if (isDev) return;
|
||||
|
||||
if (!/__WASM_ASSET__/g.test(code)) return;
|
||||
|
||||
const final = code.replaceAll(/__WASM_ASSET__([a-z\d]+).wasm.mjs/g, (s, assetId) => {
|
||||
const fileName = this.getFileName(assetId);
|
||||
const relativePath = path
|
||||
.relative(path.dirname(chunk.fileName), fileName)
|
||||
.replaceAll('\\', '/'); // fix windows paths for import
|
||||
return `./${relativePath}`;
|
||||
});
|
||||
|
||||
return { code: final };
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a deterministic 32 bit hash code from a string
|
||||
*/
|
||||
function hashString(str: string): string {
|
||||
let hash = 0;
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const char = str.charCodeAt(i);
|
||||
hash = (hash << 5) - hash + char;
|
||||
hash &= hash; // Convert to 32bit integer
|
||||
}
|
||||
return new Uint32Array([hash])[0].toString(36);
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
import { loadFixture, runCLI } from './test-utils.js';
|
||||
import { expect } from 'chai';
|
||||
import * as cheerio from 'cheerio';
|
||||
|
||||
describe('Basic app', () => {
|
||||
/** @type {import('./test-utils').Fixture} */
|
||||
let fixture;
|
||||
/** @type {import('./test-utils').WranglerCLI} */
|
||||
let cli;
|
||||
|
||||
before(async function () {
|
||||
fixture = await loadFixture({
|
||||
root: './fixtures/basics/',
|
||||
});
|
||||
await fixture.build();
|
||||
|
||||
cli = await runCLI('./fixtures/basics/', {
|
||||
silent: true,
|
||||
onTimeout: (ex) => {
|
||||
console.log(ex);
|
||||
// if fail to start, skip for now as it's very flaky
|
||||
this.skip();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await cli?.stop();
|
||||
});
|
||||
|
||||
it('can render', async () => {
|
||||
let res = await fetch(`http://127.0.0.1:${cli.port}/`);
|
||||
expect(res.status).to.equal(200);
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
expect($('h1').text()).to.equal('Testing');
|
||||
expect($('#env').text()).to.equal('secret');
|
||||
});
|
||||
});
|
|
@ -1,127 +0,0 @@
|
|||
import { loadFixture, runCLI } from './test-utils.js';
|
||||
import { expect } from 'chai';
|
||||
import * as cheerio from 'cheerio';
|
||||
import cloudflare from '../dist/index.js';
|
||||
|
||||
describe('Wrangler Cloudflare Runtime', () => {
|
||||
/** @type {import('./test-utils').Fixture} */
|
||||
let fixture;
|
||||
/** @type {import('./test-utils').WranglerCLI} */
|
||||
let cli;
|
||||
|
||||
before(async function () {
|
||||
fixture = await loadFixture({
|
||||
root: './fixtures/cf/',
|
||||
output: 'server',
|
||||
adapter: cloudflare(),
|
||||
});
|
||||
await fixture.build();
|
||||
|
||||
cli = await runCLI('./fixtures/cf/', {
|
||||
silent: true,
|
||||
onTimeout: (ex) => {
|
||||
console.log(ex);
|
||||
// if fail to start, skip for now as it's very flaky
|
||||
this.skip();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await cli?.stop();
|
||||
});
|
||||
|
||||
it('Load cf and caches API', async () => {
|
||||
let res = await fetch(`http://127.0.0.1:${cli.port}/`);
|
||||
expect(res.status).to.equal(200);
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
|
||||
expect($('#hasRuntime').text()).to.equal('true');
|
||||
expect($('#hasCache').text()).to.equal('true');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Astro Cloudflare Runtime', () => {
|
||||
/** @type {import('./test-utils').Fixture} */
|
||||
let fixture;
|
||||
let devServer;
|
||||
|
||||
before(async () => {
|
||||
fixture = await loadFixture({
|
||||
root: './fixtures/cf/',
|
||||
output: 'server',
|
||||
adapter: cloudflare({
|
||||
runtime: 'local',
|
||||
}),
|
||||
});
|
||||
process.chdir('./test/fixtures/cf');
|
||||
devServer = await fixture.startDevServer();
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await devServer?.stop();
|
||||
});
|
||||
|
||||
it('adds cf object', async () => {
|
||||
let res = await fixture.fetch('/');
|
||||
expect(res.status).to.equal(200);
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
expect($('#hasCF').text()).to.equal('true');
|
||||
});
|
||||
|
||||
it('adds cache mocking', async () => {
|
||||
let res = await fixture.fetch('/caches');
|
||||
expect(res.status).to.equal(200);
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
expect($('#hasCACHE').text()).to.equal('true');
|
||||
});
|
||||
|
||||
it('adds D1 mocking', async () => {
|
||||
expect(await fixture.pathExists('../.mf/d1')).to.be.true;
|
||||
|
||||
let res = await fixture.fetch('/d1');
|
||||
expect(res.status).to.equal(200);
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
expect($('#hasDB').text()).to.equal('true');
|
||||
expect($('#hasPRODDB').text()).to.equal('true');
|
||||
expect($('#hasACCESS').text()).to.equal('true');
|
||||
});
|
||||
|
||||
it('adds R2 mocking', async () => {
|
||||
expect(await fixture.pathExists('../.mf/r2')).to.be.true;
|
||||
|
||||
let res = await fixture.fetch('/r2');
|
||||
expect(res.status).to.equal(200);
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
expect($('#hasBUCKET').text()).to.equal('true');
|
||||
expect($('#hasPRODBUCKET').text()).to.equal('true');
|
||||
expect($('#hasACCESS').text()).to.equal('true');
|
||||
});
|
||||
|
||||
it('adds KV mocking', async () => {
|
||||
expect(await fixture.pathExists('../.mf/kv')).to.be.true;
|
||||
|
||||
let res = await fixture.fetch('/kv');
|
||||
expect(res.status).to.equal(200);
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
expect($('#hasKV').text()).to.equal('true');
|
||||
expect($('#hasPRODKV').text()).to.equal('true');
|
||||
expect($('#hasACCESS').text()).to.equal('true');
|
||||
});
|
||||
|
||||
it('adds DO mocking', async () => {
|
||||
expect(await fixture.pathExists('../.mf/do')).to.be.true;
|
||||
|
||||
let res = await fixture.fetch('/do');
|
||||
expect(res.status).to.equal(200);
|
||||
let html = await res.text();
|
||||
let $ = cheerio.load(html);
|
||||
expect($('#hasDO').text()).to.equal('true');
|
||||
});
|
||||
});
|
|
@ -1,36 +0,0 @@
|
|||
import { loadFixture } from './test-utils.js';
|
||||
import { expect } from 'chai';
|
||||
import cloudflare from '../dist/index.js';
|
||||
|
||||
/** @type {import('./test-utils').Fixture} */
|
||||
describe('mode: "directory"', () => {
|
||||
/** @type {import('./test-utils').Fixture} */
|
||||
let fixture;
|
||||
|
||||
before(async () => {
|
||||
fixture = await loadFixture({
|
||||
root: './fixtures/basics/',
|
||||
output: 'server',
|
||||
adapter: cloudflare({ mode: 'directory' }),
|
||||
redirects: {
|
||||
'/old': '/',
|
||||
},
|
||||
});
|
||||
await fixture.build();
|
||||
});
|
||||
|
||||
it('generates functions folder inside the project root', async () => {
|
||||
expect(await fixture.pathExists('../functions')).to.be.true;
|
||||
expect(await fixture.pathExists('../functions/[[path]].js')).to.be.true;
|
||||
});
|
||||
|
||||
it('generates a redirects file', async () => {
|
||||
try {
|
||||
let _redirects = await fixture.readFile('/_redirects');
|
||||
let parts = _redirects.split(/\s+/);
|
||||
expect(parts).to.deep.equal(['/old', '/', '301']);
|
||||
} catch {
|
||||
expect(false).to.equal(true);
|
||||
}
|
||||
});
|
||||
});
|
|
@ -1,10 +0,0 @@
|
|||
import { defineConfig } from 'astro/config';
|
||||
import cloudflare from '@astrojs/cloudflare';
|
||||
|
||||
// test env var
|
||||
process.env.SECRET_STUFF = 'secret'
|
||||
|
||||
export default defineConfig({
|
||||
adapter: cloudflare(),
|
||||
output: 'server'
|
||||
});
|
|
@ -1,9 +0,0 @@
|
|||
{
|
||||
"name": "@test/astro-cloudflare-basics",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "workspace:*",
|
||||
"astro": "workspace:*"
|
||||
}
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Testing</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Testing</h1>
|
||||
<div id="env">{import.meta.env.SECRET_STUFF}</div>
|
||||
</body>
|
||||
</html>
|
|
@ -1 +0,0 @@
|
|||
DATABASE_URL="postgresql://lorem"
|
|
@ -1,9 +0,0 @@
|
|||
{
|
||||
"name": "@test/astro-cloudflare-cf",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "workspace:*",
|
||||
"astro": "workspace:*"
|
||||
}
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
---
|
||||
const runtime = Astro.locals.runtime;
|
||||
---
|
||||
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>CACHES</title>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="hasCACHE">{!!runtime.caches}</pre>
|
||||
</body>
|
||||
</html>
|
|
@ -1,21 +0,0 @@
|
|||
---
|
||||
const runtime = Astro.locals.runtime;
|
||||
const db = runtime.env?.D1;
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS test (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)");
|
||||
await db.exec("INSERT INTO test (name) VALUES ('true')");
|
||||
const result = await db.prepare("SELECT * FROM test").all();
|
||||
---
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>D1</title>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="hasDB">{!!runtime.env?.D1}</pre>
|
||||
<pre id="hasPRODDB">{!!runtime.env?.D1_PROD}</pre>
|
||||
<pre id="hasACCESS">{!!result.results[0].name}</pre>
|
||||
</body>
|
||||
</html>
|
|
@ -1,15 +0,0 @@
|
|||
---
|
||||
const runtime = Astro.locals.runtime;
|
||||
---
|
||||
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>DO</title>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="hasDO">{!!runtime.env.DO}</pre>
|
||||
</body>
|
||||
</html>
|
|
@ -1,12 +0,0 @@
|
|||
---
|
||||
const runtime = Astro.locals.runtime;
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<title>Testing</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Testing</h1>
|
||||
<div id="hasCF">{!!runtime.cf?.colo}</div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
const runtime = Astro.locals.runtime;
|
||||
const kv = runtime.env?.KV;
|
||||
await kv.put("test", "true");
|
||||
const result = await kv.get("test")
|
||||
---
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>KV</title>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="hasKV">{!!runtime.env?.KV}</pre>
|
||||
<pre id="hasPRODKV">{!!runtime.env?.KV_PROD}</pre>
|
||||
<pre id="hasACCESS">{!!result}</pre>
|
||||
</body>
|
||||
</html>
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
const runtime = Astro.locals.runtime;
|
||||
const bucket = runtime.env?.R2;
|
||||
await bucket.put("test", "true");
|
||||
const result = await (await bucket.get("test")).text()
|
||||
---
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>R2</title>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="hasBUCKET">{!!runtime.env?.R2}</pre>
|
||||
<pre id="hasPRODBUCKET">{!!runtime.env?.R2_PROD}</pre>
|
||||
<pre id="hasACCESS">{!!result}</pre>
|
||||
</body>
|
||||
</html>
|
|
@ -1,37 +0,0 @@
|
|||
name = "test"
|
||||
|
||||
kv_namespaces = [
|
||||
{ binding = "KV", id = "<YOUR_ID>", preview_id = "<YOUR_ID>" },
|
||||
{ binding = "KV_PROD", id = "<YOUR_ID>", preview_id = "<YOUR_ID>" }
|
||||
]
|
||||
|
||||
[vars]
|
||||
COOL = "ME"
|
||||
|
||||
[[d1_databases]]
|
||||
binding = "D1" # Should match preview_database_id, i.e. available in your Worker on env.DB
|
||||
database_name = "<DATABASE_NAME>"
|
||||
database_id = "<unique-ID-for-your-database>"
|
||||
preview_database_id = "D1" # Required for Pages local development
|
||||
|
||||
[[d1_databases]]
|
||||
binding = "D1_PROD" # Should match preview_database_id
|
||||
database_name = "<DATABASE_NAME>"
|
||||
database_id = "<unique-ID-for-your-database>"
|
||||
preview_database_id = "D1_PROD" # Required for Pages local development
|
||||
|
||||
[[r2_buckets]]
|
||||
binding = 'R2' # <~ valid JavaScript variable name
|
||||
bucket_name = '<YOUR_BUCKET_NAME>'
|
||||
|
||||
[[r2_buckets]]
|
||||
binding = 'R2_PROD' # <~ valid JavaScript variable name
|
||||
bucket_name = '<YOUR_BUCKET_NAME>'
|
||||
|
||||
[[durable_objects.bindings]]
|
||||
name = "DO"
|
||||
class_name = "DurableObjectExample"
|
||||
|
||||
[[durable_objects.bindings]]
|
||||
name = "DO_PROD"
|
||||
class_name = "DurableObjectProductionExample"
|
|
@ -1,15 +0,0 @@
|
|||
import { defineConfig } from 'astro/config';
|
||||
import cloudflare from '@astrojs/cloudflare';
|
||||
|
||||
export default defineConfig({
|
||||
adapter: cloudflare({
|
||||
mode: 'directory',
|
||||
functionPerRoute: true
|
||||
}),
|
||||
output: 'server',
|
||||
vite: {
|
||||
build: {
|
||||
minify: false,
|
||||
},
|
||||
},
|
||||
});
|
|
@ -1,9 +0,0 @@
|
|||
{
|
||||
"name": "@test/astro-cloudflare-function-per-route",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "workspace:*",
|
||||
"astro": "workspace:*"
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
import { defineMiddleware } from "astro/middleware";
|
||||
|
||||
export const onRequest = defineMiddleware(({ locals, request }, next) => {
|
||||
// intercept response data from a request
|
||||
// optionally, transform the response by modifying `locals`
|
||||
locals.title = "New title"
|
||||
|
||||
// return a Response or the result of calling `next()`
|
||||
return next()
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
---
|
||||
const files = [
|
||||
{
|
||||
slug: undefined,
|
||||
title: 'Root level',
|
||||
},
|
||||
{
|
||||
slug: 'test.png',
|
||||
title: "One level"
|
||||
},
|
||||
{
|
||||
slug: 'assets/test.png',
|
||||
title: "Two levels"
|
||||
},
|
||||
{
|
||||
slug: 'assets/images/test.png',
|
||||
title: 'Three levels',
|
||||
}
|
||||
];
|
||||
|
||||
const { path } = Astro.params;
|
||||
const page = files.find((page) => page.slug === path);
|
||||
const { title } = page;
|
||||
|
||||
---
|
||||
<html>
|
||||
<body>
|
||||
<h1>Files / Rest Parameters / {title}</h1>
|
||||
<p>DEBUG: {path} </p>
|
||||
<p><a href="/">index</a></p>
|
||||
</body>
|
||||
<style>
|
||||
h1 {
|
||||
background-color: yellow;
|
||||
}
|
||||
</style>
|
||||
</html>
|
|
@ -1,14 +0,0 @@
|
|||
---
|
||||
const { person, car } = Astro.params;
|
||||
---
|
||||
<html>
|
||||
<body>
|
||||
<h1> {person} / {car}</h1>
|
||||
<p><a href="/">index</a></p>
|
||||
</body>
|
||||
<style>
|
||||
h1 {
|
||||
background-color: blue;
|
||||
}
|
||||
</style>
|
||||
</html>
|
|
@ -1,14 +0,0 @@
|
|||
---
|
||||
const { post } = Astro.params;
|
||||
---
|
||||
<html>
|
||||
<body>
|
||||
<h1>Blog / {post}</h1>
|
||||
<p><a href="/">index</a></p>
|
||||
</body>
|
||||
<style>
|
||||
h1 {
|
||||
background-color: pink;
|
||||
}
|
||||
</style>
|
||||
</html>
|
|
@ -1,11 +0,0 @@
|
|||
<html>
|
||||
<body>
|
||||
<h1>Blog / Cool</h1>
|
||||
<p><a href="/">index</a></p>
|
||||
</body>
|
||||
<style>
|
||||
h1 {
|
||||
background-color: orange;
|
||||
}
|
||||
</style>
|
||||
</html>
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue