Merge remote-tracking branch 'origin/canary' into xp/04-24-refactor_electron_nestjsfy

This commit is contained in:
Peng Xiao 2025-06-24 11:50:48 +08:00
commit 8f3035d7a3
No known key found for this signature in database
GPG Key ID: 21D49B4CA2B82E60
427 changed files with 11915 additions and 10370 deletions

View File

@ -1,7 +1,7 @@
name: affine
services:
affine:
image: ghcr.io/toeverything/affine-graphql:${AFFINE_REVISION:-stable}
image: ghcr.io/toeverything/affine:${AFFINE_REVISION:-stable}
container_name: affine_server
ports:
- '${PORT:-3010}:3010'
@ -25,7 +25,7 @@ services:
restart: unless-stopped
affine_migration:
image: ghcr.io/toeverything/affine-graphql:${AFFINE_REVISION:-stable}
image: ghcr.io/toeverything/affine:${AFFINE_REVISION:-stable}
container_name: affine_migration_job
volumes:
# custom configurations

View File

@ -864,22 +864,6 @@
}
}
},
"customerIo": {
"type": "object",
"description": "Configuration for customerIo module",
"properties": {
"enabled": {
"type": "boolean",
"description": "Enable customer.io integration\n@default false",
"default": false
},
"token": {
"type": "string",
"description": "Customer.io token\n@default \"\"",
"default": ""
}
}
},
"indexer": {
"type": "object",
"description": "Configuration for indexer module",
@ -921,6 +905,22 @@
}
}
},
"customerIo": {
"type": "object",
"description": "Configuration for customerIo module",
"properties": {
"enabled": {
"type": "boolean",
"description": "Enable customer.io integration\n@default false",
"default": false
},
"token": {
"type": "string",
"description": "Customer.io token\n@default \"\"",
"default": ""
}
}
},
"oauth": {
"type": "object",
"description": "Configuration for oauth module",

View File

@ -1,9 +1,6 @@
name: 'Deploy to Cluster'
description: 'Deploy AFFiNE Cloud to cluster'
inputs:
build-type:
description: 'Align with App build type, canary|beta|stable|internal'
default: 'canary'
gcp-project-number:
description: 'GCP project number'
required: true
@ -36,5 +33,3 @@ runs:
- name: Deploy
shell: bash
run: node ./.github/actions/deploy/deploy.mjs
env:
BUILD_TYPE: '${{ inputs.build-type }}'

View File

@ -0,0 +1,42 @@
name: Prepare Release
description: 'Prepare Release'
outputs:
APP_VERSION:
description: 'App Version'
value: ${{ steps.get-version.outputs.APP_VERSION }}
GIT_SHORT_HASH:
description: 'Git Short Hash'
value: ${{ steps.get-version.outputs.GIT_SHORT_HASH }}
BUILD_TYPE:
description: 'Build Type'
value: ${{ steps.get-version.outputs.BUILD_TYPE }}
runs:
using: 'composite'
steps:
- name: Get Version
id: get-version
shell: bash
run: |
GIT_SHORT_HASH=$(git rev-parse --short HEAD)
if [ "${{ github.ref_type }}" == "tag" ]; then
APP_VERSION=$(echo "${{ github.ref_name }}" | sed 's/^v//')
else
PACKAGE_VERSION=$(node -p "require('./package.json').version")
APP_VERSION=$PACKAGE_VERSION-canary.$GIT_SHORT_HASH
fi
if [[ "$APP_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
BUILD_TYPE=stable
elif [[ "$APP_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+-beta\.[0-9]+$ ]]; then
BUILD_TYPE=beta
elif [[ "$APP_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+-canary\.[0-9a-f]+$ ]]; then
BUILD_TYPE=canary
else
echo "Error: unsupported version string: $APP_VERSION" >&2
exit 1
fi
echo $APP_VERSION
echo $GIT_SHORT_HASH
echo $BUILD_TYPE
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"
echo "GIT_SHORT_HASH=$GIT_SHORT_HASH" >> "$GITHUB_OUTPUT"
echo "BUILD_TYPE=$BUILD_TYPE" >> "$GITHUB_OUTPUT"

View File

@ -1,24 +1,12 @@
name: Setup Version
description: 'Setup Version'
outputs:
APP_VERSION:
inputs:
app-version:
description: 'App Version'
value: ${{ steps.version.outputs.APP_VERSION }}
required: true
runs:
using: 'composite'
steps:
- name: 'Write Version'
id: version
shell: bash
run: |
if [ "${{ github.ref_type }}" == "tag" ]; then
APP_VERSION=$(echo "${{ github.ref_name }}" | sed 's/^v//')
else
PACKAGE_VERSION=$(node -p "require('./package.json').version")
TIME_VERSION=$(date +%Y%m%d%H%M)
GIT_SHORT_HASH=$(git rev-parse --short HEAD)
APP_VERSION=$PACKAGE_VERSION-nightly-$GIT_SHORT_HASH
fi
echo $APP_VERSION
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"
./scripts/set-version.sh $APP_VERSION
run: ./scripts/set-version.sh ${{ inputs.app-version }}

View File

@ -1,6 +1,6 @@
replicaCount: 1
image:
repository: ghcr.io/toeverything/affine-graphql
repository: ghcr.io/toeverything/affine
pullPolicy: IfNotPresent
tag: ''

View File

@ -1,6 +1,6 @@
replicaCount: 1
image:
repository: ghcr.io/toeverything/affine-graphql
repository: ghcr.io/toeverything/affine
pullPolicy: IfNotPresent
tag: ''

View File

@ -1,6 +1,6 @@
replicaCount: 1
image:
repository: ghcr.io/toeverything/affine-graphql
repository: ghcr.io/toeverything/affine
pullPolicy: IfNotPresent
tag: ''

View File

@ -1,6 +1,6 @@
replicaCount: 1
image:
repository: ghcr.io/toeverything/affine-graphql
repository: ghcr.io/toeverything/affine
pullPolicy: IfNotPresent
tag: ''

View File

@ -3,7 +3,13 @@ name: Build Images
on:
workflow_call:
inputs:
flavor:
build-type:
type: string
required: true
app-version:
type: string
required: true
git-short-hash:
type: string
required: true
@ -16,12 +22,13 @@ jobs:
build-web:
name: Build @affine/web
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Core
@ -30,11 +37,11 @@ jobs:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
BUILD_TYPE: ${{ inputs.build-type }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-web'
SENTRY_RELEASE: ${{ steps.version.outputs.APP_VERSION }}
SENTRY_RELEASE: ${{ inputs.app-version }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
@ -49,12 +56,13 @@ jobs:
build-admin:
name: Build @affine/admin
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Admin
@ -63,7 +71,7 @@ jobs:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
BUILD_TYPE: ${{ inputs.build-type }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-admin'
@ -81,12 +89,13 @@ jobs:
build-mobile:
name: Build @affine/mobile
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Mobile
@ -95,7 +104,7 @@ jobs:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
BUILD_TYPE: ${{ inputs.build-type }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-mobile'
@ -113,7 +122,7 @@ jobs:
build-server-native:
name: Build Server native - ${{ matrix.targets.name }}
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
environment: ${{ inputs.build-type }}
strategy:
fail-fast: false
matrix:
@ -128,8 +137,9 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@ -161,8 +171,9 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@ -202,16 +213,6 @@ jobs:
with:
name: server-dist
path: ./packages/backend/server/dist
- name: Setup env
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
if [ -z "${{ inputs.flavor }}" ]
then
echo "RELEASE_FLAVOR=canary" >> "$GITHUB_ENV"
else
echo "RELEASE_FLAVOR=${{ inputs.flavor }}" >> "$GITHUB_ENV"
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
@ -263,8 +264,9 @@ jobs:
run: mv ./node_modules ./packages/backend/server
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Build front Dockerfile
uses: docker/build-push-action@v6
@ -275,7 +277,7 @@ jobs:
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}
tags: ghcr.io/toeverything/affine-front:${{inputs.build-type}}-${{ inputs.git-short-hash }}
- name: Build graphql Dockerfile
uses: docker/build-push-action@v6
@ -286,4 +288,4 @@ jobs:
platforms: linux/amd64,linux/arm64,linux/arm/v7
provenance: true
file: .github/deployment/node/Dockerfile
tags: ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}
tags: ghcr.io/toeverything/affine:${{inputs.build-type}}-${{ inputs.git-short-hash }}

View File

@ -1,25 +0,0 @@
name: Build Selfhost Image
on:
workflow_dispatch:
inputs:
flavor:
description: 'Select distribution to build'
type: choice
default: canary
options:
- canary
- beta
- stable
permissions:
contents: 'write'
id-token: 'write'
packages: 'write'
jobs:
build-image:
name: Build Image
uses: ./.github/workflows/build-images.yml
with:
flavor: ${{ github.event.inputs.flavor }}

View File

@ -11,6 +11,7 @@ on:
paths-ignore:
- README.md
pull_request:
merge_group:
env:
DEBUG: napi:*
@ -734,7 +735,9 @@ jobs:
toolchain: nightly
components: miri
- name: Install latest nextest release
uses: taiki-e/install-action@nextest
uses: taiki-e/install-action@v2
with:
tool: nextest@0.9.98
- name: Miri Code Check
continue-on-error: true
@ -756,7 +759,9 @@ jobs:
with:
toolchain: stable
- name: Install latest nextest release
uses: taiki-e/install-action@nextest
uses: taiki-e/install-action@v2
with:
tool: nextest@0.9.98
- name: Loom Thread Test
run: |
@ -855,7 +860,9 @@ jobs:
no-build: 'true'
- name: Install latest nextest release
uses: taiki-e/install-action@nextest
uses: taiki-e/install-action@v2
with:
tool: nextest@0.9.98
- name: Run tests
run: cargo nextest run --workspace --exclude affine_server_native --features use-as-lib --release --no-fail-fast
@ -1352,15 +1359,6 @@ jobs:
run: |
yarn affine @affine/electron node ./scripts/macos-arm64-output-check.ts
test-build-mobile-app:
uses: ./.github/workflows/release-mobile.yml
with:
build-type: canary
build-target: development
secrets: inherit
permissions:
id-token: 'write'
test-done:
needs:
- analyze
@ -1391,7 +1389,6 @@ jobs:
- desktop-test
- desktop-bundle-check
- cloud-e2e-test
- test-build-mobile-app
if: always()
runs-on: ubuntu-latest
name: 3, 2, 1 Launch

View File

@ -1,32 +0,0 @@
name: Deploy Automatically
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
schedule:
- cron: '0 9 * * *'
permissions:
contents: write
pull-requests: write
actions: write
jobs:
dispatch-deploy:
runs-on: ubuntu-latest
name: Setup Deploy
steps:
- name: dispatch deploy by tag
if: ${{ github.event_name == 'push' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: deploy.yml
inputs: '{ "flavor": "canary" }'
- name: dispatch deploy by schedule
if: ${{ github.event_name == 'schedule' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: deploy.yml
inputs: '{ "flavor": "canary" }'
ref: canary

View File

@ -1,189 +0,0 @@
name: Deploy
on:
workflow_dispatch:
inputs:
flavor:
description: 'Select what enverionment to deploy to'
type: choice
default: canary
options:
- canary
- beta
- stable
- internal
permissions:
contents: 'write'
id-token: 'write'
packages: 'write'
jobs:
output-prev-version:
name: Output previous version
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
outputs:
prev: ${{ steps.print.outputs.version }}
namespace: ${{ steps.print.outputs.namespace }}
steps:
- uses: actions/checkout@v4
- name: Auth to Cluster
uses: './.github/actions/cluster-auth'
with:
gcp-project-number: ${{ secrets.GCP_PROJECT_NUMBER }}
gcp-project-id: ${{ secrets.GCP_PROJECT_ID }}
service-account: ${{ secrets.GCP_HELM_DEPLOY_SERVICE_ACCOUNT }}
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
- name: Output previous version
id: print
run: |
namespace=""
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
namespace="dev"
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
namespace="beta"
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
namespace="production"
else
echo "Invalid flavor: ${{ github.event.inputs.flavor }}"
exit 1
fi
echo "Namespace set to: $namespace"
# Get the previous version from the deployment
prev_version=$(kubectl get deployment -n $namespace affine-graphql -o=jsonpath='{.spec.template.spec.containers[0].image}' | awk -F '-' '{print $3}')
echo "Previous version: $prev_version"
echo "version=$prev_version" >> $GITHUB_OUTPUT
echo "namesapce=$namespace" >> $GITHUB_OUTPUT
build-images:
name: Build Images
uses: ./.github/workflows/build-images.yml
secrets: inherit
with:
flavor: ${{ github.event.inputs.flavor }}
deploy:
name: Deploy to cluster
if: ${{ github.event_name == 'workflow_dispatch' }}
environment: ${{ github.event.inputs.flavor }}
needs:
- build-images
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Deploy to ${{ github.event.inputs.flavor }}
uses: ./.github/actions/deploy
with:
build-type: ${{ github.event.inputs.flavor }}
gcp-project-number: ${{ secrets.GCP_PROJECT_NUMBER }}
gcp-project-id: ${{ secrets.GCP_PROJECT_ID }}
service-account: ${{ secrets.GCP_HELM_DEPLOY_SERVICE_ACCOUNT }}
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
env:
APP_VERSION: ${{ steps.version.outputs.APP_VERSION }}
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
CANARY_DEPLOY_HOST: ${{ secrets.CANARY_DEPLOY_HOST }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DATABASE_URL: ${{ secrets.DATABASE_URL }}
DATABASE_USERNAME: ${{ secrets.DATABASE_USERNAME }}
DATABASE_PASSWORD: ${{ secrets.DATABASE_PASSWORD }}
DATABASE_NAME: ${{ secrets.DATABASE_NAME }}
GCLOUD_CONNECTION_NAME: ${{ secrets.GCLOUD_CONNECTION_NAME }}
REDIS_SERVER_HOST: ${{ secrets.REDIS_SERVER_HOST }}
REDIS_SERVER_PASSWORD: ${{ secrets.REDIS_SERVER_PASSWORD }}
CLOUD_SQL_IAM_ACCOUNT: ${{ secrets.CLOUD_SQL_IAM_ACCOUNT }}
APP_IAM_ACCOUNT: ${{ secrets.APP_IAM_ACCOUNT }}
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}
AFFINE_INDEXER_SEARCH_PROVIDER: ${{ secrets.AFFINE_INDEXER_SEARCH_PROVIDER }}
AFFINE_INDEXER_SEARCH_ENDPOINT: ${{ secrets.AFFINE_INDEXER_SEARCH_ENDPOINT }}
AFFINE_INDEXER_SEARCH_API_KEY: ${{ secrets.AFFINE_INDEXER_SEARCH_API_KEY }}
deploy-done:
needs:
- output-prev-version
- build-images
- deploy
if: always()
runs-on: ubuntu-latest
name: Post deploy message
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/checkout@v4
with:
repository: toeverything/blocksuite
path: blocksuite
fetch-depth: 0
fetch-tags: true
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: 'workspaces focus @affine/changelog'
electron-install: false
- name: Output deployed info
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
id: set_info
run: |
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
echo "deployed_url=https://affine.fail" >> $GITHUB_OUTPUT
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
echo "deployed_url=https://insider.affine.pro" >> $GITHUB_OUTPUT
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
echo "deployed_url=https://app.affine.pro" >> $GITHUB_OUTPUT
else
exit 1
fi
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- name: Post Success event to a Slack channel
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
run: node ./tools/changelog/index.js
env:
CHANNEL_ID: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
DEPLOYED_URL: ${{ steps.set_info.outputs.deployed_url }}
PREV_VERSION: ${{ needs.output-prev-version.outputs.prev }}
NAMESPACE: ${{ needs.output-prev-version.outputs.namespace }}
DEPLOYMENT: 'SERVER'
FLAVOR: ${{ github.event.inputs.flavor }}
BLOCKSUITE_REPO_PATH: ${{ github.workspace }}/blocksuite
- name: Post Failed event to a Slack channel
id: failed-slack
uses: slackapi/slack-github-action@v2.1.0
if: ${{ always() && contains(needs.*.result, 'failure') }}
with:
method: chat.postMessage
token: ${{ secrets.SLACK_BOT_TOKEN }}
payload: |
channel: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
text: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy failed `${{ github.event.inputs.flavor }}`>"
blocks:
- type: section
text:
type: mrkdwn
text: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy failed `${{ github.event.inputs.flavor }}`>"
- name: Post Cancel event to a Slack channel
id: cancel-slack
uses: slackapi/slack-github-action@v2.1.0
if: ${{ always() && contains(needs.*.result, 'cancelled') && !contains(needs.*.result, 'failure') }}
with:
token: ${{ secrets.SLACK_BOT_TOKEN }}
method: chat.postMessage
payload: |
channel: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
text: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy cancelled `${{ github.event.inputs.flavor }}`>"
blocks:
- type: section
text:
type: mrkdwn
text: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy cancelled `${{ github.event.inputs.flavor }}`>"

View File

@ -1,66 +0,0 @@
name: Release Charts
on:
push:
branches: [canary]
paths:
- '.github/helm/**/Chart.yml'
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Checkout Helm chart repo
uses: actions/checkout@v4
with:
repository: toeverything/helm-charts
path: .helm-chart-repo
ref: gh-pages
token: ${{ secrets.HELM_RELEASER_TOKEN }}
- name: Install Helm
uses: azure/setup-helm@v4
- name: Install chart releaser
run: |
set -e
arch="$(dpkg --print-architecture)"
curl -s https://api.github.com/repos/helm/chart-releaser/releases/latest \
| yq --indent 0 --no-colors --input-format json --unwrapScalar \
".assets[] | select(.name | test("\""^chart-releaser_.+_linux_${arch}\.tar\.gz$"\"")) | .browser_download_url" \
| xargs curl -SsL \
| tar zxf - -C /usr/local/bin
- name: Package charts
working-directory: .helm-chart-repo
run: |
mkdir -p .cr-index
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
helm dependencies build ../.github/helm/affine
helm dependencies build ../.github/helm/affine-cloud
cr package ../.github/helm/affine
cr package ../.github/helm/affine-cloud
- name: Publish charts
working-directory: .helm-chart-repo
run: |
set -ex
git config --local user.name "$GITHUB_ACTOR"
git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com"
owner=$(cut -d '/' -f 1 <<< '${{ github.repository }}')
repo=helm-charts
git_hash=$(git rev-parse HEAD)
cr upload --commit "$git_hash" \
--git-repo "$repo" --owner "$owner" \
--token '${{ secrets.HELM_RELEASER_TOKEN }}' \
--skip-existing
cr index --git-repo "$repo" --owner "$owner" \
--token '${{ secrets.HELM_RELEASER_TOKEN }}' \
--index-path .cr-index --push

View File

@ -1,19 +0,0 @@
name: Label Checker
on:
pull_request:
types:
- opened
- labeled
- unlabeled
branches:
- canary
jobs:
check_labels:
name: PR should not have a blocked label
runs-on: ubuntu-latest
steps:
- uses: docker://agilepathway/pull-request-label-checker:latest
with:
none_of: blocked
repo_token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,12 +0,0 @@
name: Pull request auto assign
# on: pull_request
on:
pull_request:
types: [opened, ready_for_review]
jobs:
add-reviews:
runs-on: ubuntu-latest
steps:
- uses: kentaro-m/auto-assign-action@v2.0.0

View File

@ -1,38 +0,0 @@
name: Release Desktop/Mobile Automatically
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
schedule:
- cron: '0 9 * * *'
permissions:
contents: write
pull-requests: write
actions: write
jobs:
dispatch-release-desktop:
runs-on: ubuntu-latest
name: Setup Release Desktop
steps:
- name: dispatch desktop release by tag
if: ${{ github.event_name == 'push' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: release-desktop.yml
inputs: '{ "build-type": "canary", "is-draft": false, "is-pre-release": true }'
- name: dispatch desktop release by schedule
if: ${{ github.event_name == 'schedule' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: release-desktop.yml
inputs: '{ "build-type": "canary", "is-draft": false, "is-pre-release": true }'
ref: canary
- name: dispatch desktop release by tag
uses: benc-uk/workflow-dispatch@v1
with:
workflow: release-mobile.yml
inputs: '{ "build-type": "canary", "build-target": "distribution" }'

66
.github/workflows/release-cloud.yml vendored Normal file
View File

@ -0,0 +1,66 @@
name: Release Cloud
on:
workflow_call:
inputs:
build-type:
required: true
type: string
app-version:
required: true
type: string
git-short-hash:
required: true
type: string
permissions:
contents: 'write'
id-token: 'write'
packages: 'write'
jobs:
build-images:
name: Build Images
uses: ./.github/workflows/build-images.yml
secrets: inherit
with:
build-type: ${{ inputs.build-type }}
app-version: ${{ inputs.app-version }}
git-short-hash: ${{ inputs.git-short-hash }}
deploy:
name: Deploy to cluster
environment: ${{ inputs.build-type }}
needs:
- build-images
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Deploy to ${{ inputs.build-type }}
uses: ./.github/actions/deploy
with:
gcp-project-number: ${{ secrets.GCP_PROJECT_NUMBER }}
gcp-project-id: ${{ secrets.GCP_PROJECT_ID }}
service-account: ${{ secrets.GCP_HELM_DEPLOY_SERVICE_ACCOUNT }}
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
env:
BUILD_TYPE: ${{ inputs.build-type }}
APP_VERSION: ${{ inputs.app-version }}
GIT_SHORT_HASH: ${{ inputs.git-short-hash }}
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
CANARY_DEPLOY_HOST: ${{ secrets.CANARY_DEPLOY_HOST }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DATABASE_URL: ${{ secrets.DATABASE_URL }}
DATABASE_USERNAME: ${{ secrets.DATABASE_USERNAME }}
DATABASE_PASSWORD: ${{ secrets.DATABASE_PASSWORD }}
DATABASE_NAME: ${{ secrets.DATABASE_NAME }}
GCLOUD_CONNECTION_NAME: ${{ secrets.GCLOUD_CONNECTION_NAME }}
REDIS_SERVER_HOST: ${{ secrets.REDIS_SERVER_HOST }}
REDIS_SERVER_PASSWORD: ${{ secrets.REDIS_SERVER_PASSWORD }}
CLOUD_SQL_IAM_ACCOUNT: ${{ secrets.CLOUD_SQL_IAM_ACCOUNT }}
APP_IAM_ACCOUNT: ${{ secrets.APP_IAM_ACCOUNT }}
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}
AFFINE_INDEXER_SEARCH_PROVIDER: ${{ secrets.AFFINE_INDEXER_SEARCH_PROVIDER }}
AFFINE_INDEXER_SEARCH_ENDPOINT: ${{ secrets.AFFINE_INDEXER_SEARCH_ENDPOINT }}
AFFINE_INDEXER_SEARCH_API_KEY: ${{ secrets.AFFINE_INDEXER_SEARCH_API_KEY }}

View File

@ -1,27 +1,17 @@
name: Release Desktop App
name: Release Desktop
on:
workflow_dispatch:
workflow_call:
inputs:
build-type:
description: 'Build Type'
type: choice
required: true
default: canary
options:
- canary
- beta
- stable
is-draft:
description: 'Draft Release?'
type: boolean
type: string
app-version:
required: true
default: true
is-pre-release:
description: 'Pre Release? (labeled as "PreRelease")'
type: boolean
type: string
git-short-hash:
required: true
default: true
type: string
permissions:
actions: write
@ -31,7 +21,8 @@ permissions:
attestations: write
env:
BUILD_TYPE: ${{ github.event.inputs.build-type }}
BUILD_TYPE: ${{ inputs.build-type }}
RELEASE_VERSION: ${{ inputs.app-version }}
DEBUG: 'affine:*,napi:*'
APP_NAME: affine
MACOSX_DEPLOYMENT_TARGET: '10.13'
@ -39,14 +30,13 @@ env:
jobs:
before-make:
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.build-type }}
outputs:
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup @sentry/cli
@ -58,14 +48,14 @@ jobs:
SENTRY_PROJECT: 'affine'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
SENTRY_RELEASE: ${{ steps.version.outputs.APP_VERSION }}
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
SENTRY_RELEASE: ${{ inputs.app-version }}
RELEASE_VERSION: ${{ inputs.app-version }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
- name: Upload web artifact
uses: actions/upload-artifact@v4
with:
name: web
name: desktop-web
path: packages/frontend/apps/electron/resources/web-static
make-distribution:
@ -87,7 +77,7 @@ jobs:
target: x86_64-unknown-linux-gnu
runs-on: ${{ matrix.spec.runner }}
needs: before-make
environment: ${{ github.event.inputs.build-type }}
environment: ${{ inputs.build-type }}
env:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
@ -97,13 +87,14 @@ jobs:
SENTRY_PROJECT: 'affine'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
SENTRY_RELEASE: ${{ needs.before-make.outputs.RELEASE_VERSION }}
SENTRY_RELEASE: ${{ inputs.app-version }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
@ -119,7 +110,7 @@ jobs:
package: '@affine/native'
- uses: actions/download-artifact@v4
with:
name: web
name: desktop-web
path: packages/frontend/apps/electron/resources/web-static
- name: Build Desktop Layers
@ -165,31 +156,31 @@ jobs:
if: ${{ matrix.spec.platform == 'darwin' }}
run: |
mkdir -p builds
mv packages/frontend/apps/electron/out/*/make/*.dmg ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
mv packages/frontend/apps/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
mv packages/frontend/apps/electron/out/*/make/*.dmg ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
mv packages/frontend/apps/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
- name: Save artifacts (linux)
if: ${{ matrix.spec.platform == 'linux' }}
run: |
mkdir -p builds
mv packages/frontend/apps/electron/out/*/make/zip/linux/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ matrix.spec.arch }}.zip
mv packages/frontend/apps/electron/out/*/make/*.AppImage ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ matrix.spec.arch }}.appimage
mv packages/frontend/apps/electron/out/*/make/deb/${{ matrix.spec.arch }}/*.deb ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ matrix.spec.arch }}.deb
mv packages/frontend/apps/electron/out/*/make/flatpak/*/*.flatpak ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ matrix.spec.arch }}.flatpak
mv packages/frontend/apps/electron/out/*/make/zip/linux/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ matrix.spec.arch }}.zip
mv packages/frontend/apps/electron/out/*/make/*.AppImage ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ matrix.spec.arch }}.appimage
mv packages/frontend/apps/electron/out/*/make/deb/${{ matrix.spec.arch }}/*.deb ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ matrix.spec.arch }}.deb
mv packages/frontend/apps/electron/out/*/make/flatpak/*/*.flatpak ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ matrix.spec.arch }}.flatpak
- uses: actions/attest-build-provenance@v2
if: ${{ matrix.spec.platform == 'darwin' }}
with:
subject-path: |
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
- uses: actions/attest-build-provenance@v2
if: ${{ matrix.spec.platform == 'linux' }}
with:
subject-path: |
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.zip
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.appimage
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.deb
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.zip
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.appimage
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.deb
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
@ -197,7 +188,7 @@ jobs:
path: builds
package-distribution-windows:
environment: ${{ github.event.inputs.build-type }}
environment: ${{ inputs.build-type }}
strategy:
fail-fast: false
matrix:
@ -221,13 +212,14 @@ jobs:
SENTRY_PROJECT: 'affine'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
SENTRY_RELEASE: ${{ needs.before-make.outputs.RELEASE_VERSION }}
SENTRY_RELEASE: ${{ inputs.app-version }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
@ -242,7 +234,7 @@ jobs:
package: '@affine/native'
- uses: actions/download-artifact@v4
with:
name: web
name: desktop-web
path: packages/frontend/apps/electron/resources/web-static
- name: Build Desktop Layers
@ -314,8 +306,9 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
@ -399,16 +392,16 @@ jobs:
- name: Save artifacts
run: |
mkdir -p builds
mv packages/frontend/apps/electron/out/*/make/zip/win32/${{ matrix.spec.arch }}/AFFiNE*-win32-${{ matrix.spec.arch }}-*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.zip
mv packages/frontend/apps/electron/out/*/make/squirrel.windows/${{ matrix.spec.arch }}/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.exe
mv packages/frontend/apps/electron/out/*/make/nsis.windows/${{ matrix.spec.arch }}/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.nsis.exe
mv packages/frontend/apps/electron/out/*/make/zip/win32/${{ matrix.spec.arch }}/AFFiNE*-win32-${{ matrix.spec.arch }}-*.zip ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.zip
mv packages/frontend/apps/electron/out/*/make/squirrel.windows/${{ matrix.spec.arch }}/*.exe ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.exe
mv packages/frontend/apps/electron/out/*/make/nsis.windows/${{ matrix.spec.arch }}/*.exe ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.nsis.exe
- uses: actions/attest-build-provenance@v2
with:
subject-path: |
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.zip
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.exe
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.nsis.exe
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.zip
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.exe
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.nsis.exe
- name: Upload Artifact
uses: actions/upload-artifact@v4
@ -424,7 +417,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: web
name: desktop-web
path: web-static
- name: Zip web-static
run: zip -r web-static.zip web-static
@ -465,32 +458,14 @@ jobs:
run: |
node ./scripts/generate-release-yml.mjs
env:
RELEASE_VERSION: ${{ needs.before-make.outputs.RELEASE_VERSION }}
- name: Create Release Draft
if: ${{ github.ref_type == 'tag' }}
RELEASE_VERSION: ${{ env.RELEASE_VERSION }}
- name: Create GitHub Release
uses: softprops/action-gh-release@v2
with:
name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
body: ''
draft: ${{ github.event.inputs.is-draft }}
prerelease: ${{ github.event.inputs.is-pre-release }}
files: |
./release/*
./release/.env.example
- name: Create Nightly Release Draft
if: ${{ github.ref_type == 'branch' }}
uses: softprops/action-gh-release@v2
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:
# Temporarily, treat release from branch as nightly release, artifact saved to AFFiNE-Releases.
# Need to improve internal build and nightly release logic.
repository: 'toeverything/AFFiNE-Releases'
name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
tag_name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
body: ''
draft: false
prerelease: true
name: ${{ env.RELEASE_VERSION }}
draft: ${{ inputs.build-type == 'stable' }}
prerelease: ${{ inputs.build-type != 'stable' }}
tag_name: ${{ env.RELEASE_VERSION}}
files: |
./release/*
./release/.env.example

View File

@ -1,68 +1,33 @@
name: Release Mobile App
name: Release Mobile
on:
workflow_call:
inputs:
build-target:
description: 'Build Target'
app-version:
type: string
required: true
git-short-hash:
type: string
required: true
build-type:
description: 'Build Type'
type: string
required: true
workflow_dispatch:
inputs:
build-target:
description: 'Build Target'
type: choice
required: true
default: distribution
options:
- development
- distribution
build-type:
description: 'Build Type'
type: choice
required: true
default: canary
options:
- canary
- beta
- stable
env:
BUILD_TYPE: ${{ inputs.build-type || github.event.inputs.build-type }}
BUILD_TARGET: ${{ inputs.build-target || github.event.inputs.build-target }}
BUILD_TYPE: ${{ inputs.build-type }}
DEBUG: napi:*
KEYCHAIN_NAME: ${{ github.workspace }}/signing_temp
jobs:
output-env:
runs-on: ubuntu-latest
outputs:
ENVIRONMENT: ${{ steps.env.outputs.ENVIRONMENT }}
steps:
- name: Output Environment
id: env
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
echo "ENVIRONMENT=${{ github.event.inputs.build-type }}" >> $GITHUB_OUTPUT
else
echo "ENVIRONMENT=" >> $GITHUB_OUTPUT
fi
build-ios-web:
needs:
- output-env
runs-on: ubuntu-24.04-arm
environment: ${{ needs.output-env.outputs.ENVIRONMENT }}
outputs:
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
runs-on: ubuntu-latest
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup @sentry/cli
@ -76,8 +41,8 @@ jobs:
SENTRY_PROJECT: 'affine'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
SENTRY_RELEASE: ${{ steps.version.outputs.APP_VERSION }}
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
SENTRY_RELEASE: ${{ inputs.app-version }}
RELEASE_VERSION: ${{ inputs.app-version }}
- name: Upload ios artifact
uses: actions/upload-artifact@v4
with:
@ -85,17 +50,13 @@ jobs:
path: packages/frontend/apps/ios/dist
build-android-web:
runs-on: ubuntu-24.04-arm
needs:
- output-env
environment: ${{ needs.output-env.outputs.ENVIRONMENT }}
outputs:
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup @sentry/cli
@ -109,8 +70,7 @@ jobs:
SENTRY_PROJECT: 'affine'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
SENTRY_RELEASE: ${{ steps.version.outputs.APP_VERSION }}
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
SENTRY_RELEASE: ${{ inputs.app-version }}
- name: Upload android artifact
uses: actions/upload-artifact@v4
with:
@ -180,8 +140,9 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
with:
app-version: ${{ inputs.app-version }}
- name: Download mobile artifact
uses: actions/download-artifact@v4
with:
@ -214,7 +175,6 @@ jobs:
- name: Auth gcloud
id: auth
uses: google-github-actions/auth@v2
if: ${{ env.BUILD_TARGET == 'distribution' }}
with:
workload_identity_provider: 'projects/${{ secrets.GCP_PROJECT_NUMBER }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
service_account: '${{ secrets.GCP_HELM_DEPLOY_SERVICE_ACCOUNT }}'
@ -228,7 +188,6 @@ jobs:
cache: 'gradle'
- name: Auto increment version code
id: bump
if: ${{ env.BUILD_TARGET == 'distribution' }}
run: yarn affine @affine/playstore-auto-bump bump
env:
GOOGLE_APPLICATION_CREDENTIALS: ${{ steps.auth.outputs.credentials_file_path }}
@ -240,14 +199,13 @@ jobs:
AFFINE_ANDROID_KEYSTORE_PASSWORD: ${{ secrets.AFFINE_ANDROID_KEYSTORE_PASSWORD }}
AFFINE_ANDROID_KEYSTORE_ALIAS_PASSWORD: ${{ secrets.AFFINE_ANDROID_KEYSTORE_ALIAS_PASSWORD }}
AFFINE_ANDROID_SIGN_KEYSTORE: ${{ secrets.AFFINE_ANDROID_SIGN_KEYSTORE }}
VERSION_NAME: ${{ steps.version.outputs.APP_VERSION }}
VERSION_NAME: ${{ inputs.app-version }}
- name: Upload to Google Play
uses: r0adkll/upload-google-play@v1
if: ${{ env.BUILD_TARGET == 'distribution' }}
with:
serviceAccountJson: ${{ steps.auth.outputs.credentials_file_path }}
packageName: app.affine.pro
releaseName: ${{ steps.version.outputs.APP_VERSION }}
releaseName: ${{ inputs.app-version }}
releaseFiles: packages/frontend/apps/android/App/app/build/outputs/bundle/${{ env.BUILD_TYPE }}Release/app-${{ env.BUILD_TYPE }}-release-signed.aab
track: internal
status: draft

123
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,123 @@
name: Release
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.*'
schedule:
- cron: '0 9 * * *'
workflow_dispatch:
inputs:
web:
description: 'Release Web?'
required: true
type: boolean
default: false
desktop:
description: 'Release Desktop?'
required: true
type: boolean
default: false
mobile:
description: 'Release Mobile?'
required: true
type: boolean
default: false
permissions:
contents: write
pull-requests: write
actions: write
id-token: write
packages: write
security-events: write
attestations: write
jobs:
prepare:
name: Prepare
runs-on: ubuntu-latest
outputs:
APP_VERSION: ${{ steps.prepare.outputs.APP_VERSION }}
GIT_SHORT_HASH: ${{ steps.prepare.outputs.GIT_SHORT_HASH }}
BUILD_TYPE: ${{ steps.prepare.outputs.BUILD_TYPE }}
steps:
- uses: actions/checkout@v4
- name: Prepare Release
id: prepare
uses: ./.github/actions/prepare-release
cloud:
name: Release Cloud
if: ${{ inputs.web || github.event_name != 'workflow_dispatch' }}
needs:
- prepare
uses: ./.github/workflows/release-cloud.yml
secrets: inherit
with:
build-type: ${{ needs.prepare.outputs.BUILD_TYPE }}
app-version: ${{ needs.prepare.outputs.APP_VERSION }}
git-short-hash: ${{ needs.prepare.outputs.GIT_SHORT_HASH }}
image:
name: Release Docker Image
runs-on: ubuntu-latest
needs:
- prepare
- cloud
steps:
- uses: trstringer/manual-approval@v1
if: ${{ needs.prepare.outputs.BUILD_TYPE == 'stable' }}
name: Wait for approval
with:
secret: ${{ secrets.GITHUB_TOKEN }}
approvers: forehalo,fengmk2
fail-on-denial: true
issue-title: Please confirm to release docker image
issue-body: |
Env: ${{ needs.prepare.outputs.BUILD_TYPE }}
Candidate: ghcr.io/toeverything/affine:${{ needs.prepare.outputs.BUILD_TYPE }}-${{ needs.prepare.outputs.GIT_SHORT_HASH }}
Tag: ghcr.io/toeverything/affine:${{ needs.prepare.outputs.BUILD_TYPE }}
> comment with "approve", "approved", "lgtm", "yes" to approve
> comment with "deny", "deny", "no" to deny
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
logout: false
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Tag Image
run: |
docker tag ghcr.io/toeverything/affine:${{needs.prepare.outputs.BUILD_TYPE}}-${{needs.prepare.outputs.GIT_SHORT_HASH}} ghcr.io/toeverything/affine:${{needs.prepare.outputs.BUILD_TYPE}}
docker push ghcr.io/toeverything/affine:${{needs.prepare.outputs.BUILD_TYPE}}
desktop:
name: Release Desktop
if: ${{ inputs.desktop || github.event_name != 'workflow_dispatch' }}
needs:
- prepare
uses: ./.github/workflows/release-desktop.yml
secrets: inherit
with:
build-type: ${{ needs.prepare.outputs.BUILD_TYPE }}
app-version: ${{ needs.prepare.outputs.APP_VERSION }}
git-short-hash: ${{ needs.prepare.outputs.GIT_SHORT_HASH }}
mobile:
name: Release Mobile
if: ${{ inputs.mobile }}
needs:
- prepare
uses: ./.github/workflows/release-mobile.yml
secrets: inherit
with:
build-type: ${{ needs.prepare.outputs.BUILD_TYPE }}
app-version: ${{ needs.prepare.outputs.APP_VERSION }}
git-short-hash: ${{ needs.prepare.outputs.GIT_SHORT_HASH }}

View File

@ -28,6 +28,7 @@
"lit": "^3.2.0",
"lodash-es": "^4.17.21",
"rxjs": "^7.8.1",
"vitest": "^3.2.3",
"yjs": "^13.6.21",
"zod": "^3.23.8"
},

View File

@ -0,0 +1,120 @@
import { describe, expect, it, vi } from 'vitest';
import { TableHotkeysController } from '../view-presets/table/pc/controller/hotkeys.js';
import { TableHotkeysController as VirtualHotkeysController } from '../view-presets/table/pc-virtual/controller/hotkeys.js';
import {
TableViewAreaSelection,
TableViewRowSelection,
} from '../view-presets/table/selection';
function createLogic() {
const view = {
rowsDelete: vi.fn(),
rows$: { value: [] },
groupTrait: { groupsDataList$: { value: [] } },
};
const ui = { disposables: { add: vi.fn() }, requestUpdate: vi.fn() };
const selectionController = {
selection: undefined as any,
getCellContainer: vi.fn(),
insertRowAfter: vi.fn(),
focusToCell: vi.fn(),
rowSelectionChange: vi.fn(),
areaToRows: vi.fn().mockReturnValue([]),
rowsToArea: vi.fn(),
navigateRowSelection: vi.fn(),
selectionAreaUp: vi.fn(),
selectionAreaDown: vi.fn(),
selectionAreaLeft: vi.fn(),
selectionAreaRight: vi.fn(),
isRowSelection: vi.fn().mockReturnValue(false),
};
const logic: any = {
view,
ui$: { value: ui },
selectionController,
bindHotkey: vi.fn((hotkeys: any) => {
logic.hotkeys = hotkeys;
return { dispose: vi.fn() };
}),
handleEvent: vi.fn((name: string, handler: any) => {
if (name === 'keyDown') logic.keyDown = handler;
return { dispose: vi.fn() };
}),
};
return { logic, view, ui, selectionController };
}
describe('TableHotkeysController', () => {
it('deletes rows on Backspace', () => {
const { logic, view, ui, selectionController } = createLogic();
const ctrl = new TableHotkeysController(logic as any);
ctrl.hostConnected();
selectionController.selection = TableViewRowSelection.create({
rows: [{ id: 'r1' }],
});
logic.hotkeys.Backspace();
expect(selectionController.selection).toBeUndefined();
expect(view.rowsDelete).toHaveBeenCalledWith(['r1']);
expect(ui.requestUpdate).toHaveBeenCalled();
});
it('starts editing on character key', () => {
const { logic, selectionController } = createLogic();
const ctrl = new TableHotkeysController(logic as any);
ctrl.hostConnected();
const cell = {
rowId: 'r1',
dataset: { rowId: 'r1', columnId: 'c1' },
column: { valueSetFromString: vi.fn() },
};
selectionController.getCellContainer.mockReturnValue(cell);
selectionController.selection = TableViewAreaSelection.create({
focus: { rowIndex: 0, columnIndex: 0 },
isEditing: false,
});
const evt = {
key: 'A',
metaKey: false,
ctrlKey: false,
altKey: false,
preventDefault: vi.fn(),
};
logic.keyDown({ get: () => ({ raw: evt }) });
expect(cell.column.valueSetFromString).toHaveBeenCalledWith('r1', 'A');
expect(selectionController.selection.isEditing).toBe(true);
expect(evt.preventDefault).toHaveBeenCalled();
});
});
describe('Virtual TableHotkeysController', () => {
it('writes character to cell', () => {
const { logic, selectionController } = createLogic();
const ctrl = new VirtualHotkeysController(logic as any);
ctrl.hostConnected();
const cell = {
rowId: 'r1',
dataset: { rowId: 'r1', columnId: 'c1' },
column$: { value: { valueSetFromString: vi.fn() } },
};
selectionController.getCellContainer.mockReturnValue(cell);
selectionController.selection = TableViewAreaSelection.create({
focus: { rowIndex: 1, columnIndex: 0 },
isEditing: false,
});
const evt = {
key: 'b',
metaKey: false,
ctrlKey: false,
altKey: false,
preventDefault: vi.fn(),
};
logic.keyDown({ get: () => ({ raw: evt }) });
expect(cell.column$.value.valueSetFromString).toHaveBeenCalledWith(
'r1',
'b'
);
expect(selectionController.selection.isEditing).toBe(true);
expect(evt.preventDefault).toHaveBeenCalled();
});
});

View File

@ -3,6 +3,8 @@ import { DisposableGroup } from '@blocksuite/global/disposable';
import type { ReactiveController } from 'lit';
import { TableViewAreaSelection, TableViewRowSelection } from '../../selection';
import { handleCharStartEdit } from '../../utils.js';
import type { DatabaseCellContainer } from '../row/cell.js';
import { popRowMenu } from '../row/menu';
import type { VirtualTableViewUILogic } from '../table-view-ui-logic';
@ -138,7 +140,11 @@ export class TableHotkeysController implements ReactiveController {
});
}
} else if (selection.isEditing) {
return false;
this.selectionController.selection = {
...selection,
isEditing: false,
};
this.selectionController.focusToCell('down');
} else {
this.selectionController.selection = {
...selection,
@ -172,27 +178,31 @@ export class TableHotkeysController implements ReactiveController {
},
Tab: ctx => {
const selection = this.selectionController.selection;
if (
!selection ||
TableViewRowSelection.is(selection) ||
selection.isEditing
) {
if (!selection || TableViewRowSelection.is(selection)) {
return false;
}
ctx.get('keyboardState').raw.preventDefault();
if (selection.isEditing) {
this.selectionController.selection = {
...selection,
isEditing: false,
};
}
this.selectionController.focusToCell('right');
return true;
},
'Shift-Tab': ctx => {
const selection = this.selectionController.selection;
if (
!selection ||
TableViewRowSelection.is(selection) ||
selection.isEditing
) {
if (!selection || TableViewRowSelection.is(selection)) {
return false;
}
ctx.get('keyboardState').raw.preventDefault();
if (selection.isEditing) {
this.selectionController.selection = {
...selection,
isEditing: false,
};
}
this.selectionController.focusToCell('left');
return true;
},
@ -390,5 +400,19 @@ export class TableHotkeysController implements ReactiveController {
},
})
);
this.disposables.add(
this.logic.handleEvent('keyDown', ctx => {
const event = ctx.get('keyboardState').raw;
return handleCharStartEdit<DatabaseCellContainer>({
event,
selection: this.selectionController.selection,
getCellContainer: this.selectionController.getCellContainer.bind(
this.selectionController
),
updateSelection: sel => (this.selectionController.selection = sel),
getColumn: cell => cell.column$.value,
});
})
);
}
}

View File

@ -2,6 +2,8 @@ import { popupTargetFromElement } from '@blocksuite/affine-components/context-me
import type { ReactiveController } from 'lit';
import { TableViewAreaSelection, TableViewRowSelection } from '../../selection';
import { handleCharStartEdit } from '../../utils.js';
import type { TableViewCellContainer } from '../cell.js';
import { popRowMenu } from '../menu.js';
import type { TableViewUILogic } from '../table-view-ui-logic';
@ -136,7 +138,11 @@ export class TableHotkeysController implements ReactiveController {
});
}
} else if (selection.isEditing) {
return false;
this.selectionController.selection = {
...selection,
isEditing: false,
};
this.selectionController.focusToCell('down');
} else {
this.selectionController.selection = {
...selection,
@ -170,27 +176,31 @@ export class TableHotkeysController implements ReactiveController {
},
Tab: ctx => {
const selection = this.selectionController.selection;
if (
!selection ||
TableViewRowSelection.is(selection) ||
selection.isEditing
) {
if (!selection || TableViewRowSelection.is(selection)) {
return false;
}
ctx.get('keyboardState').raw.preventDefault();
if (selection.isEditing) {
this.selectionController.selection = {
...selection,
isEditing: false,
};
}
this.selectionController.focusToCell('right');
return true;
},
'Shift-Tab': ctx => {
const selection = this.selectionController.selection;
if (
!selection ||
TableViewRowSelection.is(selection) ||
selection.isEditing
) {
if (!selection || TableViewRowSelection.is(selection)) {
return false;
}
ctx.get('keyboardState').raw.preventDefault();
if (selection.isEditing) {
this.selectionController.selection = {
...selection,
isEditing: false,
};
}
this.selectionController.focusToCell('left');
return true;
},
@ -388,5 +398,19 @@ export class TableHotkeysController implements ReactiveController {
},
})
);
this.host?.disposables.add(
this.logic.handleEvent('keyDown', ctx => {
const event = ctx.get('keyboardState').raw;
return handleCharStartEdit<TableViewCellContainer>({
event,
selection: this.selectionController.selection,
getCellContainer: this.selectionController.getCellContainer.bind(
this.selectionController
),
updateSelection: sel => (this.selectionController.selection = sel),
getColumn: cell => cell.column,
});
})
);
}
}

View File

@ -0,0 +1,58 @@
import type { TableViewSelectionWithType } from './selection';
import { TableViewRowSelection } from './selection';
export interface TableCell {
rowId: string;
}
export type ColumnAccessor<T extends TableCell> = (
cell: T
) => { valueSetFromString(rowId: string, value: string): void } | undefined;
export interface StartEditOptions<T extends TableCell> {
event: KeyboardEvent;
selection: TableViewSelectionWithType | undefined;
getCellContainer: (
groupKey: string | undefined,
rowIndex: number,
columnIndex: number
) => T | undefined;
updateSelection: (sel: TableViewSelectionWithType) => void;
getColumn: ColumnAccessor<T>;
}
export function handleCharStartEdit<T extends TableCell>(
options: StartEditOptions<T>
): boolean {
const { event, selection, getCellContainer, updateSelection, getColumn } =
options;
const target = event.target as HTMLElement | null;
if (target && (target.tagName === 'INPUT' || target.tagName === 'TEXTAREA')) {
return false;
}
if (
selection &&
!TableViewRowSelection.is(selection) &&
!selection.isEditing &&
!event.metaKey &&
!event.ctrlKey &&
!event.altKey &&
event.key.length === 1
) {
const cell = getCellContainer(
selection.groupKey,
selection.focus.rowIndex,
selection.focus.columnIndex
);
if (cell) {
const column = getColumn(cell);
column?.valueSetFromString(cell.rowId, event.key);
updateSelection({ ...selection, isEditing: true });
event.preventDefault();
return true;
}
}
return false;
}

View File

@ -0,0 +1,25 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
esbuild: {
target: 'es2018',
},
test: {
globalSetup: '../../scripts/vitest-global.js',
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 1000,
coverage: {
provider: 'istanbul',
reporter: ['lcov'],
reportsDirectory: '../../.coverage/data-view',
},
onConsoleLog(log, type) {
if (log.includes('lit.dev/msg/dev-mode')) {
return false;
}
console.warn(`Unexpected ${type} log`, log);
throw new Error(log);
},
environment: 'happy-dom',
},
});

View File

@ -0,0 +1,11 @@
import { DomElementRendererExtension } from '@blocksuite/affine-block-surface';
import { connectorDomRenderer } from './connector-dom/index.js';
/**
* Extension to register the DOM-based renderer for 'connector' elements.
*/
export const ConnectorDomRendererExtension = DomElementRendererExtension(
'connector',
connectorDomRenderer
);

View File

@ -0,0 +1,367 @@
import type { DomRenderer } from '@blocksuite/affine-block-surface';
import {
type ConnectorElementModel,
ConnectorMode,
DefaultTheme,
type PointStyle,
} from '@blocksuite/affine-model';
import { PointLocation, SVGPathBuilder } from '@blocksuite/global/gfx';
import { isConnectorWithLabel } from '../../connector-manager.js';
import { DEFAULT_ARROW_SIZE } from '../utils.js';
interface PathBounds {
minX: number;
minY: number;
maxX: number;
maxY: number;
}
function calculatePathBounds(path: PointLocation[]): PathBounds {
if (path.length === 0) {
return { minX: 0, minY: 0, maxX: 0, maxY: 0 };
}
let minX = path[0][0];
let minY = path[0][1];
let maxX = path[0][0];
let maxY = path[0][1];
for (const point of path) {
minX = Math.min(minX, point[0]);
minY = Math.min(minY, point[1]);
maxX = Math.max(maxX, point[0]);
maxY = Math.max(maxY, point[1]);
}
return { minX, minY, maxX, maxY };
}
function createConnectorPath(
points: PointLocation[],
mode: ConnectorMode
): string {
if (points.length < 2) return '';
const pathBuilder = new SVGPathBuilder();
pathBuilder.moveTo(points[0][0], points[0][1]);
if (mode === ConnectorMode.Curve) {
// Use bezier curves
for (let i = 1; i < points.length; i++) {
const prev = points[i - 1];
const curr = points[i];
pathBuilder.curveTo(
prev.absOut[0],
prev.absOut[1],
curr.absIn[0],
curr.absIn[1],
curr[0],
curr[1]
);
}
} else {
// Use straight lines
for (let i = 1; i < points.length; i++) {
pathBuilder.lineTo(points[i][0], points[i][1]);
}
}
return pathBuilder.build();
}
function createArrowMarker(
id: string,
style: PointStyle,
color: string,
strokeWidth: number,
isStart: boolean = false
): SVGMarkerElement {
const marker = document.createElementNS(
'http://www.w3.org/2000/svg',
'marker'
);
const size = DEFAULT_ARROW_SIZE * (strokeWidth / 2);
marker.id = id;
marker.setAttribute('viewBox', '0 0 20 20');
marker.setAttribute('refX', isStart ? '20' : '0');
marker.setAttribute('refY', '10');
marker.setAttribute('markerWidth', String(size));
marker.setAttribute('markerHeight', String(size));
marker.setAttribute('orient', 'auto');
marker.setAttribute('markerUnits', 'strokeWidth');
switch (style) {
case 'Arrow': {
const path = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
path.setAttribute(
'd',
isStart ? 'M 20 5 L 10 10 L 20 15 Z' : 'M 0 5 L 10 10 L 0 15 Z'
);
path.setAttribute('fill', color);
path.setAttribute('stroke', color);
marker.append(path);
break;
}
case 'Triangle': {
const path = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
path.setAttribute(
'd',
isStart ? 'M 20 7 L 12 10 L 20 13 Z' : 'M 0 7 L 8 10 L 0 13 Z'
);
path.setAttribute('fill', color);
path.setAttribute('stroke', color);
marker.append(path);
break;
}
case 'Circle': {
const circle = document.createElementNS(
'http://www.w3.org/2000/svg',
'circle'
);
circle.setAttribute('cx', '10');
circle.setAttribute('cy', '10');
circle.setAttribute('r', '4');
circle.setAttribute('fill', color);
circle.setAttribute('stroke', color);
marker.append(circle);
break;
}
case 'Diamond': {
const path = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
path.setAttribute('d', 'M 10 6 L 14 10 L 10 14 L 6 10 Z');
path.setAttribute('fill', color);
path.setAttribute('stroke', color);
marker.append(path);
break;
}
}
return marker;
}
function renderConnectorLabel(
model: ConnectorElementModel,
container: HTMLElement,
renderer: DomRenderer,
zoom: number
) {
if (!isConnectorWithLabel(model) || !model.labelXYWH) {
return;
}
const [lx, ly, lw, lh] = model.labelXYWH;
const {
labelStyle: {
color,
fontSize,
fontWeight,
fontStyle,
fontFamily,
textAlign,
},
} = model;
// Create label element
const labelElement = document.createElement('div');
labelElement.style.position = 'absolute';
labelElement.style.left = `${lx * zoom}px`;
labelElement.style.top = `${ly * zoom}px`;
labelElement.style.width = `${lw * zoom}px`;
labelElement.style.height = `${lh * zoom}px`;
labelElement.style.pointerEvents = 'none';
labelElement.style.overflow = 'hidden';
labelElement.style.display = 'flex';
labelElement.style.alignItems = 'center';
labelElement.style.justifyContent =
textAlign === 'center'
? 'center'
: textAlign === 'right'
? 'flex-end'
: 'flex-start';
// Style the text
labelElement.style.color = renderer.getColorValue(
color,
DefaultTheme.black,
true
);
labelElement.style.fontSize = `${fontSize * zoom}px`;
labelElement.style.fontWeight = fontWeight;
labelElement.style.fontStyle = fontStyle;
labelElement.style.fontFamily = fontFamily;
labelElement.style.textAlign = textAlign;
labelElement.style.lineHeight = '1.2';
labelElement.style.whiteSpace = 'pre-wrap';
labelElement.style.wordWrap = 'break-word';
// Add text content
if (model.text) {
labelElement.textContent = model.text.toString();
}
container.append(labelElement);
}
/**
* Renders a ConnectorElementModel to a given HTMLElement using DOM/SVG.
* This function is intended to be registered via the DomElementRendererExtension.
*
* @param model - The connector element model containing rendering properties.
* @param element - The HTMLElement to apply the connector's styles to.
* @param renderer - The main DOMRenderer instance, providing access to viewport and color utilities.
*/
export const connectorDomRenderer = (
model: ConnectorElementModel,
element: HTMLElement,
renderer: DomRenderer
): void => {
const { zoom } = renderer.viewport;
const {
mode,
path: points,
strokeStyle,
frontEndpointStyle,
rearEndpointStyle,
strokeWidth,
stroke,
} = model;
// Clear previous content
element.innerHTML = '';
// Early return if no path points
if (!points || points.length < 2) {
return;
}
// Calculate bounds for the SVG viewBox
const pathBounds = calculatePathBounds(points);
const padding = Math.max(strokeWidth * 2, 20); // Add padding for arrows
const svgWidth = (pathBounds.maxX - pathBounds.minX + padding * 2) * zoom;
const svgHeight = (pathBounds.maxY - pathBounds.minY + padding * 2) * zoom;
const offsetX = pathBounds.minX - padding;
const offsetY = pathBounds.minY - padding;
// Create SVG element
const svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
svg.style.position = 'absolute';
svg.style.left = `${offsetX * zoom}px`;
svg.style.top = `${offsetY * zoom}px`;
svg.style.width = `${svgWidth}px`;
svg.style.height = `${svgHeight}px`;
svg.style.overflow = 'visible';
svg.style.pointerEvents = 'none';
svg.setAttribute('viewBox', `0 0 ${svgWidth / zoom} ${svgHeight / zoom}`);
// Create defs for markers
const defs = document.createElementNS('http://www.w3.org/2000/svg', 'defs');
svg.append(defs);
const strokeColor = renderer.getColorValue(
stroke,
DefaultTheme.connectorColor,
true
);
// Create markers for endpoints
let startMarkerId = '';
let endMarkerId = '';
if (frontEndpointStyle !== 'None') {
startMarkerId = `start-marker-${model.id}`;
const startMarker = createArrowMarker(
startMarkerId,
frontEndpointStyle,
strokeColor,
strokeWidth,
true
);
defs.append(startMarker);
}
if (rearEndpointStyle !== 'None') {
endMarkerId = `end-marker-${model.id}`;
const endMarker = createArrowMarker(
endMarkerId,
rearEndpointStyle,
strokeColor,
strokeWidth,
false
);
defs.append(endMarker);
}
// Create path element
const pathElement = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
// Adjust points relative to the SVG coordinate system
const adjustedPoints = points.map(point => {
const adjustedPoint = new PointLocation([
point[0] - offsetX,
point[1] - offsetY,
]);
if (point.absIn) {
adjustedPoint.in = [
point.absIn[0] - offsetX - adjustedPoint[0],
point.absIn[1] - offsetY - adjustedPoint[1],
];
}
if (point.absOut) {
adjustedPoint.out = [
point.absOut[0] - offsetX - adjustedPoint[0],
point.absOut[1] - offsetY - adjustedPoint[1],
];
}
return adjustedPoint;
});
const pathData = createConnectorPath(adjustedPoints, mode);
pathElement.setAttribute('d', pathData);
pathElement.setAttribute('stroke', strokeColor);
pathElement.setAttribute('stroke-width', String(strokeWidth));
pathElement.setAttribute('fill', 'none');
pathElement.setAttribute('stroke-linecap', 'round');
pathElement.setAttribute('stroke-linejoin', 'round');
// Apply stroke style
if (strokeStyle === 'dash') {
pathElement.setAttribute('stroke-dasharray', '12,12');
}
// Apply markers
if (startMarkerId) {
pathElement.setAttribute('marker-start', `url(#${startMarkerId})`);
}
if (endMarkerId) {
pathElement.setAttribute('marker-end', `url(#${endMarkerId})`);
}
svg.append(pathElement);
element.append(svg);
// Set element size and position
element.style.width = `${model.w * zoom}px`;
element.style.height = `${model.h * zoom}px`;
element.style.overflow = 'visible';
element.style.pointerEvents = 'none';
// Set z-index for layering
element.style.zIndex = renderer.layerManager.getZIndex(model).toString();
// Render label if present
renderConnectorLabel(model, element, renderer, zoom);
};

View File

@ -2,6 +2,7 @@ export * from './adapter';
export * from './connector-manager';
export * from './connector-tool';
export * from './element-renderer';
export { ConnectorDomRendererExtension } from './element-renderer/connector-dom';
export * from './element-transform';
export * from './text';
export * from './toolbar/config';

View File

@ -7,6 +7,7 @@ import { ConnectionOverlay } from './connector-manager';
import { ConnectorTool } from './connector-tool';
import { effects } from './effects';
import { ConnectorElementRendererExtension } from './element-renderer';
import { ConnectorDomRendererExtension } from './element-renderer/connector-dom';
import { ConnectorFilter } from './element-transform';
import { connectorToolbarExtension } from './toolbar/config';
import { connectorQuickTool } from './toolbar/quick-tool';
@ -24,6 +25,7 @@ export class ConnectorViewExtension extends ViewExtensionProvider {
super.setup(context);
context.register(ConnectorElementView);
context.register(ConnectorElementRendererExtension);
context.register(ConnectorDomRendererExtension);
if (this.isEdgeless(context.scope)) {
context.register(ConnectorTool);
context.register(ConnectorFilter);

View File

@ -1,6 +1,7 @@
import type { DomRenderer } from '@blocksuite/affine-block-surface';
import type { ShapeElementModel } from '@blocksuite/affine-model';
import { DefaultTheme } from '@blocksuite/affine-model';
import { SVGShapeBuilder } from '@blocksuite/global/gfx';
import { manageClassNames, setStyles } from './utils';
@ -122,25 +123,22 @@ export const shapeDomRenderer = (
element.style.backgroundColor = 'transparent'; // Host element is transparent
const strokeW = model.strokeWidth;
const halfStroke = strokeW / 2; // Calculate half stroke width for point adjustment
let svgPoints = '';
if (model.shapeType === 'diamond') {
// Adjusted points for diamond
svgPoints = [
`${unscaledWidth / 2},${halfStroke}`,
`${unscaledWidth - halfStroke},${unscaledHeight / 2}`,
`${unscaledWidth / 2},${unscaledHeight - halfStroke}`,
`${halfStroke},${unscaledHeight / 2}`,
].join(' ');
// Generate diamond points using shared utility
svgPoints = SVGShapeBuilder.diamond(
unscaledWidth,
unscaledHeight,
strokeW
);
} else {
// triangle
// Adjusted points for triangle
svgPoints = [
`${unscaledWidth / 2},${halfStroke}`,
`${unscaledWidth - halfStroke},${unscaledHeight - halfStroke}`,
`${halfStroke},${unscaledHeight - halfStroke}`,
].join(' ');
// triangle - generate triangle points using shared utility
svgPoints = SVGShapeBuilder.triangle(
unscaledWidth,
unscaledHeight,
strokeW
);
}
// Determine if stroke should be visible and its color

View File

@ -59,6 +59,7 @@ export class AffineLink extends WithDisposable(ShadowlessElement) {
refNodeSlotsProvider.docLinkClicked.next({
...referenceInfo,
openMode: e?.button === 1 ? 'open-in-new-tab' : undefined,
host: this.std.host,
});
};
@ -149,6 +150,7 @@ export class AffineLink extends WithDisposable(ShadowlessElement) {
target="_blank"
style=${styleMap(style)}
@click=${this.openLink}
@auxclick=${this.openLink}
@mouseup=${this._onMouseUp}
><v-text .str=${this.delta.insert}></v-text
></a>`;

View File

@ -154,6 +154,8 @@ export class AffineReference extends WithDisposable(ShadowlessElement) {
this.std.getOptional(RefNodeSlotsProvider)?.docLinkClicked.next({
...this.referenceInfo,
...event,
openMode:
event?.event?.button === 1 ? 'open-in-new-tab' : event?.openMode,
host: this.std.host,
});
};
@ -285,6 +287,7 @@ export class AffineReference extends WithDisposable(ShadowlessElement) {
class="affine-reference"
style=${styleMap(style)}
@click=${(event: MouseEvent) => this.open({ event })}
@auxclick=${(event: MouseEvent) => this.open({ event })}
>${content}<v-text .str=${ZERO_WIDTH_FOR_EMBED_NODE}></v-text
></span>`;
}

View File

@ -21,6 +21,7 @@ export interface BlockSuiteFlags {
enable_table_virtual_scroll: boolean;
enable_turbo_renderer: boolean;
enable_dom_renderer: boolean;
enable_web_container: boolean;
}
export class FeatureFlagService extends StoreExtension {
@ -46,6 +47,7 @@ export class FeatureFlagService extends StoreExtension {
enable_table_virtual_scroll: false,
enable_turbo_renderer: false,
enable_dom_renderer: false,
enable_web_container: false,
});
setFlag(key: keyof BlockSuiteFlags, value: boolean) {

View File

@ -0,0 +1,73 @@
import { describe, expect, test } from 'vitest';
import { SVGPathBuilder, SVGShapeBuilder } from '../gfx/svg-path.js';
describe('SVGPathBuilder', () => {
test('should build a simple path', () => {
const pathBuilder = new SVGPathBuilder();
const result = pathBuilder.moveTo(10, 20).lineTo(30, 40).build();
expect(result).toBe('M 10 20 L 30 40');
});
test('should build a path with curves', () => {
const pathBuilder = new SVGPathBuilder();
const result = pathBuilder
.moveTo(0, 0)
.curveTo(10, 0, 10, 10, 20, 10)
.build();
expect(result).toBe('M 0 0 C 10 0 10 10 20 10');
});
test('should build a closed path', () => {
const pathBuilder = new SVGPathBuilder();
const result = pathBuilder
.moveTo(0, 0)
.lineTo(10, 0)
.lineTo(5, 10)
.closePath()
.build();
expect(result).toBe('M 0 0 L 10 0 L 5 10 Z');
});
test('should clear commands', () => {
const pathBuilder = new SVGPathBuilder();
pathBuilder.moveTo(10, 20).lineTo(30, 40);
pathBuilder.clear();
const result = pathBuilder.moveTo(0, 0).build();
expect(result).toBe('M 0 0');
});
});
describe('SVGShapeBuilder', () => {
test('should generate diamond polygon points', () => {
const result = SVGShapeBuilder.diamond(100, 80, 2);
expect(result).toBe('50,1 99,40 50,79 1,40');
});
test('should generate triangle polygon points', () => {
const result = SVGShapeBuilder.triangle(100, 80, 2);
expect(result).toBe('50,1 99,79 1,79');
});
test('should generate diamond path', () => {
const result = SVGShapeBuilder.diamondPath(100, 80, 2);
expect(result).toBe('M 50 1 L 99 40 L 50 79 L 1 40 Z');
});
test('should generate triangle path', () => {
const result = SVGShapeBuilder.trianglePath(100, 80, 2);
expect(result).toBe('M 50 1 L 99 79 L 1 79 Z');
});
test('should handle zero stroke width', () => {
const diamondResult = SVGShapeBuilder.diamond(100, 80, 0);
expect(diamondResult).toBe('50,0 100,40 50,80 0,40');
const triangleResult = SVGShapeBuilder.triangle(100, 80, 0);
expect(triangleResult).toBe('50,0 100,80 0,80');
});
});

View File

@ -26,4 +26,6 @@ export const IS_IPAD =
export const IS_WINDOWS = /Win/.test(platform) || /win32/.test(platform);
export const IS_LINUX = /Linux/.test(platform);
export const IS_MOBILE = IS_IOS || IS_IPAD || IS_ANDROID;

View File

@ -4,4 +4,5 @@ export * from './math.js';
export * from './model/index.js';
export * from './perfect-freehand/index.js';
export * from './polyline.js';
export * from './svg-path.js';
export * from './xywh.js';

View File

@ -0,0 +1,160 @@
interface PathCommand {
command: string;
coordinates: number[];
}
/**
* A utility class for building SVG path strings using command-based API.
* Supports moveTo, lineTo, curveTo operations and can build complete path strings.
*/
export class SVGPathBuilder {
private commands: PathCommand[] = [];
/**
* Move to a specific point without drawing
*/
moveTo(x: number, y: number): this {
this.commands.push({
command: 'M',
coordinates: [x, y],
});
return this;
}
/**
* Draw a line to a specific point
*/
lineTo(x: number, y: number): this {
this.commands.push({
command: 'L',
coordinates: [x, y],
});
return this;
}
/**
* Draw a cubic Bézier curve
*/
curveTo(
cp1x: number,
cp1y: number,
cp2x: number,
cp2y: number,
x: number,
y: number
): this {
this.commands.push({
command: 'C',
coordinates: [cp1x, cp1y, cp2x, cp2y, x, y],
});
return this;
}
/**
* Close the current path
*/
closePath(): this {
this.commands.push({
command: 'Z',
coordinates: [],
});
return this;
}
/**
* Build the complete SVG path string
*/
build(): string {
const pathSegments = this.commands.map(cmd => {
const coords = cmd.coordinates.join(' ');
return coords ? `${cmd.command} ${coords}` : cmd.command;
});
return pathSegments.join(' ');
}
/**
* Clear all commands and reset the builder
*/
clear(): this {
this.commands = [];
return this;
}
}
/**
* Create SVG polygon points string for common shapes
*/
export class SVGShapeBuilder {
/**
* Generate diamond (rhombus) polygon points
*/
static diamond(
width: number,
height: number,
strokeWidth: number = 0
): string {
const halfStroke = strokeWidth / 2;
return [
`${width / 2},${halfStroke}`,
`${width - halfStroke},${height / 2}`,
`${width / 2},${height - halfStroke}`,
`${halfStroke},${height / 2}`,
].join(' ');
}
/**
* Generate triangle polygon points
*/
static triangle(
width: number,
height: number,
strokeWidth: number = 0
): string {
const halfStroke = strokeWidth / 2;
return [
`${width / 2},${halfStroke}`,
`${width - halfStroke},${height - halfStroke}`,
`${halfStroke},${height - halfStroke}`,
].join(' ');
}
/**
* Generate diamond path using SVGPathBuilder
*/
static diamondPath(
width: number,
height: number,
strokeWidth: number = 0
): string {
const halfStroke = strokeWidth / 2;
const pathBuilder = new SVGPathBuilder();
return pathBuilder
.moveTo(width / 2, halfStroke)
.lineTo(width - halfStroke, height / 2)
.lineTo(width / 2, height - halfStroke)
.lineTo(halfStroke, height / 2)
.closePath()
.build();
}
/**
* Generate triangle path using SVGPathBuilder
*/
static trianglePath(
width: number,
height: number,
strokeWidth: number = 0
): string {
const halfStroke = strokeWidth / 2;
const pathBuilder = new SVGPathBuilder();
return pathBuilder
.moveTo(width / 2, halfStroke)
.lineTo(width - halfStroke, height - halfStroke)
.lineTo(halfStroke, height - halfStroke)
.closePath()
.build();
}
}

View File

@ -0,0 +1,158 @@
import { DomRenderer } from '@blocksuite/affine-block-surface';
import { beforeEach, describe, expect, test } from 'vitest';
import { wait } from '../utils/common.js';
import { getSurface } from '../utils/edgeless.js';
import { setupEditor } from '../utils/setup.js';
describe('Connector rendering with DOM renderer', () => {
beforeEach(async () => {
const cleanup = await setupEditor('edgeless', [], {
enableDomRenderer: true,
});
return cleanup;
});
test('should use DomRenderer when enable_dom_renderer flag is true', async () => {
const surface = getSurface(doc, editor);
expect(surface).not.toBeNull();
expect(surface?.renderer).toBeInstanceOf(DomRenderer);
});
test('should render a connector element as a DOM node', async () => {
const surfaceView = getSurface(window.doc, window.editor);
const surfaceModel = surfaceView.model;
// Create two shapes to connect
const shape1Id = surfaceModel.addElement({
type: 'shape',
xywh: '[100, 100, 80, 60]',
});
const shape2Id = surfaceModel.addElement({
type: 'shape',
xywh: '[300, 200, 80, 60]',
});
// Create a connector between the shapes
const connectorProps = {
type: 'connector',
source: { id: shape1Id },
target: { id: shape2Id },
stroke: '#000000',
strokeWidth: 2,
};
const connectorId = surfaceModel.addElement(connectorProps);
await wait(100);
const connectorElement = surfaceView?.renderRoot.querySelector(
`[data-element-id="${connectorId}"]`
);
expect(connectorElement).not.toBeNull();
expect(connectorElement).toBeInstanceOf(HTMLElement);
// Check if SVG element is present for connector rendering
const svgElement = connectorElement?.querySelector('svg');
expect(svgElement).not.toBeNull();
});
test('should render connector with different stroke styles', async () => {
const surfaceView = getSurface(window.doc, window.editor);
const surfaceModel = surfaceView.model;
// Create a dashed connector
const connectorProps = {
type: 'connector',
source: { position: [100, 100] },
target: { position: [200, 200] },
strokeStyle: 'dash',
stroke: '#ff0000',
strokeWidth: 4,
};
const connectorId = surfaceModel.addElement(connectorProps);
// Wait for path generation and rendering
await wait(500);
const connectorElement = surfaceView?.renderRoot.querySelector(
`[data-element-id="${connectorId}"]`
);
expect(connectorElement).not.toBeNull();
const svgElement = connectorElement?.querySelector('svg');
expect(svgElement).not.toBeNull();
// Find the main path element (not the ones inside markers)
const pathElements = svgElement?.querySelectorAll('path');
// The main connector path should be the last one (after marker paths)
const pathElement = pathElements?.[pathElements.length - 1];
expect(pathElement).not.toBeNull();
// Check stroke-dasharray attribute
const strokeDasharray = pathElement!.getAttribute('stroke-dasharray');
expect(strokeDasharray).toBe('12,12');
});
test('should render connector with arrow endpoints', async () => {
const surfaceView = getSurface(window.doc, window.editor);
const surfaceModel = surfaceView.model;
const connectorProps = {
type: 'connector',
source: { position: [100, 100] },
target: { position: [200, 200] },
frontEndpointStyle: 'Triangle',
rearEndpointStyle: 'Arrow',
};
const connectorId = surfaceModel.addElement(connectorProps);
await wait(100);
const connectorElement = surfaceView?.renderRoot.querySelector(
`[data-element-id="${connectorId}"]`
);
expect(connectorElement).not.toBeNull();
// Check for markers in defs
const defsElement = connectorElement?.querySelector('defs');
expect(defsElement).not.toBeNull();
const markers = defsElement?.querySelectorAll('marker');
expect(markers?.length).toBeGreaterThan(0);
});
test('should remove connector DOM node when element is deleted', async () => {
const surfaceView = getSurface(window.doc, window.editor);
const surfaceModel = surfaceView.model;
expect(surfaceView.renderer).toBeInstanceOf(DomRenderer);
const connectorProps = {
type: 'connector',
source: { position: [50, 50] },
target: { position: [150, 150] },
};
const connectorId = surfaceModel.addElement(connectorProps);
await wait(100);
let connectorElement = surfaceView.renderRoot.querySelector(
`[data-element-id="${connectorId}"]`
);
expect(connectorElement).not.toBeNull();
surfaceModel.deleteElement(connectorId);
await wait(100);
connectorElement = surfaceView.renderRoot.querySelector(
`[data-element-id="${connectorId}"]`
);
expect(connectorElement).toBeNull();
});
});

View File

@ -86,7 +86,7 @@ Run the following script. It will build the native module at [`/packages/fronten
This could take a while if you build it for the first time.
Note: use `strip` from system instead of `binutils` if you are running MacOS. [see problem here](https://github.com/toeverything/AFFiNE/discussions/2840)
```
```sh
yarn affine @affine/native build
```
@ -102,7 +102,8 @@ Adding test cases is strongly encouraged when you contribute new features and bu
We use [Playwright](https://playwright.dev/) for E2E test, and [vitest](https://vitest.dev/) for unit test.
To test locally, please make sure browser binaries are already installed via `npx playwright install`.
Also make sure you have built the `@affine/core` workspace before running E2E tests.
Start server before tests by following [`docs/developing-server.md`](./developing-server.md) first.
### Unit Test
@ -115,6 +116,5 @@ yarn test
```shell
# there are `affine-local`, `affine-migration`, `affine-local`, `affine-prototype` e2e tests,
# which are run under different situations.
cd tests/affine-local
yarn e2e
yarn workspace @affine-test/affine-local e2e
```

View File

@ -37,6 +37,8 @@ On Windows, you must enable symbolic links this code repo. See [#### Windows](./
## Build, package & make the desktop client app
> repos/AFFiNE/.github/workflows/release-desktop.yml contains real order to build the desktop client app, but here we will explain the steps in a more detailed way. Up-to date.
### 0. Build the native modules
Please refer to `Build Native Dependencies` section in [BUILDING.md](./BUILDING.md#Build-Native-Dependencies) to build the native modules.
@ -46,6 +48,8 @@ Please refer to `Build Native Dependencies` section in [BUILDING.md](./BUILDING.
On Mac & Linux
```shell
BUILD_TYPE=canary yarn affine @affine/electron build
BUILD_TYPE=canary yarn affine @affine/electron generate-assets
```
@ -110,7 +114,7 @@ Once the build is complete, you can find the paths to the binaries in the termin
```
Finished 2 bundles at:
Artifacts available at: <affine-repo>/packages/frontend/electron/out/make
Artifacts available at: <affine-repo>/packages/frontend/apps/electron/out/canary/make
```
## CI

View File

@ -0,0 +1,23 @@
-- AlterTable
ALTER TABLE "ai_sessions_metadata" ALTER COLUMN "doc_id" DROP NOT NULL;
-- AlterTable
ALTER TABLE "ai_sessions_metadata" ADD COLUMN "pinned" BOOLEAN NOT NULL DEFAULT false;
-- AlterTable
ALTER TABLE "ai_sessions_metadata" ADD COLUMN "prompt_action" VARCHAR(32) DEFAULT '';
-- CreateIndex
CREATE UNIQUE INDEX "ai_session_unique_pinned_idx" ON "ai_sessions_metadata" (user_id, workspace_id) WHERE pinned = true AND deleted_at IS NULL;
-- CreateIndex
CREATE UNIQUE INDEX "ai_session_unique_doc_session_idx" ON "ai_sessions_metadata" (user_id, workspace_id, doc_id) WHERE prompt_action IS NULL AND parent_session_id IS NULL AND doc_id IS NOT NULL AND deleted_at IS NULL;
-- CreateIndex
CREATE INDEX "ai_sessions_metadata_prompt_name_idx" ON "ai_sessions_metadata"("prompt_name");
-- DropIndex
DROP INDEX "ai_sessions_metadata_user_id_workspace_id_idx";
-- CreateIndex
CREATE INDEX "ai_sessions_metadata_user_id_workspace_id_doc_id_idx" ON "ai_sessions_metadata"("user_id", "workspace_id", "doc_id");

View File

@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "ai_sessions_messages" ADD COLUMN "streamObjects" JSON;

View File

@ -414,14 +414,15 @@ model AiPrompt {
}
model AiSessionMessage {
id String @id @default(uuid()) @db.VarChar
sessionId String @map("session_id") @db.VarChar
role AiPromptRole
content String @db.Text
attachments Json? @db.Json
params Json? @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
id String @id @default(uuid()) @db.VarChar
sessionId String @map("session_id") @db.VarChar
role AiPromptRole
content String @db.Text
streamObjects Json? @db.Json
attachments Json? @db.Json
params Json? @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
session AiSession @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@ -433,8 +434,10 @@ model AiSession {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id") @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
docId String @map("doc_id") @db.VarChar
docId String? @map("doc_id") @db.VarChar
promptName String @map("prompt_name") @db.VarChar(32)
promptAction String? @default("") @map("prompt_action") @db.VarChar(32)
pinned Boolean @default(false)
// the session id of the parent session if this session is a forked session
parentSessionId String? @map("parent_session_id") @db.VarChar
messageCost Int @default(0)
@ -447,8 +450,14 @@ model AiSession {
messages AiSessionMessage[]
context AiContext[]
//NOTE:
// unrecorded index:
// @@index([userId, workspaceId]) where pinned = true and deleted_at is null
// @@index([userId, workspaceId, docId]) where prompt_action is null and parent_session_id is null and doc_id is not null and deleted_at is null
// since prisma does not support partial indexes, those indexes are only exists in migration files.
@@index([promptName])
@@index([userId])
@@index([userId, workspaceId])
@@index([userId, workspaceId, docId])
@@map("ai_sessions_metadata")
}

View File

@ -135,3 +135,31 @@ Generated by [AVA](https://avajs.dev).
],
},
]
## should create different session types and validate prompt constraints
> should create session with should create workspace session with text prompt
[
{
pinned: false,
},
]
> should create session with should create pinned session with text prompt
[
{
docId: 'pinned-doc',
pinned: true,
},
]
> should create session with should create doc session with text prompt
[
{
docId: 'normal-doc',
pinned: false,
},
]

View File

@ -1,5 +1,6 @@
import type { ExecutionContext, TestFn } from 'ava';
import ava from 'ava';
import { z } from 'zod';
import { ServerFeature, ServerService } from '../core';
import { AuthService } from '../core/auth';
@ -9,6 +10,8 @@ import { prompts, PromptService } from '../plugins/copilot/prompt';
import {
CopilotProviderFactory,
CopilotProviderType,
StreamObject,
StreamObjectSchema,
} from '../plugins/copilot/providers';
import { TranscriptionResponseSchema } from '../plugins/copilot/transcript/types';
import {
@ -183,6 +186,16 @@ const checkUrl = (url: string) => {
}
};
const checkStreamObjects = (result: string) => {
try {
const streamObjects = JSON.parse(result);
z.array(StreamObjectSchema).parse(streamObjects);
return true;
} catch {
return false;
}
};
const retry = async (
action: string,
t: ExecutionContext<Tester>,
@ -387,6 +400,20 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
},
type: 'text' as const,
},
{
name: 'stream objects',
promptName: ['Chat With AFFiNE AI'],
messages: [
{
role: 'user' as const,
content: 'what is AFFiNE AI',
},
],
verifier: (t: ExecutionContext<Tester>, result: string) => {
t.truthy(checkStreamObjects(result), 'should be valid stream objects');
},
type: 'object' as const,
},
{
name: 'Should transcribe short audio',
promptName: ['Transcript audio'],
@ -680,6 +707,27 @@ for (const {
verifier?.(t, result);
break;
}
case 'object': {
const streamObjects: StreamObject[] = [];
for await (const chunk of provider.streamObject(
{ modelId: prompt.model },
[
...prompt.finish(
messages.reduce(
(acc, m) => Object.assign(acc, (m as any).params || {}),
{}
)
),
...messages,
],
finalConfig
)) {
streamObjects.push(chunk);
}
t.truthy(streamObjects, 'should return result');
verifier?.(t, JSON.stringify(streamObjects));
break;
}
case 'image': {
const finalMessage = [...messages];
const params = {};

View File

@ -39,6 +39,7 @@ import {
array2sse,
audioTranscription,
chatWithImages,
chatWithStreamObject,
chatWithText,
chatWithTextStream,
chatWithWorkflow,
@ -47,7 +48,11 @@ import {
createCopilotContext,
createCopilotMessage,
createCopilotSession,
createDocCopilotSession,
createPinnedCopilotSession,
createWorkspaceCopilotSession,
forkCopilotSession,
getCopilotSession,
getHistories,
listContext,
listContextDocAndFiles,
@ -301,12 +306,8 @@ test('should fork session correctly', async t => {
// prepare session
const { id } = await createWorkspace(app);
const sessionId = await createCopilotSession(
app,
id,
randomUUID(),
textPromptName
);
const docId = randomUUID();
const sessionId = await createCopilotSession(app, id, docId, textPromptName);
let forkedSessionId: string;
// should be able to fork session
@ -315,7 +316,7 @@ test('should fork session correctly', async t => {
const messageId = await createCopilotMessage(app, sessionId);
await chatWithText(app, sessionId, messageId);
}
const histories = await getHistories(app, { workspaceId: id });
const histories = await getHistories(app, { workspaceId: id, docId });
const latestMessageId = histories[0].messages.findLast(
m => m.role === 'assistant'
)?.id;
@ -374,7 +375,7 @@ test('should fork session correctly', async t => {
});
await app.switchUser(u1);
const histories = await getHistories(app, { workspaceId: id });
const histories = await getHistories(app, { workspaceId: id, docId });
const latestMessageId = histories
.find(h => h.sessionId === forkedSessionId)
?.messages.findLast(m => m.role === 'assistant')?.id;
@ -512,6 +513,28 @@ test('should be able to chat with api', async t => {
);
}
{
const sessionId = await createCopilotSession(
app,
id,
randomUUID(),
textPromptName
);
const messageId = await createCopilotMessage(app, sessionId);
const ret4 = await chatWithStreamObject(app, sessionId, messageId);
const objects = Array.from('generate text to object stream').map(data =>
JSON.stringify({ type: 'text-delta', textDelta: data })
);
t.is(
ret4,
textToEventStream(objects, messageId),
'should be able to chat with stream object'
);
}
Sinon.restore();
});
@ -589,10 +612,11 @@ test('should be able to retry with api', async t => {
// normal chat
{
const { id } = await createWorkspace(app);
const docId = randomUUID();
const sessionId = await createCopilotSession(
app,
id,
randomUUID(),
docId,
textPromptName
);
const messageId = await createCopilotMessage(app, sessionId);
@ -600,7 +624,7 @@ test('should be able to retry with api', async t => {
await chatWithText(app, sessionId, messageId);
await chatWithText(app, sessionId, messageId);
const histories = await getHistories(app, { workspaceId: id });
const histories = await getHistories(app, { workspaceId: id, docId });
t.deepEqual(
histories.map(h => h.messages.map(m => m.content)),
[['generate text to text', 'generate text to text']],
@ -611,10 +635,11 @@ test('should be able to retry with api', async t => {
// retry chat
{
const { id } = await createWorkspace(app);
const docId = randomUUID();
const sessionId = await createCopilotSession(
app,
id,
randomUUID(),
docId,
textPromptName
);
const messageId = await createCopilotMessage(app, sessionId);
@ -623,7 +648,7 @@ test('should be able to retry with api', async t => {
await chatWithText(app, sessionId);
// should only have 1 message
const histories = await getHistories(app, { workspaceId: id });
const histories = await getHistories(app, { workspaceId: id, docId });
t.snapshot(
cleanObject(histories),
'should be able to list history after retry'
@ -633,10 +658,11 @@ test('should be able to retry with api', async t => {
// retry chat with new message id
{
const { id } = await createWorkspace(app);
const docId = randomUUID();
const sessionId = await createCopilotSession(
app,
id,
randomUUID(),
docId,
textPromptName
);
const messageId = await createCopilotMessage(app, sessionId);
@ -646,7 +672,7 @@ test('should be able to retry with api', async t => {
await chatWithText(app, sessionId, newMessageId, '', true);
// should only have 1 message
const histories = await getHistories(app, { workspaceId: id });
const histories = await getHistories(app, { workspaceId: id, docId });
t.snapshot(
cleanObject(histories),
'should be able to list history after retry'
@ -723,10 +749,11 @@ test('should be able to list history', async t => {
const { app } = t.context;
const { id: workspaceId } = await createWorkspace(app);
const docId = randomUUID();
const sessionId = await createCopilotSession(
app,
workspaceId,
randomUUID(),
docId,
textPromptName
);
@ -734,7 +761,7 @@ test('should be able to list history', async t => {
await chatWithText(app, sessionId, messageId);
{
const histories = await getHistories(app, { workspaceId });
const histories = await getHistories(app, { workspaceId, docId });
t.deepEqual(
histories.map(h => h.messages.map(m => m.content)),
[['hello', 'generate text to text']],
@ -745,6 +772,7 @@ test('should be able to list history', async t => {
{
const histories = await getHistories(app, {
workspaceId,
docId,
options: { messageOrder: 'desc' },
});
t.deepEqual(
@ -786,17 +814,18 @@ test('should reject request that user have not permission', async t => {
}
{
const docId = randomUUID();
const sessionId = await createCopilotSession(
app,
workspaceId,
randomUUID(),
docId,
textPromptName
);
const messageId = await createCopilotMessage(app, sessionId);
await chatWithText(app, sessionId, messageId);
const histories = await getHistories(app, { workspaceId });
const histories = await getHistories(app, { workspaceId, docId });
t.deepEqual(
histories.map(h => h.messages.map(m => m.content)),
[['generate text to text']],
@ -1049,3 +1078,93 @@ test('should be able to transcript', async t => {
}
}
});
test('should create different session types and validate prompt constraints', async t => {
const { app } = t.context;
const { id: workspaceId } = await createWorkspace(app);
const validateSession = async (
description: string,
workspaceId: string,
createPromise: Promise<string>
) => {
const sessionId = await createPromise;
t.truthy(sessionId, description);
t.snapshot(
cleanObject(
[await getCopilotSession(app, workspaceId, sessionId)],
['id', 'workspaceId', 'promptName']
),
`should create session with ${description}`
);
return sessionId;
};
await validateSession(
'should create workspace session with text prompt',
workspaceId,
createWorkspaceCopilotSession(app, workspaceId, textPromptName)
);
await validateSession(
'should create pinned session with text prompt',
workspaceId,
createPinnedCopilotSession(app, workspaceId, 'pinned-doc', textPromptName)
);
await validateSession(
'should create doc session with text prompt',
workspaceId,
createDocCopilotSession(app, workspaceId, 'normal-doc', textPromptName)
);
});
test('should list histories for different session types correctly', async t => {
const { app } = t.context;
const { id: workspaceId } = await createWorkspace(app);
const pinnedDocId = 'pinned-doc';
const docId = 'normal-doc';
// create sessions and add messages
const [workspaceSessionId, pinnedSessionId, docSessionId] = await Promise.all(
[
createWorkspaceCopilotSession(app, workspaceId, textPromptName),
createPinnedCopilotSession(app, workspaceId, pinnedDocId, textPromptName),
createDocCopilotSession(app, workspaceId, docId, textPromptName),
]
);
await Promise.all([
createCopilotMessage(app, workspaceSessionId, 'workspace message'),
createCopilotMessage(app, pinnedSessionId, 'pinned message'),
createCopilotMessage(app, docSessionId, 'doc message'),
]);
const testHistoryQuery = async (
queryDocId: string | undefined,
expectedSessionId: string,
description: string
) => {
const histories = await getHistories(app, {
workspaceId,
docId: queryDocId,
});
t.is(histories.length, 1, `should return ${description}`);
t.is(
histories[0].sessionId,
expectedSessionId,
`should return correct ${description}`
);
};
await testHistoryQuery(
undefined,
workspaceSessionId,
'workspace session history'
);
await testHistoryQuery(
pinnedDocId,
pinnedSessionId,
'pinned session history'
);
await testHistoryQuery(docId, docSessionId, 'doc session history');
});

View File

@ -275,7 +275,7 @@ test('should be able to manage chat session', async t => {
]);
const params = { word: 'world' };
const commonParams = { docId: 'test', workspaceId: 'test' };
const commonParams = { docId: 'test', workspaceId: 'test', pinned: false };
const sessionId = await session.create({
userId,
@ -342,11 +342,12 @@ test('should be able to update chat session prompt', async t => {
docId: 'test',
workspaceId: 'test',
userId,
pinned: false,
});
t.truthy(sessionId, 'should create session');
// Update the session
const updatedSessionId = await session.updateSessionPrompt({
const updatedSessionId = await session.updateSession({
sessionId,
promptName: 'Search With AFFiNE AI',
userId,
@ -371,7 +372,7 @@ test('should be able to fork chat session', async t => {
]);
const params = { word: 'world' };
const commonParams = { docId: 'test', workspaceId: 'test' };
const commonParams = { docId: 'test', workspaceId: 'test', pinned: false };
// create session
const sessionId = await session.create({
userId,
@ -494,6 +495,7 @@ test('should be able to process message id', async t => {
workspaceId: 'test',
userId,
promptName: 'prompt',
pinned: false,
});
const s = (await session.get(sessionId))!;
@ -537,6 +539,7 @@ test('should be able to generate with message id', async t => {
workspaceId: 'test',
userId,
promptName: 'prompt',
pinned: false,
});
const s = (await session.get(sessionId))!;
@ -559,6 +562,7 @@ test('should be able to generate with message id', async t => {
workspaceId: 'test',
userId,
promptName: 'prompt',
pinned: false,
});
const s = (await session.get(sessionId))!;
@ -586,6 +590,7 @@ test('should be able to generate with message id', async t => {
workspaceId: 'test',
userId,
promptName: 'prompt',
pinned: false,
});
const s = (await session.get(sessionId))!;
@ -614,6 +619,7 @@ test('should save message correctly', async t => {
workspaceId: 'test',
userId,
promptName: 'prompt',
pinned: false,
});
const s = (await session.get(sessionId))!;
@ -643,6 +649,7 @@ test('should revert message correctly', async t => {
workspaceId: 'test',
userId,
promptName: 'prompt',
pinned: false,
});
const s = (await session.get(sessionId))!;
@ -742,6 +749,7 @@ test('should handle params correctly in chat session', async t => {
workspaceId: 'test',
userId,
promptName: 'prompt',
pinned: false,
});
const s = (await session.get(sessionId))!;
@ -1506,6 +1514,7 @@ test('should be able to manage context', async t => {
workspaceId: 'test',
userId,
promptName: 'prompt',
pinned: false,
});
// use mocked embedding client
@ -1729,6 +1738,7 @@ test('should be able to manage workspace embedding', async t => {
workspaceId: ws.id,
userId,
promptName: 'prompt',
pinned: false,
});
const contextSession = await context.create(sessionId);

View File

@ -0,0 +1,118 @@
# Snapshot report for `src/__tests__/e2e/doc-service/controller.spec.ts`
The actual snapshot is saved in `controller.spec.ts.snap`.
Generated by [AVA](https://avajs.dev).
## should get doc markdown success
> Snapshot 1
{
markdown: `AFFiNE is an open source all in one workspace, an operating system for all the building blocks of your team wiki, knowledge management and digital assets and a better alternative to Notion and Miro.␊
# You own your data, with no compromises␊
## Local-first & Real-time collaborative␊
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
## A true canvas for blocks in any form␊
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
* Quip & Notion with their great concept of "everything is a block"␊
* Trello with their Kanban␊
* Airtable & Miro with their no-code programable datasheets␊
* Miro & Whimiscal with their edgeless visual whiteboard␊
* Remnote & Capacities with their object-based tag system␊
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
## Self Host␊
Self host AFFiNE␊
||Title|Tag|␊
|---|---|---|␊
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|␊
|For developers or installations guides, please go to AFFiNE Doc|For developers or installations guides, please go to AFFiNE Doc|<span data-affine-option data-value="0jh9gNw4Yl" data-option-color="var(--affine-tag-orange)">Developers</span>|␊
|Quip & Notion with their great concept of "everything is a block"|Quip & Notion with their great concept of "everything is a block"|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Trello with their Kanban|Trello with their Kanban|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Airtable & Miro with their no-code programable datasheets|Airtable & Miro with their no-code programable datasheets|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||␊
## Affine Development␊
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
`,
title: 'Write, Draw, Plan all at Once.',
}
## should get doc markdown return null when doc not exists
> Snapshot 1
{
code: 'Not Found',
message: 'Doc not found',
name: 'NOT_FOUND',
status: 404,
type: 'RESOURCE_NOT_FOUND',
}

View File

@ -0,0 +1,42 @@
import { randomUUID } from 'node:crypto';
import { CryptoHelper } from '../../../base';
import { app, e2e, Mockers } from '../test';
const crypto = app.get(CryptoHelper);
e2e('should get doc markdown success', async t => {
const owner = await app.signup();
const workspace = await app.create(Mockers.Workspace, {
owner,
});
const docSnapshot = await app.create(Mockers.DocSnapshot, {
workspaceId: workspace.id,
user: owner,
});
const res = await app
.GET(`/rpc/workspaces/${workspace.id}/docs/${docSnapshot.id}/markdown`)
.set('x-access-token', crypto.sign(docSnapshot.id))
.expect(200)
.expect('Content-Type', 'application/json; charset=utf-8');
t.snapshot(res.body);
});
e2e('should get doc markdown return null when doc not exists', async t => {
const owner = await app.signup();
const workspace = await app.create(Mockers.Workspace, {
owner,
});
const docId = randomUUID();
const res = await app
.GET(`/rpc/workspaces/${workspace.id}/docs/${docId}/markdown`)
.set('x-access-token', crypto.sign(docId))
.expect(404)
.expect('Content-Type', 'application/json; charset=utf-8');
t.snapshot(res.body);
});

View File

@ -0,0 +1,62 @@
import { getRecentlyUpdatedDocsQuery } from '@affine/graphql';
import { Mockers } from '../../mocks';
import { app, e2e } from '../test';
e2e('should get recently updated docs', async t => {
const owner = await app.signup();
const workspace = await app.create(Mockers.Workspace, {
owner: { id: owner.id },
});
const docSnapshot1 = await app.create(Mockers.DocSnapshot, {
workspaceId: workspace.id,
user: owner,
});
const doc1 = await app.create(Mockers.DocMeta, {
workspaceId: workspace.id,
docId: docSnapshot1.id,
title: 'doc1',
});
const docSnapshot2 = await app.create(Mockers.DocSnapshot, {
workspaceId: workspace.id,
user: owner,
});
const doc2 = await app.create(Mockers.DocMeta, {
workspaceId: workspace.id,
docId: docSnapshot2.id,
title: 'doc2',
});
const docSnapshot3 = await app.create(Mockers.DocSnapshot, {
workspaceId: workspace.id,
user: owner,
});
const doc3 = await app.create(Mockers.DocMeta, {
workspaceId: workspace.id,
docId: docSnapshot3.id,
title: 'doc3',
});
const {
workspace: { recentlyUpdatedDocs },
} = await app.gql({
query: getRecentlyUpdatedDocsQuery,
variables: {
workspaceId: workspace.id,
pagination: {
first: 10,
},
},
});
t.is(recentlyUpdatedDocs.totalCount, 3);
t.is(recentlyUpdatedDocs.edges[0].node.id, doc3.docId);
t.is(recentlyUpdatedDocs.edges[0].node.title, doc3.title);
t.is(recentlyUpdatedDocs.edges[1].node.id, doc2.docId);
t.is(recentlyUpdatedDocs.edges[1].node.title, doc2.title);
t.is(recentlyUpdatedDocs.edges[2].node.id, doc1.docId);
t.is(recentlyUpdatedDocs.edges[2].node.title, doc1.title);
});

View File

@ -0,0 +1,51 @@
# Snapshot report for `src/__tests__/e2e/indexer/search-docs.spec.ts`
The actual snapshot is saved in `search-docs.spec.ts.snap`.
Generated by [AVA](https://avajs.dev).
## should search docs by keyword
> Snapshot 1
[
{
blockId: 'block-0',
createdAt: '2025-04-22T00:00:00.000Z',
docId: 'doc-0',
highlight: 'test1 <b>hello</b>',
title: '',
updatedAt: '2025-04-22T00:00:00.000Z',
},
{
blockId: 'block-2',
createdAt: '2025-03-22T00:00:00.000Z',
docId: 'doc-2',
highlight: 'test3 <b>hello</b>',
title: '',
updatedAt: '2025-03-22T03:00:01.000Z',
},
{
blockId: 'block-1',
createdAt: '2021-04-22T00:00:00.000Z',
docId: 'doc-1',
highlight: 'test2 <b>hello</b>',
title: '',
updatedAt: '2021-04-22T00:00:00.000Z',
},
]
## should search docs by keyword with limit 1
> Snapshot 1
[
{
blockId: 'block-0',
createdAt: '2025-04-22T00:00:00.000Z',
docId: 'doc-0',
highlight: 'test1 <b>hello</b>',
title: '',
updatedAt: '2025-04-22T00:00:00.000Z',
},
]

View File

@ -0,0 +1,182 @@
import { indexerSearchDocsQuery, SearchTable } from '@affine/graphql';
import { omit } from 'lodash-es';
import { IndexerService } from '../../../plugins/indexer/service';
import { Mockers } from '../../mocks';
import { app, e2e } from '../test';
e2e('should search docs by keyword', async t => {
const owner = await app.signup();
const workspace = await app.create(Mockers.Workspace, {
owner,
});
const indexerService = app.get(IndexerService);
await indexerService.write(
SearchTable.block,
[
{
docId: 'doc-0',
workspaceId: workspace.id,
content: 'test1 hello',
flavour: 'markdown',
blockId: 'block-0',
createdByUserId: owner.id,
updatedByUserId: owner.id,
createdAt: new Date('2025-04-22T00:00:00.000Z'),
updatedAt: new Date('2025-04-22T00:00:00.000Z'),
},
{
docId: 'doc-1',
workspaceId: workspace.id,
content: 'test2 hello',
flavour: 'markdown',
blockId: 'block-1',
refDocId: ['doc-0'],
ref: ['{"foo": "bar1"}'],
createdByUserId: owner.id,
updatedByUserId: owner.id,
createdAt: new Date('2021-04-22T00:00:00.000Z'),
updatedAt: new Date('2021-04-22T00:00:00.000Z'),
},
{
docId: 'doc-2',
workspaceId: workspace.id,
content: 'test3 hello',
flavour: 'markdown',
blockId: 'block-2',
refDocId: ['doc-0', 'doc-2'],
ref: ['{"foo": "bar1"}', '{"foo": "bar3"}'],
createdByUserId: owner.id,
updatedByUserId: owner.id,
createdAt: new Date('2025-03-22T00:00:00.000Z'),
updatedAt: new Date('2025-03-22T03:00:01.000Z'),
},
],
{
refresh: true,
}
);
const result = await app.gql({
query: indexerSearchDocsQuery,
variables: {
id: workspace.id,
input: {
keyword: 'hello',
},
},
});
t.is(result.workspace.searchDocs.length, 3);
t.snapshot(
result.workspace.searchDocs.map(doc =>
omit(doc, 'createdByUser', 'updatedByUser')
)
);
});
e2e('should search docs by keyword with limit 1', async t => {
const owner = await app.signup();
const workspace = await app.create(Mockers.Workspace, {
owner,
});
const indexerService = app.get(IndexerService);
await indexerService.write(
SearchTable.block,
[
{
docId: 'doc-0',
workspaceId: workspace.id,
content: 'test1 hello',
flavour: 'markdown',
blockId: 'block-0',
createdByUserId: owner.id,
updatedByUserId: owner.id,
createdAt: new Date('2025-04-22T00:00:00.000Z'),
updatedAt: new Date('2025-04-22T00:00:00.000Z'),
},
{
docId: 'doc-1',
workspaceId: workspace.id,
content: 'test2 hello',
flavour: 'markdown',
blockId: 'block-1',
refDocId: ['doc-0'],
ref: ['{"foo": "bar1"}'],
createdByUserId: owner.id,
updatedByUserId: owner.id,
createdAt: new Date('2021-04-22T00:00:00.000Z'),
updatedAt: new Date('2021-04-22T00:00:00.000Z'),
},
{
docId: 'doc-2',
workspaceId: workspace.id,
content: 'test3 hello',
flavour: 'markdown',
blockId: 'block-2',
refDocId: ['doc-0', 'doc-2'],
ref: ['{"foo": "bar1"}', '{"foo": "bar3"}'],
createdByUserId: owner.id,
updatedByUserId: owner.id,
createdAt: new Date('2025-03-22T00:00:00.000Z'),
updatedAt: new Date('2025-03-22T03:00:01.000Z'),
},
],
{
refresh: true,
}
);
const result = await app.gql({
query: indexerSearchDocsQuery,
variables: {
id: workspace.id,
input: {
keyword: 'hello',
limit: 1,
},
},
});
t.is(result.workspace.searchDocs.length, 1);
t.snapshot(
result.workspace.searchDocs.map(doc =>
omit(doc, 'createdByUser', 'updatedByUser')
)
);
});
e2e(
'should search docs by keyword failed when workspace is no permission',
async t => {
const owner = await app.signup();
const workspace = await app.create(Mockers.Workspace, {
owner,
});
// signup another user
await app.signup();
await t.throwsAsync(
app.gql({
query: indexerSearchDocsQuery,
variables: {
id: workspace.id,
input: {
keyword: 'hello',
},
},
}),
{
message: /You do not have permission to access Space/,
}
);
}
);

View File

@ -9,6 +9,7 @@ import {
ModelInputType,
ModelOutputType,
PromptMessage,
StreamObject,
} from '../../plugins/copilot/providers';
import {
DEFAULT_DIMENSIONS,
@ -23,7 +24,7 @@ export class MockCopilotProvider extends OpenAIProvider {
capabilities: [
{
input: [ModelInputType.Text],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
defaultForOutputType: true,
},
],
@ -43,7 +44,7 @@ export class MockCopilotProvider extends OpenAIProvider {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -52,7 +53,7 @@ export class MockCopilotProvider extends OpenAIProvider {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -61,7 +62,7 @@ export class MockCopilotProvider extends OpenAIProvider {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -70,7 +71,7 @@ export class MockCopilotProvider extends OpenAIProvider {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -79,7 +80,11 @@ export class MockCopilotProvider extends OpenAIProvider {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},
@ -98,7 +103,11 @@ export class MockCopilotProvider extends OpenAIProvider {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},
@ -195,4 +204,24 @@ export class MockCopilotProvider extends OpenAIProvider {
await sleep(100);
return [Array.from(randomBytes(options.dimensions)).map(v => v % 128)];
}
override async *streamObject(
cond: ModelConditions,
messages: PromptMessage[],
options: CopilotChatOptions = {}
): AsyncIterable<StreamObject> {
const fullCond = { ...cond, outputType: ModelOutputType.Object };
await this.checkParams({ messages, cond: fullCond, options });
// make some time gap for history test case
await sleep(100);
const result = 'generate text to object stream';
for (const data of result) {
yield { type: 'text-delta', textDelta: data } as const;
if (options.signal?.aborted) {
break;
}
}
}
}

View File

@ -0,0 +1,214 @@
# Snapshot report for `src/__tests__/models/copilot-session.spec.ts`
The actual snapshot is saved in `copilot-session.spec.ts.snap`.
Generated by [AVA](https://avajs.dev).
## should list and filter session type
> workspace sessions should include workspace and pinned sessions
[
{
docId: null,
pinned: true,
},
{
docId: null,
pinned: false,
},
]
> doc sessions should only include sessions with matching docId
[
{
docId: 'doc-id-1',
pinned: false,
promptName: 'action-prompt',
},
{
docId: 'doc-id-1',
pinned: false,
promptName: 'test-prompt',
},
]
> session type identification results
[
{
session: {
docId: null,
pinned: false,
},
type: 'workspace',
},
{
session: {
docId: undefined,
pinned: false,
},
type: 'workspace',
},
{
session: {
docId: null,
pinned: true,
},
type: 'pinned',
},
{
session: {
docId: 'doc-id-1',
pinned: false,
},
type: 'doc',
},
]
## should pin and unpin sessions
> session states after creating second pinned session
[
{
docId: null,
id: 'first-session-id',
pinned: false,
},
{
docId: null,
id: 'second-session-id',
pinned: true,
},
]
> should return false when no sessions to unpin
false
> all sessions should be unpinned after unpin operation
[
{
id: 'first-session-id',
pinned: false,
},
{
id: 'second-session-id',
pinned: false,
},
{
id: 'third-session-id',
pinned: false,
},
]
## should handle session updates and validations
> should unpin existing when pinning new session
[
{
docId: null,
id: 'session-update-id',
pinned: true,
},
{
docId: null,
id: 'existing-pinned-session-id',
pinned: false,
},
]
> session type conversion steps
[
{
session: {
docId: 'doc-update-id',
pinned: false,
},
step: 'pinned_to_doc',
type: 'doc',
},
{
session: {
docId: null,
pinned: false,
},
step: 'doc_to_workspace',
type: 'workspace',
},
{
session: {
docId: null,
pinned: true,
},
step: 'workspace_to_pinned',
type: 'pinned',
},
]
## session updates and type conversions
> session states after pinning - should unpin existing
[
{
docId: null,
id: 'session-update-id',
pinned: true,
},
{
docId: null,
id: 'existing-pinned-session-id',
pinned: false,
},
]
> session state after unpinning
{
docId: null,
id: 'session-update-id',
pinned: false,
}
> session type conversion steps
[
{
session: {
docId: 'doc-update-id',
pinned: false,
},
step: 'workspace_to_doc',
type: 'doc',
},
{
session: {
docId: 'doc-update-id',
pinned: true,
},
step: 'doc_to_pinned',
type: 'pinned',
},
{
session: {
docId: null,
pinned: false,
},
step: 'pinned_to_workspace',
type: 'workspace',
},
{
session: {
docId: null,
pinned: true,
},
step: 'workspace_to_pinned',
type: 'pinned',
},
]

View File

@ -5,12 +5,14 @@ import ava, { TestFn } from 'ava';
import Sinon from 'sinon';
import { Config } from '../../base';
import { ContextEmbedStatus } from '../../models/common/copilot';
import { CopilotContextModel } from '../../models/copilot-context';
import { CopilotSessionModel } from '../../models/copilot-session';
import { CopilotWorkspaceConfigModel } from '../../models/copilot-workspace';
import { UserModel } from '../../models/user';
import { WorkspaceModel } from '../../models/workspace';
import {
ContextEmbedStatus,
CopilotContextModel,
CopilotSessionModel,
CopilotWorkspaceConfigModel,
UserModel,
WorkspaceModel,
} from '../../models';
import { createTestingModule, type TestingModule } from '../utils';
import { cleanObject } from '../utils/copilot';
@ -46,7 +48,7 @@ let docId = 'doc1';
test.beforeEach(async t => {
await t.context.module.initTestingDB();
await t.context.copilotSession.createPrompt('prompt-name', 'gpt-4o');
await t.context.copilotSession.createPrompt('prompt-name', 'gpt-4.1');
user = await t.context.user.create({
email: 'test@affine.pro',
});
@ -57,6 +59,7 @@ test.beforeEach(async t => {
docId,
userId: user.id,
promptName: 'prompt-name',
promptAction: null,
});
});

View File

@ -0,0 +1,461 @@
import { randomUUID } from 'node:crypto';
import { PrismaClient, User, Workspace } from '@prisma/client';
import ava, { ExecutionContext, TestFn } from 'ava';
import { CopilotPromptInvalid, CopilotSessionInvalidInput } from '../../base';
import {
CopilotSessionModel,
UpdateChatSessionData,
UserModel,
WorkspaceModel,
} from '../../models';
import { createTestingModule, type TestingModule } from '../utils';
import { cleanObject } from '../utils/copilot';
interface Context {
module: TestingModule;
db: PrismaClient;
user: UserModel;
workspace: WorkspaceModel;
copilotSession: CopilotSessionModel;
}
const test = ava as TestFn<Context>;
test.before(async t => {
const module = await createTestingModule();
t.context.user = module.get(UserModel);
t.context.workspace = module.get(WorkspaceModel);
t.context.copilotSession = module.get(CopilotSessionModel);
t.context.db = module.get(PrismaClient);
t.context.module = module;
});
let user: User;
let workspace: Workspace;
test.beforeEach(async t => {
await t.context.module.initTestingDB();
user = await t.context.user.create({
email: 'test@affine.pro',
});
workspace = await t.context.workspace.create(user.id);
});
test.after(async t => {
await t.context.module.close();
});
const createTestPrompts = async (
copilotSession: CopilotSessionModel,
db: PrismaClient
) => {
await copilotSession.createPrompt('test-prompt', 'gpt-4.1');
await db.aiPrompt.create({
data: { name: 'action-prompt', model: 'gpt-4.1', action: 'edit' },
});
};
const createTestSession = async (
t: ExecutionContext<Context>,
overrides: Partial<{
sessionId: string;
userId: string;
workspaceId: string;
docId: string | null;
pinned: boolean;
promptName: string;
promptAction: string | null;
}> = {}
) => {
const sessionData = {
sessionId: randomUUID(),
userId: user.id,
workspaceId: workspace.id,
docId: null,
pinned: false,
promptName: 'test-prompt',
promptAction: null,
...overrides,
};
await t.context.copilotSession.create(sessionData);
return sessionData;
};
const getSessionState = async (db: PrismaClient, sessionId: string) => {
const session = await db.aiSession.findUnique({
where: { id: sessionId },
select: { id: true, pinned: true, docId: true },
});
return session;
};
test('should list and filter session type', async t => {
const { copilotSession, db } = t.context;
await createTestPrompts(copilotSession, db);
const docId = 'doc-id-1';
await createTestSession(t, { sessionId: randomUUID() });
await createTestSession(t, { sessionId: randomUUID(), pinned: true });
await createTestSession(t, { sessionId: randomUUID(), docId });
await createTestSession(t, {
sessionId: randomUUID(),
docId,
promptName: 'action-prompt',
promptAction: 'action',
});
// should list sessions
{
const workspaceSessions = await copilotSession.list({
userId: user.id,
workspaceId: workspace.id,
});
t.snapshot(
workspaceSessions.map(s => ({ docId: s.docId, pinned: s.pinned })),
'workspace sessions should include workspace and pinned sessions'
);
}
{
const docSessions = await copilotSession.list({
userId: user.id,
workspaceId: workspace.id,
docId,
});
t.snapshot(
cleanObject(
docSessions.toSorted(s =>
s.docId!.localeCompare(s.docId!, undefined, { numeric: true })
),
['id', 'userId', 'workspaceId', 'createdAt', 'tokenCost']
),
'doc sessions should only include sessions with matching docId'
);
}
// should identify session types
{
// check get session type
const testCases = [
{ docId: null, pinned: false },
{ docId: undefined, pinned: false },
{ docId: null, pinned: true },
{ docId, pinned: false },
];
const sessionTypeResults = testCases.map(session => ({
session,
type: copilotSession.getSessionType(session),
}));
t.snapshot(sessionTypeResults, 'session type identification results');
}
});
test('should check session validation for prompts', async t => {
const { copilotSession, db } = t.context;
await createTestPrompts(copilotSession, db);
const docId = randomUUID();
const sessionTypes = [
{ name: 'workspace', session: { docId: null, pinned: false } },
{ name: 'pinned', session: { docId: null, pinned: true } },
{ name: 'doc', session: { docId, pinned: false } },
];
// non-action prompts should work for all session types
sessionTypes.forEach(({ name, session }) => {
t.notThrows(
() =>
copilotSession.checkSessionPrompt(session, 'test-prompt', undefined),
`${name} session should allow non-action prompts`
);
});
// action prompts should only work for doc session type
{
const actionPromptTests = [
{
name: 'workspace',
session: sessionTypes[0].session,
shouldThrow: true,
},
{ name: 'pinned', session: sessionTypes[1].session, shouldThrow: true },
{ name: 'doc', session: sessionTypes[2].session, shouldThrow: false },
];
actionPromptTests.forEach(({ name, session, shouldThrow }) => {
if (shouldThrow) {
t.throws(
() =>
copilotSession.checkSessionPrompt(session, 'action-prompt', 'edit'),
{ instanceOf: CopilotPromptInvalid },
`${name} session should reject action prompts`
);
} else {
t.notThrows(
() =>
copilotSession.checkSessionPrompt(session, 'action-prompt', 'edit'),
`${name} session should allow action prompts`
);
}
});
}
});
test('should pin and unpin sessions', async t => {
const { copilotSession, db } = t.context;
await createTestPrompts(copilotSession, db);
const firstSessionId = 'first-session-id';
const secondSessionId = 'second-session-id';
const thirdSessionId = 'third-session-id';
// should unpin existing pinned session when creating a new one
{
await copilotSession.create({
sessionId: firstSessionId,
userId: user.id,
workspaceId: workspace.id,
docId: null,
promptName: 'test-prompt',
promptAction: null,
pinned: true,
});
const firstSession = await copilotSession.get(firstSessionId);
t.truthy(firstSession, 'first session should be created successfully');
t.is(firstSession?.pinned, true, 'first session should be pinned');
// should unpin the first one when creating second pinned session
await copilotSession.create({
sessionId: secondSessionId,
userId: user.id,
workspaceId: workspace.id,
docId: null,
promptName: 'test-prompt',
promptAction: null,
pinned: true,
});
const sessionStatesAfterSecondPin = await Promise.all([
getSessionState(db, firstSessionId),
getSessionState(db, secondSessionId),
]);
t.snapshot(
sessionStatesAfterSecondPin,
'session states after creating second pinned session'
);
}
// should can unpin a pinned session
{
await createTestSession(t, { sessionId: thirdSessionId, pinned: true });
const unpinResult = await copilotSession.unpin(workspace.id, user.id);
t.is(
unpinResult,
true,
'unpin operation should return true when sessions are unpinned'
);
const unpinResultAgain = await copilotSession.unpin(workspace.id, user.id);
t.snapshot(
unpinResultAgain,
'should return false when no sessions to unpin'
);
}
// should unpin all sessions
{
const allSessionsAfterUnpin = await db.aiSession.findMany({
where: { id: { in: [firstSessionId, secondSessionId, thirdSessionId] } },
select: { pinned: true, id: true },
orderBy: { id: 'asc' },
});
t.snapshot(
allSessionsAfterUnpin,
'all sessions should be unpinned after unpin operation'
);
}
});
test('should handle session updates and validations', async t => {
const { copilotSession, db } = t.context;
await createTestPrompts(copilotSession, db);
const sessionId = 'session-update-id';
const actionSessionId = 'action-session-id';
const parentSessionId = 'parent-session-id';
const forkedSessionId = 'forked-session-id';
const docId = 'doc-update-id';
await createTestSession(t, { sessionId });
await createTestSession(t, {
sessionId: actionSessionId,
promptName: 'action-prompt',
promptAction: 'edit',
docId: 'some-doc',
});
await createTestSession(t, {
sessionId: parentSessionId,
docId: 'parent-doc',
});
await db.aiSession.create({
data: {
id: forkedSessionId,
workspaceId: workspace.id,
userId: user.id,
docId: 'forked-doc',
pinned: false,
promptName: 'test-prompt',
promptAction: null,
parentSessionId: parentSessionId,
},
});
const assertUpdateThrows = async (
t: ExecutionContext<Context>,
sessionId: string,
updateData: UpdateChatSessionData,
message: string
) => {
await t.throwsAsync(
t.context.copilotSession.update(user.id, sessionId, updateData),
{ instanceOf: CopilotSessionInvalidInput },
message
);
};
const assertUpdate = async (
t: ExecutionContext<Context>,
sessionId: string,
updateData: UpdateChatSessionData,
message: string
) => {
await t.notThrowsAsync(
t.context.copilotSession.update(user.id, sessionId, updateData),
message
);
};
// case 1: action sessions should reject all updates
{
const actionUpdates = [
{ docId: 'new-doc' },
{ pinned: true },
{ promptName: 'test-prompt' },
];
for (const data of actionUpdates) {
await assertUpdateThrows(
t,
actionSessionId,
data,
`action session should reject update: ${JSON.stringify(data)}`
);
}
}
// case 2: forked sessions should reject docId updates but allow others
{
await assertUpdate(
t,
forkedSessionId,
{ pinned: true },
'forked session should allow pinned update'
);
await assertUpdate(
t,
forkedSessionId,
{ promptName: 'test-prompt' },
'forked session should allow promptName update'
);
await assertUpdateThrows(
t,
forkedSessionId,
{ docId: 'new-doc' },
'forked session should reject docId update'
);
}
{
// case 3: prompt update validation
await assertUpdate(
t,
sessionId,
{ promptName: 'test-prompt' },
'should allow valid non-action prompt'
);
await assertUpdateThrows(
t,
sessionId,
{ promptName: 'action-prompt' },
'should reject action prompt'
);
await assertUpdateThrows(
t,
sessionId,
{ promptName: 'non-existent-prompt' },
'should reject non-existent prompt'
);
}
// cest 4: session type conversions and pinning behavior
{
const existingPinnedId = 'existing-pinned-session-id';
await createTestSession(t, { sessionId: existingPinnedId, pinned: true });
// should unpin existing when pinning new session
await copilotSession.update(user.id, sessionId, { pinned: true });
const sessionStatesAfterPin = await Promise.all([
getSessionState(db, sessionId),
getSessionState(db, existingPinnedId),
]);
t.snapshot(
sessionStatesAfterPin,
'should unpin existing when pinning new session'
);
}
// test type conversions
{
const conversionSteps: any[] = [];
const convertSession = async (
step: string,
data: UpdateChatSessionData
) => {
await copilotSession.update(user.id, sessionId, data);
const session = await db.aiSession.findUnique({
where: { id: sessionId },
select: { docId: true, pinned: true },
});
conversionSteps.push({
step,
session,
type: copilotSession.getSessionType(session!),
});
};
const conversions = [
['pinned_to_doc', { docId, pinned: false }],
['doc_to_workspace', { docId: null }],
['workspace_to_pinned', { pinned: true }],
] as const;
for (const [step, data] of conversions) {
await convertSession(step, data);
}
t.snapshot(conversionSteps, 'session type conversion steps');
}
});

View File

@ -20,8 +20,9 @@ export const cleanObject = (
export async function createCopilotSession(
app: TestingApp,
workspaceId: string,
docId: string,
promptName: string
docId: string | null,
promptName: string,
pinned: boolean = false
): Promise<string> {
const res = await app.gql(
`
@ -29,12 +30,73 @@ export async function createCopilotSession(
createCopilotSession(options: $options)
}
`,
{ options: { workspaceId, docId, promptName } }
{ options: { workspaceId, docId, promptName, pinned } }
);
return res.createCopilotSession;
}
export async function createWorkspaceCopilotSession(
app: TestingApp,
workspaceId: string,
promptName: string
): Promise<string> {
return createCopilotSession(app, workspaceId, null, promptName);
}
export async function createPinnedCopilotSession(
app: TestingApp,
workspaceId: string,
docId: string,
promptName: string
): Promise<string> {
return createCopilotSession(app, workspaceId, docId, promptName, true);
}
export async function createDocCopilotSession(
app: TestingApp,
workspaceId: string,
docId: string,
promptName: string
): Promise<string> {
return createCopilotSession(app, workspaceId, docId, promptName);
}
export async function getCopilotSession(
app: TestingApp,
workspaceId: string,
sessionId: string
): Promise<{
id: string;
docId: string | null;
parentSessionId: string | null;
pinned: boolean;
promptName: string;
}> {
const res = await app.gql(
`
query getCopilotSession(
$workspaceId: String!
$sessionId: String!
) {
currentUser {
copilot(workspaceId: $workspaceId) {
session(sessionId: $sessionId) {
id
docId
parentSessionId
pinned
promptName
}
}
}
}`,
{ workspaceId, sessionId }
);
return res.currentUser?.copilot?.session;
}
export async function updateCopilotSession(
app: TestingApp,
sessionId: string,
@ -582,6 +644,14 @@ export async function chatWithImages(
return chatWithText(app, sessionId, messageId, '/images');
}
export async function chatWithStreamObject(
app: TestingApp,
sessionId: string,
messageId?: string
) {
return chatWithText(app, sessionId, messageId, '/stream-object');
}
export async function unsplashSearch(
app: TestingApp,
params: Record<string, string> = {}

View File

@ -643,6 +643,10 @@ export const USER_FRIENDLY_ERRORS = {
type: 'resource_not_found',
message: `Copilot session not found.`,
},
copilot_session_invalid_input: {
type: 'invalid_input',
message: `Copilot session input is invalid.`,
},
copilot_session_deleted: {
type: 'action_forbidden',
message: `Copilot session has been deleted.`,

View File

@ -657,6 +657,12 @@ export class CopilotSessionNotFound extends UserFriendlyError {
}
}
export class CopilotSessionInvalidInput extends UserFriendlyError {
constructor(message?: string) {
super('invalid_input', 'copilot_session_invalid_input', message);
}
}
export class CopilotSessionDeleted extends UserFriendlyError {
constructor(message?: string) {
super('action_forbidden', 'copilot_session_deleted', message);
@ -1145,6 +1151,7 @@ export enum ErrorNames {
WORKSPACE_ID_REQUIRED_FOR_TEAM_SUBSCRIPTION,
WORKSPACE_ID_REQUIRED_TO_UPDATE_TEAM_SUBSCRIPTION,
COPILOT_SESSION_NOT_FOUND,
COPILOT_SESSION_INVALID_INPUT,
COPILOT_SESSION_DELETED,
NO_COPILOT_PROVIDER_AVAILABLE,
COPILOT_FAILED_TO_GENERATE_TEXT,

View File

@ -175,6 +175,7 @@ test('should get doc content in json format', async t => {
await app
.GET(`/rpc/workspaces/${workspace.id}/docs/${docId}/content`)
.set('x-access-token', t.context.crypto.sign(docId))
.expect('Content-Type', 'application/json; charset=utf-8')
.expect({
title: 'test title',
summary: 'test summary',
@ -184,6 +185,7 @@ test('should get doc content in json format', async t => {
await app
.GET(`/rpc/workspaces/${workspace.id}/docs/${docId}/content?full=false`)
.set('x-access-token', t.context.crypto.sign(docId))
.expect('Content-Type', 'application/json; charset=utf-8')
.expect({
title: 'test title',
summary: 'test summary',
@ -205,6 +207,7 @@ test('should get full doc content in json format', async t => {
await app
.GET(`/rpc/workspaces/${workspace.id}/docs/${docId}/content?full=true`)
.set('x-access-token', t.context.crypto.sign(docId))
.expect('Content-Type', 'application/json; charset=utf-8')
.expect({
title: 'test title',
summary: 'test summary full',
@ -251,3 +254,44 @@ test('should get workspace content in json format', async t => {
});
t.pass();
});
test('should get doc markdown in json format', async t => {
const { app } = t.context;
mock.method(t.context.databaseDocReader, 'getDocMarkdown', async () => {
return {
title: 'test title',
markdown: 'test markdown',
};
});
const docId = randomUUID();
await app
.GET(`/rpc/workspaces/${workspace.id}/docs/${docId}/markdown`)
.set('x-access-token', t.context.crypto.sign(docId))
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200)
.expect({
title: 'test title',
markdown: 'test markdown',
});
t.pass();
});
test('should 404 when doc markdown not found', async t => {
const { app } = t.context;
const workspaceId = '123';
const docId = '123';
await app
.GET(`/rpc/workspaces/${workspaceId}/docs/${docId}/markdown`)
.set('x-access-token', t.context.crypto.sign(docId))
.expect({
status: 404,
code: 'Not Found',
type: 'RESOURCE_NOT_FOUND',
name: 'NOT_FOUND',
message: 'Doc not found',
})
.expect(404);
t.pass();
});

View File

@ -42,6 +42,20 @@ export class DocRpcController {
res.send(doc.bin);
}
@SkipThrottle()
@Internal()
@Get('/workspaces/:workspaceId/docs/:docId/markdown')
async getDocMarkdown(
@Param('workspaceId') workspaceId: string,
@Param('docId') docId: string
) {
const result = await this.docReader.getDocMarkdown(workspaceId, docId);
if (!result) {
throw new NotFound('Doc not found');
}
return result;
}
@SkipThrottle()
@Internal()
@Post('/workspaces/:workspaceId/docs/:docId/diff')

View File

@ -0,0 +1,106 @@
# Snapshot report for `src/core/doc/__tests__/reader-from-database.spec.ts`
The actual snapshot is saved in `reader-from-database.spec.ts.snap`.
Generated by [AVA](https://avajs.dev).
## should return doc markdown success
> Snapshot 1
{
markdown: `AFFiNE is an open source all in one workspace, an operating system for all the building blocks of your team wiki, knowledge management and digital assets and a better alternative to Notion and Miro.␊
# You own your data, with no compromises␊
## Local-first & Real-time collaborative␊
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
## A true canvas for blocks in any form␊
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
* Quip & Notion with their great concept of "everything is a block"␊
* Trello with their Kanban␊
* Airtable & Miro with their no-code programable datasheets␊
* Miro & Whimiscal with their edgeless visual whiteboard␊
* Remnote & Capacities with their object-based tag system␊
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
## Self Host␊
Self host AFFiNE␊
||Title|Tag|␊
|---|---|---|␊
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|␊
|For developers or installations guides, please go to AFFiNE Doc|For developers or installations guides, please go to AFFiNE Doc|<span data-affine-option data-value="0jh9gNw4Yl" data-option-color="var(--affine-tag-orange)">Developers</span>|␊
|Quip & Notion with their great concept of "everything is a block"|Quip & Notion with their great concept of "everything is a block"|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Trello with their Kanban|Trello with their Kanban|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Airtable & Miro with their no-code programable datasheets|Airtable & Miro with their no-code programable datasheets|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||␊
## Affine Development␊
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
`,
title: 'Write, Draw, Plan all at Once.',
}

View File

@ -0,0 +1,106 @@
# Snapshot report for `src/core/doc/__tests__/reader-from-rpc.spec.ts`
The actual snapshot is saved in `reader-from-rpc.spec.ts.snap`.
Generated by [AVA](https://avajs.dev).
## should return doc markdown success
> Snapshot 1
{
markdown: `AFFiNE is an open source all in one workspace, an operating system for all the building blocks of your team wiki, knowledge management and digital assets and a better alternative to Notion and Miro.␊
# You own your data, with no compromises␊
## Local-first & Real-time collaborative␊
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
## A true canvas for blocks in any form␊
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
* Quip & Notion with their great concept of "everything is a block"␊
* Trello with their Kanban␊
* Airtable & Miro with their no-code programable datasheets␊
* Miro & Whimiscal with their edgeless visual whiteboard␊
* Remnote & Capacities with their object-based tag system␊
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
## Self Host␊
Self host AFFiNE␊
||Title|Tag|␊
|---|---|---|␊
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|␊
|For developers or installations guides, please go to AFFiNE Doc|For developers or installations guides, please go to AFFiNE Doc|<span data-affine-option data-value="0jh9gNw4Yl" data-option-color="var(--affine-tag-orange)">Developers</span>|␊
|Quip & Notion with their great concept of "everything is a block"|Quip & Notion with their great concept of "everything is a block"|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Trello with their Kanban|Trello with their Kanban|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Airtable & Miro with their no-code programable datasheets|Airtable & Miro with their no-code programable datasheets|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||␊
## Affine Development␊
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
`,
title: 'Write, Draw, Plan all at Once.',
}

View File

@ -257,3 +257,28 @@ test('should get workspace content with custom avatar', async t => {
avatarUrl: `http://localhost:3010/api/workspaces/${workspace.id}/blobs/${avatarKey}`,
});
});
test('should return doc markdown success', async t => {
const workspace = await module.create(Mockers.Workspace, {
owner: user,
name: '',
});
const docSnapshot = await module.create(Mockers.DocSnapshot, {
workspaceId: workspace.id,
user,
});
const result = await docReader.getDocMarkdown(workspace.id, docSnapshot.id);
t.snapshot(result);
});
test('should read markdown return null when doc not exists', async t => {
const workspace = await module.create(Mockers.Workspace, {
owner: user,
name: '',
});
const result = await docReader.getDocMarkdown(workspace.id, randomUUID());
t.is(result, null);
});

View File

@ -5,13 +5,24 @@ import { User, Workspace } from '@prisma/client';
import ava, { TestFn } from 'ava';
import { applyUpdate, Doc as YDoc } from 'yjs';
import { createModule } from '../../../__tests__/create-module';
import { Mockers } from '../../../__tests__/mocks';
import { createTestingApp, type TestingApp } from '../../../__tests__/utils';
import { UserFriendlyError } from '../../../base';
import { ConfigFactory } from '../../../base/config';
import { Models } from '../../../models';
import { DatabaseDocReader, DocReader, PgWorkspaceDocStorageAdapter } from '..';
import {
DatabaseDocReader,
DocReader,
DocStorageModule,
PgWorkspaceDocStorageAdapter,
} from '..';
import { RpcDocReader } from '../reader';
const module = await createModule({
imports: [DocStorageModule],
});
const test = ava as TestFn<{
models: Models;
app: TestingApp;
@ -68,6 +79,12 @@ test.afterEach.always(() => {
test.after.always(async t => {
await t.context.app.close();
await t.context.docApp.close();
await module.close();
});
test('should be rpc reader', async t => {
const { docReader } = t.context;
t.true(docReader instanceof RpcDocReader);
});
test('should return null when doc not found', async t => {
@ -144,7 +161,6 @@ test('should fallback to database doc reader when endpoint network error', async
test('should return doc when found', async t => {
const { docReader } = t.context;
t.true(docReader instanceof RpcDocReader);
const docId = randomUUID();
const timestamp = Date.now();
@ -359,3 +375,32 @@ test('should return null when workspace bin meta not exists', async t => {
const notExists = await docReader.getWorkspaceContent(randomUUID());
t.is(notExists, null);
});
test('should return doc markdown success', async t => {
const { docReader } = t.context;
const workspace = await module.create(Mockers.Workspace, {
owner: user,
name: '',
});
const docSnapshot = await module.create(Mockers.DocSnapshot, {
workspaceId: workspace.id,
user,
});
const result = await docReader.getDocMarkdown(workspace.id, docSnapshot.id);
t.snapshot(result);
});
test('should read markdown return null when doc not exists', async t => {
const { docReader } = t.context;
const workspace = await module.create(Mockers.Workspace, {
owner: user,
name: '',
});
const result = await docReader.getDocMarkdown(workspace.id, randomUUID());
t.is(result, null);
});

View File

@ -18,6 +18,7 @@ import { Models } from '../../models';
import { WorkspaceBlobStorage } from '../storage';
import {
type PageDocContent,
parseDocToMarkdownFromDocSnapshot,
parsePageDoc,
parseWorkspaceDoc,
} from '../utils/blocksuite';
@ -33,6 +34,11 @@ export interface WorkspaceDocInfo {
avatarUrl?: string;
}
export interface DocMarkdown {
title: string;
markdown: string;
}
export abstract class DocReader {
protected readonly logger = new Logger(DocReader.name);
@ -59,6 +65,11 @@ export abstract class DocReader {
docId: string
): Promise<DocRecord | null>;
abstract getDocMarkdown(
workspaceId: string,
docId: string
): Promise<DocMarkdown | null>;
abstract getDocDiff(
spaceId: string,
docId: string,
@ -171,6 +182,17 @@ export class DatabaseDocReader extends DocReader {
return await this.workspace.getDoc(workspaceId, docId);
}
async getDocMarkdown(
workspaceId: string,
docId: string
): Promise<DocMarkdown | null> {
const doc = await this.workspace.getDoc(workspaceId, docId);
if (!doc) {
return null;
}
return parseDocToMarkdownFromDocSnapshot(workspaceId, docId, doc.bin);
}
async getDocDiff(
spaceId: string,
docId: string,
@ -304,6 +326,33 @@ export class RpcDocReader extends DatabaseDocReader {
}
}
override async getDocMarkdown(
workspaceId: string,
docId: string
): Promise<DocMarkdown | null> {
const url = `${this.config.docService.endpoint}/rpc/workspaces/${workspaceId}/docs/${docId}/markdown`;
const accessToken = this.crypto.sign(docId);
try {
const res = await this.fetch(accessToken, url, 'GET');
if (!res) {
return null;
}
return (await res.json()) as DocMarkdown;
} catch (e) {
if (e instanceof UserFriendlyError) {
throw e;
}
const err = e as Error;
// other error
this.logger.error(
`Failed to fetch doc markdown ${url}, fallback to database doc reader`,
err
);
// fallback to database doc reader if the error is not user friendly, like network error
return await super.getDocMarkdown(workspaceId, docId);
}
}
override async getDocDiff(
workspaceId: string,
docId: string,

View File

@ -1,4 +1,5 @@
import { Logger } from '@nestjs/common';
import { Transactional } from '@nestjs-cls/transactional';
import {
applyUpdate,
diffUpdate,
@ -78,42 +79,55 @@ export abstract class DocStorageAdapter extends Connection {
const updates = await this.getDocUpdates(spaceId, docId);
if (updates.length) {
this.logger.log(
`Squashing updates, spaceId: ${spaceId}, docId: ${docId}, updates: ${updates.length}`
);
const { timestamp, bin, editor } = await this.squash(
snapshot ? [snapshot, ...updates] : updates
);
const newSnapshot = {
spaceId: spaceId,
return await this.squashUpdatesToSnapshot(
spaceId,
docId,
bin,
timestamp,
editor,
};
const success = await this.setDocSnapshot(newSnapshot);
// if there is old snapshot, create a new history record
if (success && snapshot) {
await this.createDocHistory(snapshot);
}
// always mark updates as merged unless throws
const count = await this.markUpdatesMerged(spaceId, docId, updates);
if (count > 0) {
this.logger.log(
`Marked ${count} updates as merged, spaceId: ${spaceId}, docId: ${docId}`
);
}
return newSnapshot;
updates,
snapshot
);
}
return snapshot;
}
@Transactional()
private async squashUpdatesToSnapshot(
spaceId: string,
docId: string,
updates: DocUpdate[],
snapshot: DocRecord | null
) {
this.logger.log(
`Squashing updates, spaceId: ${spaceId}, docId: ${docId}, updates: ${updates.length}`
);
const { timestamp, bin, editor } = await this.squash(
snapshot ? [snapshot, ...updates] : updates
);
const newSnapshot: DocRecord = {
spaceId,
docId,
bin,
timestamp,
editor,
};
const success = await this.setDocSnapshot(newSnapshot);
// if there is old snapshot, create a new history record
if (success && snapshot) {
await this.createDocHistory(snapshot);
}
// always mark updates as merged unless throws
const count = await this.markUpdatesMerged(spaceId, docId, updates);
this.logger.log(
`Marked ${count} updates as merged, spaceId: ${spaceId}, docId: ${docId}, timestamp: ${timestamp}`
);
return newSnapshot;
}
async getDocDiff(
spaceId: string,
docId: string,

View File

@ -1366,3 +1366,223 @@ Generated by [AVA](https://avajs.dev).
summary: 'AFFiNE is an open source all in one workspace, an operating system for all the building blocks of your team wiki, knowledge management and digital assets and a better alternative to Notion and Miro. You own your data, with no compromisesLocal-first & Real-time collaborativeWe love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.Blocks that assemble your next docs, tasks kanban or whiteboardThere is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further. ',
title: 'Write, Draw, Plan all at Once.',
}
## can parse doc to markdown from doc snapshot
> Snapshot 1
{
markdown: `AFFiNE is an open source all in one workspace, an operating system for all the building blocks of your team wiki, knowledge management and digital assets and a better alternative to Notion and Miro.␊
# You own your data, with no compromises␊
## Local-first & Real-time collaborative␊
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
## A true canvas for blocks in any form␊
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
* Quip & Notion with their great concept of "everything is a block"␊
* Trello with their Kanban␊
* Airtable & Miro with their no-code programable datasheets␊
* Miro & Whimiscal with their edgeless visual whiteboard␊
* Remnote & Capacities with their object-based tag system␊
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
## Self Host␊
Self host AFFiNE␊
||Title|Tag|␊
|---|---|---|␊
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|␊
|For developers or installations guides, please go to AFFiNE Doc|For developers or installations guides, please go to AFFiNE Doc|<span data-affine-option data-value="0jh9gNw4Yl" data-option-color="var(--affine-tag-orange)">Developers</span>|␊
|Quip & Notion with their great concept of "everything is a block"|Quip & Notion with their great concept of "everything is a block"|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Trello with their Kanban|Trello with their Kanban|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Airtable & Miro with their no-code programable datasheets|Airtable & Miro with their no-code programable datasheets|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||␊
## Affine Development␊
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
`,
title: 'Write, Draw, Plan all at Once.',
}
## can parse doc to markdown from doc snapshot with ai editable
> Snapshot 1
{
markdown: `<!-- block_id=FoPQcAyV_m flavour=affine:paragraph -->
AFFiNE is an open source all in one workspace, an operating system for all the building blocks of your team wiki, knowledge management and digital assets and a better alternative to Notion and Miro.␊
<!-- block_id=oz48nn_zp8 flavour=affine:paragraph -->
<!-- block_id=g8a-D9-jXS flavour=affine:paragraph -->
# You own your data, with no compromises␊
<!-- block_id=J8lHN1GR_5 flavour=affine:paragraph -->
## Local-first & Real-time collaborative␊
<!-- block_id=xCuWdM0VLz flavour=affine:paragraph -->
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
<!-- block_id=zElMi0tViK flavour=affine:paragraph -->
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
<!-- block_id=Z4rK0OF9Wk flavour=affine:paragraph -->
<!-- block_id=DQ0Ryb-SpW flavour=affine:paragraph -->
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
<!-- block_id=HAZC3URZp_ flavour=affine:paragraph -->
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
<!-- block_id=0H87ypiuv8 flavour=affine:paragraph -->
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
<!-- block_id=Sp4G1KD0Wn flavour=affine:paragraph -->
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
<!-- block_id=RsUhDuEqXa flavour=affine:paragraph -->
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
<!-- block_id=Z2HibKzAr- flavour=affine:paragraph -->
## A true canvas for blocks in any form␊
<!-- block_id=UwvWddamzM flavour=affine:paragraph -->
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
<!-- block_id=g9xKUjhJj1 flavour=affine:paragraph -->
<!-- block_id=wDTn4YJ4pm flavour=affine:paragraph -->
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
<!-- block_id=xFrrdiP3-V flavour=affine:list -->
* Quip & Notion with their great concept of "everything is a block"␊
<!-- block_id=Tp9xyN4Okl flavour=affine:list -->
* Trello with their Kanban␊
<!-- block_id=K_4hUzKZFQ flavour=affine:list -->
* Airtable & Miro with their no-code programable datasheets␊
<!-- block_id=QwMzON2s7x flavour=affine:list -->
* Miro & Whimiscal with their edgeless visual whiteboard␊
<!-- block_id=FFVmit6u1T flavour=affine:list -->
* Remnote & Capacities with their object-based tag system␊
<!-- block_id=YqnG5O6AE6 flavour=affine:paragraph -->
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
<!-- block_id=sbDTmZMZcq flavour=affine:paragraph -->
## Self Host␊
<!-- block_id=QVvitesfbj flavour=affine:paragraph -->
Self host AFFiNE␊
<!-- block_id=U_GoHFD9At flavour=affine:database placeholder -->
<!-- block_id=NyHXrMX3R1 flavour=affine:paragraph -->
## Affine Development␊
<!-- block_id=9-K49otbCv flavour=affine:paragraph -->
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
<!-- block_id=faFteK9eG- flavour=affine:paragraph -->
`,
title: 'Write, Draw, Plan all at Once.',
}

View File

@ -5,6 +5,7 @@ import { createModule } from '../../../__tests__/create-module';
import { Mockers } from '../../../__tests__/mocks';
import { Models } from '../../../models';
import {
parseDocToMarkdownFromDocSnapshot,
readAllBlocksFromDocSnapshot,
readAllDocIdsFromWorkspaceSnapshot,
} from '../blocksuite';
@ -88,3 +89,24 @@ test('can read all blocks from doc snapshot without workspace snapshot', async t
blocks: result!.blocks.map(block => omit(block, ['yblock'])),
});
});
test('can parse doc to markdown from doc snapshot', async t => {
const result = parseDocToMarkdownFromDocSnapshot(
workspace.id,
docSnapshot.id,
docSnapshot.blob
);
t.snapshot(result);
});
test('can parse doc to markdown from doc snapshot with ai editable', async t => {
const result = parseDocToMarkdownFromDocSnapshot(
workspace.id,
docSnapshot.id,
docSnapshot.blob,
true
);
t.snapshot(result);
});

View File

@ -8,6 +8,7 @@
// eslint-disable-next-line @typescript-eslint/no-restricted-imports -- import from bundle
import {
parsePageDoc as parseDocToMarkdown,
readAllBlocksFromDoc,
readAllDocIdsFromRootDoc,
} from '@affine/reader/dist';
@ -196,3 +197,32 @@ export async function readAllBlocksFromDocSnapshot(
maxSummaryLength,
});
}
export function parseDocToMarkdownFromDocSnapshot(
workspaceId: string,
docId: string,
docSnapshot: Uint8Array,
aiEditable = false
) {
const ydoc = new YDoc({
guid: docId,
});
applyUpdate(ydoc, docSnapshot);
const parsed = parseDocToMarkdown({
workspaceId,
doc: ydoc,
buildBlobUrl: (blobId: string) => {
return `/${workspaceId}/blobs/${blobId}`;
},
buildDocUrl: (docId: string) => {
return `/workspace/${workspaceId}/${docId}`;
},
aiEditable,
});
return {
title: parsed.title,
markdown: parsed.md,
};
}

View File

@ -76,6 +76,9 @@ class DocType {
@Field(() => String, { nullable: true })
lastUpdaterId?: string;
@Field(() => String, { nullable: true })
title?: string | null;
}
@InputType()
@ -266,6 +269,26 @@ export class WorkspaceDocResolver {
return paginate(rows, 'createdAt', pagination, count);
}
@ResolveField(() => PaginatedDocType, {
description: 'Get recently updated docs of a workspace',
})
async recentlyUpdatedDocs(
@CurrentUser() me: CurrentUser,
@Parent() workspace: WorkspaceType,
@Args('pagination', PaginationInput.decode) pagination: PaginationInput
): Promise<PaginatedDocType> {
const [count, rows] = await this.models.doc.paginateDocInfoByUpdatedAt(
workspace.id,
pagination
);
const needs = await this.ac
.user(me.id)
.workspace(workspace.id)
.docs(rows, 'Doc.Read');
return paginate(needs, 'updatedAt', pagination, count);
}
@ResolveField(() => DocType, {
description: 'Get get with given id',
complexity: 2,

View File

@ -1,36 +1,406 @@
import { Injectable } from '@nestjs/common';
import { Transactional } from '@nestjs-cls/transactional';
import { AiPromptRole, Prisma } from '@prisma/client';
import { omit } from 'lodash-es';
import {
CopilotPromptInvalid,
CopilotSessionDeleted,
CopilotSessionInvalidInput,
CopilotSessionNotFound,
} from '../base';
import { BaseModel } from './base';
interface ChatSessionState {
export enum SessionType {
Workspace = 'workspace', // docId is null and pinned is false
Pinned = 'pinned', // pinned is true
Doc = 'doc', // docId points to specific document
}
type ChatAttachment = { attachment: string; mimeType: string } | string;
type ChatStreamObject = {
type: 'text-delta' | 'reasoning' | 'tool-call' | 'tool-result';
textDelta?: string;
toolCallId?: string;
toolName?: string;
args?: Record<string, any>;
result?: any;
};
type ChatMessage = {
id?: string | undefined;
role: 'system' | 'assistant' | 'user';
content: string;
attachments?: ChatAttachment[] | null;
params?: Record<string, any> | null;
streamObjects?: ChatStreamObject[] | null;
createdAt: Date;
};
type ChatSession = {
sessionId: string;
workspaceId: string;
docId: string;
docId?: string | null;
pinned?: boolean;
messages?: ChatMessage[];
// connect ids
userId: string;
promptName: string;
}
promptAction: string | null;
parentSessionId?: string | null;
};
export type UpdateChatSessionData = Partial<
Pick<ChatSession, 'docId' | 'pinned' | 'promptName'>
>;
export type UpdateChatSession = Pick<ChatSession, 'userId' | 'sessionId'> &
UpdateChatSessionData;
export type ListSessionOptions = {
userId: string;
sessionId?: string;
workspaceId?: string;
docId?: string;
action?: boolean;
fork?: boolean;
limit?: number;
skip?: number;
sessionOrder?: 'asc' | 'desc';
messageOrder?: 'asc' | 'desc';
// extra condition
withPrompt?: boolean;
withMessages?: boolean;
};
// TODO(@darkskygit): not ready to replace business codes yet, just for test
@Injectable()
export class CopilotSessionModel extends BaseModel {
async create(state: ChatSessionState) {
getSessionType(session: Pick<ChatSession, 'docId' | 'pinned'>): SessionType {
if (session.pinned) return SessionType.Pinned;
if (!session.docId) return SessionType.Workspace;
return SessionType.Doc;
}
checkSessionPrompt(
session: Pick<ChatSession, 'docId' | 'pinned'>,
promptName: string,
promptAction: string | undefined
): boolean {
const sessionType = this.getSessionType(session);
// workspace and pinned sessions cannot use action prompts
if (
[SessionType.Workspace, SessionType.Pinned].includes(sessionType) &&
!!promptAction?.trim()
) {
throw new CopilotPromptInvalid(
`${promptName} are not allowed for ${sessionType} sessions`
);
}
return true;
}
// NOTE: just for test, remove it after copilot prompt model is ready
async createPrompt(name: string, model: string, action?: string) {
await this.db.aiPrompt.create({
data: { name, model, action: action ?? null },
});
}
@Transactional()
async create(state: ChatSession) {
if (state.pinned) {
await this.unpin(state.workspaceId, state.userId);
}
const row = await this.db.aiSession.create({
data: {
id: state.sessionId,
workspaceId: state.workspaceId,
docId: state.docId,
pinned: state.pinned ?? false,
// connect
userId: state.userId,
promptName: state.promptName,
promptAction: state.promptAction,
parentSessionId: state.parentSessionId,
},
});
return row;
}
async createPrompt(name: string, model: string) {
await this.db.aiPrompt.create({
data: { name, model },
@Transactional()
async has(
sessionId: string,
userId: string,
params?: Prisma.AiSessionCountArgs['where']
) {
return await this.db.aiSession
.count({ where: { id: sessionId, userId, ...params } })
.then(c => c > 0);
}
@Transactional()
async getChatSessionId(
state: Omit<ChatSession, 'promptName' | 'promptAction'>
) {
const extraCondition: Record<string, any> = {};
if (state.parentSessionId) {
// also check session id if provided session is forked session
extraCondition.id = state.sessionId;
extraCondition.parentSessionId = state.parentSessionId;
}
const session = await this.db.aiSession.findFirst({
where: {
userId: state.userId,
workspaceId: state.workspaceId,
docId: state.docId,
parentSessionId: null,
prompt: { action: { equals: null } },
...extraCondition,
},
select: { id: true, deletedAt: true },
});
if (session?.deletedAt) throw new CopilotSessionDeleted();
return session?.id;
}
@Transactional()
async getExists<Select extends Prisma.AiSessionSelect>(
sessionId: string,
select?: Select,
where?: Omit<Prisma.AiSessionWhereInput, 'id' | 'deletedAt'>
) {
return (await this.db.aiSession.findUnique({
where: { ...where, id: sessionId, deletedAt: null },
select,
})) as Prisma.AiSessionGetPayload<{ select: Select }>;
}
@Transactional()
async get(sessionId: string) {
return await this.getExists(sessionId, {
id: true,
userId: true,
workspaceId: true,
docId: true,
pinned: true,
parentSessionId: true,
messages: {
select: {
id: true,
role: true,
content: true,
attachments: true,
params: true,
createdAt: true,
},
orderBy: { createdAt: 'asc' },
},
promptName: true,
});
}
async list(options: ListSessionOptions) {
const { userId, sessionId, workspaceId, docId } = options;
const extraCondition = [];
if (!options?.action && options?.fork) {
// only query forked session if fork == true and action == false
extraCondition.push({
userId: { not: userId },
workspaceId: workspaceId,
docId: docId ?? null,
id: sessionId ? { equals: sessionId } : undefined,
prompt: {
action: options.action ? { not: null } : null,
},
// should only find forked session
parentSessionId: { not: null },
deletedAt: null,
});
}
return await this.db.aiSession.findMany({
where: {
OR: [
{
userId,
workspaceId,
docId: docId ?? null,
id: sessionId ? { equals: sessionId } : undefined,
deletedAt: null,
},
...extraCondition,
],
},
select: {
id: true,
userId: true,
workspaceId: true,
docId: true,
parentSessionId: true,
pinned: true,
promptName: true,
tokenCost: true,
createdAt: true,
messages: options.withMessages
? {
select: {
id: true,
role: true,
content: true,
attachments: true,
params: true,
streamObjects: true,
createdAt: true,
},
orderBy: {
// message order is asc by default
createdAt: options?.messageOrder === 'desc' ? 'desc' : 'asc',
},
}
: false,
},
take: options?.limit,
skip: options?.skip,
orderBy: {
// session order is desc by default
createdAt: options?.sessionOrder === 'asc' ? 'asc' : 'desc',
},
});
}
@Transactional()
async unpin(workspaceId: string, userId: string): Promise<boolean> {
const { count } = await this.db.aiSession.updateMany({
where: { userId, workspaceId, pinned: true, deletedAt: null },
data: { pinned: false },
});
return count > 0;
}
@Transactional()
async update(
userId: string,
sessionId: string,
data: UpdateChatSessionData
): Promise<string> {
const session = await this.getExists(
sessionId,
{
id: true,
workspaceId: true,
docId: true,
parentSessionId: true,
pinned: true,
prompt: true,
},
{ userId }
);
if (!session) {
throw new CopilotSessionNotFound();
}
// not allow to update action session
if (session.prompt.action) {
throw new CopilotSessionInvalidInput(
`Cannot update action: ${session.id}`
);
} else if (data.docId && session.parentSessionId) {
throw new CopilotSessionInvalidInput(
`Cannot update docId for forked session: ${session.id}`
);
}
if (data.promptName) {
const prompt = await this.db.aiPrompt.findFirst({
where: { name: data.promptName },
});
// always not allow to update to action prompt
if (!prompt || prompt.action) {
throw new CopilotSessionInvalidInput(
`Prompt ${data.promptName} not found or not available for session ${sessionId}`
);
}
}
if (data.pinned && data.pinned !== session.pinned) {
// if pin the session, unpin exists session in the workspace
await this.unpin(session.workspaceId, userId);
}
await this.db.aiSession.update({ where: { id: sessionId }, data });
return sessionId;
}
@Transactional()
async getMessages(
sessionId: string,
select?: Prisma.AiSessionMessageSelect,
orderBy?: Prisma.AiSessionMessageOrderByWithRelationInput
) {
return this.db.aiSessionMessage.findMany({
where: { sessionId },
select,
orderBy: orderBy ?? { createdAt: 'asc' },
});
}
@Transactional()
async setMessages(
sessionId: string,
messages: ChatMessage[],
tokenCost: number
) {
await this.db.aiSessionMessage.createMany({
data: messages.map(m => ({
...m,
attachments: m.attachments || undefined,
params: omit(m.params, ['docs']) || undefined,
streamObjects: m.streamObjects || undefined,
sessionId,
})),
});
// only count message generated by user
const userMessages = messages.filter(m => m.role === 'user');
await this.db.aiSession.update({
where: { id: sessionId },
data: {
messageCost: { increment: userMessages.length },
tokenCost: { increment: tokenCost },
},
});
}
@Transactional()
async revertLatestMessage(
sessionId: string,
removeLatestUserMessage: boolean
) {
const id = await this.getExists(sessionId, { id: true }).then(
session => session?.id
);
if (!id) {
throw new CopilotSessionNotFound();
}
const ids = await this.getMessages(id, { id: true, role: true }).then(
roles =>
roles
.slice(
roles.findLastIndex(({ role }) => role === AiPromptRole.user) +
(removeLatestUserMessage ? 0 : 1)
)
.map(({ id }) => id)
);
if (ids.length) {
await this.db.aiSessionMessage.deleteMany({ where: { id: { in: ids } } });
}
}
}

View File

@ -636,5 +636,61 @@ export class DocModel extends BaseModel {
return [count, rows] as const;
}
async paginateDocInfoByUpdatedAt(
workspaceId: string,
pagination: PaginationInput
) {
const count = await this.db.workspaceDoc.count({
where: {
workspaceId,
},
});
const after = pagination.after
? Prisma.sql`AND "snapshots"."updated_at" < ${new Date(pagination.after)}`
: Prisma.sql``;
const rows = await this.db.$queryRaw<
{
workspaceId: string;
docId: string;
mode: PublicDocMode;
public: boolean;
defaultRole: DocRole;
title: string | null;
createdAt: Date;
updatedAt: Date;
creatorId?: string;
lastUpdaterId?: string;
}[]
>`
SELECT
"workspace_pages"."workspace_id" as "workspaceId",
"workspace_pages"."page_id" as "docId",
"workspace_pages"."mode" as "mode",
"workspace_pages"."public" as "public",
"workspace_pages"."defaultRole" as "defaultRole",
"workspace_pages"."title" as "title",
"snapshots"."created_at" as "createdAt",
"snapshots"."updated_at" as "updatedAt",
"snapshots"."created_by" as "creatorId",
"snapshots"."updated_by" as "lastUpdaterId"
FROM "workspace_pages"
INNER JOIN "snapshots"
ON "workspace_pages"."workspace_id" = "snapshots"."workspace_id"
AND "workspace_pages"."page_id" = "snapshots"."guid"
WHERE
"workspace_pages"."workspace_id" = ${workspaceId}
${after}
ORDER BY
"snapshots"."updated_at" DESC
LIMIT ${pagination.first}
OFFSET ${pagination.offset}
`;
return [count, rows] as const;
}
// #endregion
}

View File

@ -102,6 +102,8 @@ export class ModelsModule {}
export * from './common';
export * from './copilot-context';
export * from './copilot-job';
export * from './copilot-session';
export * from './copilot-workspace';
export * from './doc';
export * from './doc-user';
export * from './feature';

View File

@ -45,6 +45,10 @@ interface UserFilter {
withDisabled?: boolean;
}
export interface ItemWithUserId {
userId: string;
}
export type PublicUser = Pick<User, keyof typeof publicUserSelect>;
export type WorkspaceUser = Pick<User, keyof typeof workspaceUserSelect>;
export type { ConnectedAccount, User };
@ -78,6 +82,19 @@ export class UserModel extends BaseModel {
});
}
async getPublicUsersMap<T extends ItemWithUserId>(
items: T[]
): Promise<Map<string, PublicUser>> {
const userIds: string[] = [];
for (const item of items) {
if (item.userId) {
userIds.push(item.userId);
}
}
const users = await this.getPublicUsers(userIds);
return new Map(users.map(user => [user.id, user]));
}
async getWorkspaceUser(id: string): Promise<WorkspaceUser | null> {
return this.db.user.findUnique({
select: workspaceUserSelect,

View File

@ -51,6 +51,7 @@ import {
ModelInputType,
ModelOutputType,
} from './providers';
import { StreamObjectParser } from './providers/utils';
import { ChatSession, ChatSessionService } from './session';
import { CopilotStorage } from './storage';
import { ChatMessage, ChatQuerySchema } from './types';
@ -189,6 +190,45 @@ export class CopilotController implements BeforeApplicationShutdown {
return merge(source$.pipe(finalize(() => subject$.next(null))), ping$);
}
private async prepareChatSession(
user: CurrentUser,
sessionId: string,
query: Record<string, string | string[]>,
outputType: ModelOutputType
) {
let { messageId, retry, modelId, params } = ChatQuerySchema.parse(query);
const { provider, model } = await this.chooseProvider(
outputType,
user.id,
sessionId,
messageId,
modelId
);
const [latestMessage, session] = await this.appendSessionMessage(
sessionId,
messageId,
retry
);
if (latestMessage) {
params = Object.assign({}, params, latestMessage.params, {
content: latestMessage.content,
attachments: latestMessage.attachments,
});
}
const finalMessage = session.finish(params);
return {
provider,
model,
session,
finalMessage,
};
}
@Get('/chat/:sessionId')
@CallMetric('ai', 'chat', { timer: true })
async chat(
@ -200,36 +240,19 @@ export class CopilotController implements BeforeApplicationShutdown {
const info: any = { sessionId, params: query };
try {
let { messageId, retry, reasoning, webSearch, modelId, params } =
ChatQuerySchema.parse(query);
const { provider, model } = await this.chooseProvider(
ModelOutputType.Text,
user.id,
sessionId,
messageId,
modelId
);
const [latestMessage, session] = await this.appendSessionMessage(
sessionId,
messageId,
retry
);
const { provider, model, session, finalMessage } =
await this.prepareChatSession(
user,
sessionId,
query,
ModelOutputType.Text
);
info.model = model;
info.finalMessage = finalMessage.filter(m => m.role !== 'system');
metrics.ai.counter('chat_calls').add(1, { model });
if (latestMessage) {
params = Object.assign({}, params, latestMessage.params, {
content: latestMessage.content,
attachments: latestMessage.attachments,
});
}
const finalMessage = session.finish(params);
info.finalMessage = finalMessage.filter(m => m.role !== 'system');
const { reasoning, webSearch } = ChatQuerySchema.parse(query);
const content = await provider.text({ modelId: model }, finalMessage, {
...session.config.promptConfig,
signal: this.getSignal(req),
@ -269,37 +292,20 @@ export class CopilotController implements BeforeApplicationShutdown {
const info: any = { sessionId, params: query, throwInStream: false };
try {
let { messageId, retry, reasoning, webSearch, modelId, params } =
ChatQuerySchema.parse(query);
const { provider, model } = await this.chooseProvider(
ModelOutputType.Text,
user.id,
sessionId,
messageId,
modelId
);
const [latestMessage, session] = await this.appendSessionMessage(
sessionId,
messageId,
retry
);
const { provider, model, session, finalMessage } =
await this.prepareChatSession(
user,
sessionId,
query,
ModelOutputType.Text
);
info.model = model;
metrics.ai.counter('chat_stream_calls').add(1, { model });
if (latestMessage) {
params = Object.assign({}, params, latestMessage.params, {
content: latestMessage.content,
attachments: latestMessage.attachments,
});
}
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
const finalMessage = session.finish(params);
info.finalMessage = finalMessage.filter(m => m.role !== 'system');
metrics.ai.counter('chat_stream_calls').add(1, { model });
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
const { messageId, reasoning, webSearch } = ChatQuerySchema.parse(query);
const source$ = from(
provider.streamText({ modelId: model }, finalMessage, {
...session.config.promptConfig,
@ -348,6 +354,83 @@ export class CopilotController implements BeforeApplicationShutdown {
}
}
@Sse('/chat/:sessionId/stream-object')
@CallMetric('ai', 'chat_object_stream', { timer: true })
async chatStreamObject(
@CurrentUser() user: CurrentUser,
@Req() req: Request,
@Param('sessionId') sessionId: string,
@Query() query: Record<string, string>
): Promise<Observable<ChatEvent>> {
const info: any = { sessionId, params: query, throwInStream: false };
try {
const { provider, model, session, finalMessage } =
await this.prepareChatSession(
user,
sessionId,
query,
ModelOutputType.Object
);
info.model = model;
info.finalMessage = finalMessage.filter(m => m.role !== 'system');
metrics.ai.counter('chat_object_stream_calls').add(1, { model });
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
const { messageId, reasoning, webSearch } = ChatQuerySchema.parse(query);
const source$ = from(
provider.streamObject({ modelId: model }, finalMessage, {
...session.config.promptConfig,
signal: this.getSignal(req),
user: user.id,
workspace: session.config.workspaceId,
reasoning,
webSearch,
})
).pipe(
connect(shared$ =>
merge(
// actual chat event stream
shared$.pipe(
map(data => ({ type: 'message' as const, id: messageId, data }))
),
// save the generated text to the session
shared$.pipe(
toArray(),
concatMap(values => {
const parser = new StreamObjectParser();
const streamObjects = parser.mergeTextDelta(values);
const content = parser.mergeContent(streamObjects);
session.push({
role: 'assistant',
content,
streamObjects,
createdAt: new Date(),
});
return from(session.save());
}),
mergeMap(() => EMPTY)
)
)
),
catchError(e => {
metrics.ai.counter('chat_object_stream_errors').add(1);
info.throwInStream = true;
return mapSseError(e, info);
}),
finalize(() => {
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value - 1);
})
);
return this.mergePingStream(messageId || '', source$);
} catch (err) {
metrics.ai.counter('chat_object_stream_errors').add(1, info);
return mapSseError(err, info);
}
}
@Sse('/chat/:sessionId/workflow')
@CallMetric('ai', 'chat_workflow', { timer: true })
async chatWorkflow(

View File

@ -1,12 +1,10 @@
import { Injectable } from '@nestjs/common';
import { Injectable, Logger } from '@nestjs/common';
import { ModuleRef } from '@nestjs/core';
import {
AFFiNELogger,
BlobNotFound,
CallMetric,
CopilotContextFileNotSupported,
DocNotFound,
EventBus,
JobQueue,
mapAnyError,
@ -23,6 +21,7 @@ import { EMBEDDING_DIMENSIONS, EmbeddingClient } from './types';
@Injectable()
export class CopilotEmbeddingJob {
private readonly logger = new Logger(CopilotEmbeddingJob.name);
private readonly workspaceJobAbortController: Map<string, AbortController> =
new Map();
@ -33,13 +32,10 @@ export class CopilotEmbeddingJob {
private readonly moduleRef: ModuleRef,
private readonly doc: DocReader,
private readonly event: EventBus,
private readonly logger: AFFiNELogger,
private readonly models: Models,
private readonly queue: JobQueue,
private readonly storage: CopilotStorage
) {
this.logger.setContext(CopilotEmbeddingJob.name);
}
) {}
@OnEvent('config.init')
async onConfigInit() {
@ -138,7 +134,7 @@ export class CopilotEmbeddingJob {
if (enableDocEmbedding) {
const toBeEmbedDocIds =
await this.models.copilotWorkspace.findDocsToEmbed(workspaceId);
this.logger.debug(
this.logger.log(
`Trigger embedding for ${toBeEmbedDocIds.length} docs in workspace ${workspaceId}`
);
for (const docId of toBeEmbedDocIds) {
@ -163,8 +159,10 @@ export class CopilotEmbeddingJob {
}
}
@OnEvent('doc.indexer.updated')
async addDocEmbeddingQueueFromEvent(doc: Events['doc.indexer.updated']) {
@OnJob('copilot.embedding.updateDoc')
async addDocEmbeddingQueueFromEvent(
doc: Jobs['copilot.embedding.updateDoc']
) {
if (!this.supportEmbedding || !this.embeddingClient) return;
await this.queue.add(
@ -180,8 +178,10 @@ export class CopilotEmbeddingJob {
);
}
@OnEvent('doc.indexer.deleted')
async deleteDocEmbeddingQueueFromEvent(doc: Events['doc.indexer.deleted']) {
@OnJob('copilot.embedding.deleteDoc')
async deleteDocEmbeddingQueueFromEvent(
doc: Jobs['copilot.embedding.deleteDoc']
) {
await this.queue.remove(
`workspace:embedding:${doc.workspaceId}:${doc.docId}`,
'copilot.embedding.docs'
@ -342,11 +342,16 @@ export class CopilotEmbeddingJob {
workspaceId,
docId
);
this.logger.verbose(
this.logger.log(
`Check if doc ${docId} in workspace ${workspaceId} needs embedding: ${needEmbedding}`
);
if (needEmbedding) {
if (signal.aborted) return;
if (signal.aborted) {
this.logger.log(
`Doc ${docId} in workspace ${workspaceId} is aborted, skipping embedding.`
);
return;
}
const fragment = await this.getDocFragment(workspaceId, docId);
if (fragment) {
// fast fall for empty doc, journal is easily to create a empty doc
@ -367,12 +372,21 @@ export class CopilotEmbeddingJob {
chunks
);
}
this.logger.log(
`Doc ${docId} in workspace ${workspaceId} has summary, embedding done.`
);
} else {
// for empty doc, insert empty embedding
this.logger.warn(
`Doc ${docId} in workspace ${workspaceId} has no summary, fulfilling empty embedding.`
);
await this.fulfillEmptyEmbedding(workspaceId, docId);
}
} else if (contextId) {
throw new DocNotFound({ spaceId: workspaceId, docId });
this.logger.warn(
`Doc ${docId} in workspace ${workspaceId} has no fragment, fulfilling empty embedding.`
);
await this.fulfillEmptyEmbedding(workspaceId, docId);
}
}
} catch (error: any) {
@ -394,8 +408,11 @@ export class CopilotEmbeddingJob {
return;
}
// passthrough error to job queue
throw error;
// log error and skip the job
this.logger.error(
`Error embedding doc ${docId} in workspace ${workspaceId}`,
error
);
}
}
}

View File

@ -43,6 +43,16 @@ declare global {
docId: string;
};
'copilot.embedding.updateDoc': {
workspaceId: string;
docId: string;
};
'copilot.embedding.deleteDoc': {
workspaceId: string;
docId: string;
};
'copilot.embedding.files': {
contextId?: string;
userId: string;

View File

@ -8,6 +8,7 @@ import { FeatureModule } from '../../core/features';
import { PermissionModule } from '../../core/permission';
import { QuotaModule } from '../../core/quota';
import { WorkspaceModule } from '../../core/workspaces';
import { IndexerModule } from '../indexer';
import {
CopilotContextResolver,
CopilotContextRootResolver,
@ -44,6 +45,7 @@ import {
PermissionModule,
ServerConfigModule,
WorkspaceModule,
IndexerModule,
],
providers: [
// providers

View File

@ -1880,5 +1880,14 @@ export async function refreshPrompts(db: PrismaClient) {
},
},
});
await db.aiSession.updateMany({
where: {
promptName: prompt.name,
},
data: {
promptAction: prompt.action ?? null,
},
});
}
}

View File

@ -13,11 +13,17 @@ import {
import { CopilotProvider } from '../provider';
import type {
CopilotChatOptions,
CopilotProviderModel,
ModelConditions,
PromptMessage,
StreamObject,
} from '../types';
import { ModelOutputType } from '../types';
import { chatToGPTMessage, TextStreamParser } from '../utils';
import {
chatToGPTMessage,
StreamObjectParser,
TextStreamParser,
} from '../utils';
export abstract class AnthropicProvider<T> extends CopilotProvider<T> {
private readonly MAX_STEPS = 20;
@ -92,21 +98,7 @@ export abstract class AnthropicProvider<T> extends CopilotProvider<T> {
try {
metrics.ai.counter('chat_text_stream_calls').add(1, { model: model.id });
const [system, msgs] = await chatToGPTMessage(messages, true, true);
const { fullStream } = streamText({
model: this.instance(model.id),
system,
messages: msgs,
abortSignal: options.signal,
providerOptions: {
anthropic: this.getAnthropicOptions(options, model.id),
},
tools: await this.getTools(options, model.id),
maxSteps: this.MAX_STEPS,
experimental_continueSteps: true,
});
const fullStream = await this.getFullStream(model, messages, options);
const parser = new TextStreamParser();
for await (const chunk of fullStream) {
const result = parser.parse(chunk);
@ -122,6 +114,60 @@ export abstract class AnthropicProvider<T> extends CopilotProvider<T> {
}
}
override async *streamObject(
cond: ModelConditions,
messages: PromptMessage[],
options: CopilotChatOptions = {}
): AsyncIterable<StreamObject> {
const fullCond = { ...cond, outputType: ModelOutputType.Object };
await this.checkParams({ cond: fullCond, messages, options });
const model = this.selectModel(fullCond);
try {
metrics.ai
.counter('chat_object_stream_calls')
.add(1, { model: model.id });
const fullStream = await this.getFullStream(model, messages, options);
const parser = new StreamObjectParser();
for await (const chunk of fullStream) {
const result = parser.parse(chunk);
if (result) {
yield result;
}
if (options.signal?.aborted) {
await fullStream.cancel();
break;
}
}
} catch (e: any) {
metrics.ai
.counter('chat_object_stream_errors')
.add(1, { model: model.id });
throw this.handleError(e);
}
}
private async getFullStream(
model: CopilotProviderModel,
messages: PromptMessage[],
options: CopilotChatOptions = {}
) {
const [system, msgs] = await chatToGPTMessage(messages, true, true);
const { fullStream } = streamText({
model: this.instance(model.id),
system,
messages: msgs,
abortSignal: options.signal,
providerOptions: {
anthropic: this.getAnthropicOptions(options, model.id),
},
tools: await this.getTools(options, model.id),
maxSteps: this.MAX_STEPS,
experimental_continueSteps: true,
});
return fullStream;
}
private getAnthropicOptions(options: CopilotChatOptions, model: string) {
const result: AnthropicProviderOptions = {};
if (options?.reasoning && this.isReasoningModel(model)) {

View File

@ -20,7 +20,7 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -29,7 +29,7 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -38,7 +38,7 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -47,7 +47,7 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
defaultForOutputType: true,
},
],

View File

@ -18,7 +18,7 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -27,7 +27,7 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -36,7 +36,7 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -45,7 +45,7 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
defaultForOutputType: true,
},
],

View File

@ -21,11 +21,17 @@ import { CopilotProvider } from '../provider';
import type {
CopilotChatOptions,
CopilotImageOptions,
CopilotProviderModel,
ModelConditions,
PromptMessage,
StreamObject,
} from '../types';
import { ModelOutputType } from '../types';
import { chatToGPTMessage, TextStreamParser } from '../utils';
import {
chatToGPTMessage,
StreamObjectParser,
TextStreamParser,
} from '../utils';
export const DEFAULT_DIMENSIONS = 256;
@ -150,21 +156,7 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
try {
metrics.ai.counter('chat_text_stream_calls').add(1, { model: model.id });
const [system, msgs] = await chatToGPTMessage(messages);
const { fullStream } = streamText({
model: this.instance(model.id, {
useSearchGrounding: this.useSearchGrounding(options),
}),
system,
messages: msgs,
abortSignal: options.signal,
maxSteps: this.MAX_STEPS,
providerOptions: {
google: this.getGeminiOptions(options, model.id),
},
});
const fullStream = await this.getFullStream(model, messages, options);
const parser = new TextStreamParser();
for await (const chunk of fullStream) {
const result = parser.parse(chunk);
@ -180,6 +172,60 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
}
}
override async *streamObject(
cond: ModelConditions,
messages: PromptMessage[],
options: CopilotChatOptions = {}
): AsyncIterable<StreamObject> {
const fullCond = { ...cond, outputType: ModelOutputType.Object };
await this.checkParams({ cond: fullCond, messages, options });
const model = this.selectModel(fullCond);
try {
metrics.ai
.counter('chat_object_stream_calls')
.add(1, { model: model.id });
const fullStream = await this.getFullStream(model, messages, options);
const parser = new StreamObjectParser();
for await (const chunk of fullStream) {
const result = parser.parse(chunk);
if (result) {
yield result;
}
if (options.signal?.aborted) {
await fullStream.cancel();
break;
}
}
} catch (e: any) {
metrics.ai
.counter('chat_object_stream_errors')
.add(1, { model: model.id });
throw this.handleError(e);
}
}
private async getFullStream(
model: CopilotProviderModel,
messages: PromptMessage[],
options: CopilotChatOptions = {}
) {
const [system, msgs] = await chatToGPTMessage(messages);
const { fullStream } = streamText({
model: this.instance(model.id, {
useSearchGrounding: this.useSearchGrounding(options),
}),
system,
messages: msgs,
abortSignal: options.signal,
maxSteps: this.MAX_STEPS,
providerOptions: {
google: this.getGeminiOptions(options, model.id),
},
});
return fullStream;
}
private getGeminiOptions(options: CopilotChatOptions, model: string) {
const result: GoogleGenerativeAIProviderOptions = {};
if (options?.reasoning && this.isReasoningModel(model)) {

View File

@ -25,7 +25,11 @@ export class GeminiGenerativeProvider extends GeminiProvider<GeminiGenerativeCon
ModelInputType.Image,
ModelInputType.Audio,
],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
defaultForOutputType: true,
},
],
@ -40,7 +44,11 @@ export class GeminiGenerativeProvider extends GeminiProvider<GeminiGenerativeCon
ModelInputType.Image,
ModelInputType.Audio,
],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},
@ -54,7 +62,11 @@ export class GeminiGenerativeProvider extends GeminiProvider<GeminiGenerativeCon
ModelInputType.Image,
ModelInputType.Audio,
],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},

View File

@ -23,7 +23,11 @@ export class GeminiVertexProvider extends GeminiProvider<GeminiVertexConfig> {
ModelInputType.Image,
ModelInputType.Audio,
],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},
@ -37,7 +41,11 @@ export class GeminiVertexProvider extends GeminiProvider<GeminiVertexConfig> {
ModelInputType.Image,
ModelInputType.Audio,
],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},

View File

@ -27,12 +27,19 @@ import type {
CopilotChatTools,
CopilotEmbeddingOptions,
CopilotImageOptions,
CopilotProviderModel,
CopilotStructuredOptions,
ModelConditions,
PromptMessage,
StreamObject,
} from './types';
import { CopilotProviderType, ModelInputType, ModelOutputType } from './types';
import { chatToGPTMessage, CitationParser, TextStreamParser } from './utils';
import {
chatToGPTMessage,
CitationParser,
StreamObjectParser,
TextStreamParser,
} from './utils';
export const DEFAULT_DIMENSIONS = 256;
@ -65,7 +72,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -75,7 +82,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -84,7 +91,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -94,7 +101,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -103,7 +110,11 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
defaultForOutputType: true,
},
],
@ -113,7 +124,11 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},
@ -122,7 +137,11 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},
@ -131,7 +150,11 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Structured],
output: [
ModelOutputType.Text,
ModelOutputType.Object,
ModelOutputType.Structured,
],
},
],
},
@ -140,7 +163,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -149,7 +172,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -158,7 +181,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
@ -312,26 +335,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
try {
metrics.ai.counter('chat_text_stream_calls').add(1, { model: model.id });
const [system, msgs] = await chatToGPTMessage(messages);
const modelInstance = this.#instance.responses(model.id);
const { fullStream } = streamText({
model: modelInstance,
system,
messages: msgs,
frequencyPenalty: options.frequencyPenalty ?? 0,
presencePenalty: options.presencePenalty ?? 0,
temperature: options.temperature ?? 0,
maxTokens: options.maxTokens ?? 4096,
providerOptions: {
openai: this.getOpenAIOptions(options, model.id),
},
tools: await this.getTools(options, model.id),
maxSteps: this.MAX_STEPS,
abortSignal: options.signal,
});
const fullStream = await this.getFullStream(model, messages, options);
const citationParser = new CitationParser();
const textParser = new TextStreamParser();
for await (const chunk of fullStream) {
@ -363,6 +367,39 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
}
}
override async *streamObject(
cond: ModelConditions,
messages: PromptMessage[],
options: CopilotChatOptions = {}
): AsyncIterable<StreamObject> {
const fullCond = { ...cond, outputType: ModelOutputType.Object };
await this.checkParams({ cond: fullCond, messages, options });
const model = this.selectModel(fullCond);
try {
metrics.ai
.counter('chat_object_stream_calls')
.add(1, { model: model.id });
const fullStream = await this.getFullStream(model, messages, options);
const parser = new StreamObjectParser();
for await (const chunk of fullStream) {
const result = parser.parse(chunk);
if (result) {
yield result;
}
if (options.signal?.aborted) {
await fullStream.cancel();
break;
}
}
} catch (e: any) {
metrics.ai
.counter('chat_object_stream_errors')
.add(1, { model: model.id });
throw this.handleError(e, model.id, options);
}
}
override async structure(
cond: ModelConditions,
messages: PromptMessage[],
@ -403,6 +440,31 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
}
}
private async getFullStream(
model: CopilotProviderModel,
messages: PromptMessage[],
options: CopilotChatOptions = {}
) {
const [system, msgs] = await chatToGPTMessage(messages);
const modelInstance = this.#instance.responses(model.id);
const { fullStream } = streamText({
model: modelInstance,
system,
messages: msgs,
frequencyPenalty: options.frequencyPenalty ?? 0,
presencePenalty: options.presencePenalty ?? 0,
temperature: options.temperature ?? 0,
maxTokens: options.maxTokens ?? 4096,
providerOptions: {
openai: this.getOpenAIOptions(options, model.id),
},
tools: await this.getTools(options, model.id),
maxSteps: this.MAX_STEPS,
abortSignal: options.signal,
});
return fullStream;
}
// ====== text to image ======
private async *generateImageWithAttachments(
model: string,

View File

@ -10,9 +10,12 @@ import {
OnEvent,
} from '../../../base';
import { AccessController } from '../../../core/permission';
import { IndexerService } from '../../indexer';
import { CopilotContextService } from '../context';
import {
buildDocKeywordSearchGetter,
buildDocSearchGetter,
createDocKeywordSearchTool,
createDocSemanticSearchTool,
createExaCrawlTool,
createExaSearchTool,
@ -33,6 +36,7 @@ import {
ModelInputType,
type PromptMessage,
PromptMessageSchema,
StreamObject,
} from './types';
@Injectable()
@ -124,6 +128,7 @@ export abstract class CopilotProvider<C = any> {
): Promise<ToolSet> {
const tools: ToolSet = {};
if (options?.tools?.length) {
this.logger.debug(`getTools: ${JSON.stringify(options.tools)}`);
for (const tool of options.tools) {
const toolDef = this.getProviderSpecificTools(tool, model);
if (toolDef) {
@ -142,6 +147,24 @@ export abstract class CopilotProvider<C = any> {
);
break;
}
case 'docKeywordSearch': {
if (this.AFFiNEConfig.indexer.enabled) {
const ac = this.moduleRef.get(AccessController, {
strict: false,
});
const indexerService = this.moduleRef.get(IndexerService, {
strict: false,
});
const searchDocs = buildDocKeywordSearchGetter(
ac,
indexerService
);
tools.doc_keyword_search = createDocKeywordSearchTool(
searchDocs.bind(null, options)
);
}
break;
}
case 'webSearch': {
tools.web_search_exa = createExaSearchTool(this.AFFiNEConfig);
tools.web_crawl_exa = createExaCrawlTool(this.AFFiNEConfig);
@ -225,6 +248,17 @@ export abstract class CopilotProvider<C = any> {
options?: CopilotChatOptions
): AsyncIterable<string>;
streamObject(
_model: ModelConditions,
_messages: PromptMessage[],
_options?: CopilotChatOptions
): AsyncIterable<StreamObject> {
throw new CopilotProviderNotSupported({
provider: this.type,
kind: 'object',
});
}
structure(
_cond: ModelConditions,
_messages: PromptMessage[],

View File

@ -118,8 +118,33 @@ export const ChatMessageAttachment = z.union([
}),
]);
export const StreamObjectSchema = z.discriminatedUnion('type', [
z.object({
type: z.literal('text-delta'),
textDelta: z.string(),
}),
z.object({
type: z.literal('reasoning'),
textDelta: z.string(),
}),
z.object({
type: z.literal('tool-call'),
toolCallId: z.string(),
toolName: z.string(),
args: z.record(z.any()),
}),
z.object({
type: z.literal('tool-result'),
toolCallId: z.string(),
toolName: z.string(),
args: z.record(z.any()),
result: z.any(),
}),
]);
export const PureMessageSchema = z.object({
content: z.string(),
streamObjects: z.array(StreamObjectSchema).optional().nullable(),
attachments: z.array(ChatMessageAttachment).optional().nullable(),
params: z.record(z.any()).optional().nullable(),
});
@ -129,6 +154,7 @@ export const PromptMessageSchema = PureMessageSchema.extend({
}).strict();
export type PromptMessage = z.infer<typeof PromptMessageSchema>;
export type PromptParams = NonNullable<PromptMessage['params']>;
export type StreamObject = z.infer<typeof StreamObjectSchema>;
// ========== options ==========
@ -187,6 +213,7 @@ export enum ModelInputType {
export enum ModelOutputType {
Text = 'text',
Object = 'object',
Embedding = 'embedding',
Image = 'image',
Structured = 'structured',

View File

@ -1,3 +1,4 @@
import { Logger } from '@nestjs/common';
import {
CoreAssistantMessage,
CoreUserMessage,
@ -10,11 +11,12 @@ import {
import { ZodType } from 'zod';
import {
createDocKeywordSearchTool,
createDocSemanticSearchTool,
createExaCrawlTool,
createExaSearchTool,
} from '../tools';
import { PromptMessage } from './types';
import { PromptMessage, StreamObject } from './types';
type ChatMessage = CoreUserMessage | CoreAssistantMessage;
@ -381,13 +383,31 @@ export class CitationParser {
export interface CustomAITools extends ToolSet {
doc_semantic_search: ReturnType<typeof createDocSemanticSearchTool>;
doc_keyword_search: ReturnType<typeof createDocKeywordSearchTool>;
web_search_exa: ReturnType<typeof createExaSearchTool>;
web_crawl_exa: ReturnType<typeof createExaCrawlTool>;
}
type ChunkType = TextStreamPart<CustomAITools>['type'];
export function parseUnknownError(error: unknown) {
if (typeof error === 'string') {
throw new Error(error);
} else if (error instanceof Error) {
throw error;
} else if (
typeof error === 'object' &&
error !== null &&
'message' in error
) {
throw new Error(String(error.message));
} else {
throw new Error(JSON.stringify(error));
}
}
export class TextStreamParser {
private readonly logger = new Logger(TextStreamParser.name);
private readonly CALLOUT_PREFIX = '\n[!]\n';
private lastType: ChunkType | undefined;
@ -412,6 +432,9 @@ export class TextStreamParser {
break;
}
case 'tool-call': {
this.logger.debug(
`[tool-call] toolName: ${chunk.toolName}, toolCallId: ${chunk.toolCallId}`
);
result = this.addPrefix(result);
switch (chunk.toolName) {
case 'web_search_exa': {
@ -422,11 +445,18 @@ export class TextStreamParser {
result += `\nCrawling the web "${chunk.args.url}"\n`;
break;
}
case 'doc_keyword_search': {
result += `\nSearching the keyword "${chunk.args.query}"\n`;
break;
}
}
result = this.markAsCallout(result);
break;
}
case 'tool-result': {
this.logger.debug(
`[tool-result] toolName: ${chunk.toolName}, toolCallId: ${chunk.toolCallId}`
);
result = this.addPrefix(result);
switch (chunk.toolName) {
case 'doc_semantic_search': {
@ -435,6 +465,13 @@ export class TextStreamParser {
}
break;
}
case 'doc_keyword_search': {
if (Array.isArray(chunk.result)) {
result += `\nFound ${chunk.result.length} document${chunk.result.length !== 1 ? 's' : ''} related to “${chunk.args.query}”.\n`;
result += `\n${this.getKeywordSearchLinks(chunk.result)}\n`;
}
break;
}
case 'web_search_exa': {
if (Array.isArray(chunk.result)) {
result += `\n${this.getWebSearchLinks(chunk.result)}\n`;
@ -446,8 +483,8 @@ export class TextStreamParser {
break;
}
case 'error': {
const error = chunk.error as { type: string; message: string };
throw new Error(error.message);
parseUnknownError(chunk.error);
break;
}
}
this.lastType = chunk.type;
@ -489,4 +526,81 @@ export class TextStreamParser {
}, '');
return links;
}
private getKeywordSearchLinks(
list: {
docId: string;
title: string;
}[]
): string {
const links = list.reduce((acc, result) => {
return acc + `\n\n[${result.title}](${result.docId})\n\n`;
}, '');
return links;
}
}
export class StreamObjectParser {
public parse(chunk: TextStreamPart<CustomAITools>) {
switch (chunk.type) {
case 'reasoning':
case 'text-delta':
case 'tool-call':
case 'tool-result': {
return chunk;
}
case 'error': {
parseUnknownError(chunk.error);
return null;
}
default: {
return null;
}
}
}
public mergeTextDelta(chunks: StreamObject[]): StreamObject[] {
return chunks.reduce((acc, curr) => {
const prev = acc.at(-1);
switch (curr.type) {
case 'reasoning':
case 'text-delta': {
if (prev && prev.type === curr.type) {
prev.textDelta += curr.textDelta;
} else {
acc.push(curr);
}
break;
}
case 'tool-result': {
const index = acc.findIndex(
item =>
item.type === 'tool-call' &&
item.toolCallId === curr.toolCallId &&
item.toolName === curr.toolName
);
if (index !== -1) {
acc[index] = curr;
} else {
acc.push(curr);
}
break;
}
default: {
acc.push(curr);
break;
}
}
return acc;
}, [] as StreamObject[]);
}
public mergeContent(chunks: StreamObject[]): string {
return chunks.reduce((acc, curr) => {
if (curr.type === 'text-delta') {
acc += curr.textDelta;
}
return acc;
}, '');
}
}

Some files were not shown because too many files have changed in this diff Show More