perf(e2e): reduce e2e runtime (#4280)

* chore: reuse page instance

* refactor: optimize

* chore: revert

* chore: ignore workflows

* chore: upgrade playwright to 1.44.0

* chore: fix failed tests

* refactor: optimize e2eUtils

* chore: make tests pass

* chore: optimize workflow for e2e

* chore: fix fialed tests

* chore: optimize workflows

* chore: optimize

* chore: fix path

* chore: fix

* chore: test

* chore: fix

* chore: test

* chore: test

* chore: add bot for e2e

* chore: wait to revert

* chore: wait to revert

* chore: fix

* chore: fix

* chore: fix bot

* chore: test

* chore: ignore pro-plugins on PR author is not member

* chore: optimize

* chore: test

* chore: test

* chore: test bot

* chore: remove title link

* chore: fix

* chore: fix error and cache yarn

* chore: optimize md

* chore: add new workflows
This commit is contained in:
Zeke Zhang 2024-05-10 20:51:11 +08:00 committed by GitHub
parent e2922bed9e
commit df0d3ddee3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 526 additions and 112 deletions

View File

@ -16,7 +16,6 @@ on:
branches: branches:
- '**' - '**'
paths: paths:
- 'packages/core/client/**'
- 'packages/core/client/docs/**' - 'packages/core/client/docs/**'
- '.github/workflows/deploy-client-docs.yml' - '.github/workflows/deploy-client-docs.yml'
@ -25,11 +24,12 @@ jobs:
name: Build name: Build
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: actions/setup-node@v3 - uses: actions/setup-node@v4
with: with:
node-version: "18" node-version: 18
- run: yarn install cache: 'yarn'
- run: yarn --frozen-lockfile
- name: Build zh-CN - name: Build zh-CN
run: yarn doc build core/client --lang=zh-CN run: yarn doc build core/client --lang=zh-CN
- name: Build en-US - name: Build en-US

5
.github/workflows/merge.config.ts vendored Normal file
View File

@ -0,0 +1,5 @@
export default {
// Look for test files in the "tests" directory, relative to this configuration file.
testDir: 'packages',
reporter: [['markdown'], ['html', { outputFolder: `../../e2e-report`, open: 'never' }]]
};

View File

@ -10,29 +10,43 @@ on:
- 'main' - 'main'
- 'develop' - 'develop'
paths: paths:
- '.github/workflows/nocobase-build-test.yml'
- 'packages/**' - 'packages/**'
- '.github/workflows/nocobase-build-test.yml'
pull_request: pull_request:
branches: branches:
- '**' - '**'
paths: paths:
- '.github/workflows/nocobase-build-test.yml'
- 'packages/**' - 'packages/**'
- '.github/workflows/nocobase-build-test.yml'
jobs: jobs:
build-test: build-test:
strategy:
matrix:
node_version: [ '18' ]
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: node:${{ matrix.node_version }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node_version }} - name: Checkout pro-plugins
uses: actions/setup-node@v2 continue-on-error: true # 外部开发者提交 PR 的时候因为没有权限这里会报错,为了能够继续执行后续步骤,所以这里设置为 continue-on-error: true
uses: actions/checkout@v4
with: with:
node-version: ${{ matrix.node_version }} repository: nocobase/pro-plugins
ref: main
path: packages/pro-plugins
ssh-key: ${{ secrets.SUBMODULE_SSH_KEY }}
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 18
cache: 'yarn' cache: 'yarn'
- run: yarn install - run: yarn --frozen-lockfile
- run: yarn build - run: yarn build
env:
__E2E__: true # e2e will be reusing this workflow, so we need to set this flag to true
- uses: actions/upload-artifact@v4
with:
name: build-artifact
path: |
packages/**/es/
packages/**/lib/
packages/**/dist/
!packages/**/node_modules/**
timeout-minutes: 30 timeout-minutes: 30

144
.github/workflows/nocobase-e2e.yml vendored Normal file
View File

@ -0,0 +1,144 @@
name: E2E without workflows
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on:
workflow_run:
workflows: [Nocobase Build Test]
types: [completed]
jobs:
e2e-test-postgres:
runs-on: ubuntu-latest
container: node:18
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres:11
# Provide the password for postgres
env:
POSTGRES_USER: nocobase
POSTGRES_PASSWORD: password
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Checkout pro-plugins
continue-on-error: true # 外部开发者提交 PR 的时候因为没有权限这里会报错,为了能够继续执行后续步骤,所以这里设置为 continue-on-error: true
uses: actions/checkout@v4
with:
repository: nocobase/pro-plugins
ref: main
path: packages/pro-plugins
ssh-key: ${{ secrets.SUBMODULE_SSH_KEY }}
- name: Set variables
continue-on-error: true # 外部开发者提交 PR 的时候因为没有权限这里会报错,为了能够继续执行后续步骤,所以这里设置为 continue-on-error: true
run: |
APPEND_PRESET_LOCAL_PLUGINS=$(find ./packages/pro-plugins/@nocobase -mindepth 1 -maxdepth 1 -type d -exec basename {} \; | sed 's/^plugin-//' | tr '\n' ',' | sed 's/,$//')
echo "var2=$APPEND_PRESET_LOCAL_PLUGINS" >> $GITHUB_OUTPUT
id: vars
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v4
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- run: yarn --frozen-lockfile
- name: Download build artifact
uses: actions/download-artifact@v4
with:
name: build-artifact
- run: npx playwright install chromium --with-deps
- name: Test with postgres
run: yarn e2e p-test --ignore 'packages/**/{plugin-workflow,plugin-workflow-*}/**/__e2e__/**/*.test.ts'
env:
__E2E__: true
APP_ENV: production
LOGGER_LEVEL: error
DB_DIALECT: postgres
DB_HOST: postgres
DB_PORT: 5432
DB_USER: nocobase
DB_PASSWORD: password
DB_DATABASE: nocobase
APPEND_PRESET_LOCAL_PLUGINS: ${{ steps.vars.outputs.var2 }}
- name: Merge reports
run: |
node scripts/moveE2EReportFiles.js && npx playwright merge-reports --config .github/workflows/merge.config.ts ./storage/playwright/tests-report-blob
env:
NODE_OPTIONS: --max-old-space-size=4096
- name: Upload e2e-report
uses: actions/upload-artifact@v4
id: artifact-upload-step
with:
name: e2e-report
path: ./e2e-report/index.html
- name: Comment on PR
uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const jobName = 'E2E without workflows';
const fs = require('fs');
const prNumber = '${{ github.event.workflow_run.pull_requests.number }}';
if (!prNumber) {
core.error('No pull request found for commit ' + context.sha + ' and workflow triggered by: ' + jobName);
return;
}
{
// Mark previous comments as outdated by minimizing them.
const { data: comments } = await github.rest.issues.listComments({
...context.repo,
issue_number: prNumber,
});
for (const comment of comments) {
if (comment.user.login === 'github-actions[bot]' && comment.body.includes(jobName)) {
await github.graphql(`
mutation {
minimizeComment(input: {subjectId: "${comment.node_id}", classifier: OUTDATED}) {
clientMutationId
}
}
`);
}
}
}
const reportUrl = '${{ steps.artifact-upload-step.outputs.artifact-url }}';
core.notice('Report url: ' + reportUrl);
const mergeWorkflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
const reportMd = await fs.promises.readFile('report.md', 'utf8');
function formatComment(lines) {
let body = lines.join('\n');
if (body.length > 65535)
body = body.substring(0, 65000) + `... ${body.length - 65000} more characters`;
return body;
}
const { data: response } = await github.rest.issues.createComment({
...context.repo,
issue_number: prNumber,
body: formatComment([
`### Test results for "${jobName}"`,
reportMd,
'',
`Full [HTML report](${reportUrl}). Merge [workflow run](${mergeWorkflowUrl}).`
]),
});
core.info('Posted comment: ' + response.html_url);
timeout-minutes: 180

View File

@ -21,11 +21,8 @@ on:
jobs: jobs:
e2e-test-postgres: e2e-test-postgres:
strategy:
matrix:
node_version: ['18']
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: node:${{ matrix.node_version }} container: node:18
services: services:
# Label used to access the service container # Label used to access the service container
postgres: postgres:
@ -42,28 +39,26 @@ jobs:
--health-timeout 5s --health-timeout 5s
--health-retries 5 --health-retries 5
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Checkout pro-plugins - name: Checkout pro-plugins
uses: actions/checkout@v3 continue-on-error: true # 外部开发者提交 PR 的时候因为没有权限这里会报错,为了能够继续执行后续步骤,所以这里设置为 continue-on-error: true
uses: actions/checkout@v4
with: with:
repository: nocobase/pro-plugins repository: nocobase/pro-plugins
ref: main ref: main
path: packages/pro-plugins path: packages/pro-plugins
ssh-key: ${{ secrets.SUBMODULE_SSH_KEY }} ssh-key: ${{ secrets.SUBMODULE_SSH_KEY }}
- name: Set variables - name: Set variables
continue-on-error: true # 外部开发者提交 PR 的时候因为没有权限这里会报错,为了能够继续执行后续步骤,所以这里设置为 continue-on-error: true
run: | run: |
APPEND_PRESET_LOCAL_PLUGINS=$(find ./packages/pro-plugins/@nocobase -mindepth 1 -maxdepth 1 -type d -exec basename {} \; | sed 's/^plugin-//' | tr '\n' ',' | sed 's/,$//') APPEND_PRESET_LOCAL_PLUGINS=$(find ./packages/pro-plugins/@nocobase -mindepth 1 -maxdepth 1 -type d -exec basename {} \; | sed 's/^plugin-//' | tr '\n' ',' | sed 's/,$//')
echo "var2=$APPEND_PRESET_LOCAL_PLUGINS" >> $GITHUB_OUTPUT echo "var2=$APPEND_PRESET_LOCAL_PLUGINS" >> $GITHUB_OUTPUT
id: vars id: vars
- name: Use Node.js ${{ matrix.node_version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node_version }}
cache: 'yarn'
- name: Get yarn cache directory path - name: Get yarn cache directory path
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)" run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v3 - uses: actions/cache@v4
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`) id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with: with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }} path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
@ -71,7 +66,7 @@ jobs:
restore-keys: | restore-keys: |
${{ runner.os }}-yarn- ${{ runner.os }}-yarn-
- run: yarn install - run: yarn --frozen-lockfile
- name: yarn build - name: yarn build
run: yarn build run: yarn build
env: env:
@ -90,4 +85,70 @@ jobs:
DB_PASSWORD: password DB_PASSWORD: password
DB_DATABASE: nocobase DB_DATABASE: nocobase
APPEND_PRESET_LOCAL_PLUGINS: ${{ steps.vars.outputs.var2 }} APPEND_PRESET_LOCAL_PLUGINS: ${{ steps.vars.outputs.var2 }}
- name: Merge reports
run: |
node scripts/moveE2EReportFiles.js && npx playwright merge-reports --config .github/workflows/merge.config.ts ./storage/playwright/tests-report-blob
env:
NODE_OPTIONS: --max-old-space-size=4096
- name: Upload e2e-report
uses: actions/upload-artifact@v4
id: artifact-upload-step
with:
name: e2e-report
path: ./e2e-report/index.html
- name: Comment on PR
uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const jobName = 'NocoBase E2E Test';
const fs = require('fs');
const prNumber = '${{ github.event.pull_request.number }}';
if (!prNumber) {
core.error('No pull request found for commit ' + context.sha + ' and workflow triggered by: ' + jobName);
return;
}
{
// Mark previous comments as outdated by minimizing them.
const { data: comments } = await github.rest.issues.listComments({
...context.repo,
issue_number: prNumber,
});
for (const comment of comments) {
if (comment.user.login === 'github-actions[bot]' && comment.body.includes(jobName)) {
await github.graphql(`
mutation {
minimizeComment(input: {subjectId: "${comment.node_id}", classifier: OUTDATED}) {
clientMutationId
}
}
`);
}
}
}
const reportUrl = '${{ steps.artifact-upload-step.outputs.artifact-url }}';
core.notice('Report url: ' + reportUrl);
const mergeWorkflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
const reportMd = await fs.promises.readFile('report.md', 'utf8');
function formatComment(lines) {
let body = lines.join('\n');
if (body.length > 65535)
body = body.substring(0, 65000) + `... ${body.length - 65000} more characters`;
return body;
}
const { data: response } = await github.rest.issues.createComment({
...context.repo,
issue_number: prNumber,
body: formatComment([
`### Test results for "${jobName}"`,
reportMd,
'',
`Full [HTML report](${reportUrl}). Merge [workflow run](${mergeWorkflowUrl}).`
]),
});
core.info('Posted comment: ' + response.html_url);
timeout-minutes: 180 timeout-minutes: 180

View File

@ -0,0 +1,144 @@
name: Workflows E2E
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on:
workflow_run:
workflows: [Nocobase Build Test]
types: [completed]
jobs:
e2e-test-postgres:
runs-on: ubuntu-latest
container: node:18
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres:11
# Provide the password for postgres
env:
POSTGRES_USER: nocobase
POSTGRES_PASSWORD: password
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Checkout pro-plugins
continue-on-error: true # 外部开发者提交 PR 的时候因为没有权限这里会报错,为了能够继续执行后续步骤,所以这里设置为 continue-on-error: true
uses: actions/checkout@v4
with:
repository: nocobase/pro-plugins
ref: main
path: packages/pro-plugins
ssh-key: ${{ secrets.SUBMODULE_SSH_KEY }}
- name: Set variables
continue-on-error: true # 外部开发者提交 PR 的时候因为没有权限这里会报错,为了能够继续执行后续步骤,所以这里设置为 continue-on-error: true
run: |
APPEND_PRESET_LOCAL_PLUGINS=$(find ./packages/pro-plugins/@nocobase -mindepth 1 -maxdepth 1 -type d -exec basename {} \; | sed 's/^plugin-//' | tr '\n' ',' | sed 's/,$//')
echo "var2=$APPEND_PRESET_LOCAL_PLUGINS" >> $GITHUB_OUTPUT
id: vars
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v4
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- run: yarn --frozen-lockfile
- name: Download build artifact
uses: actions/download-artifact@v4
with:
name: build-artifact
- run: npx playwright install chromium --with-deps
- name: Test with postgres
run: yarn e2e p-test --match 'packages/**/{plugin-workflow,plugin-workflow-*}/**/__e2e__/**/*.test.ts'
env:
__E2E__: true
APP_ENV: production
LOGGER_LEVEL: error
DB_DIALECT: postgres
DB_HOST: postgres
DB_PORT: 5432
DB_USER: nocobase
DB_PASSWORD: password
DB_DATABASE: nocobase
APPEND_PRESET_LOCAL_PLUGINS: ${{ steps.vars.outputs.var2 }}
- name: Merge reports
run: |
node scripts/moveE2EReportFiles.js && npx playwright merge-reports --config .github/workflows/merge.config.ts ./storage/playwright/tests-report-blob
env:
NODE_OPTIONS: --max-old-space-size=4096
- name: Upload e2e-report
uses: actions/upload-artifact@v4
id: artifact-upload-step
with:
name: e2e-report
path: ./e2e-report/index.html
- name: Comment on PR
uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const jobName = 'Workflows E2E';
const fs = require('fs');
const prNumber = '${{ github.event.workflow_run.pull_requests.number }}';
if (!prNumber) {
core.error('No pull request found for commit ' + context.sha + ' and workflow triggered by: ' + jobName);
return;
}
{
// Mark previous comments as outdated by minimizing them.
const { data: comments } = await github.rest.issues.listComments({
...context.repo,
issue_number: prNumber,
});
for (const comment of comments) {
if (comment.user.login === 'github-actions[bot]' && comment.body.includes(jobName)) {
await github.graphql(`
mutation {
minimizeComment(input: {subjectId: "${comment.node_id}", classifier: OUTDATED}) {
clientMutationId
}
}
`);
}
}
}
const reportUrl = '${{ steps.artifact-upload-step.outputs.artifact-url }}';
core.notice('Report url: ' + reportUrl);
const mergeWorkflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
const reportMd = await fs.promises.readFile('report.md', 'utf8');
function formatComment(lines) {
let body = lines.join('\n');
if (body.length > 65535)
body = body.substring(0, 65000) + `... ${body.length - 65000} more characters`;
return body;
}
const { data: response } = await github.rest.issues.createComment({
...context.repo,
issue_number: prNumber,
body: formatComment([
`### Test results for "${jobName}"`,
reportMd,
'',
`Full [HTML report](${reportUrl}). Merge [workflow run](${mergeWorkflowUrl}).`
]),
});
core.info('Posted comment: ' + response.html_url);
timeout-minutes: 180

View File

@ -148,9 +148,6 @@ const filterArgv = () => {
if (element.startsWith('--url=')) { if (element.startsWith('--url=')) {
continue; continue;
} }
if (element === '--skip-reporter') {
continue;
}
if (element === '--build') { if (element === '--build') {
continue; continue;
} }
@ -178,7 +175,6 @@ module.exports = (cli) => {
.command('test') .command('test')
.allowUnknownOption() .allowUnknownOption()
.option('--url [url]') .option('--url [url]')
.option('--skip-reporter')
.option('--build') .option('--build')
.option('--production') .option('--production')
.action(async (options) => { .action(async (options) => {
@ -190,9 +186,6 @@ module.exports = (cli) => {
process.env.APP_ENV = 'production'; process.env.APP_ENV = 'production';
await run('yarn', ['build']); await run('yarn', ['build']);
} }
if (options.skipReporter) {
process.env.PLAYWRIGHT_SKIP_REPORTER = true;
}
if (options.url) { if (options.url) {
process.env.APP_BASE_URL = options.url.replace('localhost', '127.0.0.1'); process.env.APP_BASE_URL = options.url.replace('localhost', '127.0.0.1');
} else { } else {

View File

@ -43,7 +43,7 @@ async function runApp(dir, index = 0) {
await client.query(`DROP DATABASE IF EXISTS "${database}"`); await client.query(`DROP DATABASE IF EXISTS "${database}"`);
await client.query(`CREATE DATABASE "${database}";`); await client.query(`CREATE DATABASE "${database}";`);
await client.end(); await client.end();
return execa('yarn', ['nocobase', 'e2e', 'test', dir, '--skip-reporter'], { return execa('yarn', ['nocobase', 'e2e', 'test', dir], {
shell: true, shell: true,
stdio: 'inherit', stdio: 'inherit',
env: { env: {
@ -58,6 +58,7 @@ async function runApp(dir, index = 0) {
SOCKET_PATH: `storage/e2e/gateway-e2e-${index}.sock`, SOCKET_PATH: `storage/e2e/gateway-e2e-${index}.sock`,
PM2_HOME: resolve(process.cwd(), `storage/e2e/.pm2-${index}`), PM2_HOME: resolve(process.cwd(), `storage/e2e/.pm2-${index}`),
PLAYWRIGHT_AUTH_FILE: resolve(process.cwd(), `storage/playwright/.auth/admin-${index}.json`), PLAYWRIGHT_AUTH_FILE: resolve(process.cwd(), `storage/playwright/.auth/admin-${index}.json`),
E2E_JOB_ID: index,
}, },
}); });
} }

View File

@ -19,7 +19,7 @@ test.describe('bulk-destroy', () => {
await expect(page.getByLabel('block-item-CardItem-general-').getByText('No data')).not.toBeVisible(); await expect(page.getByLabel('block-item-CardItem-general-').getByText('No data')).not.toBeVisible();
// 1. 创建一个批量删除按钮 // 1. 创建一个批量删除按钮
await page.getByLabel('schema-initializer-ActionBar-').click(); await page.getByLabel('schema-initializer-ActionBar-').hover();
await page.getByRole('menuitem', { name: 'Delete' }).click(); await page.getByRole('menuitem', { name: 'Delete' }).click();
// 2. 选中所有行 // 2. 选中所有行
@ -39,7 +39,7 @@ test.describe('bulk-destroy', () => {
await expect(page.getByLabel('block-item-CardItem-general-').getByText('No data')).not.toBeVisible(); await expect(page.getByLabel('block-item-CardItem-general-').getByText('No data')).not.toBeVisible();
// 1. 创建一个批量删除按钮,并关闭二次确认 // 1. 创建一个批量删除按钮,并关闭二次确认
await page.getByLabel('schema-initializer-ActionBar-').click(); await page.getByLabel('schema-initializer-ActionBar-').hover();
await page.getByRole('menuitem', { name: 'Delete' }).click(); await page.getByRole('menuitem', { name: 'Delete' }).click();
await page.getByLabel('action-Action-Delete-destroy-').hover(); await page.getByLabel('action-Action-Delete-destroy-').hover();
await page.getByLabel('designer-schema-settings-Action-actionSettings:bulkDelete-general').hover(); await page.getByLabel('designer-schema-settings-Action-actionSettings:bulkDelete-general').hover();

View File

@ -702,6 +702,16 @@ export const T3686: PageConfig = {
}, },
], ],
}, },
{
name: 'parentCollection',
fields: [
{
name: 'parentAssociationField',
interface: 'm2m',
target: 'parentTargetCollection',
},
],
},
{ {
name: 'childCollection', name: 'childCollection',
inherits: ['parentCollection'], inherits: ['parentCollection'],
@ -713,16 +723,6 @@ export const T3686: PageConfig = {
}, },
], ],
}, },
{
name: 'parentCollection',
fields: [
{
name: 'parentAssociationField',
interface: 'm2m',
target: 'parentTargetCollection',
},
],
},
], ],
pageSchema: { pageSchema: {
_isJSONSchemaObject: true, _isJSONSchemaObject: true,

View File

@ -52,7 +52,7 @@
"dependencies": { "dependencies": {
"@faker-js/faker": "8.1.0", "@faker-js/faker": "8.1.0",
"@nocobase/server": "1.0.0-alpha.9", "@nocobase/server": "1.0.0-alpha.9",
"@playwright/test": "^1.42.1", "@playwright/test": "^1.44.0",
"@testing-library/jest-dom": "^6.4.2", "@testing-library/jest-dom": "^6.4.2",
"@testing-library/react": "^14.0.0", "@testing-library/react": "^14.0.0",
"@testing-library/react-hooks": "^8.0.1", "@testing-library/react-hooks": "^8.0.1",

View File

@ -38,9 +38,9 @@ export const defineConfig = (config?: PlaywrightTestConfig) => {
maxFailures: 0, maxFailures: 0,
// Reporter to use // Reporter to use
reporter: process.env.PLAYWRIGHT_SKIP_REPORTER reporter: process.env.CI
? undefined ? [['blob', { outputDir: `./storage/playwright/tests-report-blob/blob-${process.env.E2E_JOB_ID}` }]]
: [['html', { outputFolder: './storage/playwright/tests-report' }]], : [['html', { outputFolder: `./storage/playwright/tests-report-html`, open: 'never' }]],
outputDir: './storage/playwright/test-results', outputDir: './storage/playwright/test-results',

View File

@ -9,7 +9,7 @@
import { faker } from '@faker-js/faker'; import { faker } from '@faker-js/faker';
import { uid } from '@formily/shared'; import { uid } from '@formily/shared';
import { Page, test as base, expect, request } from '@playwright/test'; import { Browser, Page, test as base, expect, request } from '@playwright/test';
import _ from 'lodash'; import _ from 'lodash';
import { defineConfig } from './defineConfig'; import { defineConfig } from './defineConfig';
@ -193,6 +193,7 @@ interface CreatePageOptions {
} }
interface ExtendUtils { interface ExtendUtils {
page?: Page;
/** /**
* NocoBase * NocoBase
* @param pageConfig * @param pageConfig
@ -304,20 +305,27 @@ export class NocoPage {
} }
async init() { async init() {
const waitList = [];
if (this.options?.collections?.length) { if (this.options?.collections?.length) {
const collections: any = omitSomeFields(this.options.collections); const collections: any = omitSomeFields(this.options.collections);
this.collectionsName = collections.map((item) => item.name); this.collectionsName = collections.map((item) => item.name);
await createCollections(collections); waitList.push(createCollections(collections));
} }
this.uid = await createPage({ waitList.push(
type: this.options?.type, createPage({
name: this.options?.name, type: this.options?.type,
pageSchema: this.options?.pageSchema, name: this.options?.name,
url: this.options?.url, pageSchema: this.options?.pageSchema,
keepUid: this.options?.keepUid, url: this.options?.url,
}); keepUid: this.options?.keepUid,
}),
);
const result = await Promise.all(waitList);
this.uid = result[result.length - 1];
this.url = `${this.options?.basePath || '/admin/'}${this.uid}`; this.url = `${this.options?.basePath || '/admin/'}${this.uid}`;
} }
@ -340,22 +348,35 @@ export class NocoPage {
} }
async destroy() { async destroy() {
const waitList: any[] = [];
if (this.uid) { if (this.uid) {
await deletePage(this.uid); waitList.push(deletePage(this.uid));
this.uid = undefined; this.uid = undefined;
} }
if (this.collectionsName?.length) { if (this.collectionsName?.length) {
await deleteCollections(this.collectionsName); waitList.push(deleteCollections(this.collectionsName));
this.collectionsName = undefined; this.collectionsName = undefined;
} }
await Promise.all(waitList);
} }
} }
let _page: Page;
const getPage = async (browser: Browser) => {
if (!_page) {
_page = await browser.newPage();
}
return _page;
};
const _test = base.extend<ExtendUtils>({ const _test = base.extend<ExtendUtils>({
mockPage: async ({ page }, use) => { page: async ({ browser }, use) => {
await use(await getPage(browser));
},
mockPage: async ({ browser }, use) => {
// 保证每个测试运行时 faker 的随机值都是一样的 // 保证每个测试运行时 faker 的随机值都是一样的
// faker.seed(1); // faker.seed(1);
const page = await getPage(browser);
const nocoPages: NocoPage[] = []; const nocoPages: NocoPage[] = [];
const mockPage = (config?: PageConfig) => { const mockPage = (config?: PageConfig) => {
const nocoPage = new NocoPage(config, page); const nocoPage = new NocoPage(config, page);
@ -365,13 +386,18 @@ const _test = base.extend<ExtendUtils>({
await use(mockPage); await use(mockPage);
const waitList = [];
// 测试运行完自动销毁页面 // 测试运行完自动销毁页面
for (const nocoPage of nocoPages) { for (const nocoPage of nocoPages) {
// 这里之所以不加入 waitList 是因为会导致 acl 的测试报错
await nocoPage.destroy(); await nocoPage.destroy();
await setDefaultRole('root');
} }
waitList.push(setDefaultRole('root'));
// 删除掉 id 不是 1 的 users 和 name 不是 root admin member 的 roles // 删除掉 id 不是 1 的 users 和 name 不是 root admin member 的 roles
await removeRedundantUserAndRoles(); waitList.push(removeRedundantUserAndRoles());
await Promise.all(waitList);
}, },
mockManualDestroyPage: async ({ browser }, use) => { mockManualDestroyPage: async ({ browser }, use) => {
const mockManualDestroyPage = (config?: PageConfig) => { const mockManualDestroyPage = (config?: PageConfig) => {
@ -381,7 +407,7 @@ const _test = base.extend<ExtendUtils>({
await use(mockManualDestroyPage); await use(mockManualDestroyPage);
}, },
createCollections: async ({ page }, use) => { createCollections: async ({ browser }, use) => {
let collectionsName: string[] = []; let collectionsName: string[] = [];
const _createCollections = async (collectionSettings: CollectionSetting | CollectionSetting[]) => { const _createCollections = async (collectionSettings: CollectionSetting | CollectionSetting[]) => {
@ -398,7 +424,7 @@ const _test = base.extend<ExtendUtils>({
await deleteCollections(_.uniq(collectionsName)); await deleteCollections(_.uniq(collectionsName));
} }
}, },
mockCollections: async ({ page }, use) => { mockCollections: async ({ browser }, use) => {
let collectionsName: string[] = []; let collectionsName: string[] = [];
const destroy = async () => { const destroy = async () => {
if (collectionsName.length) { if (collectionsName.length) {
@ -415,7 +441,7 @@ const _test = base.extend<ExtendUtils>({
await use(mockCollections); await use(mockCollections);
await destroy(); await destroy();
}, },
mockCollection: async ({ page }, use) => { mockCollection: async ({ browser }, use) => {
let collectionsName: string[] = []; let collectionsName: string[] = [];
const destroy = async () => { const destroy = async () => {
if (collectionsName.length) { if (collectionsName.length) {
@ -432,7 +458,7 @@ const _test = base.extend<ExtendUtils>({
await use(mockCollection); await use(mockCollection);
await destroy(); await destroy();
}, },
mockRecords: async ({ page }, use) => { mockRecords: async ({ browser }, use) => {
const mockRecords = async (collectionName: string, count: any = 3, data?: any) => { const mockRecords = async (collectionName: string, count: any = 3, data?: any) => {
let maxDepth: number; let maxDepth: number;
if (_.isNumber(data)) { if (_.isNumber(data)) {
@ -448,7 +474,7 @@ const _test = base.extend<ExtendUtils>({
await use(mockRecords); await use(mockRecords);
}, },
mockRecord: async ({ page }, use) => { mockRecord: async ({ browser }, use) => {
const mockRecord = async (collectionName: string, data?: any, maxDepth?: any) => { const mockRecord = async (collectionName: string, data?: any, maxDepth?: any) => {
if (_.isNumber(data)) { if (_.isNumber(data)) {
maxDepth = data; maxDepth = data;
@ -461,7 +487,9 @@ const _test = base.extend<ExtendUtils>({
await use(mockRecord); await use(mockRecord);
}, },
deletePage: async ({ page }, use) => { deletePage: async ({ browser }, use) => {
const page = await getPage(browser);
const deletePage = async (pageName: string) => { const deletePage = async (pageName: string) => {
await page.getByText(pageName, { exact: true }).hover(); await page.getByText(pageName, { exact: true }).hover();
await page.getByRole('button', { name: 'designer-schema-settings-' }).hover(); await page.getByRole('button', { name: 'designer-schema-settings-' }).hover();
@ -471,18 +499,14 @@ const _test = base.extend<ExtendUtils>({
await use(deletePage); await use(deletePage);
}, },
mockRole: async ({ page }, use) => { mockRole: async ({ browser }, use) => {
const mockRole = async (roleSetting: AclRoleSetting) => { const mockRole = async (roleSetting: AclRoleSetting) => {
return createRole(roleSetting); return createRole(roleSetting);
}; };
await use(mockRole); await use(mockRole);
}, },
updateRole: async ({ page }, use) => { updateRole: async ({ browser }, use) => {
async (roleSetting: AclRoleSetting) => {
return updateRole(roleSetting);
};
await use(updateRole); await use(updateRole);
}, },
mockExternalDataSource: async ({ browser }, use) => { mockExternalDataSource: async ({ browser }, use) => {
@ -499,7 +523,7 @@ const _test = base.extend<ExtendUtils>({
await use(destoryDataSource); await use(destoryDataSource);
}, },
clearBlockTemplates: async ({ page }, use) => { clearBlockTemplates: async ({ browser }, use) => {
const clearBlockTemplates = async () => { const clearBlockTemplates = async () => {
const api = await request.newContext({ const api = await request.newContext({
storageState: process.env.PLAYWRIGHT_AUTH_FILE, storageState: process.env.PLAYWRIGHT_AUTH_FILE,
@ -811,11 +835,16 @@ const setDefaultRole = async (name) => {
}); });
const state = await api.storageState(); const state = await api.storageState();
const headers = getHeaders(state); const headers = getHeaders(state);
await api.post(`/api/users:setDefaultRole`, { const result = await api.post(`/api/users:setDefaultRole`, {
headers, headers,
data: { roleName: name }, data: { roleName: name },
}); });
if (!result.ok()) {
throw new Error(await result.text());
}
}; };
/** /**
* *
* @paramn * @paramn
@ -834,8 +863,7 @@ const createExternalDataSource = async (dataSourceSetting: DataSourceSetting) =>
if (!result.ok()) { if (!result.ok()) {
throw new Error(await result.text()); throw new Error(await result.text());
} }
const dataSourceData = (await result.json()).data; return (await result.json()).data;
return dataSourceData;
}; };
/** /**
@ -855,8 +883,7 @@ const destoryExternalDataSource = async (key) => {
if (!result.ok()) { if (!result.ok()) {
throw new Error(await result.text()); throw new Error(await result.text());
} }
const dataSourceData = (await result.json()).data; return (await result.json()).data;
return dataSourceData;
}; };
/** /**
* collection Faker * collection Faker

View File

@ -1,9 +0,0 @@
/**
* This file is part of the NocoBase (R) project.
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
* Authors: NocoBase Team.
*
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
* For more information, please refer to: https://www.nocobase.com/agreement.
*/

View File

@ -196,7 +196,7 @@ export class PluginMockCollectionsServer extends Plugin {
return options; return options;
}; };
this.app.resourcer.registerActions({ this.app.resourceManager.registerActionHandlers({
mock: async (ctx, next) => { mock: async (ctx, next) => {
const { resourceName } = ctx.action; const { resourceName } = ctx.action;
const { values, count = 10, maxDepth = 4 } = ctx.action.params; const { values, count = 10, maxDepth = 4 } = ctx.action.params;
@ -228,7 +228,7 @@ export class PluginMockCollectionsServer extends Plugin {
); );
return count == 1 ? items[0] : items; return count == 1 ? items[0] : items;
}; };
const repository = ctx.db.getRepository(resourceName); const repository = ctx.db.getRepository(resourceName) as CollectionRepository;
let size = count; let size = count;
if (Array.isArray(values)) { if (Array.isArray(values)) {
size = values.length; size = values.length;
@ -302,9 +302,12 @@ export class PluginMockCollectionsServer extends Plugin {
} }
} }
}); });
await collectionsRepository.load(); await collectionsRepository.load();
await db.sync();
for (const collection of collections) {
await db.getRepository(collection.name).collection.sync();
}
const records = await collectionsRepository.find({ const records = await collectionsRepository.find({
filter: { filter: {
name: collections.map((c) => c.name), name: collections.map((c) => c.name),

View File

@ -0,0 +1,31 @@
const fs = require('fs');
const path = require('path');
const glob = require('glob');
// 源路径和目标路径
const sourcePattern = './storage/playwright/tests-report-blob/blob-*/*';
const targetDir = './storage/playwright/tests-report-blob/';
// 确保目标目录存在
fs.mkdirSync(targetDir, { recursive: true });
// 使用 glob 模块匹配文件
glob(sourcePattern, (err, files) => {
if (err) {
console.error('Error matching files:', err);
return;
}
// 移动每个文件
files.forEach((file) => {
const targetFile = path.join(targetDir, path.basename(file));
fs.rename(file, targetFile, (err) => {
if (err) {
console.error(`Error moving file ${file}:`, err);
} else {
console.log(`Moved file ${file} to ${targetDir}`);
}
});
});
});

View File

@ -4954,12 +4954,12 @@
picocolors "^1.0.0" picocolors "^1.0.0"
tslib "^2.6.0" tslib "^2.6.0"
"@playwright/test@^1.42.1": "@playwright/test@^1.44.0":
version "1.42.1" version "1.44.0"
resolved "https://registry.npmmirror.com/@playwright/test/-/test-1.42.1.tgz#9eff7417bcaa770e9e9a00439e078284b301f31c" resolved "https://registry.npmmirror.com/@playwright/test/-/test-1.44.0.tgz#ac7a764b5ee6a80558bdc0fcbc525fcb81f83465"
integrity sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ== integrity sha512-rNX5lbNidamSUorBhB4XZ9SQTjAqfe5M+p37Z8ic0jPFBMo5iCtQz1kRWkEMg+rYOKSlVycpQmpqjSFq7LXOfg==
dependencies: dependencies:
playwright "1.42.1" playwright "1.44.0"
"@pm2/agent@~2.0.0": "@pm2/agent@~2.0.0":
version "2.0.3" version "2.0.3"
@ -20664,17 +20664,17 @@ platform@^1.3.1:
resolved "https://registry.npmmirror.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7" resolved "https://registry.npmmirror.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7"
integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg== integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==
playwright-core@1.42.1: playwright-core@1.44.0:
version "1.42.1" version "1.44.0"
resolved "https://registry.npmmirror.com/playwright-core/-/playwright-core-1.42.1.tgz#13c150b93c940a3280ab1d3fbc945bc855c9459e" resolved "https://registry.npmmirror.com/playwright-core/-/playwright-core-1.44.0.tgz#316c4f0bca0551ffb88b6eb1c97bc0d2d861b0d5"
integrity sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA== integrity sha512-ZTbkNpFfYcGWohvTTl+xewITm7EOuqIqex0c7dNZ+aXsbrLj0qI8XlGKfPpipjm0Wny/4Lt4CJsWJk1stVS5qQ==
playwright@1.42.1: playwright@1.44.0:
version "1.42.1" version "1.44.0"
resolved "https://registry.npmmirror.com/playwright/-/playwright-1.42.1.tgz#79c828b51fe3830211137550542426111dc8239f" resolved "https://registry.npmmirror.com/playwright/-/playwright-1.44.0.tgz#22894e9b69087f6beb639249323d80fe2b5087ff"
integrity sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg== integrity sha512-F9b3GUCLQ3Nffrfb6dunPOkE5Mh68tR7zN32L4jCk4FjQamgesGay7/dAAe1WaMEGV04DkdJfcJzjoCKygUaRQ==
dependencies: dependencies:
playwright-core "1.42.1" playwright-core "1.44.0"
optionalDependencies: optionalDependencies:
fsevents "2.3.2" fsevents "2.3.2"