fix: merge develop and fix confliect

This commit is contained in:
aaaaaajie 2025-04-02 15:52:55 +08:00
commit a5c9a62ae3
22 changed files with 546 additions and 127 deletions

View File

@ -30,9 +30,64 @@ export function mergeRole(roles: ACLRole[]) {
}
}
result.snippets = mergeRoleSnippets(allSnippets);
adjustActionByStrategy(roles, result);
return result;
}
/**
* When merging permissions from multiple roles, if strategy.actions allows certain actions, then those actions have higher priority.
* For example, [
* {
* actions: {
* 'users:view': {...},
* 'users:create': {...}
* },
* strategy: {
* actions: ['view']
* }
* }]
* finally result: [{
* actions: {
* 'users:create': {...},
* },
* {
* strategy: {
* actions: ['view']
* }]
**/
function adjustActionByStrategy(
roles,
result: {
actions?: Record<string, object>;
strategy?: { actions?: string[] };
resources?: string[];
},
) {
const { actions, strategy } = result;
const actionSet = getAdjustActions(roles);
if (!_.isEmpty(actions) && !_.isEmpty(strategy?.actions) && !_.isEmpty(result.resources)) {
for (const resource of result.resources) {
for (const action of strategy.actions) {
if (actionSet.has(action)) {
actions[`${resource}:${action}`] = {};
}
}
}
}
}
function getAdjustActions(roles: ACLRole[]) {
const actionSet = new Set<string>();
for (const role of roles) {
const jsonRole = role.toJSON();
// Within the same role, actions have higher priority than strategy.actions.
if (!_.isEmpty(jsonRole.strategy?.['actions']) && _.isEmpty(jsonRole.actions)) {
jsonRole.strategy['actions'].forEach((x) => !x.includes('own') && actionSet.add(x));
}
}
return actionSet;
}
function mergeRoleNames(sourceRoleNames, newRoleName) {
return newRoleName ? sourceRoleNames.concat(newRoleName) : sourceRoleNames;
}

View File

@ -62,6 +62,12 @@ export class InputFieldInterface extends CollectionFieldInterface {
hasDefaultValue = true;
properties = {
...defaultProps,
trim: {
type: 'boolean',
'x-content': '{{t("Automatically remove heading and tailing spaces")}}',
'x-decorator': 'FormItem',
'x-component': 'Checkbox',
},
layout: {
type: 'void',
title: '{{t("Index")}}',

View File

@ -259,6 +259,7 @@
"Parent collection fields": "父表字段",
"Basic": "基本类型",
"Single line text": "单行文本",
"Automatically remove heading and tailing spaces": "自动去除首尾空白字符",
"Long text": "多行文本",
"Phone": "手机号码",
"Email": "电子邮箱",

View File

@ -33,7 +33,7 @@ import { RemoteSelect, RemoteSelectProps } from '../remote-select';
import useServiceOptions, { useAssociationFieldContext } from './hooks';
const removeIfKeyEmpty = (obj, filterTargetKey) => {
if (!obj || typeof obj !== 'object' || !filterTargetKey) return obj;
if (!obj || typeof obj !== 'object' || !filterTargetKey || Array.isArray(obj)) return obj;
return !obj[filterTargetKey] ? null : obj;
};
@ -132,7 +132,7 @@ const InternalAssociationSelect = observer(
return () => {
form.removeEffects(id);
};
}, []);
}, [] as any);
const handleCreateAction = async (props) => {
const { search: value, callBack } = props;

View File

@ -105,4 +105,18 @@ describe('string field', () => {
name2: 'n2111',
});
});
it('trim', async () => {
const collection = db.collection({
name: 'tests',
fields: [{ type: 'string', name: 'name', trim: true }],
});
await db.sync();
const model = await collection.model.create({
name: ' n1\n ',
});
expect(model.toJSON()).toMatchObject({
name: 'n1',
});
});
});

View File

@ -54,19 +54,18 @@ export class BelongsToArrayAssociation {
return this.db.getModel(this.targetName);
}
generateInclude() {
if (this.db.sequelize.getDialect() !== 'postgres') {
throw new Error('Filtering by many to many (array) associations is only supported on postgres');
}
generateInclude(parentAs?: string) {
const targetCollection = this.db.getCollection(this.targetName);
const targetField = targetCollection.getField(this.targetKey);
const sourceCollection = this.db.getCollection(this.source.name);
const foreignField = sourceCollection.getField(this.foreignKey);
const queryInterface = this.db.sequelize.getQueryInterface();
const left = queryInterface.quoteIdentifiers(`${this.as}.${targetField.columnName()}`);
const right = queryInterface.quoteIdentifiers(`${this.source.collection.name}.${foreignField.columnName()}`);
const asLeft = parentAs ? `${parentAs}->${this.as}` : this.as;
const asRight = parentAs || this.source.collection.name;
const left = queryInterface.quoteIdentifiers(`${asLeft}.${targetField.columnName()}`);
const right = queryInterface.quoteIdentifiers(`${asRight}.${foreignField.columnName()}`);
return {
on: this.db.sequelize.literal(`${left}=any(${right})`),
on: this.db.queryInterface.generateJoinOnForJSONArray(left, right),
};
}

View File

@ -82,6 +82,31 @@ const queryParentSQL = (options: {
SELECT ${q(targetKeyField)} AS ${q(targetKey)}, ${q(foreignKeyField)} AS ${q(foreignKey)} FROM cte`;
};
const processIncludes = (includes: any[], model: any, parentAs = '') => {
includes.forEach((include: { association: string; include?: any[] }, index: number) => {
// Process current level
const association = model.associations[include.association];
if (association?.generateInclude) {
includes[index] = {
...include,
...association.generateInclude(parentAs),
};
}
// Recursively process nested includes if they exist
if (include.include && Array.isArray(include.include) && include.include.length > 0) {
// Get the associated model for the next level
const nextModel = association?.target;
if (!nextModel) {
return;
}
processIncludes(include.include, nextModel, parentAs ? `${parentAs}->${association.as}` : association.as);
}
});
return includes;
};
export class EagerLoadingTree {
public root: EagerLoadingNode;
db: Database;
@ -252,28 +277,20 @@ export class EagerLoadingTree {
throw new Error(`Model ${node.model.name} does not have primary key`);
}
includeForFilter.forEach((include: { association: string }, index: number) => {
const association = node.model.associations[include.association];
if (association?.associationType == 'BelongsToArray') {
includeForFilter[index] = {
...include,
...association.generateInclude(),
};
}
});
const options = {
...this.rootQueryOptions,
includeIgnoreAttributes: false,
attributes: [primaryKeyField],
group: `${node.model.name}.${primaryKeyField}`,
transaction,
include: includeForFilter,
};
include: processIncludes(includeForFilter, node.model),
} as any;
if (node.model.database.options.dialect === 'mssql' && options.order) {
options.order = null;
}
// find all ids
const ids = (await node.model.findAll(options)).map((row) => {
const ids = (await node.model.findAll()).map((row) => {
return { row, pk: row[primaryKeyField] };
});

View File

@ -8,7 +8,7 @@
*/
import { DataTypes } from 'sequelize';
import { BaseColumnFieldOptions, Field } from './field';
import { BaseColumnFieldOptions, Field, FieldContext } from './field';
export class StringField extends Field {
get dataType() {
@ -18,9 +18,20 @@ export class StringField extends Field {
return DataTypes.STRING;
}
additionalSequelizeOptions() {
const { name, trim } = this.options;
return {
set(value) {
this.setDataValue(name, trim ? value?.trim() : value);
},
};
}
}
export interface StringFieldOptions extends BaseColumnFieldOptions {
type: 'string';
length?: number;
trim?: boolean;
}

View File

@ -142,6 +142,10 @@ export default class MysqlQueryInterface extends QueryInterface {
}
}
public generateJoinOnForJSONArray(left: string, right: string) {
return this.db.sequelize.literal(`JSON_CONTAINS(${right}, JSON_ARRAY(${left}))`);
}
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
return null;
}

View File

@ -233,6 +233,10 @@ $BODY$
return res[0]['show_create_table'];
}
public generateJoinOnForJSONArray(left: string, right: string) {
return this.db.sequelize.literal(`${left}=any(${right})`);
}
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
return null;
}

View File

@ -126,6 +126,11 @@ export default abstract class QueryInterface {
return this.db.sequelize.getQueryInterface().queryGenerator.quoteIdentifier(identifier);
}
public generateJoinOnForJSONArray(left: string, right: string) {
const dialect = this.db.sequelize.getDialect();
throw new Error(`Filtering by many to many (array) associations is not supported on ${dialect}`);
}
public async changeColumn(options: ChangeColumnOptions) {
if (!options.actions.length) {
throw new Error('Actions invalid');

View File

@ -147,6 +147,10 @@ export default class SqliteQueryInterface extends QueryInterface {
await this.db.sequelize.query(sql, { transaction });
}
public generateJoinOnForJSONArray(left: string, right: string) {
return this.db.sequelize.literal(`${left} in (SELECT value from json_each(${right}))`);
}
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
return null;
}

View File

@ -315,6 +315,34 @@ describe('merge strategy', () => {
key1: 'val1 + val2',
});
});
it('case 2', () => {
const obj = assign(
{
filter: { a: 'a2' },
},
{},
{
filter: () => '123',
},
);
expect(obj).toMatchObject({
filter: '123',
});
});
it('case 3', () => {
const obj = assign(
{},
{
filter: { a: 'a2' },
},
{
filter: () => '123',
},
);
expect(obj).toMatchObject({
filter: '123',
});
});
});
describe('merge', () => {
@ -416,4 +444,32 @@ describe('merge strategy', () => {
});
});
});
describe('source is empty', () => {
it('case 1', () => {
const obj = assign(
{
resourceName: 'uiSchemas',
resourceIndex: 'n0jylid5rqa',
actionName: 'getJsonSchema',
values: {},
},
{},
{
filter: 'andMerge',
fields: 'intersect',
except: 'union',
whitelist: 'intersect',
blacklist: 'intersect',
sort: 'overwrite',
},
);
expect(obj).toMatchObject({
resourceName: 'uiSchemas',
resourceIndex: 'n0jylid5rqa',
actionName: 'getJsonSchema',
values: {},
});
});
});
});

View File

@ -8,7 +8,7 @@
*/
import deepmerge from 'deepmerge';
import lodash from 'lodash';
import _ from 'lodash';
import { isPlainObject } from './common';
type MergeStrategyType = 'merge' | 'deepMerge' | 'overwrite' | 'andMerge' | 'orMerge' | 'intersect' | 'union';
@ -22,7 +22,9 @@ export interface MergeStrategies {
function getEnumerableOwnPropertySymbols(target: any): any[] {
return Object.getOwnPropertySymbols
? Object.getOwnPropertySymbols(target).filter((symbol) => target.propertyIsEnumerable(symbol))
? Object.getOwnPropertySymbols(target).filter((symbol) =>
Object.prototype.propertyIsEnumerable.call(target, symbol),
)
: [];
}
@ -88,7 +90,7 @@ mergeStrategies.set('union', (x, y) => {
if (typeof y === 'string') {
y = y.split(',');
}
return lodash.uniq((x || []).concat(y || [])).filter(Boolean);
return _.uniq((x || []).concat(y || [])).filter(Boolean);
});
mergeStrategies.set('intersect', (x, y) =>
@ -110,15 +112,22 @@ mergeStrategies.set('intersect', (x, y) =>
);
export function assign(target: any, source: any, strategies: MergeStrategies = {}) {
getKeys(source).forEach((sourceKey) => {
const sourceKeys = getKeys(source);
const targetKeys = getKeys(target);
_.uniq([...sourceKeys, ...targetKeys]).forEach((sourceKey) => {
const strategy = strategies[sourceKey];
let func = mergeStrategies.get('deepMerge');
let func: any;
if (typeof strategy === 'function') {
func = strategy;
} else if (typeof strategy === 'string' && mergeStrategies.has(strategy as any)) {
func = mergeStrategies.get(strategy as any);
}
if (func) {
target[sourceKey] = func(target[sourceKey], source[sourceKey]);
} else if (sourceKeys.includes(sourceKey)) {
const func = mergeStrategies.get('deepMerge');
target[sourceKey] = func(target[sourceKey], source[sourceKey]);
}
});
return target;
}

View File

@ -530,4 +530,150 @@ describe('union role: full permissions', async () => {
expect(createRoleResponse.statusCode).toBe(200);
expect(createRoleResponse.body.data.role).not.toBe(UNION_ROLE_KEY);
});
it('should general action permissions override specific resource permissions when using union role #1924', async () => {
const rootAgent = await app.agent().login(rootUser);
await rootAgent
.post(`/dataSources/main/roles:update`)
.query({
filterByTk: role1.name,
})
.send({
roleName: role1.name,
strategy: {
actions: ['view'],
},
dataSourceKey: 'main',
});
const ownDataSourceScopeRole = await db.getRepository('dataSourcesRolesResourcesScopes').findOne({
where: {
key: 'own',
dataSourceKey: 'main',
},
});
const scopeFields = ['id', 'createdBy', 'createdById'];
const dataSourceResourcesResponse = await rootAgent
.post(`/roles/${role2.name}/dataSourceResources:create`)
.query({
filterByTk: 'users',
filter: {
dataSourceKey: 'main',
name: 'users',
},
})
.send({
usingActionsConfig: true,
actions: [
{
name: 'view',
fields: scopeFields,
scope: {
id: ownDataSourceScopeRole.id,
createdAt: '2025-02-19T08:57:17.385Z',
updatedAt: '2025-02-19T08:57:17.385Z',
key: 'own',
dataSourceKey: 'main',
name: '{{t("Own records")}}',
resourceName: null,
scope: {
createdById: '{{ ctx.state.currentUser.id }}',
},
},
},
],
name: 'users',
dataSourceKey: 'main',
});
expect(dataSourceResourcesResponse.statusCode).toBe(200);
agent = await app.agent().login(user, UNION_ROLE_KEY);
const rolesResponse = await agent.resource('roles').check();
expect(rolesResponse.status).toBe(200);
expect(rolesResponse.body.data.actions['users:view']).toStrictEqual({});
});
it('should verify actions configuration for union role with specific scopes', async () => {
const rootAgent = await app.agent().login(rootUser);
await rootAgent
.post(`/dataSources/main/roles:update`)
.query({
filterByTk: role1.name,
})
.send({
roleName: role1.name,
strategy: {
actions: ['view', 'create:own', 'update'],
},
dataSourceKey: 'main',
});
const ownDataSourceScopeRole = await db.getRepository('dataSourcesRolesResourcesScopes').findOne({
where: {
key: 'own',
dataSourceKey: 'main',
},
});
const scopeFields = ['id', 'createdBy', 'createdById'];
const dataSourceResourcesResponse = await rootAgent
.post(`/roles/${role2.name}/dataSourceResources:create`)
.query({
filterByTk: 'users',
filter: {
dataSourceKey: 'main',
name: 'users',
},
})
.send({
usingActionsConfig: true,
actions: [
{
name: 'view',
fields: scopeFields,
scope: {
id: ownDataSourceScopeRole.id,
createdAt: '2025-02-19T08:57:17.385Z',
updatedAt: '2025-02-19T08:57:17.385Z',
key: 'own',
dataSourceKey: 'main',
name: '{{t("Own records")}}',
resourceName: null,
scope: {
createdById: '{{ ctx.state.currentUser.id }}',
},
},
},
{
name: 'create',
fields: scopeFields,
scope: {
id: ownDataSourceScopeRole.id,
createdAt: '2025-02-19T08:57:17.385Z',
updatedAt: '2025-02-19T08:57:17.385Z',
key: 'own',
dataSourceKey: 'main',
name: '{{t("Own records")}}',
resourceName: null,
scope: {
createdById: '{{ ctx.state.currentUser.id }}',
},
},
},
],
name: 'users',
dataSourceKey: 'main',
});
expect(dataSourceResourcesResponse.statusCode).toBe(200);
agent = await app.agent().login(user, UNION_ROLE_KEY);
const rolesResponse = await agent.resource('roles').check();
expect(rolesResponse.status).toBe(200);
expect(rolesResponse.body.data.actions).toHaveProperty('users:create');
expect(rolesResponse.body.data.actions).toHaveProperty('users:view');
expect(rolesResponse.body.data.actions['users:view']).toStrictEqual({});
expect(rolesResponse.body.data.actions).not.toHaveProperty('users:create:own');
expect(rolesResponse.body.data.actions['users:create']).toHaveProperty('filter');
expect(rolesResponse.body.data.actions['users:create']).toHaveProperty('whitelist');
expect(rolesResponse.body.data.actions['users:update']).toStrictEqual({});
});
});

View File

@ -324,4 +324,100 @@ describe('issues', () => {
}
expect(res.status).toBe(200);
});
test('filtering by fields of a relation collection with m2m array field', async () => {
await db.getRepository('collections').create({
values: {
name: 'tags',
fields: [
{
name: 'id',
type: 'bigInt',
autoIncrement: true,
primaryKey: true,
allowNull: false,
},
{
name: 'title',
type: 'string',
},
],
},
});
await db.getRepository('collections').create({
values: {
name: 'users',
fields: [
{
name: 'id',
type: 'bigInt',
autoIncrement: true,
primaryKey: true,
allowNull: false,
},
{
name: 'username',
type: 'string',
},
{
name: 'tags',
type: 'belongsToArray',
foreignKey: 'tag_ids',
target: 'tags',
targetKey: 'id',
},
],
},
});
await db.getRepository('collections').create({
values: {
name: 'projects',
fields: [
{
name: 'id',
type: 'bigInt',
autoIncrement: true,
primaryKey: true,
allowNull: false,
},
{
name: 'title',
type: 'string',
},
{
name: 'users',
type: 'belongsTo',
foreignKey: 'user_id',
target: 'users',
},
],
},
});
// @ts-ignore
await db.getRepository('collections').load();
await db.sync();
await db.getRepository('tags').create({
values: [{ title: 'a' }, { title: 'b' }, { title: 'c' }],
});
await db.getRepository('users').create({
values: { id: 1, username: 'a' },
});
await db.getRepository('projects').create({
values: { id: 1, title: 'p1', user_id: 1 },
});
await expect(
db.getRepository('projects').findOne({
appends: ['users', 'users.tags'],
filter: {
$and: [
{
users: {
username: 'a',
},
},
],
},
}),
).resolves.toBeTruthy();
});
});

View File

@ -207,15 +207,8 @@ describe('m2m array api, bigInt targetKey', () => {
},
},
});
if (db.sequelize.getDialect() === 'postgres') {
const res = await search;
expect(res.length).toBe(1);
} else {
expect(search).rejects.toThrowError();
}
if (db.sequelize.getDialect() !== 'postgres') {
return;
}
const res1 = await search;
expect(res1.length).toBe(1);
const search2 = db.getRepository('users').find({
filter: {
'tags.title': {
@ -223,12 +216,8 @@ describe('m2m array api, bigInt targetKey', () => {
},
},
});
if (db.sequelize.getDialect() === 'postgres') {
const res = await search2;
expect(res.length).toBe(2);
} else {
expect(search2).rejects.toThrowError();
}
const res2 = await search2;
expect(res2.length).toBe(2);
});
it('should create with belongsToArray', async () => {

View File

@ -186,15 +186,8 @@ describe('m2m array api, string targetKey', () => {
},
},
});
if (db.sequelize.getDialect() === 'postgres') {
const res = await search;
expect(res.length).toBe(1);
} else {
expect(search).rejects.toThrowError();
}
if (db.sequelize.getDialect() !== 'postgres') {
return;
}
const res1 = await search;
expect(res1.length).toBe(1);
const search2 = db.getRepository('users').find({
filter: {
'tags.title': {
@ -202,12 +195,8 @@ describe('m2m array api, string targetKey', () => {
},
},
});
if (db.sequelize.getDialect() === 'postgres') {
const res = await search2;
expect(res.length).toBe(2);
} else {
expect(search2).rejects.toThrowError();
}
const res2 = await search2;
expect(res2.length).toBe(2);
});
it('should create with belongsToArray', async () => {

View File

@ -42,7 +42,7 @@ function getRepeatTypeValue(v) {
return 'none';
}
function CommonRepeatField({ value, onChange }) {
function CommonRepeatField({ value, onChange, disabled }) {
const { t } = useWorkflowTranslation();
const option = getNumberOption(value);
@ -59,11 +59,12 @@ function CommonRepeatField({ value, onChange }) {
addonBefore={t('Every')}
addonAfter={t(option.unitText)}
className="auto-width"
disabled={disabled}
/>
);
}
export function RepeatField({ value = null, onChange }) {
export function RepeatField({ value = null, onChange, disabled }) {
const { t } = useWorkflowTranslation();
const typeValue = getRepeatTypeValue(value);
const onTypeChange = useCallback(
@ -114,20 +115,23 @@ export function RepeatField({ value = null, onChange }) {
}
`}
>
<Select value={typeValue} onChange={onTypeChange} className="auto-width">
<Select value={typeValue} onChange={onTypeChange} className="auto-width" disabled={disabled}>
{RepeatOptions.map((item) => (
<Select.Option key={item.value} value={item.value}>
{t(item.text)}
</Select.Option>
))}
</Select>
{typeof typeValue === 'number' ? <CommonRepeatField value={value} onChange={onChange} /> : null}
{typeof typeValue === 'number' ? (
<CommonRepeatField value={value} onChange={onChange} disabled={disabled} />
) : null}
{typeValue === 'cron' ? (
<Cron
value={value.trim().split(/\s+/).slice(1).join(' ')}
setValue={(v) => onChange(`0 ${v}`)}
clearButton={false}
locale={window['cronLocale']}
disabled={disabled}
/>
) : null}
</fieldset>

View File

@ -376,11 +376,16 @@ export default class PluginWorkflowServer extends Plugin {
const prev = workflow.previous();
if (prev.config) {
trigger.off({ ...workflow.get(), ...prev });
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`);
}
trigger.on(workflow);
this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`);
this.enabledCache.set(workflow.id, workflow);
} else {
trigger.off(workflow);
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`);
this.enabledCache.delete(workflow.id);
}
if (!silent) {

View File

@ -104,8 +104,7 @@ export default class DateFieldScheduleTrigger {
// caching workflows in range, default to 5min
cacheCycle = 300_000;
constructor(public workflow: Plugin) {
workflow.app.on('afterStart', async () => {
onAfterStart = () => {
if (this.timer) {
return;
}
@ -113,9 +112,9 @@ export default class DateFieldScheduleTrigger {
this.timer = setInterval(() => this.reload(), this.cacheCycle);
this.reload();
});
};
workflow.app.on('beforeStop', () => {
onBeforeStop = () => {
if (this.timer) {
clearInterval(this.timer);
}
@ -124,31 +123,36 @@ export default class DateFieldScheduleTrigger {
clearTimeout(timer);
this.cache.delete(key);
}
});
};
constructor(public workflow: Plugin) {
workflow.app.on('afterStart', this.onAfterStart);
workflow.app.on('beforeStop', this.onBeforeStop);
}
reload() {
for (const [key, timer] of this.cache.entries()) {
clearTimeout(timer);
this.cache.delete(key);
}
async reload() {
const workflows = Array.from(this.workflow.enabledCache.values()).filter(
(item) => item.type === 'schedule' && item.config.mode === SCHEDULE_MODE.DATE_FIELD,
);
// NOTE: clear cached jobs in last cycle
this.cache = new Map();
this.inspect(workflows);
workflows.forEach((workflow) => {
this.inspect(workflow);
});
}
inspect(workflows: WorkflowModel[]) {
async inspect(workflow: WorkflowModel) {
const now = new Date();
workflows.forEach(async (workflow) => {
const records = await this.loadRecordsToSchedule(workflow, now);
this.workflow.getLogger(workflow.id).info(`[Schedule on date field] ${records.length} records to schedule`);
records.forEach((record) => {
const nextTime = this.getRecordNextTime(workflow, record);
this.schedule(workflow, record, nextTime, Boolean(nextTime));
});
});
}
// 1. startsOn in range -> yes
@ -233,8 +237,6 @@ export default class DateFieldScheduleTrigger {
[Op.gte]: new Date(endTimestamp),
},
});
} else {
this.workflow.getLogger(id).warn(`[Schedule on date field] "endsOn.field" is not configured`);
}
}
}
@ -367,7 +369,7 @@ export default class DateFieldScheduleTrigger {
}
on(workflow: WorkflowModel) {
this.inspect([workflow]);
this.inspect(workflow);
const { collection } = workflow.config;
const [dataSourceName, collectionName] = parseCollectionName(collection);

View File

@ -18,26 +18,30 @@ const MAX_SAFE_INTERVAL = 2147483647;
export default class StaticScheduleTrigger {
private timers: Map<string, NodeJS.Timeout | null> = new Map();
constructor(public workflow: Plugin) {
workflow.app.on('afterStart', async () => {
onAfterStart = () => {
const workflows = Array.from(this.workflow.enabledCache.values()).filter(
(item) => item.type === 'schedule' && item.config.mode === SCHEDULE_MODE.STATIC,
);
this.inspect(workflows);
workflows.forEach((workflow) => {
this.inspect(workflow);
});
};
workflow.app.on('beforeStop', () => {
onBeforeStop = () => {
for (const timer of this.timers.values()) {
clearInterval(timer);
}
});
};
constructor(public workflow: Plugin) {
workflow.app.on('afterStart', this.onAfterStart);
workflow.app.on('beforeStop', this.onBeforeStop);
}
inspect(workflows: WorkflowModel[]) {
inspect(workflow: WorkflowModel) {
const now = new Date();
workflows.forEach((workflow) => {
const nextTime = this.getNextTime(workflow, now);
if (nextTime) {
this.workflow
@ -47,7 +51,6 @@ export default class StaticScheduleTrigger {
this.workflow.getLogger(workflow.id).info('workflow will not be scheduled');
}
this.schedule(workflow, nextTime, nextTime >= now.getTime());
});
}
getNextTime({ config, stats }: WorkflowModel, currentDate: Date, nextSecond = false) {
@ -130,7 +133,7 @@ export default class StaticScheduleTrigger {
}
on(workflow) {
this.inspect([workflow]);
this.inspect(workflow);
}
off(workflow) {