mirror of
https://gitee.com/nocobase/nocobase.git
synced 2025-05-06 14:09:25 +08:00
fix: merge develop and fix confliect
This commit is contained in:
commit
a5c9a62ae3
@ -30,9 +30,64 @@ export function mergeRole(roles: ACLRole[]) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
result.snippets = mergeRoleSnippets(allSnippets);
|
result.snippets = mergeRoleSnippets(allSnippets);
|
||||||
|
adjustActionByStrategy(roles, result);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When merging permissions from multiple roles, if strategy.actions allows certain actions, then those actions have higher priority.
|
||||||
|
* For example, [
|
||||||
|
* {
|
||||||
|
* actions: {
|
||||||
|
* 'users:view': {...},
|
||||||
|
* 'users:create': {...}
|
||||||
|
* },
|
||||||
|
* strategy: {
|
||||||
|
* actions: ['view']
|
||||||
|
* }
|
||||||
|
* }]
|
||||||
|
* finally result: [{
|
||||||
|
* actions: {
|
||||||
|
* 'users:create': {...},
|
||||||
|
* },
|
||||||
|
* {
|
||||||
|
* strategy: {
|
||||||
|
* actions: ['view']
|
||||||
|
* }]
|
||||||
|
**/
|
||||||
|
function adjustActionByStrategy(
|
||||||
|
roles,
|
||||||
|
result: {
|
||||||
|
actions?: Record<string, object>;
|
||||||
|
strategy?: { actions?: string[] };
|
||||||
|
resources?: string[];
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
const { actions, strategy } = result;
|
||||||
|
const actionSet = getAdjustActions(roles);
|
||||||
|
if (!_.isEmpty(actions) && !_.isEmpty(strategy?.actions) && !_.isEmpty(result.resources)) {
|
||||||
|
for (const resource of result.resources) {
|
||||||
|
for (const action of strategy.actions) {
|
||||||
|
if (actionSet.has(action)) {
|
||||||
|
actions[`${resource}:${action}`] = {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAdjustActions(roles: ACLRole[]) {
|
||||||
|
const actionSet = new Set<string>();
|
||||||
|
for (const role of roles) {
|
||||||
|
const jsonRole = role.toJSON();
|
||||||
|
// Within the same role, actions have higher priority than strategy.actions.
|
||||||
|
if (!_.isEmpty(jsonRole.strategy?.['actions']) && _.isEmpty(jsonRole.actions)) {
|
||||||
|
jsonRole.strategy['actions'].forEach((x) => !x.includes('own') && actionSet.add(x));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return actionSet;
|
||||||
|
}
|
||||||
|
|
||||||
function mergeRoleNames(sourceRoleNames, newRoleName) {
|
function mergeRoleNames(sourceRoleNames, newRoleName) {
|
||||||
return newRoleName ? sourceRoleNames.concat(newRoleName) : sourceRoleNames;
|
return newRoleName ? sourceRoleNames.concat(newRoleName) : sourceRoleNames;
|
||||||
}
|
}
|
||||||
|
@ -62,6 +62,12 @@ export class InputFieldInterface extends CollectionFieldInterface {
|
|||||||
hasDefaultValue = true;
|
hasDefaultValue = true;
|
||||||
properties = {
|
properties = {
|
||||||
...defaultProps,
|
...defaultProps,
|
||||||
|
trim: {
|
||||||
|
type: 'boolean',
|
||||||
|
'x-content': '{{t("Automatically remove heading and tailing spaces")}}',
|
||||||
|
'x-decorator': 'FormItem',
|
||||||
|
'x-component': 'Checkbox',
|
||||||
|
},
|
||||||
layout: {
|
layout: {
|
||||||
type: 'void',
|
type: 'void',
|
||||||
title: '{{t("Index")}}',
|
title: '{{t("Index")}}',
|
||||||
|
@ -259,6 +259,7 @@
|
|||||||
"Parent collection fields": "父表字段",
|
"Parent collection fields": "父表字段",
|
||||||
"Basic": "基本类型",
|
"Basic": "基本类型",
|
||||||
"Single line text": "单行文本",
|
"Single line text": "单行文本",
|
||||||
|
"Automatically remove heading and tailing spaces": "自动去除首尾空白字符",
|
||||||
"Long text": "多行文本",
|
"Long text": "多行文本",
|
||||||
"Phone": "手机号码",
|
"Phone": "手机号码",
|
||||||
"Email": "电子邮箱",
|
"Email": "电子邮箱",
|
||||||
|
@ -33,7 +33,7 @@ import { RemoteSelect, RemoteSelectProps } from '../remote-select';
|
|||||||
import useServiceOptions, { useAssociationFieldContext } from './hooks';
|
import useServiceOptions, { useAssociationFieldContext } from './hooks';
|
||||||
|
|
||||||
const removeIfKeyEmpty = (obj, filterTargetKey) => {
|
const removeIfKeyEmpty = (obj, filterTargetKey) => {
|
||||||
if (!obj || typeof obj !== 'object' || !filterTargetKey) return obj;
|
if (!obj || typeof obj !== 'object' || !filterTargetKey || Array.isArray(obj)) return obj;
|
||||||
return !obj[filterTargetKey] ? null : obj;
|
return !obj[filterTargetKey] ? null : obj;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -132,7 +132,7 @@ const InternalAssociationSelect = observer(
|
|||||||
return () => {
|
return () => {
|
||||||
form.removeEffects(id);
|
form.removeEffects(id);
|
||||||
};
|
};
|
||||||
}, []);
|
}, [] as any);
|
||||||
|
|
||||||
const handleCreateAction = async (props) => {
|
const handleCreateAction = async (props) => {
|
||||||
const { search: value, callBack } = props;
|
const { search: value, callBack } = props;
|
||||||
|
@ -105,4 +105,18 @@ describe('string field', () => {
|
|||||||
name2: 'n2111',
|
name2: 'n2111',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('trim', async () => {
|
||||||
|
const collection = db.collection({
|
||||||
|
name: 'tests',
|
||||||
|
fields: [{ type: 'string', name: 'name', trim: true }],
|
||||||
|
});
|
||||||
|
await db.sync();
|
||||||
|
const model = await collection.model.create({
|
||||||
|
name: ' n1\n ',
|
||||||
|
});
|
||||||
|
expect(model.toJSON()).toMatchObject({
|
||||||
|
name: 'n1',
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -54,19 +54,18 @@ export class BelongsToArrayAssociation {
|
|||||||
return this.db.getModel(this.targetName);
|
return this.db.getModel(this.targetName);
|
||||||
}
|
}
|
||||||
|
|
||||||
generateInclude() {
|
generateInclude(parentAs?: string) {
|
||||||
if (this.db.sequelize.getDialect() !== 'postgres') {
|
|
||||||
throw new Error('Filtering by many to many (array) associations is only supported on postgres');
|
|
||||||
}
|
|
||||||
const targetCollection = this.db.getCollection(this.targetName);
|
const targetCollection = this.db.getCollection(this.targetName);
|
||||||
const targetField = targetCollection.getField(this.targetKey);
|
const targetField = targetCollection.getField(this.targetKey);
|
||||||
const sourceCollection = this.db.getCollection(this.source.name);
|
const sourceCollection = this.db.getCollection(this.source.name);
|
||||||
const foreignField = sourceCollection.getField(this.foreignKey);
|
const foreignField = sourceCollection.getField(this.foreignKey);
|
||||||
const queryInterface = this.db.sequelize.getQueryInterface();
|
const queryInterface = this.db.sequelize.getQueryInterface();
|
||||||
const left = queryInterface.quoteIdentifiers(`${this.as}.${targetField.columnName()}`);
|
const asLeft = parentAs ? `${parentAs}->${this.as}` : this.as;
|
||||||
const right = queryInterface.quoteIdentifiers(`${this.source.collection.name}.${foreignField.columnName()}`);
|
const asRight = parentAs || this.source.collection.name;
|
||||||
|
const left = queryInterface.quoteIdentifiers(`${asLeft}.${targetField.columnName()}`);
|
||||||
|
const right = queryInterface.quoteIdentifiers(`${asRight}.${foreignField.columnName()}`);
|
||||||
return {
|
return {
|
||||||
on: this.db.sequelize.literal(`${left}=any(${right})`),
|
on: this.db.queryInterface.generateJoinOnForJSONArray(left, right),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,6 +82,31 @@ const queryParentSQL = (options: {
|
|||||||
SELECT ${q(targetKeyField)} AS ${q(targetKey)}, ${q(foreignKeyField)} AS ${q(foreignKey)} FROM cte`;
|
SELECT ${q(targetKeyField)} AS ${q(targetKey)}, ${q(foreignKeyField)} AS ${q(foreignKey)} FROM cte`;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const processIncludes = (includes: any[], model: any, parentAs = '') => {
|
||||||
|
includes.forEach((include: { association: string; include?: any[] }, index: number) => {
|
||||||
|
// Process current level
|
||||||
|
const association = model.associations[include.association];
|
||||||
|
if (association?.generateInclude) {
|
||||||
|
includes[index] = {
|
||||||
|
...include,
|
||||||
|
...association.generateInclude(parentAs),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively process nested includes if they exist
|
||||||
|
if (include.include && Array.isArray(include.include) && include.include.length > 0) {
|
||||||
|
// Get the associated model for the next level
|
||||||
|
const nextModel = association?.target;
|
||||||
|
if (!nextModel) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
processIncludes(include.include, nextModel, parentAs ? `${parentAs}->${association.as}` : association.as);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return includes;
|
||||||
|
};
|
||||||
|
|
||||||
export class EagerLoadingTree {
|
export class EagerLoadingTree {
|
||||||
public root: EagerLoadingNode;
|
public root: EagerLoadingNode;
|
||||||
db: Database;
|
db: Database;
|
||||||
@ -252,28 +277,20 @@ export class EagerLoadingTree {
|
|||||||
throw new Error(`Model ${node.model.name} does not have primary key`);
|
throw new Error(`Model ${node.model.name} does not have primary key`);
|
||||||
}
|
}
|
||||||
|
|
||||||
includeForFilter.forEach((include: { association: string }, index: number) => {
|
|
||||||
const association = node.model.associations[include.association];
|
|
||||||
if (association?.associationType == 'BelongsToArray') {
|
|
||||||
includeForFilter[index] = {
|
|
||||||
...include,
|
|
||||||
...association.generateInclude(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const options = {
|
const options = {
|
||||||
...this.rootQueryOptions,
|
...this.rootQueryOptions,
|
||||||
includeIgnoreAttributes: false,
|
includeIgnoreAttributes: false,
|
||||||
attributes: [primaryKeyField],
|
attributes: [primaryKeyField],
|
||||||
group: `${node.model.name}.${primaryKeyField}`,
|
group: `${node.model.name}.${primaryKeyField}`,
|
||||||
transaction,
|
transaction,
|
||||||
include: includeForFilter,
|
include: processIncludes(includeForFilter, node.model),
|
||||||
};
|
} as any;
|
||||||
if (node.model.database.options.dialect === 'mssql' && options.order) {
|
if (node.model.database.options.dialect === 'mssql' && options.order) {
|
||||||
options.order = null;
|
options.order = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// find all ids
|
// find all ids
|
||||||
const ids = (await node.model.findAll(options)).map((row) => {
|
const ids = (await node.model.findAll()).map((row) => {
|
||||||
return { row, pk: row[primaryKeyField] };
|
return { row, pk: row[primaryKeyField] };
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { DataTypes } from 'sequelize';
|
import { DataTypes } from 'sequelize';
|
||||||
import { BaseColumnFieldOptions, Field } from './field';
|
import { BaseColumnFieldOptions, Field, FieldContext } from './field';
|
||||||
|
|
||||||
export class StringField extends Field {
|
export class StringField extends Field {
|
||||||
get dataType() {
|
get dataType() {
|
||||||
@ -18,9 +18,20 @@ export class StringField extends Field {
|
|||||||
|
|
||||||
return DataTypes.STRING;
|
return DataTypes.STRING;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
additionalSequelizeOptions() {
|
||||||
|
const { name, trim } = this.options;
|
||||||
|
|
||||||
|
return {
|
||||||
|
set(value) {
|
||||||
|
this.setDataValue(name, trim ? value?.trim() : value);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface StringFieldOptions extends BaseColumnFieldOptions {
|
export interface StringFieldOptions extends BaseColumnFieldOptions {
|
||||||
type: 'string';
|
type: 'string';
|
||||||
length?: number;
|
length?: number;
|
||||||
|
trim?: boolean;
|
||||||
}
|
}
|
||||||
|
@ -142,6 +142,10 @@ export default class MysqlQueryInterface extends QueryInterface {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public generateJoinOnForJSONArray(left: string, right: string) {
|
||||||
|
return this.db.sequelize.literal(`JSON_CONTAINS(${right}, JSON_ARRAY(${left}))`);
|
||||||
|
}
|
||||||
|
|
||||||
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
|
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -233,6 +233,10 @@ $BODY$
|
|||||||
return res[0]['show_create_table'];
|
return res[0]['show_create_table'];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public generateJoinOnForJSONArray(left: string, right: string) {
|
||||||
|
return this.db.sequelize.literal(`${left}=any(${right})`);
|
||||||
|
}
|
||||||
|
|
||||||
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
|
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -126,6 +126,11 @@ export default abstract class QueryInterface {
|
|||||||
return this.db.sequelize.getQueryInterface().queryGenerator.quoteIdentifier(identifier);
|
return this.db.sequelize.getQueryInterface().queryGenerator.quoteIdentifier(identifier);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public generateJoinOnForJSONArray(left: string, right: string) {
|
||||||
|
const dialect = this.db.sequelize.getDialect();
|
||||||
|
throw new Error(`Filtering by many to many (array) associations is not supported on ${dialect}`);
|
||||||
|
}
|
||||||
|
|
||||||
public async changeColumn(options: ChangeColumnOptions) {
|
public async changeColumn(options: ChangeColumnOptions) {
|
||||||
if (!options.actions.length) {
|
if (!options.actions.length) {
|
||||||
throw new Error('Actions invalid');
|
throw new Error('Actions invalid');
|
||||||
|
@ -147,6 +147,10 @@ export default class SqliteQueryInterface extends QueryInterface {
|
|||||||
await this.db.sequelize.query(sql, { transaction });
|
await this.db.sequelize.query(sql, { transaction });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public generateJoinOnForJSONArray(left: string, right: string) {
|
||||||
|
return this.db.sequelize.literal(`${left} in (SELECT value from json_each(${right}))`);
|
||||||
|
}
|
||||||
|
|
||||||
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
|
changeColumnDefaultValueSQL(options: ChangeColumnOptions): Promise<string> {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -315,6 +315,34 @@ describe('merge strategy', () => {
|
|||||||
key1: 'val1 + val2',
|
key1: 'val1 + val2',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
it('case 2', () => {
|
||||||
|
const obj = assign(
|
||||||
|
{
|
||||||
|
filter: { a: 'a2' },
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
filter: () => '123',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
expect(obj).toMatchObject({
|
||||||
|
filter: '123',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it('case 3', () => {
|
||||||
|
const obj = assign(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
filter: { a: 'a2' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
filter: () => '123',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
expect(obj).toMatchObject({
|
||||||
|
filter: '123',
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('merge', () => {
|
describe('merge', () => {
|
||||||
@ -416,4 +444,32 @@ describe('merge strategy', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('source is empty', () => {
|
||||||
|
it('case 1', () => {
|
||||||
|
const obj = assign(
|
||||||
|
{
|
||||||
|
resourceName: 'uiSchemas',
|
||||||
|
resourceIndex: 'n0jylid5rqa',
|
||||||
|
actionName: 'getJsonSchema',
|
||||||
|
values: {},
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
filter: 'andMerge',
|
||||||
|
fields: 'intersect',
|
||||||
|
except: 'union',
|
||||||
|
whitelist: 'intersect',
|
||||||
|
blacklist: 'intersect',
|
||||||
|
sort: 'overwrite',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
expect(obj).toMatchObject({
|
||||||
|
resourceName: 'uiSchemas',
|
||||||
|
resourceIndex: 'n0jylid5rqa',
|
||||||
|
actionName: 'getJsonSchema',
|
||||||
|
values: {},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import deepmerge from 'deepmerge';
|
import deepmerge from 'deepmerge';
|
||||||
import lodash from 'lodash';
|
import _ from 'lodash';
|
||||||
import { isPlainObject } from './common';
|
import { isPlainObject } from './common';
|
||||||
|
|
||||||
type MergeStrategyType = 'merge' | 'deepMerge' | 'overwrite' | 'andMerge' | 'orMerge' | 'intersect' | 'union';
|
type MergeStrategyType = 'merge' | 'deepMerge' | 'overwrite' | 'andMerge' | 'orMerge' | 'intersect' | 'union';
|
||||||
@ -22,7 +22,9 @@ export interface MergeStrategies {
|
|||||||
|
|
||||||
function getEnumerableOwnPropertySymbols(target: any): any[] {
|
function getEnumerableOwnPropertySymbols(target: any): any[] {
|
||||||
return Object.getOwnPropertySymbols
|
return Object.getOwnPropertySymbols
|
||||||
? Object.getOwnPropertySymbols(target).filter((symbol) => target.propertyIsEnumerable(symbol))
|
? Object.getOwnPropertySymbols(target).filter((symbol) =>
|
||||||
|
Object.prototype.propertyIsEnumerable.call(target, symbol),
|
||||||
|
)
|
||||||
: [];
|
: [];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -88,7 +90,7 @@ mergeStrategies.set('union', (x, y) => {
|
|||||||
if (typeof y === 'string') {
|
if (typeof y === 'string') {
|
||||||
y = y.split(',');
|
y = y.split(',');
|
||||||
}
|
}
|
||||||
return lodash.uniq((x || []).concat(y || [])).filter(Boolean);
|
return _.uniq((x || []).concat(y || [])).filter(Boolean);
|
||||||
});
|
});
|
||||||
|
|
||||||
mergeStrategies.set('intersect', (x, y) =>
|
mergeStrategies.set('intersect', (x, y) =>
|
||||||
@ -110,15 +112,22 @@ mergeStrategies.set('intersect', (x, y) =>
|
|||||||
);
|
);
|
||||||
|
|
||||||
export function assign(target: any, source: any, strategies: MergeStrategies = {}) {
|
export function assign(target: any, source: any, strategies: MergeStrategies = {}) {
|
||||||
getKeys(source).forEach((sourceKey) => {
|
const sourceKeys = getKeys(source);
|
||||||
|
const targetKeys = getKeys(target);
|
||||||
|
_.uniq([...sourceKeys, ...targetKeys]).forEach((sourceKey) => {
|
||||||
const strategy = strategies[sourceKey];
|
const strategy = strategies[sourceKey];
|
||||||
let func = mergeStrategies.get('deepMerge');
|
let func: any;
|
||||||
if (typeof strategy === 'function') {
|
if (typeof strategy === 'function') {
|
||||||
func = strategy;
|
func = strategy;
|
||||||
} else if (typeof strategy === 'string' && mergeStrategies.has(strategy as any)) {
|
} else if (typeof strategy === 'string' && mergeStrategies.has(strategy as any)) {
|
||||||
func = mergeStrategies.get(strategy as any);
|
func = mergeStrategies.get(strategy as any);
|
||||||
}
|
}
|
||||||
|
if (func) {
|
||||||
target[sourceKey] = func(target[sourceKey], source[sourceKey]);
|
target[sourceKey] = func(target[sourceKey], source[sourceKey]);
|
||||||
|
} else if (sourceKeys.includes(sourceKey)) {
|
||||||
|
const func = mergeStrategies.get('deepMerge');
|
||||||
|
target[sourceKey] = func(target[sourceKey], source[sourceKey]);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
return target;
|
return target;
|
||||||
}
|
}
|
||||||
|
@ -530,4 +530,150 @@ describe('union role: full permissions', async () => {
|
|||||||
expect(createRoleResponse.statusCode).toBe(200);
|
expect(createRoleResponse.statusCode).toBe(200);
|
||||||
expect(createRoleResponse.body.data.role).not.toBe(UNION_ROLE_KEY);
|
expect(createRoleResponse.body.data.role).not.toBe(UNION_ROLE_KEY);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should general action permissions override specific resource permissions when using union role #1924', async () => {
|
||||||
|
const rootAgent = await app.agent().login(rootUser);
|
||||||
|
await rootAgent
|
||||||
|
.post(`/dataSources/main/roles:update`)
|
||||||
|
.query({
|
||||||
|
filterByTk: role1.name,
|
||||||
|
})
|
||||||
|
.send({
|
||||||
|
roleName: role1.name,
|
||||||
|
strategy: {
|
||||||
|
actions: ['view'],
|
||||||
|
},
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
const ownDataSourceScopeRole = await db.getRepository('dataSourcesRolesResourcesScopes').findOne({
|
||||||
|
where: {
|
||||||
|
key: 'own',
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const scopeFields = ['id', 'createdBy', 'createdById'];
|
||||||
|
const dataSourceResourcesResponse = await rootAgent
|
||||||
|
.post(`/roles/${role2.name}/dataSourceResources:create`)
|
||||||
|
.query({
|
||||||
|
filterByTk: 'users',
|
||||||
|
filter: {
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
name: 'users',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.send({
|
||||||
|
usingActionsConfig: true,
|
||||||
|
actions: [
|
||||||
|
{
|
||||||
|
name: 'view',
|
||||||
|
fields: scopeFields,
|
||||||
|
scope: {
|
||||||
|
id: ownDataSourceScopeRole.id,
|
||||||
|
createdAt: '2025-02-19T08:57:17.385Z',
|
||||||
|
updatedAt: '2025-02-19T08:57:17.385Z',
|
||||||
|
key: 'own',
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
name: '{{t("Own records")}}',
|
||||||
|
resourceName: null,
|
||||||
|
scope: {
|
||||||
|
createdById: '{{ ctx.state.currentUser.id }}',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
name: 'users',
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
});
|
||||||
|
expect(dataSourceResourcesResponse.statusCode).toBe(200);
|
||||||
|
|
||||||
|
agent = await app.agent().login(user, UNION_ROLE_KEY);
|
||||||
|
const rolesResponse = await agent.resource('roles').check();
|
||||||
|
expect(rolesResponse.status).toBe(200);
|
||||||
|
expect(rolesResponse.body.data.actions['users:view']).toStrictEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should verify actions configuration for union role with specific scopes', async () => {
|
||||||
|
const rootAgent = await app.agent().login(rootUser);
|
||||||
|
await rootAgent
|
||||||
|
.post(`/dataSources/main/roles:update`)
|
||||||
|
.query({
|
||||||
|
filterByTk: role1.name,
|
||||||
|
})
|
||||||
|
.send({
|
||||||
|
roleName: role1.name,
|
||||||
|
strategy: {
|
||||||
|
actions: ['view', 'create:own', 'update'],
|
||||||
|
},
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
const ownDataSourceScopeRole = await db.getRepository('dataSourcesRolesResourcesScopes').findOne({
|
||||||
|
where: {
|
||||||
|
key: 'own',
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const scopeFields = ['id', 'createdBy', 'createdById'];
|
||||||
|
const dataSourceResourcesResponse = await rootAgent
|
||||||
|
.post(`/roles/${role2.name}/dataSourceResources:create`)
|
||||||
|
.query({
|
||||||
|
filterByTk: 'users',
|
||||||
|
filter: {
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
name: 'users',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.send({
|
||||||
|
usingActionsConfig: true,
|
||||||
|
actions: [
|
||||||
|
{
|
||||||
|
name: 'view',
|
||||||
|
fields: scopeFields,
|
||||||
|
scope: {
|
||||||
|
id: ownDataSourceScopeRole.id,
|
||||||
|
createdAt: '2025-02-19T08:57:17.385Z',
|
||||||
|
updatedAt: '2025-02-19T08:57:17.385Z',
|
||||||
|
key: 'own',
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
name: '{{t("Own records")}}',
|
||||||
|
resourceName: null,
|
||||||
|
scope: {
|
||||||
|
createdById: '{{ ctx.state.currentUser.id }}',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'create',
|
||||||
|
fields: scopeFields,
|
||||||
|
scope: {
|
||||||
|
id: ownDataSourceScopeRole.id,
|
||||||
|
createdAt: '2025-02-19T08:57:17.385Z',
|
||||||
|
updatedAt: '2025-02-19T08:57:17.385Z',
|
||||||
|
key: 'own',
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
name: '{{t("Own records")}}',
|
||||||
|
resourceName: null,
|
||||||
|
scope: {
|
||||||
|
createdById: '{{ ctx.state.currentUser.id }}',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
name: 'users',
|
||||||
|
dataSourceKey: 'main',
|
||||||
|
});
|
||||||
|
expect(dataSourceResourcesResponse.statusCode).toBe(200);
|
||||||
|
|
||||||
|
agent = await app.agent().login(user, UNION_ROLE_KEY);
|
||||||
|
const rolesResponse = await agent.resource('roles').check();
|
||||||
|
expect(rolesResponse.status).toBe(200);
|
||||||
|
expect(rolesResponse.body.data.actions).toHaveProperty('users:create');
|
||||||
|
expect(rolesResponse.body.data.actions).toHaveProperty('users:view');
|
||||||
|
expect(rolesResponse.body.data.actions['users:view']).toStrictEqual({});
|
||||||
|
expect(rolesResponse.body.data.actions).not.toHaveProperty('users:create:own');
|
||||||
|
expect(rolesResponse.body.data.actions['users:create']).toHaveProperty('filter');
|
||||||
|
expect(rolesResponse.body.data.actions['users:create']).toHaveProperty('whitelist');
|
||||||
|
expect(rolesResponse.body.data.actions['users:update']).toStrictEqual({});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -324,4 +324,100 @@ describe('issues', () => {
|
|||||||
}
|
}
|
||||||
expect(res.status).toBe(200);
|
expect(res.status).toBe(200);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('filtering by fields of a relation collection with m2m array field', async () => {
|
||||||
|
await db.getRepository('collections').create({
|
||||||
|
values: {
|
||||||
|
name: 'tags',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: 'id',
|
||||||
|
type: 'bigInt',
|
||||||
|
autoIncrement: true,
|
||||||
|
primaryKey: true,
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'title',
|
||||||
|
type: 'string',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await db.getRepository('collections').create({
|
||||||
|
values: {
|
||||||
|
name: 'users',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: 'id',
|
||||||
|
type: 'bigInt',
|
||||||
|
autoIncrement: true,
|
||||||
|
primaryKey: true,
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'username',
|
||||||
|
type: 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'tags',
|
||||||
|
type: 'belongsToArray',
|
||||||
|
foreignKey: 'tag_ids',
|
||||||
|
target: 'tags',
|
||||||
|
targetKey: 'id',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await db.getRepository('collections').create({
|
||||||
|
values: {
|
||||||
|
name: 'projects',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: 'id',
|
||||||
|
type: 'bigInt',
|
||||||
|
autoIncrement: true,
|
||||||
|
primaryKey: true,
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'title',
|
||||||
|
type: 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'users',
|
||||||
|
type: 'belongsTo',
|
||||||
|
foreignKey: 'user_id',
|
||||||
|
target: 'users',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
// @ts-ignore
|
||||||
|
await db.getRepository('collections').load();
|
||||||
|
await db.sync();
|
||||||
|
await db.getRepository('tags').create({
|
||||||
|
values: [{ title: 'a' }, { title: 'b' }, { title: 'c' }],
|
||||||
|
});
|
||||||
|
await db.getRepository('users').create({
|
||||||
|
values: { id: 1, username: 'a' },
|
||||||
|
});
|
||||||
|
await db.getRepository('projects').create({
|
||||||
|
values: { id: 1, title: 'p1', user_id: 1 },
|
||||||
|
});
|
||||||
|
await expect(
|
||||||
|
db.getRepository('projects').findOne({
|
||||||
|
appends: ['users', 'users.tags'],
|
||||||
|
filter: {
|
||||||
|
$and: [
|
||||||
|
{
|
||||||
|
users: {
|
||||||
|
username: 'a',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
).resolves.toBeTruthy();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -207,15 +207,8 @@ describe('m2m array api, bigInt targetKey', () => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
if (db.sequelize.getDialect() === 'postgres') {
|
const res1 = await search;
|
||||||
const res = await search;
|
expect(res1.length).toBe(1);
|
||||||
expect(res.length).toBe(1);
|
|
||||||
} else {
|
|
||||||
expect(search).rejects.toThrowError();
|
|
||||||
}
|
|
||||||
if (db.sequelize.getDialect() !== 'postgres') {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const search2 = db.getRepository('users').find({
|
const search2 = db.getRepository('users').find({
|
||||||
filter: {
|
filter: {
|
||||||
'tags.title': {
|
'tags.title': {
|
||||||
@ -223,12 +216,8 @@ describe('m2m array api, bigInt targetKey', () => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
if (db.sequelize.getDialect() === 'postgres') {
|
const res2 = await search2;
|
||||||
const res = await search2;
|
expect(res2.length).toBe(2);
|
||||||
expect(res.length).toBe(2);
|
|
||||||
} else {
|
|
||||||
expect(search2).rejects.toThrowError();
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should create with belongsToArray', async () => {
|
it('should create with belongsToArray', async () => {
|
||||||
|
@ -186,15 +186,8 @@ describe('m2m array api, string targetKey', () => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
if (db.sequelize.getDialect() === 'postgres') {
|
const res1 = await search;
|
||||||
const res = await search;
|
expect(res1.length).toBe(1);
|
||||||
expect(res.length).toBe(1);
|
|
||||||
} else {
|
|
||||||
expect(search).rejects.toThrowError();
|
|
||||||
}
|
|
||||||
if (db.sequelize.getDialect() !== 'postgres') {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const search2 = db.getRepository('users').find({
|
const search2 = db.getRepository('users').find({
|
||||||
filter: {
|
filter: {
|
||||||
'tags.title': {
|
'tags.title': {
|
||||||
@ -202,12 +195,8 @@ describe('m2m array api, string targetKey', () => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
if (db.sequelize.getDialect() === 'postgres') {
|
const res2 = await search2;
|
||||||
const res = await search2;
|
expect(res2.length).toBe(2);
|
||||||
expect(res.length).toBe(2);
|
|
||||||
} else {
|
|
||||||
expect(search2).rejects.toThrowError();
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should create with belongsToArray', async () => {
|
it('should create with belongsToArray', async () => {
|
||||||
|
@ -42,7 +42,7 @@ function getRepeatTypeValue(v) {
|
|||||||
return 'none';
|
return 'none';
|
||||||
}
|
}
|
||||||
|
|
||||||
function CommonRepeatField({ value, onChange }) {
|
function CommonRepeatField({ value, onChange, disabled }) {
|
||||||
const { t } = useWorkflowTranslation();
|
const { t } = useWorkflowTranslation();
|
||||||
const option = getNumberOption(value);
|
const option = getNumberOption(value);
|
||||||
|
|
||||||
@ -59,11 +59,12 @@ function CommonRepeatField({ value, onChange }) {
|
|||||||
addonBefore={t('Every')}
|
addonBefore={t('Every')}
|
||||||
addonAfter={t(option.unitText)}
|
addonAfter={t(option.unitText)}
|
||||||
className="auto-width"
|
className="auto-width"
|
||||||
|
disabled={disabled}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function RepeatField({ value = null, onChange }) {
|
export function RepeatField({ value = null, onChange, disabled }) {
|
||||||
const { t } = useWorkflowTranslation();
|
const { t } = useWorkflowTranslation();
|
||||||
const typeValue = getRepeatTypeValue(value);
|
const typeValue = getRepeatTypeValue(value);
|
||||||
const onTypeChange = useCallback(
|
const onTypeChange = useCallback(
|
||||||
@ -114,20 +115,23 @@ export function RepeatField({ value = null, onChange }) {
|
|||||||
}
|
}
|
||||||
`}
|
`}
|
||||||
>
|
>
|
||||||
<Select value={typeValue} onChange={onTypeChange} className="auto-width">
|
<Select value={typeValue} onChange={onTypeChange} className="auto-width" disabled={disabled}>
|
||||||
{RepeatOptions.map((item) => (
|
{RepeatOptions.map((item) => (
|
||||||
<Select.Option key={item.value} value={item.value}>
|
<Select.Option key={item.value} value={item.value}>
|
||||||
{t(item.text)}
|
{t(item.text)}
|
||||||
</Select.Option>
|
</Select.Option>
|
||||||
))}
|
))}
|
||||||
</Select>
|
</Select>
|
||||||
{typeof typeValue === 'number' ? <CommonRepeatField value={value} onChange={onChange} /> : null}
|
{typeof typeValue === 'number' ? (
|
||||||
|
<CommonRepeatField value={value} onChange={onChange} disabled={disabled} />
|
||||||
|
) : null}
|
||||||
{typeValue === 'cron' ? (
|
{typeValue === 'cron' ? (
|
||||||
<Cron
|
<Cron
|
||||||
value={value.trim().split(/\s+/).slice(1).join(' ')}
|
value={value.trim().split(/\s+/).slice(1).join(' ')}
|
||||||
setValue={(v) => onChange(`0 ${v}`)}
|
setValue={(v) => onChange(`0 ${v}`)}
|
||||||
clearButton={false}
|
clearButton={false}
|
||||||
locale={window['cronLocale']}
|
locale={window['cronLocale']}
|
||||||
|
disabled={disabled}
|
||||||
/>
|
/>
|
||||||
) : null}
|
) : null}
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
@ -376,11 +376,16 @@ export default class PluginWorkflowServer extends Plugin {
|
|||||||
const prev = workflow.previous();
|
const prev = workflow.previous();
|
||||||
if (prev.config) {
|
if (prev.config) {
|
||||||
trigger.off({ ...workflow.get(), ...prev });
|
trigger.off({ ...workflow.get(), ...prev });
|
||||||
|
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`);
|
||||||
}
|
}
|
||||||
trigger.on(workflow);
|
trigger.on(workflow);
|
||||||
|
this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`);
|
||||||
|
|
||||||
this.enabledCache.set(workflow.id, workflow);
|
this.enabledCache.set(workflow.id, workflow);
|
||||||
} else {
|
} else {
|
||||||
trigger.off(workflow);
|
trigger.off(workflow);
|
||||||
|
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`);
|
||||||
|
|
||||||
this.enabledCache.delete(workflow.id);
|
this.enabledCache.delete(workflow.id);
|
||||||
}
|
}
|
||||||
if (!silent) {
|
if (!silent) {
|
||||||
|
@ -104,8 +104,7 @@ export default class DateFieldScheduleTrigger {
|
|||||||
// caching workflows in range, default to 5min
|
// caching workflows in range, default to 5min
|
||||||
cacheCycle = 300_000;
|
cacheCycle = 300_000;
|
||||||
|
|
||||||
constructor(public workflow: Plugin) {
|
onAfterStart = () => {
|
||||||
workflow.app.on('afterStart', async () => {
|
|
||||||
if (this.timer) {
|
if (this.timer) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -113,9 +112,9 @@ export default class DateFieldScheduleTrigger {
|
|||||||
this.timer = setInterval(() => this.reload(), this.cacheCycle);
|
this.timer = setInterval(() => this.reload(), this.cacheCycle);
|
||||||
|
|
||||||
this.reload();
|
this.reload();
|
||||||
});
|
};
|
||||||
|
|
||||||
workflow.app.on('beforeStop', () => {
|
onBeforeStop = () => {
|
||||||
if (this.timer) {
|
if (this.timer) {
|
||||||
clearInterval(this.timer);
|
clearInterval(this.timer);
|
||||||
}
|
}
|
||||||
@ -124,31 +123,36 @@ export default class DateFieldScheduleTrigger {
|
|||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
this.cache.delete(key);
|
this.cache.delete(key);
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
|
constructor(public workflow: Plugin) {
|
||||||
|
workflow.app.on('afterStart', this.onAfterStart);
|
||||||
|
workflow.app.on('beforeStop', this.onBeforeStop);
|
||||||
|
}
|
||||||
|
|
||||||
|
reload() {
|
||||||
|
for (const [key, timer] of this.cache.entries()) {
|
||||||
|
clearTimeout(timer);
|
||||||
|
this.cache.delete(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
async reload() {
|
|
||||||
const workflows = Array.from(this.workflow.enabledCache.values()).filter(
|
const workflows = Array.from(this.workflow.enabledCache.values()).filter(
|
||||||
(item) => item.type === 'schedule' && item.config.mode === SCHEDULE_MODE.DATE_FIELD,
|
(item) => item.type === 'schedule' && item.config.mode === SCHEDULE_MODE.DATE_FIELD,
|
||||||
);
|
);
|
||||||
|
|
||||||
// NOTE: clear cached jobs in last cycle
|
workflows.forEach((workflow) => {
|
||||||
this.cache = new Map();
|
this.inspect(workflow);
|
||||||
|
});
|
||||||
this.inspect(workflows);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
inspect(workflows: WorkflowModel[]) {
|
async inspect(workflow: WorkflowModel) {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
|
|
||||||
workflows.forEach(async (workflow) => {
|
|
||||||
const records = await this.loadRecordsToSchedule(workflow, now);
|
const records = await this.loadRecordsToSchedule(workflow, now);
|
||||||
this.workflow.getLogger(workflow.id).info(`[Schedule on date field] ${records.length} records to schedule`);
|
this.workflow.getLogger(workflow.id).info(`[Schedule on date field] ${records.length} records to schedule`);
|
||||||
records.forEach((record) => {
|
records.forEach((record) => {
|
||||||
const nextTime = this.getRecordNextTime(workflow, record);
|
const nextTime = this.getRecordNextTime(workflow, record);
|
||||||
this.schedule(workflow, record, nextTime, Boolean(nextTime));
|
this.schedule(workflow, record, nextTime, Boolean(nextTime));
|
||||||
});
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 1. startsOn in range -> yes
|
// 1. startsOn in range -> yes
|
||||||
@ -233,8 +237,6 @@ export default class DateFieldScheduleTrigger {
|
|||||||
[Op.gte]: new Date(endTimestamp),
|
[Op.gte]: new Date(endTimestamp),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
this.workflow.getLogger(id).warn(`[Schedule on date field] "endsOn.field" is not configured`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -367,7 +369,7 @@ export default class DateFieldScheduleTrigger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
on(workflow: WorkflowModel) {
|
on(workflow: WorkflowModel) {
|
||||||
this.inspect([workflow]);
|
this.inspect(workflow);
|
||||||
|
|
||||||
const { collection } = workflow.config;
|
const { collection } = workflow.config;
|
||||||
const [dataSourceName, collectionName] = parseCollectionName(collection);
|
const [dataSourceName, collectionName] = parseCollectionName(collection);
|
||||||
|
@ -18,26 +18,30 @@ const MAX_SAFE_INTERVAL = 2147483647;
|
|||||||
export default class StaticScheduleTrigger {
|
export default class StaticScheduleTrigger {
|
||||||
private timers: Map<string, NodeJS.Timeout | null> = new Map();
|
private timers: Map<string, NodeJS.Timeout | null> = new Map();
|
||||||
|
|
||||||
constructor(public workflow: Plugin) {
|
onAfterStart = () => {
|
||||||
workflow.app.on('afterStart', async () => {
|
|
||||||
const workflows = Array.from(this.workflow.enabledCache.values()).filter(
|
const workflows = Array.from(this.workflow.enabledCache.values()).filter(
|
||||||
(item) => item.type === 'schedule' && item.config.mode === SCHEDULE_MODE.STATIC,
|
(item) => item.type === 'schedule' && item.config.mode === SCHEDULE_MODE.STATIC,
|
||||||
);
|
);
|
||||||
|
|
||||||
this.inspect(workflows);
|
workflows.forEach((workflow) => {
|
||||||
|
this.inspect(workflow);
|
||||||
});
|
});
|
||||||
|
};
|
||||||
|
|
||||||
workflow.app.on('beforeStop', () => {
|
onBeforeStop = () => {
|
||||||
for (const timer of this.timers.values()) {
|
for (const timer of this.timers.values()) {
|
||||||
clearInterval(timer);
|
clearInterval(timer);
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
|
constructor(public workflow: Plugin) {
|
||||||
|
workflow.app.on('afterStart', this.onAfterStart);
|
||||||
|
workflow.app.on('beforeStop', this.onBeforeStop);
|
||||||
}
|
}
|
||||||
|
|
||||||
inspect(workflows: WorkflowModel[]) {
|
inspect(workflow: WorkflowModel) {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
|
|
||||||
workflows.forEach((workflow) => {
|
|
||||||
const nextTime = this.getNextTime(workflow, now);
|
const nextTime = this.getNextTime(workflow, now);
|
||||||
if (nextTime) {
|
if (nextTime) {
|
||||||
this.workflow
|
this.workflow
|
||||||
@ -47,7 +51,6 @@ export default class StaticScheduleTrigger {
|
|||||||
this.workflow.getLogger(workflow.id).info('workflow will not be scheduled');
|
this.workflow.getLogger(workflow.id).info('workflow will not be scheduled');
|
||||||
}
|
}
|
||||||
this.schedule(workflow, nextTime, nextTime >= now.getTime());
|
this.schedule(workflow, nextTime, nextTime >= now.getTime());
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
getNextTime({ config, stats }: WorkflowModel, currentDate: Date, nextSecond = false) {
|
getNextTime({ config, stats }: WorkflowModel, currentDate: Date, nextSecond = false) {
|
||||||
@ -130,7 +133,7 @@ export default class StaticScheduleTrigger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
on(workflow) {
|
on(workflow) {
|
||||||
this.inspect([workflow]);
|
this.inspect(workflow);
|
||||||
}
|
}
|
||||||
|
|
||||||
off(workflow) {
|
off(workflow) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user