diff --git a/migrations/20241011063712-datasources.js b/migrations/20241011063712-datasources.js new file mode 100644 index 0000000..b62cba7 --- /dev/null +++ b/migrations/20241011063712-datasources.js @@ -0,0 +1,212 @@ +'use strict'; + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function(options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +exports.up = async function(db) { + await db.runSql( + "ALTER TABLE datasource_fields DROP CONSTRAINT ds_fields;" + + "DROP TABLE IF EXISTS datasource;" + + "DELETE FROM datasource_fields;"+ + "CREATE TABLE datasource (" + + "id bigserial primary key," + + "account_id int NOT NULL REFERENCES accounts(id) ON DELETE NO ACTION ON UPDATE CASCADE,"+ + "alias varchar(150)," + + "title varchar(150)," + + "type varchar(20)," + + "is_system bool," + + "is_tree bool," + + "script text," + + "context text," + + "version int default 1," + + "permissions jsonb not null default '{}'::json," + + "created_at TIMESTAMP(3) default now()," + + "created_by int REFERENCES users(id) ON DELETE NO ACTION ON UPDATE CASCADE," + + "updated_at TIMESTAMP(3) default now()," + + "updated_by int REFERENCES users(id) ON DELETE NO ACTION ON UPDATE CASCADE," + + "deleted_at TIMESTAMP(3),"+ + "deleted_by int REFERENCES users(id) ON DELETE NO ACTION ON UPDATE CASCADE," + + "UNIQUE(account_id, alias)" + + ");" + + "ALTER TABLE datasource_fields ADD CONSTRAINT ds_fields FOREIGN KEY (datasource_id) REFERENCES datasource (id);" + + "ALTER TABLE datasource_fields ADD UNIQUE(datasource_id, alias);"+ + "ALTER TABLE datasource_fields RENAME COLUMN required TO nullable;" + ) + + await populateDs(db) + await populateFields(db) + +}; + +const populateDs = async(db) => { + let dss = await getDatasource(db) + //console.log(dss) + + for (let i in dss) { + let ds = dss[i] + console.log(ds) + let query = "INSERT INTO datasource (" + + "id," + + "account_id," + + "alias," + + "title," + + "type," + + "is_system,"+ + "is_tree," + + "script," + + "context," + + "version," + + "permissions," + + "created_by," + + "updated_by" + + ") VALUES (" + + `'${ds.id}', + 1, + '${ds.data.alias}', + '${ds.data.title}', + '${ds.data.source}', + false, + ${!!ds.data.isTree }, + $1, + '${ds.data.context ? ds.data.context : ''}', + ${ds.version}, + ${ds.data.permissions ? "'" + JSON.stringify(ds.data.permissions) + "'::jsonb": "'{}'::jsonb"}, + ${ds.created_by}, + ${ds.updated_by})` + await db.runSql(query, [ds.data.script]) + + } +} + +const getDatasource = async(db) => { + return new Promise((resolve, reject) => { + db.runSql("SELECT * FROM config WHERE alias = 'datasource' and deleted_at is null", (err, res) => { + if (err) { + reject(err) + return + } + + resolve(res.rows) + }) + }) +} + +const populateFields = async (db) => { + let fields = await getFields(db) + //console.log(fields) + + for (let i in fields) { + let field = fields[i] + let query = "INSERT INTO datasource_fields (" + + "datasource_id," + + "datasource_alias," + + "alias," + + "type," + + "title," + + "searchable," + + "filterable," + + "sortable," + + "is_multiple," + + "default_value," + + "datasource_ref," + + "autoincrement," + + "nullable," + + "precision," + + "format," + + "enum_values," + + "created_by," + + "updated_by," + + "account_id," + + "version) VALUES (" + + `${field.datasource_id},` + + `'${field.datasource_alias}',` + + `'${field.alias}',` + + `'${field.type}',` + + `'${field.title}',` + + `${field.searchable},` + + `${field.filterable},` + + `${field.sortable},` + + `${field.is_multiple},` + + `'${field.default_value}',` + + `${field.datasource_ref ? "'" + field.datasource_ref + "'" : null },` + + `${field.autoincrement},` + + `${field.nullable},` + + `${field.precision},` + + `'${field.format}',` + + `${ field.enum_values ? "'" + JSON.stringify(field.enum_values) + "'::jsonb": null},` + + `${field.created_by},` + + `${field.updated_by},` + + `${field.account_id},` + + `${field.version})` + await db.runSql(query) + } + + +} + +const getFields = async(db) => { + return new Promise((resolve, reject) => { + db.runSql("SELECT * FROM config WHERE alias = 'datasource' and deleted_at is null", (err, res) => { + if (err) { + reject(err) + return + } + + + let fields = [] + for(let i in res.rows) { + let ds = res.rows[i] + if (ds.data.fields) { + for(let j in ds.data.fields) { + + let field = { + "datasource_id": ds.id, + "datasource_alias": ds.data.alias, + "account_id": 1 + } + let f = ds.data.fields[j] + field.alias = f.alias + field.type = f.type + field.title = f.title + field.searchable = !!f.searchable + field.filterable = !!f.filterable + field.sortable = !!f.sortable + field.is_multiple = !!f.isMultiple + field.default_value = f.default ? f.default : "" + field.datasource_ref = f.datasource + field.autoincrement = !!f.autoincrement + field.nullable = !!f.required + field.enum_values = f.values + field.precision = f.precision ? f.precision : null + field.format = f.format ? f.format : "" + field.created_by = ds.created_by + field.updated_by = ds.updated_by + field.version = 1 + fields.push(field) + } + } + } + + resolve(fields) + }) + }) +} + +exports.down = async function(db) { + await db.runSql("ALTER TABLE datasource_fields RENAME COLUMN nullable TO required;") +}; + +exports._meta = { + "version": 1 +}; diff --git a/src/app.module.ts b/src/app.module.ts index 4f1aa00..c467708 100644 --- a/src/app.module.ts +++ b/src/app.module.ts @@ -43,6 +43,7 @@ import { JobsModule } from './jobs/jobs.module'; import { Job } from "./jobs/jobs.entity"; import { ScheduleModule } from "@nestjs/schedule"; import { PageEntity } from "./pages/entities/pages.entity"; +import { DatasourceV2Entity } from "./datasources/entities/datasourceV2.entity"; @Module({ imports: [ @@ -76,7 +77,8 @@ import { PageEntity } from "./pages/entities/pages.entity"; DatasourceField, Variable, Job, - PageEntity + PageEntity, + DatasourceV2Entity ], }), BullModule.forRoot({ diff --git a/src/auth/jwt-auth.guard.ts b/src/auth/jwt-auth.guard.ts index 2a6fd82..8171980 100644 --- a/src/auth/jwt-auth.guard.ts +++ b/src/auth/jwt-auth.guard.ts @@ -43,6 +43,8 @@ export class JwtAuthGuard extends AuthGuard('jwt') { request.username = payload.username request.userId = Number(payload.userId) + + let user = await this.userService.getSettings(payload.userId) if (!user) return false diff --git a/src/data-indexer/data-indexer.adapter.ts b/src/data-indexer/data-indexer.adapter.ts new file mode 100644 index 0000000..e533023 --- /dev/null +++ b/src/data-indexer/data-indexer.adapter.ts @@ -0,0 +1,15 @@ +import { DataSource } from "typeorm"; +import { MeiliSearch } from "meilisearch"; +import { Context } from "../entities/context"; +import { DataSourceV2Dto } from "../datasources/dto/datasourceV2.dto"; + +export abstract class IndexerDataAdapter { + protected constructor(dataSource: DataSource, searchClient: MeiliSearch) { + this.dataSource = dataSource + this.searchClient = searchClient + } + protected readonly dataSource: DataSource + protected readonly searchClient: MeiliSearch + + abstract getData(ds: DataSourceV2Dto, context: Context, ids?: string[]) : Promise +} \ No newline at end of file diff --git a/src/data-indexer/data-indexer.dto.ts b/src/data-indexer/data-indexer.dto.ts index 199a2db..06f92c7 100644 --- a/src/data-indexer/data-indexer.dto.ts +++ b/src/data-indexer/data-indexer.dto.ts @@ -1,8 +1,9 @@ import { Context } from "../entities/context"; -import { DataSourceConfigInterface } from "../datasources/entities/datasource.entity"; +import { DataSourceV2Dto } from "../datasources/dto/datasourceV2.dto"; export class DataIndexJob { context: Context - datasource: DataSourceConfigInterface + datasource: DataSourceV2Dto ids?: string[] -} \ No newline at end of file +} + diff --git a/src/data-indexer/data-indexer.ts b/src/data-indexer/data-indexer.ts index 3c99deb..e4927ee 100644 --- a/src/data-indexer/data-indexer.ts +++ b/src/data-indexer/data-indexer.ts @@ -3,24 +3,25 @@ import { DataSource } from "typeorm"; import { Index, MeiliSearch } from "meilisearch"; import { DataReindexDto, + DataSourceType, + DataSourceV2Dto, GetDataManyDto, - GetDataManyParamsDto, - SystemFields + GetDataManyRequestDto } from "../datasources/dto/datasourceV2.dto"; import { Context } from "../entities/context"; import { DataItem } from "../datasources/entities/dataitem.entity"; -import { DataSourceConfigInterface } from "../datasources/entities/datasource.entity"; -import { FieldConfigInterface } from "../entities/field"; import * as dayjs from "dayjs"; -import { ConfigItem } from "../config/entities/config.entity"; import { SearchParams } from "meilisearch/src/types/types"; import { FilterItemInterface } from "../datasources/dto/datasource.dto"; import * as utc from "dayjs/plugin/utc"; import * as timezone from "dayjs/plugin/timezone"; - import { ConfigService } from "@nestjs/config"; import { Logger } from "@nestjs/common"; import { DatasourceField } from "../datasources/entities/field.entity"; +import { IndexerDataAdapter } from "./data-indexer.adapter"; +import { InternalAdapter } from "./internal.adapter"; +import { InternalDbAdapter } from "./internal-db.adapter"; + dayjs.extend(utc); dayjs.extend(timezone); @@ -31,13 +32,17 @@ export class DataIndexer { private datasource: DataSource) { this.searchClient = new MeiliSearch({ host: configService.get('MAILISEARCH_HOST'), - apiKey: configService.get('MAILISEARCH_MASTER_KEY') , + apiKey: configService.get('MAILISEARCH_MASTER_KEY') }) + this.internalAdapter = new InternalAdapter(this.datasource, this.searchClient); + this.internalDbAdapter = new InternalDbAdapter(this.datasource, this.searchClient); } private readonly logger = new Logger(DataIndexer.name); private readonly searchClient:MeiliSearch = null private timezone = 'Europe/Moscow' + private readonly internalAdapter: InternalAdapter + private readonly internalDbAdapter: InternalDbAdapter public setTimezone(val) { this.timezone = val dayjs.tz.setDefault(this.timezone) @@ -45,15 +50,23 @@ export class DataIndexer { public getIndexUid = (alias, context) => `${context.accountId}_${alias}` - public async getDataMany(params: GetDataManyParamsDto, context: Context) : Promise { + private getAdapter(type: DataSourceType) { + switch (type) { + case DataSourceType.internal: return this.internalAdapter; + case DataSourceType.internalDB: return this.internalDbAdapter; + default: return null + } + } + + public async getDataMany(params: GetDataManyRequestDto, dataSourceConfig: DataSourceV2Dto , context: Context) : Promise { let index:Index try { - index = await this.searchClient.getIndex(this.getIndexUid(params.dataSourceConfig.alias, context)) + index = await this.searchClient.getIndex(this.getIndexUid(dataSourceConfig.alias, context)) } catch (e) { throw e } - let dsFields = params.dataSourceConfig.fields + let dsFields = dataSourceConfig.fields const allFields = new Map(dsFields.map(i => [i.alias, i])) let filterBy = params.filterBy @@ -83,13 +96,15 @@ export class DataIndexer { } public async dataReindex(params: DataReindexDto, context: Context) : Promise { - if (params.dataSourceConfig.source !== 'internal') - throw `DataSource is not an internal source` + let adapter: IndexerDataAdapter = this.getAdapter(params.dataSourceConfig.type) + + if (!adapter) + throw `DataSource has no ability to index` const indexUid = this.getIndexUid(params.dataSourceConfig.alias, context) - this.logger.log(`Indexing data to index ${indexUid}, ids: ${params.ids?.join(',')}`, ) + this.logger.log(`Indexing data for datasource ${params.dataSourceConfig.alias} to index ${indexUid}, ids: ${params.ids?.join(',')}`, ) let newIndexUid = "" let index: Index = await this.getIndex(indexUid) @@ -105,38 +120,11 @@ export class DataIndexer { await this.updateIndexSettings(index, params.dataSourceConfig.fields) } - const rep = this.datasource.getRepository(DataItem) - let query = rep - .createQueryBuilder() - .select('data, ' + params.dataSourceConfig.fields.filter(f=>f.isSystem).map(f => f.alias).join(',')) - .where( - `alias = :alias`, - { alias: params.dataSourceConfig.alias } - ) - - if (params.ids && params.ids.length > 0) { - query.andWhere(`id IN (${params.ids.join(',')})`) - } - - let items = await query.getRawMany() + let docs = await adapter.getData(params.dataSourceConfig, context, params.ids) + docs = await this.replaceEnumDataToItems(params.dataSourceConfig, docs) + docs = await this.replaceLinkedDataToItems(params.dataSourceConfig, docs, context) - let dsFields = params.dataSourceConfig.fields - // Get all data source configs for link nesting - let linkDataSource: Map = new Map() - for (let i in dsFields) { - const f = dsFields[i] - if (f.type === 'link' && !f.isSystem) { - let config = await this.getConfigByAlias(f.datasource) - if (config) - linkDataSource.set(f.datasource, config) - } - } - - let docs = [] - for (let i in items) { - docs.push(await this.prepareItemForIndex(items[i], dsFields, linkDataSource)) - } let taskUid = (await index.addDocuments(docs)).taskUid @@ -150,29 +138,102 @@ export class DataIndexer { if (params.ids && params.ids.length > 0) { - await this.updateLinkedData(params, context) + await this.updateLinkedData(docs, params.dataSourceConfig, context) } - return items.length + return docs.length } - public async updateLinkedData(params: DataReindexDto, context: Context) { - if (params.dataSourceConfig.source !== 'internal') - throw `DataSource is not an internal source` + private async replaceEnumDataToItems(ds: DataSourceV2Dto, items: any[]) { + let enums = ds.fields.filter(f => f.type === 'enum') + if (!enums.length) + return items - if (!params.ids.length) - return + for(let i in enums) { + let field = enums[i] + for(let j in items) { + let item = items[j] - const itemsRep = await this.datasource.getRepository(DataItem) - let items = await itemsRep - .createQueryBuilder() - .select('id, data') - .where( - `alias = :alias AND id IN (${params.ids.join(',')})`, - { alias: params.dataSourceConfig.alias } - ).getRawMany() + if (item[field.alias] === null || item[field.alias] === undefined) + continue + + if (field.isMultiple) { + let arr = [] + item[field.alias].forEach(d => { + let val = field.enumValues.find(f => f.key === d) + + arr.push({ id: val ? val.key : d, title: val ? val.title : d }) + + }) + + item[field.alias] = arr + } else { + let val = field.enumValues.find(f => f.key === item[field.alias]) + item[field.alias] = val ? { id: val.key, title: val.title } : null + } + } + } + + return items + } + + private async replaceLinkedDataToItems(ds: DataSourceV2Dto, items: any[], context: Context) { + let linksField = ds.fields.filter(f => f.type === 'link') + if (!linksField.length) + return items + + for(const i in linksField) { + const field = linksField[i] + if (field.isSystem) + continue + + const indexUid = this.getIndexUid(field.datasourceReference, context) + const fields = await this.getFieldForLinkIndex(field.datasourceReference) + const index = await this.getIndex(indexUid) + if (!index) { + this.logger.warn(`Index ${indexUid} for add linked data into item`) + continue + } + + for(let j in items) { + items[j][field.alias] = await this.replaceLinkedDataToItem(items[j][field.alias], index, fields) + } + + } + + return items + } + + private async replaceLinkedDataToItem(id: string, index: Index, fields: string[]) { + if (typeof id === 'string') { + let doc + try { + doc = await index.getDocument(id, { + fields: fields + }) + return doc + } catch (e) { + this.logger.warn(`Document with id ${id} not found in index ${index.uid}`) + return { + id: id + } + } + + + } else { + this.logger.warn(`Item id must be a type of string`) + return { + id: id + } + } + } + + public async updateLinkedData(items: any[], ds: DataSourceV2Dto, context: Context) { + let adapter: IndexerDataAdapter = this.getAdapter(ds.type) + if (!adapter) + throw `DataSource is not an internal or internal-db source` - this.logger.log(`Indexing linked docs to ${params.dataSourceConfig.alias}`) + this.logger.log(`Indexing linked docs to ${ds.alias}`) let rep = this.datasource.getRepository(DatasourceField) @@ -181,18 +242,13 @@ export class DataIndexer { .select('datasource_alias, alias as field_alias') .where( `datasource_ref = :ref`, - { ref: params.dataSourceConfig.alias } + { ref: ds.alias } ) .getRawMany() - - for(let i in links) { - let link = links[i] - await this.updateLinkItem(items, link, context) + await this.updateLinkItem(items, links[i], context) } - - } private async updateLinkItem(items: any[], link: {datasource_alias: string, field_alias: string}, context: Context) { @@ -208,8 +264,6 @@ export class DataIndexer { for(let i in items) { let item = items[i] - - let linkedItems = await rep.createQueryBuilder() .select('id') .where(`data ->> '${link.field_alias}' = '${item.id}' AND alias = '${link.datasource_alias}'`) @@ -238,113 +292,22 @@ export class DataIndexer { } - - private async prepareItemForIndex(item: any, fields: FieldConfigInterface[], linkDs: Map): Promise { - let o = {} - - for(let i in fields) { - const field = fields[i] - let val = field.isSystem ? item[field.alias] : item.data[field.alias] - - if(['datetime', 'time', 'date'].includes(field.type)) { - val = val ? dayjs(val).utc(false).valueOf() : null - } - - if (field.type === 'number') { - val = Number(val) - } - - if (field.type === 'link' && !field.isSystem) { - val = await this.getLinkItemForIndex(val, field, linkDs) - } - - if (!field.isMultiple && field.type === 'string' ) { - val = val !== undefined && val !== null ? String(val) : "" - } - - o[field.alias] = val - } - return o - } - - private async getLinkItemForIndex(id: string | string[], field: FieldConfigInterface, linkDs: Map): Promise { - if (!id) - return null - - let ds = linkDs.get(field.datasource) - if (!ds) return { - id - } - - const rep = this.datasource.getRepository(DataItem) - - let val = >id - if (field.isMultiple) { - if (!Array.isArray(id)) - val = [id] - - if (!val.length) - return [] - - let items = await rep - .createQueryBuilder() - .select('data') - .where( - `alias = :alias AND id in (${val.join(',')})`, - { alias: ds.alias } - ) - .getRawMany() - - val = items.map(i => i.data) - } else { - let item = await rep - .createQueryBuilder() - .select('data') - .where( - `alias = :alias AND id = :id`, - { alias: ds.alias, id } - ) - .getRawOne() - - val = item.data - } - - return val - } - - private async getConfigByAlias(alias: string): Promise { - const rep = this.datasource.getRepository(ConfigItem) - let item = await rep - .createQueryBuilder() - .where( - `alias = 'datasource' AND (data ->> 'alias')::varchar = :alias and deleted_at IS NULL`, - { alias: alias } - ) - .getOne() - - if (item) { - item.data.fields.push(...SystemFields) - } - - return item ? item.data : null - } - - private prepareItemForUser(item: any, fields: Map, timezone = this.timezone):DataItem { + private prepareItemForUser(item: any, fields: Map, timezone = this.timezone):DataItem { let o = item Object.keys(item).forEach(alias => { - if (fields.has(alias) && ['datetime', 'time', 'date'].includes(fields.get(alias).type)) { + if (fields.has(alias) && ['datetime', 'date'].includes(fields.get(alias).type)) { item[alias] = item[alias] ? dayjs(Number(item[alias])).tz(timezone).format() : null } }) return o } - private convertFilterToSearch(filter: FilterItemInterface[], fields: Map) { + private convertFilterToSearch(filter: FilterItemInterface[], fields: Map) { let query:string = "" const andWhere = (where) => { if (where) query += query.length > 0 ? ` AND ${where}` : where } - const convertValue = (val, field: FieldConfigInterface) => { + const convertValue = (val, field: DatasourceField) => { if(['datetime', 'time', 'date'].includes(field.type)) { return val ? dayjs(val).utc(false).valueOf() : null } @@ -421,11 +384,16 @@ export class DataIndexer { throw task.error } - async updateIndexSettings(index: Index, fields: FieldConfigInterface[]) { + async updateIndexSettings(index: Index, fields: DatasourceField[]) { let sort = fields.filter(f=>f.sortable).map(f=>f.alias) let filter = fields.filter(f=>f.filterable).map(f=>f.alias) let search = fields.filter(f=>f.searchable).map(f=>f.alias) + // Need add filterable link field id to index linked data after update linked item + fields.filter(f => f.type === 'link' || f.type === 'enum').forEach(i => { + filter.push(`${i.alias}.id`) + }) + let taskUid = (await index.updateSettings({ sortableAttributes: sort, displayedAttributes: ['*'], @@ -464,4 +432,18 @@ export class DataIndexer { return true } + + async getFieldForLinkIndex(alias): Promise { + let rep = this.datasource.getRepository(DatasourceField) + let fields = await rep.createQueryBuilder() + .select('alias') + .where(`datasource_alias = :alias AND deleted_at IS NULL and type <> 'table'`, + { alias: alias}) + .getRawMany() + + let f = fields.map(f=> f.alias) + f.push('id') + return f + + } } \ No newline at end of file diff --git a/src/data-indexer/internal-db.adapter.ts b/src/data-indexer/internal-db.adapter.ts new file mode 100644 index 0000000..a22931a --- /dev/null +++ b/src/data-indexer/internal-db.adapter.ts @@ -0,0 +1,61 @@ +import { IndexerDataAdapter } from "./data-indexer.adapter"; +import { DataSource } from "typeorm"; +import { MeiliSearch } from "meilisearch"; +import { Context } from "../entities/context"; +import { DataSourceV2Dto } from "../datasources/dto/datasourceV2.dto"; +import { DatasourceField } from "../datasources/entities/field.entity"; +import * as dayjs from "dayjs"; + +export class InternalDbAdapter extends IndexerDataAdapter { + constructor(dataSource: DataSource, searchClient: MeiliSearch) { + super(dataSource, searchClient) + } + + // Get items from database to index documents into meilisearch + async getData(ds: DataSourceV2Dto, context: Context, ids?: string[]) : Promise { + const schema = `"account_data${context.accountId}"."${ds.alias}"` + console.log("Reindex for schema ", schema) + + let fields = ds.fields.map(f => `"${f.alias}"` ) + let query = `SELECT ${fields.join(', ')} FROM ${schema}` + + if (ids && ids.length > 0) { + query += ` WHERE id IN (${ids.join(',')})` + } + + let items = await this.dataSource.query(query) + + let docs = [] + for (let i in items) { + docs.push(await this.prepareItemForIndex(items[i], ds.fields)) + } + return docs + } + + private async prepareItemForIndex(item: any, fields: DatasourceField[]): Promise { + let o = {} + for(let i in fields) { + const field = fields[i] + let val = item[field.alias] + + if (field.isMultiple) { + val = JSON.parse(val) + } else { + if(['datetime', 'date'].includes(field.type)) { + val = val ? dayjs(val).utc(false).valueOf() : null + } + + if (field.type === 'number') { + val = Number(val) + } + + if (field.type === 'string' ) { + val = val !== undefined && val !== null ? String(val) : "" + } + } + + o[field.alias] = val + } + return o + } +} \ No newline at end of file diff --git a/src/data-indexer/internal.adapter.ts b/src/data-indexer/internal.adapter.ts new file mode 100644 index 0000000..0c90d85 --- /dev/null +++ b/src/data-indexer/internal.adapter.ts @@ -0,0 +1,62 @@ +import { IndexerDataAdapter } from "./data-indexer.adapter"; +import { DataSource } from "typeorm"; +import { MeiliSearch } from "meilisearch"; +import { Context } from "../entities/context"; +import { DataSourceV2Dto } from "../datasources/dto/datasourceV2.dto"; +import { DatasourceField } from "../datasources/entities/field.entity"; +import * as dayjs from "dayjs"; +import { DataItem } from "../datasources/entities/dataitem.entity"; + +export class InternalAdapter extends IndexerDataAdapter { + constructor(dataSource: DataSource, searchClient: MeiliSearch) { + super(dataSource, searchClient) + } + + async getData(ds: DataSourceV2Dto, context: Context, ids?: string[]) : Promise { + const rep = this.dataSource.getRepository(DataItem) + let query = rep + .createQueryBuilder() + .select('data, ' + ds.fields.filter(f=>f.isSystem).map(f => f.alias).join(',')) + .where( + `alias = :alias`, + { alias: ds.alias } + ) + + if (ids && ids.length > 0) { + query.andWhere(`id IN (${ids.join(',')})`) + } + + let items = await query.getRawMany() + + let docs = [] + for (let i in items) { + docs.push(await this.prepareItemForIndex(items[i], ds.fields/*, linkDataSource*/)) + } + return docs + } + + private async prepareItemForIndex(item: any, fields: DatasourceField[]): Promise { + let o = {} + + for(let i in fields) { + const field = fields[i] + let val = field.isSystem ? item[field.alias] : item.data[field.alias] + + if(['datetime', 'time', 'date'].includes(field.type)) { + val = val ? dayjs(val).utc(false).valueOf() : null + } + + if (field.type === 'number') { + val = Number(val) + } + + if (!field.isMultiple && field.type === 'string' ) { + val = val !== undefined && val !== null ? String(val) : "" + } + + o[field.alias] = val + } + return o + } + +} \ No newline at end of file diff --git a/src/datasources/datasourceV2.contreller.ts b/src/datasources/datasourceV2.contreller.ts index 9113ad0..16d4ddd 100644 --- a/src/datasources/datasourceV2.contreller.ts +++ b/src/datasources/datasourceV2.contreller.ts @@ -1,11 +1,11 @@ import { Body, - Controller, + Controller, Delete, Get, HttpCode, HttpException, HttpStatus, Param, - Post, Query, + Post, Put, Query, Req, UseGuards, UseInterceptors, @@ -18,10 +18,16 @@ import { DataSourceV2Service } from "./datasourceV2.service"; import { DataIndexRequestDto, DataIndexResponseDto, + DeleteDataSourceDataRequestDto, GetDataManyRequestDto, GetDataManyResponseDto, GetManyResponseDto, - GetRevisionsResponseDto + GetRevisionsResponseDto, + InsertDataSourceRequestDto, + InsertDataSourceResponseDto, + ResponseDto, + UpsertDataSourceDataRequestDto, + UpsertDataSourceDataResponseDto } from "./dto/datasourceV2.dto"; import { Context } from "../entities/context"; import { DataSourceInterceptor } from "./datasourceV2.interceptor"; @@ -46,23 +52,9 @@ export class DataSourceV2Controller { @Body() body: GetDataManyRequestDto, @Req() req: Request, ): Promise { - let config = req['datasource.config'] - - if (config.source !== 'internal') { - throw new HttpException( - { - success: false, - statusCode: HttpStatus.BAD_REQUEST, - error: `DataSource ${alias} is not an internal source` - }, HttpStatus.BAD_REQUEST) - } - let res try { - res = await this.dsService.getDataMany({ - ...body, - dataSourceConfig: config - }, this.getContext(req)) + res = await this.dsService.getDataMany(alias, body, this.getContext(req)) } catch (e) { throw new HttpException( { @@ -72,9 +64,6 @@ export class DataSourceV2Controller { ) } - - - return { statusCode: 200, ...res @@ -96,10 +85,9 @@ export class DataSourceV2Controller { ): Promise { let config = req['datasource.config'] - if (config.source !== 'internal') { + if (config.type !== 'internal' || config.type !== 'internal-db') { throw new HttpException( { - success: false, statusCode: HttpStatus.BAD_REQUEST, error: `DataSource ${alias} is not an internal source` }, HttpStatus.BAD_REQUEST) @@ -207,6 +195,33 @@ export class DataSourceV2Controller { } } + @UseGuards(JwtAuthGuard) + @Version(['2']) + @Get('') + @HttpCode(200) + @ApiOperation({ summary: 'Get many datasource' }) + async getManyV2( + @Body() body: GetDataManyRequestDto, + @Req() req: Request, + ): Promise { + try { + let items = await this.dsService.getManyV2(this.getContext(req)) + return { + statusCode: 200, + items: items.items, + count: items.count + } + } catch (e) { + throw new HttpException( + { + statusCode: HttpStatus.INTERNAL_SERVER_ERROR, + error: e.toString(), + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + } + } + @UseGuards(JwtAuthGuard) @UseInterceptors(DataSourceInterceptor) @Version(['2']) @@ -236,6 +251,160 @@ export class DataSourceV2Controller { } } + @UseGuards(JwtAuthGuard) + @Version(['2']) + @Post('') + @HttpCode(200) + @ApiOperation({ summary: 'Insert a new datasource' }) + async insertDataSource( + @Req() req: Request, + @Body() body: InsertDataSourceRequestDto + ): Promise { + if (body.type === 'internal') + throw new HttpException( + { + statusCode: HttpStatus.NOT_ACCEPTABLE, + error: "Datasource with internal type is not allowed in api v2", + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + try { + let data = await this.dsService.insertDataSource(body, this.getContext(req)) + + return { + id: data.id, + statusCode: HttpStatus.OK + } + } catch (e) { + throw new HttpException( + { + statusCode: HttpStatus.INTERNAL_SERVER_ERROR, + error: e.toString(), + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + } + } + + @UseGuards(JwtAuthGuard) + @Version(['2']) + @Put('/:alias') + @HttpCode(200) + @UseInterceptors(DataSourceInterceptor) + @ApiOperation({ summary: 'Update datasource by alias' }) + async updateDataSource( + @Req() req: Request, + @Param('alias') alias: string, + @Body() body: InsertDataSourceRequestDto + ): Promise { + let config = req['datasource.config'] + if (config.type !== 'internal-db') + throw new HttpException( + { + statusCode: HttpStatus.NOT_ACCEPTABLE, + error: "Datasource with type internal-db only allowed to update in api v2", + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + try { + let data = await this.dsService.updateDataSource(alias, body, this.getContext(req)) + return { + id: data.id, + statusCode: HttpStatus.OK + } + } catch (e) { + throw new HttpException( + { + statusCode: HttpStatus.INTERNAL_SERVER_ERROR, + error: e.toString(), + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + } + } + + @UseGuards(JwtAuthGuard) + @Version(['2']) + @Put('/:alias/data') + @HttpCode(200) + @UseInterceptors(DataSourceInterceptor) + @ApiOperation({ summary: 'Update items in datasource' }) + async upsertDataSourceItems( + @Req() req: Request, + @Param('alias') alias: string, + @Body() body: UpsertDataSourceDataRequestDto + ): Promise { + let config = req['datasource.config'] + if (config.type !== 'internal-db') + throw new HttpException( + { + statusCode: HttpStatus.NOT_ACCEPTABLE, + error: "Datasource with type internal-db only allowed to update items in api v2", + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + try { + let data = await this.dsService.upsertDataSourceItems(alias, body, this.getContext(req)) + return { + statusCode: HttpStatus.OK, + items: data + } + } catch (e) { + if (e instanceof Object) { + throw new HttpException( + { + statusCode: HttpStatus.BAD_REQUEST, + message: e, + }, + HttpStatus.BAD_REQUEST + ) + } else + throw new HttpException( + { + statusCode: HttpStatus.INTERNAL_SERVER_ERROR, + error: e.toString(), + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + } + } + + @UseGuards(JwtAuthGuard) + @Version(['2']) + @Delete('/:alias/data') + @HttpCode(200) + @UseInterceptors(DataSourceInterceptor) + @ApiOperation({ summary: 'Delete items in datasource by id or condition' }) + async deleteDataSourceItems( + @Req() req: Request, + @Param('alias') alias: string, + @Body() body: DeleteDataSourceDataRequestDto + ): Promise { + let config = req['datasource.config'] + if (config.type !== 'internal-db') + throw new HttpException( + { + statusCode: HttpStatus.NOT_ACCEPTABLE, + error: "Datasource with type internal-db only allowed to delete items in api v2", + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + try { + await this.dsService.deleteDataSourceItems(alias, body, this.getContext(req)) + return { + statusCode: HttpStatus.OK + } + } catch (e) { + throw new HttpException( + { + statusCode: HttpStatus.INTERNAL_SERVER_ERROR, + error: e.toString(), + }, + HttpStatus.INTERNAL_SERVER_ERROR + ) + } + } + getContext(req: Request) : Context { return { accountId: req['accountId'], diff --git a/src/datasources/datasourceV2.interceptor.ts b/src/datasources/datasourceV2.interceptor.ts index ed80ce8..2c5f754 100644 --- a/src/datasources/datasourceV2.interceptor.ts +++ b/src/datasources/datasourceV2.interceptor.ts @@ -7,8 +7,8 @@ import { CallHandler } from "@nestjs/common"; import { Observable } from 'rxjs'; -import { DataSourceConfigInterface } from "./entities/datasource.entity"; import { DataSourceV2Service } from "./datasourceV2.service"; +import { DataSourceV2Dto } from "./dto/datasourceV2.dto"; @Injectable() export class DataSourceInterceptor implements NestInterceptor { @@ -19,14 +19,15 @@ export class DataSourceInterceptor implements NestInterceptor { ): Promise> { const request = context.switchToHttp().getRequest(); - let config: DataSourceConfigInterface = await this.dsService.getConfigByAlias(request.params['alias']) + console.log() + let config: DataSourceV2Dto = await this.dsService.getConfigByAlias(request.params['alias']) if (!config) { throw new HttpException( { success: false, statusCode: HttpStatus.NOT_FOUND, - error: `DataSource1 ${request.params['alias']} is not exists` + error: `DataSource ${request.params['alias']} doesn't exist` }, HttpStatus.NOT_FOUND) } let req = context.switchToHttp().getRequest() diff --git a/src/datasources/datasourceV2.internal.ts b/src/datasources/datasourceV2.internal.ts new file mode 100644 index 0000000..f51ee33 --- /dev/null +++ b/src/datasources/datasourceV2.internal.ts @@ -0,0 +1,304 @@ +import { DataSourceV2Dto } from "./dto/datasourceV2.dto"; +import { DataSource, QueryRunner } from "typeorm"; +import { Context } from "../entities/context"; +import { Logger } from "@nestjs/common"; +import { flakeId } from "../flake-id"; +import * as dayjs from "dayjs"; +import * as utc from "dayjs/plugin/utc"; +import * as timezone from "dayjs/plugin/timezone"; +import { DataIndexer } from "../data-indexer/data-indexer"; +import { EventEmitter2 } from "@nestjs/event-emitter"; +dayjs.extend(utc); +dayjs.extend(timezone); + +export class UpsertParams { + items: any[] + returnItems?: boolean +} + +export class CreateParamsDto { + config: DataSourceV2Dto + dataSource: DataSource + context: Context + logger: Logger + indexer: DataIndexer + timezone?: string + eventEmitter: EventEmitter2 +} + +export class InternalDBDatasource { + constructor(params: CreateParamsDto) { + this.config = params.config + this.datasource = params.dataSource + this.context = params.context + this.logger = params.logger + this.indexer = params.indexer + this.timezone = params.timezone || 'Europe/Moscow' + this.eventEmitter = params.eventEmitter + } + + private readonly config: DataSourceV2Dto = null + private readonly datasource: DataSource = null + private readonly context: Context = null + private readonly logger: Logger + private readonly indexer: DataIndexer + private readonly timezone + readonly eventEmitter: EventEmitter2 + private schema = () => { + return `"account_data${this.context.accountId}"."${this.config.alias}"` + } + private schemaC = () => { + return `account_data${this.context.accountId}.${this.config.alias}` + } + + async getMany(params) { + return await this.indexer.getDataMany(params, this.config, this.context) + } + + async getById(id) { + + let fields = this.config.fields.map(f => `"${f.alias}"` ) + let query = `SELECT ${fields.join(', ')} FROM ${this.schema()} WHERE id = '${id}'` + + let data = await this.datasource.query(query) + + this.logger.log(`getById, ${query}`) + + if (!data.length) + return null + + let item = data[0] + for(let i in this.config.fields) { + + let field = this.config.fields[i] + switch (field.type) { + case 'datetime': + item[field.alias] = item[field.alias] + ? dayjs(item[field.alias]).tz(this.timezone).format() + : null + break + case 'date': + item[field.alias] = item[field.alias] + ? dayjs(item[field.alias]).tz(this.timezone).format('YYYY-MM-DD') + :null + break; + case 'text': + case 'link': + case 'enum': + case 'string': + case 'bool': + case 'time': + break; + case 'number': + item[field.alias] = Number(item[field.alias]) + } + } + + return item + } + + async upsert(params: UpsertParams) { + this.logger.log(`upsert, ${params.items.length} items`) + + + let queryRunner = this.datasource.createQueryRunner() + await queryRunner.startTransaction() + try { + + let items = [] + for(let i in params.items) { + let upsItem = params.items[i] + + let fItem = null + let existItem = null + + if (upsItem.id) { + existItem = await this.getById(upsItem.id) + } + + if (!existItem) { + fItem = await this.insertItem(queryRunner, upsItem, Number(i)) + } else { + fItem = await this.updateItem(queryRunner, upsItem, existItem) + } + + if (params.returnItems) + items.push(fItem) + } + + await queryRunner.commitTransaction() + + this.eventEmitter.emit(`data-update.${this.config.alias}.updated`, { + ids: items.map(i => i.id), + alias: this.config.alias, + context: this.context + }) + + return params.returnItems ? items : undefined + } catch (e) { + await queryRunner.rollbackTransaction() + this.logger.error(e) + throw e + } finally { + await queryRunner.release() + } + } + + async deleteById(params: { + ids: string[] + soft?: boolean + }) { + this.logger.log("deleteById", params) + if (!params.ids.length) + throw "Ids must by not empty" + + let queryRunner = this.datasource.createQueryRunner() + await queryRunner.startTransaction() + try { + + if (params.soft) { + await queryRunner.manager.query(`UPDATE ${this.schema()} SET deleted_at = $1, deleted_by = $2 WHERE id IN (${params.ids.join(',')})`, + [new Date(), this.context.userId]) + } else { + await queryRunner.manager.query(`DELETE FROM ${this.schema()} WHERE id IN (${params.ids.join(',')})`) + } + + await queryRunner.commitTransaction() + } catch (e) { + await queryRunner.rollbackTransaction() + this.logger.error(e) + throw e + } finally { + await queryRunner.release() + } + } + + async deleteBy(params: {where: string, soft?: boolean}) { + if (!params || !params.where) + throw "Where must by not null" + + this.logger.log("deleteBy", params) + let queryRunner = this.datasource.createQueryRunner() + await queryRunner.startTransaction() + try { + + if (params.soft) { + await queryRunner.manager.query(`UPDATE ${this.schema()} SET deleted_at = $1, deleted_by = $2 WHERE ${params.where}`, + [new Date(), this.context.userId]) + } else { + + let query = `DELETE FROM ${this.schema()} WHERE ${params.where}` + this.logger.log(query) + await queryRunner.manager.query(query) + } + + await queryRunner.commitTransaction() + + + } catch (e) { + await queryRunner.rollbackTransaction() + this.logger.error(e) + throw e + } finally { + await queryRunner.release() + } + } + + private async insertItem(queryRunner:QueryRunner, item: any, id?: number) { + let notValid = this.isNotValid(item) + if (notValid) + throw notValid + + item = this.convertToDBValue(item) + + item['created_by'] = this.context.userId + item['updated_by'] = this.context.userId + item['created_at'] = new Date() + item['updated_at'] = new Date() + item['id'] = flakeId(id).toString() + item['version'] = 1 + + + console.log(item) + + try{ + await queryRunner.manager.insert(this.schemaC(), item) + } catch (e) { + this.logger.error(e) + throw e + } + + return item + } + + private async updateItem(queryRunner:QueryRunner, item: any, old: any) { + let notValid = this.isNotValid(item) + if (notValid) + throw notValid + + item = this.convertToDBValue(item) + + item['updated_by'] = Number(this.context.userId) + item['updated_at'] = dayjs().tz(this.timezone).format() + item['version'] = Number(old['version']) + 1 + + let params = [] + let upd = "" + + let cnt = 1 + for (const [key, value] of Object.entries(item)) { + + // If field not found, we don't insert into query that + if (this.config.fields.find(f => f.alias === key)) { + if (upd) upd += ', ' + + upd += `"${key}" = $${cnt++}` + params.push(value) + old[key] = value + } + } + + + try{ + let query = `UPDATE ${this.schema()} SET ${upd} WHERE id = ${old.id}` + await queryRunner.query(query, params) + return old + } catch (e) { + this.logger.error(e) + throw e + } + } + + isNotValid(item: any) { + let errors = [] + for(const i in this.config.fields) { + const field = this.config.fields[i] + + if (!item[field.alias]) { + continue + } + + if (field.isMultiple && !Array.isArray(item[field.alias])) { + errors.push({ + field: field.alias, + error: `Value must be an array` + }) + } + } + return errors.length ? errors : null + } + + convertToDBValue(item) { + for (const [key, value] of Object.entries(item)) { + let field = this.config.fields.find(f => f.alias === key) + + if (!field) { + delete item[key] + } else if (field.isMultiple && value){ + item[key] = JSON.stringify(value) + } + } + + return item + } +} \ No newline at end of file diff --git a/src/datasources/datasourceV2.service.ts b/src/datasources/datasourceV2.service.ts index a704726..b009efd 100644 --- a/src/datasources/datasourceV2.service.ts +++ b/src/datasources/datasourceV2.service.ts @@ -1,25 +1,35 @@ -import { forwardRef, Inject, Injectable } from '@nestjs/common' +import { forwardRef, Inject, Injectable, Logger } from "@nestjs/common"; import { InjectDataSource } from '@nestjs/typeorm' -import { DataSource, QueryRunner } from "typeorm"; +import { DataSource, QueryRunner, Table, TableColumn } from "typeorm"; import { Context } from '../entities/context' import { FunctionsService } from '../functions/functions.service' -import { RoomsService } from '../rooms/rooms.service' import { - DataReindexDto, GetDataManyParamsDto, GetFieldsManyDto, + DataReindexDto, + DatasourceFieldDto, + DataSourceV2Dto, + DeleteDataSourceDataRequestDto, + GetDataManyRequestDto, + GetFieldsManyDto, GetRevisionByIdDto, GetRevisionsDto, - GetRevisionsResponseDto, SystemFields + GetRevisionsResponseDto, + InsertDataSourceRequestDto, + SystemFields, + UpsertDataSourceDataRequestDto, + FieldType } from "./dto/datasourceV2.dto"; import { DataItem, Revision } from "./entities/dataitem.entity"; import { User } from "../users/entities/user.entity"; -import { DataSourceConfigInterface } from "./entities/datasource.entity"; import { ConfigItem } from "../config/entities/config.entity"; import { InjectQueue } from "@nestjs/bullmq"; import { Queue } from "bullmq"; import { DataIndexer } from "../data-indexer/data-indexer"; import { ConfigService } from "@nestjs/config"; -import { OnEvent } from "@nestjs/event-emitter"; +import { EventEmitter2, OnEvent } from "@nestjs/event-emitter"; import { DatasourceField } from "./entities/field.entity"; +import { DatasourceV2Entity } from "./entities/datasourceV2.entity"; +import {flakeId} from "../flake-id" +import { InternalDBDatasource } from "./datasourceV2.internal"; @@ -30,21 +40,20 @@ export class DataSourceV2Service { private functionsService: FunctionsService, @InjectDataSource('default') private datasource: DataSource, - @Inject(RoomsService) - private rooms: RoomsService, @InjectQueue('datasource-data-indexing') private dataIndexingQueue: Queue, - private configService: ConfigService + private configService: ConfigService, + private eventEmitter: EventEmitter2 ) { this.indexer = new DataIndexer(configService, datasource) - this.indexer.setTimezone(configService.get('DEFAULT_TIMEZONE') || 'Europe/Moscow') - } - - private readonly indexer:DataIndexer = null + this.timezone = configService.get('DEFAULT_TIMEZONE') || 'Europe/Moscow' + this.indexer.setTimezone(this.timezone) - async getDataMany(params: GetDataManyParamsDto, context: Context) { - return await this.indexer.getDataMany(params, context) } + private readonly logger = new Logger(DataSourceV2Service.name); + private readonly indexer : DataIndexer = null + private readonly timezone : string + async getRevisions(params: GetRevisionsDto, context: Context) : Promise { let rep = this.datasource.getRepository(Revision) const query = await rep @@ -107,8 +116,8 @@ export class DataSourceV2Service { } async dataReindex(params: DataReindexDto, context: Context) : Promise<{ jobId: string }> { - if (params.dataSourceConfig.source !== 'internal') - throw `DataSource is not an internal source` + if (params.dataSourceConfig.type !== 'internal' && params.dataSourceConfig.type !== 'internal-db') + throw `DataSource is not an internal or internal-db source` // await this.indexer.dataReindex(params, context) // return { @@ -125,21 +134,50 @@ export class DataSourceV2Service { } } - async getConfigByAlias(alias: string): Promise { - const rep = this.datasource.getRepository(ConfigItem) + // For internal use without context + // Don't use it to response to user + async getConfigByAlias(alias: string): Promise { + const rep = this.datasource.getRepository(DatasourceV2Entity) let item = await rep - .createQueryBuilder() + .createQueryBuilder('ds') .where( - `alias = 'datasource' AND (data ->> 'alias')::varchar = :alias and deleted_at IS NULL`, + `alias = :alias and deleted_at IS NULL`, { alias: alias } ) .getOne() if (item) { - item.data.fields.push(...SystemFields) + const rep = this.datasource.getRepository(DatasourceField) + item.fields = await rep + .createQueryBuilder() + .where( + `datasource_alias = :alias AND deleted_at IS NULL`, + { alias: alias } + ) + .getMany() } - return item ? item.data : null + if (item) { + item.fields.push(...SystemFields) + } + + return item ? item : null + } + + async getDataSource(alias: string, context: Context) { + let ds = await this.getConfigByAlias(alias) + if (!ds) + throw `Datasource ${alias} not found` + + return new InternalDBDatasource({ + config: ds, + dataSource: this.datasource, + context: context, + logger: this.logger, + indexer: this.indexer, + timezone: this.timezone, + eventEmitter: this.eventEmitter + }) } async getManyV1() { @@ -159,6 +197,21 @@ export class DataSourceV2Service { } } + async getManyV2(context: Context) { + const rep = this.datasource.getRepository(DatasourceV2Entity) + let items = await rep + .createQueryBuilder() + .select('id, version, title, type, alias') + .where('account_id = :account', { account: context.accountId }) + .orderBy('title') + .getRawMany() + + return { + items, + count: items.length + } + } + async getFieldsMany(params: GetFieldsManyDto, context: Context) { console.log(params, context) @@ -170,15 +223,19 @@ export class DataSourceV2Service { `datasource_alias = :alias AND deleted_at IS NULL`, { alias: alias } ) - .getMany() + .getMany() as DatasourceField[] } let items = await getFields(params.datasource) + + items.push(...SystemFields) + + // Need to collect all nested fields in linked datasource if (params.nested) { - let links = items.filter(f => f.type === 'link' && f.datasourceReference) + let links = items.filter(f => f.type === 'link' && f.datasourceReference && !f.isSystem) for(let i in links) { const field = links[i] let linked = (await getFields(field.datasourceReference)) @@ -197,10 +254,9 @@ export class DataSourceV2Service { items, count: items.length } - } - async updateField(queryRunner:QueryRunner, item: DataItem, context: Context) { + async updateFieldsFromV1(queryRunner:QueryRunner, item: DataItem, context: Context) { if (!item.data?.fields) { return } @@ -221,7 +277,7 @@ export class DataSourceV2Service { d.defaultValue = field.default ? field.default : "" d.datasourceReference = field.datasource d.autoincrement = !!field.autoincrement - d.required = !!field.required + d.isNullable = !!field.required d.enumValues = field.values d.precision = field.precision ? field.precision : null d.format = field.format ? field.format : "" @@ -261,7 +317,370 @@ export class DataSourceV2Service { .execute() } + async updateDatasourceFromV1(queryRunner:QueryRunner, ds: any, context: Context) { + console.log(context) + let data: DatasourceV2Entity = { + accountId: 1, + alias: ds.data.alias, + context: ds.data.context, + createdAt: ds.createdAt, + createdBy: ds.createdBy, + deletedAt: ds.deletedAt, + deletedBy: ds.deletedBy, + isSystem: false, + isTree: ds.data.isTree, + permissions: ds.data.permissions, + script: ds.data.script, + type: ds.data.source, + updatedAt: ds.updatedAt, + updatedBy: ds.updatedBy, + version: ds.version, + id: ds.id, + title: ds.data.title + } + + await queryRunner.manager + .createQueryBuilder() + .insert() + .into(DatasourceV2Entity) + .values(data) + .orUpdate(['alias', 'title', 'script', 'context', 'is_tree', + 'permissions', 'updated_by', 'updated_at', 'deleted_at', + 'type', 'deleted_by', 'version'], + ['id']) + .execute() + + } + + async insertDataSource(ds: InsertDataSourceRequestDto, context: Context) { + if (await this.dataSourceAliasExists(ds.alias)) { + throw `Datasource with alias '${ds.alias}' exists` + } + + let queryRunner = this.datasource.createQueryRunner() + await queryRunner.startTransaction() + try { + + let data: DatasourceV2Entity = { + id: flakeId().toString(), + accountId: context.accountId, + version: 1, + type: ds.type, + title: ds.title, + isSystem: false, + isTree: ds.isTree, + alias: ds.alias, + createdBy: context.userId, + updatedBy: context.userId, + permissions: ds.permissions, + createdAt: new Date(), + updatedAt: new Date() + } + await queryRunner.manager + .createQueryBuilder() + .insert() + .into(DatasourceV2Entity) + .values(data) + .execute() + + await this.upsertFields(queryRunner, data, ds.fields, context) + await this.addDataSourceSchema({ + queryRunner, + ds: data, + fields: ds.fields, + context, + }) + + await queryRunner.commitTransaction() + await this.upsertIndex(ds.alias, context) + + return { + id: data.id + } + } catch (e) { + await queryRunner.rollbackTransaction() + this.logger.error(e) + throw e + } finally { + await queryRunner.release() + } + } + + async updateDataSource(alias: string, ds: InsertDataSourceRequestDto, context: Context) { + console.log(alias) + let currentDs = await this.getConfigByAlias(alias) + if (!currentDs) { + throw `DataSource with alias '${alias}' doesn't exist` + } + + let queryRunner = this.datasource.createQueryRunner() + await queryRunner.startTransaction() + try { + + let data: DatasourceV2Entity = { + id: currentDs.id, + accountId: context.accountId, + version: currentDs.version + 1, + type: ds.type, + title: ds.title, + isSystem: false, + isTree: ds.isTree, + alias: ds.alias, + createdBy: currentDs.createdBy, + createdAt: currentDs.createdAt, + updatedAt: new Date(), + updatedBy: context.userId, + permissions: ds.permissions ? ds.permissions : undefined + } + await queryRunner.manager + .createQueryBuilder() + .update(DatasourceV2Entity) + .set(data) + .where("id = :id", {id: currentDs.id}) + .execute() + + await this.upsertFields(queryRunner, data, ds.fields, context) + + await this.updateDataSourceSchema({queryRunner, ds: data, fields: ds.fields, oldDs: currentDs, context}) + + await queryRunner.commitTransaction() + + try { + await this.upsertIndex(ds.alias, context) + } catch (e) { + this.logger.error(e) + } + + return { + id: currentDs.id + } + } catch (e) { + await queryRunner.rollbackTransaction() + this.logger.error(e) + throw e + } finally { + await queryRunner.release() + } + } + + private async upsertIndex(alias: string, context: Context) { + let ds = await this.getConfigByAlias(alias) + let indexUid = this.indexer.getIndexUid(ds.alias, context) + let index = await this.indexer.getIndex(indexUid) + if (!index) { + await this.indexer.createIndex(indexUid) + index = await this.indexer.getIndex(indexUid) + } + await this.indexer.updateIndexSettings(index, ds.fields) + await this.indexer.dataReindex({dataSourceConfig: ds}, context) + } + + async updateDataSourceSchema(params: {queryRunner:QueryRunner, ds: DatasourceV2Entity, oldDs: DataSourceV2Dto, fields: DatasourceFieldDto[], context: Context}) { + const schema = `account_data${params.context.accountId}` + + //Rename old datasource table if exists + if(params.oldDs.alias !== params.ds.alias) { + let table = new Table({ + schema: schema, + name: params.oldDs.alias + }) + if (await params.queryRunner.hasTable(table)) { + await params.queryRunner.renameTable(table, params.ds.alias) + } + } + + let table = new Table({ + schema: schema, + name: params.ds.alias + }) + + if (!(await params.queryRunner.hasTable(table))) { + await this.addDataSourceSchema({ + queryRunner: params.queryRunner, + ds: params.ds, + fields: params.fields, + context: params.context + }) + return + } + + let oldTable = await params.queryRunner.getTable(`${schema}.${params.ds.alias}`) + + for(let i in params.fields) { + const field = params.fields[i] + const column = oldTable.columns.find(c => c.name === field.alias) + if (!column) { + await params.queryRunner.addColumn(oldTable, new TableColumn({ + name: field.alias, + type: this.dataSourceTypeToColumnType(field.type), + comment: field.title, + isNullable: !!field.isNullable || true, + default: field.defaultValue ? field.defaultValue : undefined + })) + } else { + await params.queryRunner.changeColumn(oldTable, column, new TableColumn({ + name: field.alias, + type: this.dataSourceTypeToColumnType(field.type), + comment: field.title, + isNullable: !!field.isNullable || true, + default: field.defaultValue ? field.defaultValue : undefined + })) + } + } + } + + async addDataSourceSchema(params: {queryRunner:QueryRunner, ds: DatasourceV2Entity, context: Context, fields: DatasourceFieldDto[]}) { + const schema = `account_data${params.context.accountId}` + + let columns: any[] = [{ + name: "id", + type: "bigserial", + comment: "", + isPrimary: true, + }] + + columns.push(...params.fields.map(f => { + return { + name: f.alias, + type: this.dataSourceTypeToColumnType(f.type), + comment: f.title, + default: f.defaultValue + } + })) + SystemFields.forEach(f => { + if (f.alias !== 'id') { + columns.push({ + name: f.alias, + type: this.dataSourceTypeToColumnType(f.type), + comment: f.title, + isNullable: !!f.isNullable, + default: f.defaultValue + }) + } + }) + + let table = new Table({ + schema: schema, + name: params.ds.alias, + columns: columns + }) + + if (await params.queryRunner.hasTable(table)) { + throw `Table ${params.ds.alias} in schema ${schema} exists` + } + + await params.queryRunner.createSchema(schema, true) + await params.queryRunner.createTable(table) + } + + dataSourceTypeToColumnType(type: FieldType) : string { + switch (type) { + case "number": return "numeric" + case "date": return "date" + case "datetime": return "timestamp with time zone" + case "time": return "time" + case "text": return "text" + case "bool": return "bool" + case "table":return "jsonb" + case "link": return "bigint" + case "string": + case "enum": + case "image": + case "file": return "varchar" + default: return "varchar" + } + } + + + + async upsertFields(queryRunner:QueryRunner, ds: DatasourceV2Entity, fields: DatasourceFieldDto[], context: Context) { + if (!fields.length) { + return + } + + for(const i in fields) { + let field = fields[i] + let d: DatasourceField = new DatasourceField() + + d.datasourceId = ds.id + d.datasourceAlias = ds.alias + d.alias = field.alias + d.type = field.type + d.title = field.title + d.searchable = !!field.searchable + d.filterable = !!field.filterable + d.sortable = !!field.sortable + d.isMultiple = !!field.isMultiple + d.defaultValue = field.defaultValue + d.datasourceReference = field.datasourceReference + d.autoincrement = field.autoincrement + d.isNullable = field.isNullable + d.enumValues = field.enumValues + d.precision = field.precision + d.format = field.format + d.accountId = context.accountId + d.updatedAt = new Date() + d.createdBy = context.userId + d.updatedBy = context.userId + d.version = ds.version + d.deletedAt = null + d.deletedBy = null + + await queryRunner.manager + .createQueryBuilder() + .insert() + .into(DatasourceField) + .values(d) + .orUpdate(['type', 'title', 'searchable', 'filterable', + 'sortable', 'is_multiple','default_value', 'datasource_ref', + 'autoincrement', 'nullable', 'enum_values', 'precision', + 'format', 'updated_by', 'updated_at', 'deleted_at', + 'deleted_by', 'version', 'datasource_alias'], + ['datasource_id', 'alias']) + .execute() + } + + let del = { + deletedBy: context.userId, + deletedAt: new Date() + } + + await queryRunner.manager + .createQueryBuilder() + .update(DatasourceField) + .set(del) + .where(`version <> ${ds.version} AND datasource_id = '${ds.id}'` + ) + .execute() + } + + async dataSourceAliasExists(alias: string) : Promise { + const rep = this.datasource.getRepository(DatasourceV2Entity) + let cnt = await rep.countBy({alias: alias}) + return cnt > 0 + } + + async getDataMany(alias: string, params: GetDataManyRequestDto, context: Context) { + let inst = await this.getDataSource(alias, context) + return inst.getMany(params) + } + + async upsertDataSourceItems(alias: string, data: UpsertDataSourceDataRequestDto, context: Context) { + let inst = await this.getDataSource(alias, context) + return inst.upsert(data) + } + + async deleteDataSourceItems(alias, data: DeleteDataSourceDataRequestDto, context: Context) { + let inst = await this.getDataSource(alias, context) + if (data.ids) { + return inst.deleteById({ids: data.ids, soft: data.soft !== undefined ? data.soft : true}) + } else if (data.where) { + return inst.deleteBy({where: data.where, soft: data.soft !== undefined ? data.soft : true}) + } else + throw "No items to delete" + + } @OnEvent('config-update.datasource.*', {async: true}) async handleDataSourceConfigUpdate(data) { @@ -269,7 +688,8 @@ export class DataSourceV2Service { let queryRunner = this.datasource.createQueryRunner() await queryRunner.startTransaction() try { - await this.updateField(queryRunner, data.item, data.context) + await this.updateFieldsFromV1(queryRunner, data.item, data.context) + await this.updateDatasourceFromV1(queryRunner, data.item, data.context) await queryRunner.commitTransaction() } catch (e) { await queryRunner.rollbackTransaction() @@ -286,11 +706,15 @@ export class DataSourceV2Service { @OnEvent('data-update.**', {async: true}) async handleDataSourceDataUpdate(data) { - //console.log(data) - let config = await this.getConfigByAlias(data.item.alias) + if (!data.alias) { + this.logger.error("Event data-update.** doesn't have alias of datasource") + return; + } + + let config = await this.getConfigByAlias(data.alias) await this.dataReindex({ dataSourceConfig: config, - ids: [data.item.id] + ids: data.ids }, data.context) } diff --git a/src/datasources/datasources.service.ts b/src/datasources/datasources.service.ts index d69f8e5..50e0f47 100644 --- a/src/datasources/datasources.service.ts +++ b/src/datasources/datasources.service.ts @@ -130,6 +130,7 @@ export class DataSourcesService { } async getByAlias(alias: string, context: Context) { + console.log('getByAlias') let config = await this.getConfig(alias) if (config.source !== 'internal') { diff --git a/src/datasources/dto/datasourceV2.dto.ts b/src/datasources/dto/datasourceV2.dto.ts index fa7d218..8818d13 100644 --- a/src/datasources/dto/datasourceV2.dto.ts +++ b/src/datasources/dto/datasourceV2.dto.ts @@ -2,14 +2,176 @@ import { ArrayMaxSize, ArrayMinSize, - IsArray, - IsInt, + IsArray, IsBoolean, IsEnum, + IsInt, IsNotEmpty, IsNumber, IsObject, IsOptional, - IsString + IsString, Matches, MinLength, ValidateIf, ValidateNested } from "class-validator"; -import { DataSourceConfigInterface } from "../entities/datasource.entity"; import { FilterItemInterface } from "./datasource.dto"; -import { FieldConfigInterface } from "../../entities/field"; +import { DatasourceField } from "../entities/field.entity"; +import { Type } from "class-transformer"; + +export enum DataSourceType { + internal = 'internal', + internalDB = 'internal-db', + custom = 'custom', + field = 'field', +} + +export interface EnumValue { + key: string + title: string +} + +export type AccessType = 'all' | 'roles' | 'nobody' +export enum Access { + 'canSelect' = 'canSelect', + 'canAdd' = 'canAdd', + 'canUpdate' = 'canUpdate', + 'canDelete' = 'canDelete' +} + +export interface DataSourceV2Dto { + id?: string + alias: string + title: string + type: DataSourceType + isTree: boolean + isSystem: boolean + permissions: { + [key in Access]: { + type: AccessType + roles?: string[] + } + } + script?: string // Only for type = custom + context?: string + fields?: DatasourceField[] + version: number + createdBy: number + updatedBy: number + deletedBy?: number + createdAt: Date + updatedAt: Date + deletedAt?: Date +} + +export enum FieldType { + 'number' = 'number', + 'string' = 'string', + 'bool' = 'bool', + 'text' = 'text', + 'enum' = 'enum', + 'image' = 'image', + 'file' = 'file', + 'datetime' = 'datetime', + 'date' = 'date', + 'time' = 'time', + 'link' = 'link', + 'table' = 'table' +} + + +export class DatasourceFieldDto { + @IsString() + @Matches(`^[a-zA-Z_][a-zA-Z0-9_]*$`,"", + {message: "Alias is not valid"}) + alias: string + + @IsEnum(FieldType) + type: FieldType + + @IsString() + title: string + + @IsBoolean() + @IsOptional() + searchable?: boolean + + @IsBoolean() + @IsOptional() + filterable?: boolean + + @IsBoolean() + @IsOptional() + sortable?: boolean + + @IsBoolean() + @IsOptional() + isMultiple?: boolean + + @IsString() + @IsOptional() + defaultValue?: string + + @IsString() + @ValidateIf(o => o.type === 'link' || o.type === 'table') + datasourceReference?: string + + @IsBoolean() + @IsOptional() + @ValidateIf(o => o.type === 'number') + autoincrement?: boolean + + @IsBoolean() + @IsOptional() + isNullable?: boolean + + @IsNumber() + @IsOptional() + @ValidateIf(o => o.type === 'number') + precision?: number + + @IsString() + @IsOptional() + format?: string + + @IsArray() + @ValidateIf(o => o.type === 'enum') + enumValues?: EnumValue[] +} + +export class InsertDataSourceRequestDto { + @IsString() + @Matches(`^[a-zA-Z_][a-zA-Z0-9_]*$`,"", + {message: "Alias is not valid"}) + alias: string + + @IsString() + title: string + + @IsEnum(DataSourceType) + type: DataSourceType + + @IsBoolean() + @IsOptional() + isTree: boolean + + @IsObject() + @IsOptional() + permissions: { + [key in Access]: { + type: AccessType + roles?: string[] + } + } + + @IsString() + @IsOptional() + script?: string // Only for type = custom + + @IsString() + @IsOptional() + context?: string + + @IsArray() + @IsNotEmpty() + @ValidateNested({each: true}) + @Type(() => DatasourceFieldDto) + fields?: DatasourceFieldDto[] +} + + export class GetRevisionsDto { dataSourceAlias: string @@ -50,6 +212,10 @@ export class ResponseDto { message?: string[] } +export class InsertDataSourceResponseDto extends ResponseDto { + id: string +} + export class DataIndexResponseDto extends ResponseDto { jobId: string } @@ -72,7 +238,7 @@ export class GetFieldsManyDto { } export class DataReindexDto extends DataIndexRequestDto{ - dataSourceConfig: DataSourceConfigInterface + dataSourceConfig: DataSourceV2Dto } export class GetDataManyDto { @@ -128,83 +294,122 @@ export class GetDataManyRequestDto { parentId?: string } -export class GetDataManyParamsDto extends GetDataManyRequestDto{ - dataSourceConfig: DataSourceConfigInterface +export class UpsertDataSourceDataRequestDto { + @IsArray() + @ArrayMinSize(1) + @ArrayMaxSize(2) + items: any[] + + @IsOptional() + @IsBoolean() + returnItems?: boolean +} + +export class DeleteDataSourceDataRequestDto { + @IsArray() + @ArrayMinSize(1) + @ValidateIf(o => o.where === undefined || !o.where ) + ids?: string[] + + + @IsString() + @ValidateIf(o => o.ids === undefined) + @MinLength(3) + where?: string + + @IsOptional() + @IsBoolean() + soft?: boolean +} + +export class UpsertDataSourceDataResponseDto extends ResponseDto { + items: any[] } -export const SystemFields:FieldConfigInterface[] = [{ + +export const SystemFields:DatasourceField[] = [{ + id: "1", alias: "id", - type: "string", + type: FieldType.string, title: "Id", sortable: true, filterable: true, searchable: true, - isSystem: true + isSystem: true, + isNullable: false },{ + id: "2", alias: "version", - type: "number", + type: FieldType.number, title: "Version", sortable: true, filterable: true, - isSystem: true -},{ - alias: "rev", - type: "string", - title: "Revision", - sortable: true, - filterable: true, - isSystem: true + isSystem: true, + isNullable: false },{ + id: "3", alias: "parent_id", - type: "link", + type: FieldType.link, title: "Parent Id", sortable: true, filterable: true, - isSystem: true + isSystem: true, + isNullable: true },{ + id: "4", alias: "created_at", - type: "datetime", + type: FieldType.datetime, title: "Created At", sortable: true, filterable: true, - isSystem: true + isSystem: true, + defaultValue: "now()" },{ + id: "5", alias: "updated_at", - type: "datetime", + type: FieldType.datetime, title: "Updated At", sortable: true, filterable: true, - isSystem: true + isSystem: true, + defaultValue: "now()" + },{ + id: "6", alias: "created_by", - type: "link", - datasource: "users", + type: FieldType.link, + datasourceAlias: "users", title: "Created By", sortable: true, filterable: true, isSystem: true },{ + id: "7", alias: "updated_by", - type: "link", + type: FieldType.link, title: "Updated By", - datasource: "users", + datasourceAlias: "users", sortable: true, filterable: true, isSystem: true },{ + id: "8", alias: "deleted_at", - type: "datetime", + type: FieldType.datetime, title: "Deleted At", sortable: true, filterable: true, - isSystem: true + isSystem: true, + isNullable: true },{ + id: "9", alias: "deleted_by", - type: "link", - datasource: "users", + type: FieldType.link, + datasourceAlias: "users", title: "Deleted By", sortable: true, filterable: true, - isSystem: true + isSystem: true, + isNullable: true }] \ No newline at end of file diff --git a/src/datasources/entities/datasource.entity.ts b/src/datasources/entities/datasource.entity.ts index 9cc97d2..4a2ce69 100644 --- a/src/datasources/entities/datasource.entity.ts +++ b/src/datasources/entities/datasource.entity.ts @@ -34,6 +34,7 @@ export interface EventHandlerInterface { event: DataSourceEvent handler: HandlerInterface } + export interface HandlerInterface { type: HandlerType script?: string @@ -448,8 +449,6 @@ export class InternalDataSource { query.andWhere(`account_id = ${this.context.accountId}`) } - console.log('getByIdRaw', query.getQuery()) - return await query.getOne() } @@ -485,7 +484,8 @@ export class InternalDataSource { await queryRunner.commitTransaction() this.eventEmitter.emit(`data-update.${this.config.alias}.inserted`, { - item, + ids: [item.id], + alias: this.config.alias, context: this.context }) } catch (e) { @@ -498,7 +498,7 @@ export class InternalDataSource { if (invokeEvents) { await this.invokeEvents('onAdd', { old: null, - new: item, + new: item }) } @@ -551,7 +551,8 @@ export class InternalDataSource { await queryRunner.commitTransaction() this.eventEmitter.emit(`data-update.${this.config.alias}.updated`, { - item, + ids: [item.id], + alias: this.config.alias, context: this.context }) } catch (e) { @@ -725,7 +726,8 @@ export class InternalDataSource { await queryRunner.commitTransaction() this.eventEmitter.emit(`data-update.${this.config.alias}.removed`, { - item, + ids: [item.id], + alias: this.config.alias, context: this.context }) } catch (e) { @@ -911,7 +913,8 @@ export class InternalDataSource { await queryRunner.commitTransaction() this.eventEmitter.emit(`data-update.${this.config.alias}.imported`, { - context: this.context + context: this.context, + alias: this.config.alias, }) } catch (e) { await queryRunner.rollbackTransaction() diff --git a/src/datasources/entities/datasourceV2.entity.ts b/src/datasources/entities/datasourceV2.entity.ts new file mode 100644 index 0000000..7eb051b --- /dev/null +++ b/src/datasources/entities/datasourceV2.entity.ts @@ -0,0 +1,72 @@ +import { Column, CreateDateColumn, Entity, PrimaryColumn, UpdateDateColumn } from "typeorm"; +import { Access, AccessType, DataSourceType, DataSourceV2Dto } from "../dto/datasourceV2.dto"; +import { DatasourceField } from "./field.entity"; + +@Entity({ name: 'datasource' }) +export class DatasourceV2Entity implements DataSourceV2Dto{ + @PrimaryColumn({ type: 'bigint' }) + id: string + + @Column({ type: 'bigint', name: 'version' }) + version: number + + @Column({ type: 'int', name: 'account_id', select: false }) + accountId: number + + @Column({ type: 'int', name: 'alias' }) + alias: string + + @Column({ type: 'int', name: 'title' }) + title: string + + @Column({ type: 'boolean', name: 'is_tree' }) + isTree: any + + @Column({ type: 'boolean', name: 'is_system' }) + isSystem: any + + @Column({ type: 'varchar' }) + type: DataSourceType + + @Column({ type: 'jsonb' }) + permissions: { + [key in Access]: { + type: AccessType + roles?: string[] + } + } + + @Column({ type: 'text' }) + script?: string + + @Column({ type: 'text' }) + context?: string + + fields?: DatasourceField[]; + + @CreateDateColumn({ + type: 'timestamp', + name: 'created_at', + default: () => 'CURRENT_TIMESTAMP(3)', + }) + public createdAt: Date + + @UpdateDateColumn({ + type: 'timestamp', + name: 'updated_at', + default: () => 'CURRENT_TIMESTAMP(3)', + }) + public updatedAt: Date + + @Column({ type: 'timestamp', name: 'deleted_at' }) + public deletedAt?: Date + + @Column({ type: 'int', name: 'created_by' }) + public createdBy: number + + @Column({ type: 'int', name: 'updated_by' }) + public updatedBy: number + + @Column({ type: 'int', name: 'deleted_by' }) + public deletedBy?: number +} \ No newline at end of file diff --git a/src/datasources/entities/field.entity.ts b/src/datasources/entities/field.entity.ts index d3fa31a..6a81f23 100644 --- a/src/datasources/entities/field.entity.ts +++ b/src/datasources/entities/field.entity.ts @@ -3,9 +3,9 @@ import { Column, CreateDateColumn, UpdateDateColumn, - PrimaryGeneratedColumn, -} from 'typeorm' -import { FieldType } from "../../entities/field"; + PrimaryGeneratedColumn +} from "typeorm"; +import {FieldType, EnumValue} from "../dto/datasourceV2.dto"; @Entity({ name: 'datasource_fields' }) export class DatasourceField { @@ -13,19 +13,19 @@ export class DatasourceField { id: string @Column({ type: 'int', name: 'account_id' }) - accountId: number + accountId?: number @Column({ type: 'int', name: 'version' }) - version: number + version?: number @Column({ type: 'varchar', name: 'alias' }) alias: string @Column({ type: 'varchar', name: 'datasource_alias' }) - datasourceAlias: string + datasourceAlias?: string @Column({ type: 'bigint', name: 'datasource_id' }) - datasourceId: string + datasourceId?: string @Column({ type: 'varchar', name: 'type' }) type: FieldType @@ -34,13 +34,13 @@ export class DatasourceField { title: string @Column({ type: 'boolean', name: 'searchable' }) - searchable: boolean + searchable?: boolean @Column({ type: 'boolean', name: 'filterable' }) - filterable: boolean + filterable?: boolean @Column({ type: 'boolean', name: 'sortable' }) - sortable: boolean + sortable?: boolean @Column({ type: 'boolean', name: 'is_multiple' }) isMultiple?: boolean @@ -52,43 +52,45 @@ export class DatasourceField { datasourceReference?: string @Column({ type: 'boolean', name: 'autoincrement' }) - autoincrement: boolean + autoincrement?: boolean - @Column({ type: 'boolean', name: 'required' }) - required: boolean + @Column({ type: 'boolean', name: 'nullable' }) + isNullable?: boolean @Column({ type: 'int', name: 'precision' }) - precision: number + precision?: number @Column({ type: 'varchar', name: 'format' }) - format: string + format?: string @Column({ type: 'jsonb', name: 'enum_values' }) - enumValues: object[] + enumValues?: EnumValue[] @CreateDateColumn({ type: 'timestamp', name: 'created_at', default: () => 'CURRENT_TIMESTAMP(3)', }) - public createdAt: Date + public createdAt?: Date @UpdateDateColumn({ type: 'timestamp', name: 'updated_at', default: () => 'CURRENT_TIMESTAMP(3)', }) - public updatedAt: Date + public updatedAt?: Date @Column({ type: 'timestamp', name: 'deleted_at' }) public deletedAt?: Date @Column({ type: 'int', name: 'created_by' }) - public createdBy: number + public createdBy?: number @Column({ type: 'int', name: 'updated_by' }) - public updatedBy: number + public updatedBy?: number @Column({ type: 'int', name: 'deleted_by' }) public deletedBy?: number + + public isSystem?: boolean } \ No newline at end of file diff --git a/src/entities/field.ts b/src/entities/field.ts index 449cde8..419538a 100644 --- a/src/entities/field.ts +++ b/src/entities/field.ts @@ -59,6 +59,7 @@ export interface EnumValuesInterface { } export interface FieldConfigInterface { + id: string title: string // Using in table and editor titles alias: string // Using in calculations type: FieldType diff --git a/src/flake-id.ts b/src/flake-id.ts index 6d5797b..e19e2db 100644 --- a/src/flake-id.ts +++ b/src/flake-id.ts @@ -63,3 +63,7 @@ export class FlakeId { return id.readBigInt64BE(0) } } + +export function flakeId(id?) { + return (new FlakeId(id)).generateId() +} diff --git a/src/functions/functions.controller.ts b/src/functions/functions.controller.ts index 5927ff3..ad0ecef 100644 --- a/src/functions/functions.controller.ts +++ b/src/functions/functions.controller.ts @@ -4,11 +4,14 @@ import { Body, Param, HttpException, - HttpStatus, -} from '@nestjs/common' + HttpStatus, Req, UseGuards +} from "@nestjs/common"; import { FunctionsService } from './functions.service' import { ApiOperation } from '@nestjs/swagger' import { RunScriptDto } from "./dto/call-function.dto"; +import { Request } from "express"; +import { Context } from "../entities/context"; +import { JwtAuthGuard } from "../auth/jwt-auth.guard"; @Controller('functions') export class FunctionsController { @@ -38,17 +41,19 @@ export class FunctionsController { } } + @UseGuards(JwtAuthGuard) @Post('script/run') @ApiOperation({ summary: 'Call a script with context' }) - async runScript(@Body() body: RunScriptDto) { + async runScript(@Req() req: Request, + @Body() body: RunScriptDto) { try { + console.log(this.getContext(req)) let res = await this.functionsService.runScript({ - context: body.context, + context: Object.assign(this.getContext(req), body.context), script: body.script, room: body.room }) - console.log(res) return { success: true, data: res, @@ -60,4 +65,11 @@ export class FunctionsController { } } } + + getContext(req: Request) : Context { + return { + accountId: req['accountId'], + userId: req['userId'], + } + } } diff --git a/src/functions/functions.service.ts b/src/functions/functions.service.ts index 098848c..a9f67d2 100644 --- a/src/functions/functions.service.ts +++ b/src/functions/functions.service.ts @@ -9,24 +9,25 @@ import { Context } from '../entities/context' import * as process from 'process' import { DataSourcesService } from '../datasources/datasources.service' import { AggregationsService } from '../aggregations/aggregations.service' -import * as Sentry from '@sentry/node' import { UsersService } from '../users/users.service' -import { RoomsService } from "../rooms/rooms.service"; import { VariablesService } from "../variables/variables.service"; +import {DataSourceV2Service } from "../datasources/datasourceV2.service"; +import { EventEmitter2 } from "@nestjs/event-emitter"; @Injectable() export class FunctionsService { constructor( @Inject(forwardRef(() => DataSourcesService)) private dataSourcesService: DataSourcesService, + private dataSourceV2Service: DataSourceV2Service, @InjectRepository(ConfigItem) private configRepository: Repository, @InjectDataSource('default') private datasource: DataSource, private userService: UsersService, private aggService: AggregationsService, - private rooms: RoomsService, - private variables: VariablesService + private variables: VariablesService, + private eventEmitter: EventEmitter2 ) {} async getByAlias(alias: string) { @@ -106,6 +107,7 @@ export class FunctionsService { ) const usr = new UserScriptHelper(this.userService, params.context) const vars = new VariablesHelper(this.variables, params.context) + const ds2 = new DataSourceV2ScriptHelper(this.dataSourceV2Service, params.context) const vm = new NodeVM({ timeout: 5000, @@ -115,6 +117,7 @@ export class FunctionsService { sandbox: { ctx: params.context, dataSources: dsHelper, + datasource: ds2, request: requestHelper, utilities: utils, aggregations: agg, @@ -125,12 +128,26 @@ export class FunctionsService { if(params.room) { vm.on('console.log', (...args) => { - this.rooms.logToRoom({ + console.log("<<<< console.log", this.eventEmitter) + + this.eventEmitter.emit(`functions.logs`, { room: params.room, level: "log", message: args }) }) + + vm.on('console.error', (...args) => { + console.log("<<<< console.error", this.eventEmitter) + + this.eventEmitter.emit(`functions.logs`, { + room: params.room, + level: "error", + message: args + }) + }) + + } if (!!params.vmConsole) vm.on('console.log', params.vmConsole) @@ -140,26 +157,26 @@ export class FunctionsService { let res = null try { res = await vm.run(params.script) + return res instanceof Promise ? await res : res } catch (e) { console.error(`Run script error: `, e) if (!!params.vmConsole) params.vmConsole(e.toString()) if(params.room) { - this.rooms.logToRoom({ + this.eventEmitter.emit(`functions.logs`, { room: params.room, level: "error", message: e.toString() }) } - Sentry.captureException(e) throw `${e.toString()}` } finally { if (!!params.vmConsole) params.vmConsole('Function finished') process.off('uncaughtException', uncaughtException) } - let rooms = this.rooms + let ee = this.eventEmitter function uncaughtException(err) { console.error('Asynchronous error caught.', err.toString()) @@ -168,17 +185,13 @@ export class FunctionsService { } if(params.room) { - rooms.logToRoom({ + ee.emit(`functions.logs`, { room: params.room, level: "error", message: err.toString() }) } - - Sentry.captureException(err) } - - return res instanceof Promise ? await res : res } } @@ -198,7 +211,29 @@ class DataSourcesScriptHelper { readonly context: Context async getByAlias(alias: string) { - return await this.dataSourcesService.getByAlias(alias, this.context) + console.log('getByAliasV2', alias) + try { + return await this.dataSourcesService.getByAlias(alias, this.context) + } catch (e) { + console.error(e) + } + + + } +} + +class DataSourceV2ScriptHelper { + constructor(dataSourcesService: DataSourceV2Service, context: Context) { + this.service = dataSourcesService + this.context = context + } + + private readonly service: DataSourceV2Service + private readonly context: Context + + async getByAlias(alias: string) { + console.log('getByAliasV2', alias) + return await this.service.getDataSource(alias, this.context) } } @@ -221,7 +256,6 @@ class AggregationScriptHelper { return await this.aggService.conduct(params, this.context) } catch (e) { console.error(e) - Sentry.captureException(e) if (this.vmConsole) this.vmConsole(e.toString()) } } @@ -231,7 +265,6 @@ class AggregationScriptHelper { return await this.aggService.reverse(params, this.context) } catch (e) { console.error(e) - Sentry.captureException(e) if (this.vmConsole) this.vmConsole(e.toString()) } } @@ -299,7 +332,6 @@ class RequestScriptHelper { return res.data } catch (e) { console.error(e) - Sentry.captureException(e) throw new Error('Error while execution script: ' + e.toString()) } } diff --git a/src/pages/dto/pages.dto.ts b/src/pages/dto/pages.dto.ts index 7d7aee9..6017b6a 100644 --- a/src/pages/dto/pages.dto.ts +++ b/src/pages/dto/pages.dto.ts @@ -5,6 +5,9 @@ export class ResponseDto { } export type AccessType = 'all' | 'roles' | 'nobody' +export enum Access { + 'canView' = 'canView' +} export class PageInterface { id: string @@ -13,8 +16,10 @@ export class PageInterface { elements: ElementInterface[] headerActions: PageActionInterface[] permissions: { - access: AccessType, - accessRoles?: string[], + [key in Access]: { + type: AccessType + roles?: string[] + } } type: 'edit' | 'list' | 'dashboard' | 'select' datasets: { diff --git a/src/pages/pages.service.ts b/src/pages/pages.service.ts index 56cc22a..ece5f2e 100644 --- a/src/pages/pages.service.ts +++ b/src/pages/pages.service.ts @@ -59,6 +59,11 @@ export class PagesService { delete n.createdAt delete n.createdBy delete n.accountId + delete n.deletedAt + delete n.deletedBy + n.version++ + + console.log(n) let rep = this.datasource.getRepository(PageEntity) await rep diff --git a/src/rooms/rooms.gateway.ts b/src/rooms/rooms.gateway.ts index b74fb8e..284d077 100644 --- a/src/rooms/rooms.gateway.ts +++ b/src/rooms/rooms.gateway.ts @@ -27,10 +27,12 @@ export class RoomsGateway @SubscribeMessage('rooms/join') join(@MessageBody() rooms: string[], @ConnectedSocket() client: Socket) { + console.log("rooms/join") client.join(rooms) + console.log(client.rooms) } - @SubscribeMessage('rooms/join') + @SubscribeMessage('rooms/leave') leave(@MessageBody() room: string, @ConnectedSocket() client: Socket) { client.leave(room) } @@ -53,6 +55,7 @@ export class RoomsGateway } } + console.log(client.rooms) //await client.join('updates') } diff --git a/src/rooms/rooms.module.ts b/src/rooms/rooms.module.ts index 81c1079..db0575b 100644 --- a/src/rooms/rooms.module.ts +++ b/src/rooms/rooms.module.ts @@ -2,10 +2,11 @@ import { Module } from '@nestjs/common' import { RoomsGateway } from './rooms.gateway' import { RoomsService } from './rooms.service' import { AuthModule } from '../auth/auth.module' +import { UsersModule } from "../users/users.module"; @Module({ providers: [RoomsGateway, RoomsService], - imports: [AuthModule], + imports: [AuthModule, UsersModule], exports: [RoomsService], }) export class RoomsModule {} diff --git a/src/rooms/rooms.service.ts b/src/rooms/rooms.service.ts index 91f2372..e5bea98 100644 --- a/src/rooms/rooms.service.ts +++ b/src/rooms/rooms.service.ts @@ -1,6 +1,7 @@ import { Injectable } from '@nestjs/common' import { Server } from 'socket.io' import { Context } from '../entities/context' +import { OnEvent } from "@nestjs/event-emitter"; export class UpdateMessage { type: 'data' | 'config' @@ -33,16 +34,11 @@ export class RoomsService { this.server.emit('updates', message) } - logToRoom(params: { - room: string, - level: 'log' | 'error', - message: any[] - payload?: any - - }) { - this.server.emit(params.room, { - level: params.level, - message: params.message + @OnEvent('functions.logs', {async: false}) + onFunctionLogs(data) { + this.server.emit(data.room, { + level: data.level, + message: data.message }) } }