Skip to content

Commit

Permalink
Merge pull request #320 from Sanketika-Obsrv/dataset-aliasing
Browse files Browse the repository at this point in the history
fix: #OBS-I452 Dataset alias attach and detach functionality
  • Loading branch information
HarishGangula authored Jan 20, 2025
2 parents 7aff2bb + 39af0da commit 060b941
Show file tree
Hide file tree
Showing 8 changed files with 168 additions and 8 deletions.
73 changes: 73 additions & 0 deletions api-service/src/controllers/DatasetAlias/DatasetAlias.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import { Request, Response } from "express";
import { schemaValidation } from "../../services/ValidationService";
import DatasetAliasSchema from "./DatasetAliasValidationSchema.json"
import { obsrvError } from "../../types/ObsrvError";
import _ from "lodash";
import { datasetService } from "../../services/DatasetService";
import { Op } from "sequelize";
import { Dataset } from "../../models/Dataset";
import { ResponseHandler } from "../../helpers/ResponseHandler";
import httpStatus from "http-status";


const validateRequest = async (req: Request) => {

const isRequestValid: Record<string, any> = schemaValidation(req.body, DatasetAliasSchema)
if (!isRequestValid.isValid) {
throw obsrvError("", "DATASET_ALIAS_INPUT_INVALID", isRequestValid.message, "BAD_REQUEST", 400)
}

const { dataset_id, action, alias_name: alias } = _.get(req, ["body", "request"])
let datasetAlias = alias
const dataset = await datasetService.getDataset(dataset_id, ["id", "name", "alias"], true);
if (_.isEmpty(dataset)) {
throw obsrvError(dataset_id, "DATASET_NOT_EXISTS", `Dataset does not exists with id:${dataset_id}`, "NOT_FOUND", 404);
}

if (action === "attach") {
if (_.get(dataset, "alias")) {
throw obsrvError(dataset_id, "DATASET_ALIAS_EXISTS", `Dataset already has alias '${_.get(dataset, "alias")}' associated with it. Please detach the existing alias and try again`, "BAD_REQUEST", 400);
}

const datasetList = await datasetService.findDatasets({ [Op.or]: [{ dataset_id: alias }, { name: alias }, { alias }] }, ["id"]);
const draftDatasetList = await datasetService.findDraftDatasets({ [Op.or]: [{ dataset_id: alias }, { name: alias }] }, ["id"]);
if (!(_.isEmpty(datasetList) && _.isEmpty(draftDatasetList))) {
throw obsrvError(dataset_id, "DATASET_ALIAS_NOT_UNIQUE", `Dataset alias must be unique. The alias '${alias}' cannot be the same as the dataset id, dataset name or alias name of any other dataset.`, "BAD_REQUEST", 400);
}
}

if (action === "detach") {
const existingAliasName = _.get(dataset, "alias")
if (!existingAliasName) {
throw obsrvError(dataset_id, "DATASET_ALIAS_NOT_EXISTS", `Dataset '${dataset_id}' does not have any alias associated with it`, "BAD_REQUEST", 400);
}
datasetAlias = existingAliasName;
}

return datasetAlias
}

const datasetAlias = async (req: Request, res: Response) => {
const dataset_alias = await validateRequest(req)
const { dataset_id, action, alias_name } = _.get(req, ["body", "request"])
const userID = (req as any)?.userID;
switch (action) {
case "attach":
await attachAlias(dataset_id, alias_name, userID);
break;
case "detach":
await detachAlias(dataset_id, userID);
break;
}
ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { message: `Dataset alias name '${dataset_alias}' ${action}ed successfully`, dataset_id } });
}

const attachAlias = async (dataset_id: string, alias_name: string, userID: string) => {
await Dataset.update({ alias: alias_name, updated_by: userID }, { where: { id: dataset_id } });
}

const detachAlias = async (dataset_id: string, userID: string) => {
await Dataset.update({ alias: null, updated_by: userID }, { where: { id: dataset_id } });
}

export default datasetAlias;
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
{
"type": "object",
"properties": {
"id": {
"type": "string",
"enum": [
"api.datasets.alias"
]
},
"ver": {
"type": "string"
},
"ts": {
"type": "string"
},
"params": {
"type": "object",
"properties": {
"msgid": {
"type": "string"
}
},
"required": [
"msgid"
],
"additionalProperties": false
},
"request": {
"type": "object",
"properties": {
"action": {
"type": "string",
"enum": [
"attach",
"detach"
]
},
"dataset_id": {
"type": "string"
},
"alias_name": {
"type": "string"
}
},
"if": {
"properties": {
"action": {
"const": "attach"
}
}
},
"then": {
"properties": {
"alias_name": {
"minLength": 1
}
},
"required": [
"alias_name"
]
},
"required": [
"action",
"dataset_id"
],
"additionalProperties": false
}
},
"required": [
"id",
"ver",
"ts",
"params",
"request"
],
"additionalProperties": false
}
2 changes: 2 additions & 0 deletions api-service/src/controllers/DatasetExport/DatasetExport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ const validateDataset = async (req: Request) => {
const migratedConfigs = await datasetService.migrateDatasetV1(dataset_id, datasetRecord)
datasetRecord = { ...datasetRecord, ...migratedConfigs }
}

datasetRecord = _.omit(datasetRecord, "alias")
return datasetRecord;
}

Expand Down
7 changes: 4 additions & 3 deletions api-service/src/controllers/DatasetRead/DatasetRead.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { DatasetDraft } from "../../models/DatasetDraft";
import { datasetService, getV1Connectors } from "../../services/DatasetService";
import { obsrvError } from "../../types/ObsrvError";
import { cipherService } from "../../services/CipherService";
import { Dataset } from "../../models/Dataset";

export const apiId = "api.datasets.read";
export const errorCode = "DATASET_READ_FAILURE"
Expand All @@ -16,9 +17,9 @@ export const defaultFields = ["dataset_id", "name", "type", "status", "tags", "v
const validateRequest = (req: Request) => {

const { dataset_id } = req.params;
const { fields } = req.query;
const { fields, mode } = req.query;
const fieldValues = fields ? _.split(fields as string, ",") : [];
const invalidFields = _.difference(fieldValues, Object.keys(DatasetDraft.getAttributes()));
const invalidFields = mode === "edit" ? _.difference(fieldValues, Object.keys(DatasetDraft.getAttributes())) : _.difference(fieldValues, Object.keys(Dataset.getAttributes()));
if (!_.isEmpty(invalidFields)) {
throw obsrvError(dataset_id, "DATASET_INVALID_FIELDS", `The specified fields [${invalidFields}] in the dataset cannot be found.`, "BAD_REQUEST", 400);
}
Expand All @@ -37,7 +38,7 @@ const datasetRead = async (req: Request, res: Response) => {
throw obsrvError(dataset_id, "DATASET_NOT_FOUND", `Dataset with the given dataset_id:${dataset_id} not found`, "NOT_FOUND", 404);
}
if (dataset.connectors_config) {
dataset.connectors_config = processConnectorsConfig(dataset.connectors_config);
dataset.connectors_config = processConnectorsConfig(dataset.connectors_config);
}
ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: dataset });
}
Expand Down
3 changes: 2 additions & 1 deletion api-service/src/middlewares/userPermissions.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@
"api.datasets.update",
"api.datasets.import",
"api.datasets.copy",
"api.datasets.dataschema"
"api.datasets.dataschema",
"api.datasets.alias"
],
"data": [
"api.data.in",
Expand Down
4 changes: 4 additions & 0 deletions api-service/src/models/Dataset.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,10 @@ export const Dataset = sequelize.define("datasets", {
entry_topic: {
type: DataTypes.STRING,
allowNull: false
},
alias: {
type: DataTypes.STRING,
allowNull: true
}
}, {
tableName: "datasets",
Expand Down
2 changes: 2 additions & 0 deletions api-service/src/routes/Router.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import connectorRegisterController from "../controllers/ConnectorRegister/Connec
import dataMetrics from "../controllers/DataMetrics/DataMetricsController";
import datasetMetrics from "../controllers/DatasetMetrics/DatasetMetricsController";
import { dataAnalyzePII } from "../controllers/DataAnalyzePII/DataAnalyzePIIController";
import datasetAlias from "../controllers/DatasetAlias/DatasetAlias";

export const router = express.Router();

Expand Down Expand Up @@ -62,6 +63,7 @@ router.post("/connectors/list", setDataToRequestObject("api.connectors.list"), o
router.get("/connectors/read/:id", setDataToRequestObject("api.connectors.read"), onRequest({entity: Entity.Management }), telemetryAuditStart({action: telemetryActions.readConnectors, operationType: OperationType.GET}), checkRBAC.handler(), ConnectorsRead);
router.post("/datasets/import", setDataToRequestObject("api.datasets.import"), onRequest({ entity: Entity.Management }), checkRBAC.handler(), DatasetImport);
router.post("/connector/register", setDataToRequestObject("api.connector.register"), onRequest({ entity: Entity.Management }), connectorRegisterController);
router.post("/datasets/alias", setDataToRequestObject("api.datasets.alias"), onRequest({ entity: Entity.Management }), checkRBAC.handler(), datasetAlias);
router.post("/data/analyze/pii", setDataToRequestObject("api.data.analyze.pii"), onRequest({ entity: Entity.Management }),checkRBAC.handler(), dataAnalyzePII);
//Wrapper Service
router.post("/obsrv/data/sql-query", setDataToRequestObject("api.obsrv.data.sql-query"), onRequest({ entity: Entity.Data_out }), checkRBAC.handler(), sqlQuery);
Expand Down
8 changes: 4 additions & 4 deletions api-service/src/services/DatasetService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ class DatasetService {

const transaction = await sequelize.transaction();
try {
await Dataset.update({ status: DatasetStatus.Retired, updated_by: updatedBy }, { where: { id: dataset.id }, transaction });
await Dataset.update({ status: DatasetStatus.Retired, updated_by: updatedBy, alias: null }, { where: { id: dataset.id }, transaction });
await DatasetSourceConfig.update({ status: DatasetStatus.Retired, updated_by: updatedBy }, { where: { dataset_id: dataset.id }, transaction });
await Datasource.update({ status: DatasetStatus.Retired, updated_by: updatedBy }, { where: { dataset_id: dataset.id }, transaction });
await DatasetTransformations.update({ status: DatasetStatus.Retired, updated_by: updatedBy }, { where: { dataset_id: dataset.id }, transaction });
Expand Down Expand Up @@ -332,7 +332,7 @@ class DatasetService {

private createDruidDataSource = async (draftDataset: Record<string, any>, transaction: Transaction) => {

const {created_by, updated_by} = draftDataset;
const { created_by, updated_by } = draftDataset;
const allFields = await tableGenerator.getAllFields(draftDataset, "druid");
const draftDatasource = this.createDraftDatasource(draftDataset, "druid");
const ingestionSpec = tableGenerator.getDruidIngestionSpec(draftDataset, allFields, draftDatasource.datasource_ref);
Expand All @@ -344,7 +344,7 @@ class DatasetService {

private createHudiDataSource = async (draftDataset: Record<string, any>, transaction: Transaction) => {

const {created_by, updated_by} = draftDataset;
const { created_by, updated_by } = draftDataset;
const allFields = await tableGenerator.getAllFieldsHudi(draftDataset, "datalake");
const draftDatasource = this.createDraftDatasource(draftDataset, "datalake");
const ingestionSpec = tableGenerator.getHudiIngestionSpecForCreate(draftDataset, allFields, draftDatasource.datasource_ref);
Expand All @@ -356,7 +356,7 @@ class DatasetService {

private updateHudiDataSource = async (draftDataset: Record<string, any>, transaction: Transaction) => {

const {created_by, updated_by} = draftDataset;
const { created_by, updated_by } = draftDataset;
const allFields = await tableGenerator.getAllFieldsHudi(draftDataset, "datalake");
const draftDatasource = this.createDraftDatasource(draftDataset, "datalake");
const dsId = _.join([draftDataset.dataset_id, "events", "datalake"], "_")
Expand Down

0 comments on commit 060b941

Please sign in to comment.