Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

migration: api-depot postgres #126

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
174 changes: 174 additions & 0 deletions src/lib/types/api-depot.types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
// HABILITATION

export enum StatusHabilitationEnum {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

On pourrait pas exporter ces types via OpenAPI, comme pour le signalement et mes-adresses? Je vois qu'ils sont dupliqués dans pas mal d'endroits.
ça sera plus simple à maintenir

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

J'aimerais bien mais dans un second temps
On peut le faire pour l'api-depot et le moissonneur dans quasiment toutes nos briques.
Je voulais que la migration soit la plus petite possible dans un premier temps

ACCEPTED = 'accepted',
PENDING = 'pending',
REJECTED = 'rejected',
}

export enum TypeStrategyEnum {
EMAIL = 'email',
FRANCECONNECT = 'franceconnect',
INTERNAL = 'internal',
}

export type Mandat = {
nomMarital: string;
nomNaissance: string;
prenom: string;
};

export type Strategy = {
type: TypeStrategyEnum;
// EMAIL
pinCode?: string;
pinCodeExpiration?: Date | null;
createdAt?: Date | null;
remainingAttempts?: number;
// FRANCECONNECT
mandat?: Mandat;
authenticationError?: string;
};

export type Habilitation = {
id: string;
clientId?: string;
codeCommune: string;
emailCommune: string;
franceconnectAuthenticationUrl?: string;
status: StatusHabilitationEnum;
strategy?: Strategy | null;
expiresAt?: Date;
acceptedAt?: Date;
rejectedAt?: Date;
createdAt?: Date;
updatedAt?: Date;
};

// FILE

export enum TypeFileEnum {
BAL = 'bal',
}
export type File = {
id: string;
revisionId?: string;
size?: number;
hash?: string;
type?: TypeFileEnum;
createdAt?: Date;
};

export enum StatusRevisionEnum {
PENDING = 'pending',
PUBLISHED = 'published',
}

// REVISION

export type ParseError = {
type: string;
code: string;
message: string;
row: number;
};

export type Validation = {
valid: boolean;
validatorVersion?: string;
parseErrors?: ParseError[];
errors?: string[];
warnings?: string[];
infos?: string[];
rowsCount?: number;
};

export type Context = {
nomComplet?: string;
organisation?: string;
extras?: Record<string, any> | null;
};

export type PublicClient = {
id: string;
legacyId?: string;
nom: string;
mandataire: string;
chefDeFile?: string;
chefDeFileEmail?: string;
};

export type Revision = {
id: string;
clientId?: string;
codeCommune: string;
isReady: boolean;
isCurrent: boolean;
status: StatusRevisionEnum;
context?: Context;
validation?: Validation | null;
habilitation?: Habilitation | null;
files?: File[];
client?: PublicClient;
publishedAt?: Date;
createdAt: Date;
updatedAt: Date;
};

// CHEF DE FILE

export enum TypePerimeterEnum {
COMMUNE = 'commune',
DEPARTEMENT = 'departement',
EPCI = 'epci',
}

export type Perimeter = {
id?: string;
chefDeFileId?: string;
type: TypePerimeterEnum;
code: string;
};

export type ChefDeFile = {
id: string;
nom?: string;
email?: string;
isEmailPublic?: boolean;
isSignataireCharte?: boolean;
perimeters?: Perimeter[];
createdAt?: Date;
updatedAt?: Date;
};

// MANDATAIRE

export type Mandataire = {
id: string;
nom: string;
email: string;
createdAt: Date;
updatedAt: Date;
};

// CLIENT

export enum AuthorizationStrategyEnum {
INTERNAL = 'internal',
CHEF_DE_FILE = 'chef-de-file',
HABILITATION = 'habilitation',
}

export type Client = {
id: string;
mandataireId?: string;
chefDeFileId?: string;
legacyId: string;
nom: string;
isActive: boolean;
isRelaxMode: boolean;
token?: string;
authorizationStrategy: AuthorizationStrategyEnum;
createdAt: Date;
updatedAt: Date;
};
17 changes: 10 additions & 7 deletions src/modules/api_depot/api_depot.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import {
Revision,
StatusPublicationEnum,
} from '../revision/revision.entity';
import { Revision as RevisionApiDepot } from '../../lib/types/api-depot.types';
import { Organization } from '../organization/organization.entity';
import { AxiosError, AxiosRequestConfig, AxiosResponse } from 'axios';
import { catchError, firstValueFrom, of } from 'rxjs';
Expand All @@ -25,7 +26,9 @@ export class ApiDepotService {
);
}

private async getCurrentRevision(codeCommune: string) {
private async getCurrentRevision(
codeCommune: string,
): Promise<RevisionApiDepot> {
const url: string = `/communes/${codeCommune}/current-revision`;
const options: AxiosRequestConfig = { responseType: 'json' };
const { data: revision } = await firstValueFrom(
Expand All @@ -50,7 +53,7 @@ export class ApiDepotService {
codeCommune: string,
extras: any,
organisation: string,
) {
): Promise<RevisionApiDepot> {
const url: string = `/communes/${codeCommune}/revisions`;
const options: AxiosRequestConfig = { responseType: 'json' };
const body = { context: { extras, organisation } };
Expand Down Expand Up @@ -102,7 +105,7 @@ export class ApiDepotService {
}
}

private async publishRevision(revisionId: string) {
private async publishRevision(revisionId: string): Promise<RevisionApiDepot> {
const url: string = `/revisions/${revisionId}/publish`;

const { data }: AxiosResponse = await firstValueFrom(
Expand Down Expand Up @@ -131,11 +134,11 @@ export class ApiDepotService {
if (
!options.force &&
currentPublishedRevision?.client &&
currentPublishedRevision?.client?.id !== this.API_DEPOT_CLIENT_ID
currentPublishedRevision?.client?.legacyId !== this.API_DEPOT_CLIENT_ID
) {
return {
status: StatusPublicationEnum.PROVIDED_BY_OTHER_CLIENT,
currentClientId: currentPublishedRevision.client._id,
currentClientId: currentPublishedRevision.client.id,
};
}
// CHECK SI IL EXISTE UNE AUTRE SOURCE QUI MOISSONE CETTE COMMUNE
Expand All @@ -161,7 +164,7 @@ export class ApiDepotService {
uniqueErrors: validation.uniqueErrors,
};
// ON CREER UNE REVISION POUR LA COMMUNE
const { _id: revisionId } = await this.createRevision(
const { id: revisionId } = await this.createRevision(
codeCommune,
extras,
organization.name,
Expand All @@ -174,7 +177,7 @@ export class ApiDepotService {
const publishedRevision = await this.publishRevision(revisionId);
return {
status: StatusPublicationEnum.PUBLISHED,
publishedRevisionId: publishedRevision._id,
publishedRevisionId: publishedRevision.id,
};
} catch (error) {
this.logger.error(
Expand Down
25 changes: 14 additions & 11 deletions src/modules/worker/tests/harvesting.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,7 @@ describe('HARVESTING WORKER', () => {
};
expect(harvestRes).toMatchObject(harvestExpected);
});

it('First harvesting', async () => {
// CREATE ORGA
const orgaInit = {
Expand Down Expand Up @@ -239,14 +240,14 @@ describe('HARVESTING WORKER', () => {
const revisionId = new ObjectId().toHexString();
axiosMock
.onPost(`/communes/31591/revisions`)
.replyOnce(200, { _id: revisionId });
.replyOnce(200, { id: revisionId });
axiosMock.onPut(`/revisions/${revisionId}/files/bal`).replyOnce(200);
axiosMock
.onPost(`/revisions/${revisionId}/compute`)
.replyOnce(200, { validation: { valid: true } });
axiosMock
.onPost(`/revisions/${revisionId}/publish`)
.replyOnce(200, { _id: revisionId });
.replyOnce(200, { id: revisionId });
// RUN WORKER
await harvestingWorker.run();
// CHECK HARVEST
Expand Down Expand Up @@ -277,6 +278,7 @@ describe('HARVESTING WORKER', () => {
expect(revisionRes).toMatchObject(revisionExpected);
expect(revisionRes.createdAt).toBeInstanceOf(Date);
});

it('Harvesting with last harvest', async () => {
// CREATE ORGA
const orgaInit = {
Expand Down Expand Up @@ -317,16 +319,16 @@ describe('HARVESTING WORKER', () => {
const revisionId = new ObjectId().toHexString();
axiosMock
.onPost(`/communes/31591/revisions`)
.replyOnce(200, { _id: revisionId });
.replyOnce(200, { id: revisionId });
axiosMock.onPut(`/revisions/${revisionId}/files/bal`).replyOnce(200);
axiosMock
.onPost(`/revisions/${revisionId}/compute`)
.replyOnce(200, { validation: { valid: true } });
axiosMock
.onPost(`/revisions/${revisionId}/publish`)
.replyOnce(200, { _id: revisionId });
.replyOnce(200, { id: revisionId });
axiosMock.onGet(`/communes/31591/current-revision`).replyOnce(200, {
client: { _id: '_moissonneur-bal', id: 'moissonneur-bal' },
client: { id: 'id_moissonneur-bal', legacyId: 'moissonneur-bal' },
});
// RUN WORKER
await harvestingWorker.run();
Expand Down Expand Up @@ -360,6 +362,7 @@ describe('HARVESTING WORKER', () => {
expect(revisionRes).toMatchObject(revisionExpected);
expect(revisionRes.createdAt).toBeInstanceOf(Date);
});

it('Harvesting with last harvest (file no change)', async () => {
// CREATE ORGA
const orgaInit = {
Expand Down Expand Up @@ -497,7 +500,7 @@ describe('HARVESTING WORKER', () => {
axiosMock.onGet(url).replyOnce(200, readFile('1.3-valid.csv'));
// MOCK PUBLICATION API DEPOT
axiosMock.onGet(`/communes/31591/current-revision`).replyOnce(200, {
client: { _id: '_other-client', id: 'other-client' },
client: { id: 'id_other-client', legacyId: 'spec-other-client' },
});
// RUN WORKER
await harvestingWorker.run();
Expand All @@ -523,7 +526,7 @@ describe('HARVESTING WORKER', () => {
},
publication: {
status: StatusPublicationEnum.PROVIDED_BY_OTHER_CLIENT,
currentClientId: '_other-client',
currentClientId: 'id_other-client',
},
};
expect(revisionRes).toMatchObject(revisionExpected);
Expand Down Expand Up @@ -636,29 +639,29 @@ describe('HARVESTING WORKER', () => {
const revisionId = new ObjectId().toHexString();
axiosMock
.onPost(`/communes/31591/revisions`)
.replyOnce(200, { _id: revisionId });
.replyOnce(200, { id: revisionId });
axiosMock.onPut(`/revisions/${revisionId}/files/bal`).replyOnce(200);
axiosMock
.onPost(`/revisions/${revisionId}/compute`)
.replyOnce(200, { validation: { valid: true } });
axiosMock
.onPost(`/revisions/${revisionId}/publish`)
.replyOnce(200, { _id: revisionId });
.replyOnce(200, { id: revisionId });
// MOCK PUBLICATION API DEPOT
axiosMock
.onGet(`/communes/67482/current-revision`)
.replyOnce(404, 'Aucune révision connue pour cette commune');
const revisionId2 = new ObjectId().toHexString();
axiosMock
.onPost(`/communes/67482/revisions`)
.replyOnce(200, { _id: revisionId2 });
.replyOnce(200, { id: revisionId2 });
axiosMock.onPut(`/revisions/${revisionId2}/files/bal`).replyOnce(200);
axiosMock
.onPost(`/revisions/${revisionId2}/compute`)
.replyOnce(200, { validation: { valid: true } });
axiosMock
.onPost(`/revisions/${revisionId2}/publish`)
.replyOnce(200, { _id: revisionId2 });
.replyOnce(200, { id: revisionId2 });
// RUN WORKER
await harvestingWorker.run();
// CHECK HARVEST
Expand Down
Loading