diff --git a/shared/types/export/index.ts b/shared/types/export/index.ts index 76b56cb21..22c4262e2 100644 --- a/shared/types/export/index.ts +++ b/shared/types/export/index.ts @@ -1,3 +1,4 @@ export * from "./agreements"; export * from "./contributions"; export * from "./global"; +export * from "./prequalified"; diff --git a/shared/types/export/prequalified.ts b/shared/types/export/prequalified.ts new file mode 100644 index 000000000..204d428ab --- /dev/null +++ b/shared/types/export/prequalified.ts @@ -0,0 +1,11 @@ +import { DocumentElasticWithSource } from "./global"; + +type Prequalified = { + source: "prequalified"; + variants: string[]; +}; + +export type PrequalifiedElasticDocument = Omit< + DocumentElasticWithSource, + "slug" +>; diff --git a/targets/alert-cli/src/diff/shared/__tests__/getDocumentsWithRelations.test.ts b/targets/alert-cli/src/diff/shared/__tests__/getDocumentsWithRelations.test.ts index 71b190d76..d7ee7502e 100644 --- a/targets/alert-cli/src/diff/shared/__tests__/getDocumentsWithRelations.test.ts +++ b/targets/alert-cli/src/diff/shared/__tests__/getDocumentsWithRelations.test.ts @@ -3,23 +3,45 @@ import { describe, expect, it } from "@jest/globals"; import { getDocumentsWithRelations } from "../getDocumentsWithRelations"; import { SOURCES } from "@socialgouv/cdtn-sources"; -jest.mock("../getAllDocumentsBySource", () => ({ - getDocumentsWithRelationsBySource: async () => +jest.mock("../createDocumentsFetcher", () => ({ + createDocumentsFetcher: () => async () => Promise.resolve([ { - cdtnId: "1", - contentRelations: [ - { - document: { - initialId: "F2839", - }, + status: "fulfilled", + value: { + data: { + documents: [ + { + cdtnId: "1", + isPublished: true, + contentRelations: [ + { + position: 1, + document: { + initialId: "F2839", + slug: "slug1", + source: "contributions", + title: "contribution1", + }, + }, + ], + source: "themes", + title: "Handicap", + }, + ], }, - ], - source: "themes", - title: "Handicap", + }, }, + ]), +})); + +jest.mock("../fetchPrequalified", () => ({ + fetchPrequalified: async () => + Promise.resolve([ { cdtnId: "2", + isPublished: true, + source: "prequalified", contentRelations: [ { document: { @@ -32,11 +54,12 @@ jest.mock("../getAllDocumentsBySource", () => ({ }, }, ], - source: "prequalified", title: "procédure licenciement pour inaptitude", }, { cdtnId: "3", + isPublished: true, + source: "prequalified", contentRelations: [ { document: { @@ -44,7 +67,6 @@ jest.mock("../getAllDocumentsBySource", () => ({ }, }, ], - source: "prequalified", title: "complement-salaire pole emploi", }, ]), diff --git a/targets/alert-cli/src/diff/shared/createDocumentsFetcher.ts b/targets/alert-cli/src/diff/shared/createDocumentsFetcher.ts new file mode 100644 index 000000000..bdcc52383 --- /dev/null +++ b/targets/alert-cli/src/diff/shared/createDocumentsFetcher.ts @@ -0,0 +1,54 @@ +import { SourceValues } from "@socialgouv/cdtn-sources"; +import { + AllDocumentsBySourceResult, + AllDocumentsWithRelationBySourceResult, + CountDocumentsBySourceResult, + countDocumentsBySourceQuery, + getAllDocumentsBySourceQuery, +} from "./getDocumentQuery.gql"; +import { gqlClient } from "@shared/utils"; +import { batchPromises } from "../../utils/batch-promises"; + +const PAGE_SIZE = 100; +const JOB_CONCURENCY = 3; + +export const createDocumentsFetcher = + < + T extends + | AllDocumentsBySourceResult + | AllDocumentsWithRelationBySourceResult + >( + gqlRequest = getAllDocumentsBySourceQuery + ) => + async (source: SourceValues[]) => { + const countResult = await gqlClient() + .query(countDocumentsBySourceQuery, { + source, + }) + .toPromise(); + + if (countResult.error || !countResult.data) { + console.error(countResult.error && "no data received"); + throw new Error("getSources"); + } + + const { count } = countResult.data.documents_aggregate.aggregate; + + const pages = Array.from( + { length: Math.ceil(count / PAGE_SIZE) }, + (_, i) => i + ); + const documentResults = await batchPromises( + pages, + async (page) => + gqlClient() + .query(gqlRequest, { + limit: PAGE_SIZE, + offset: page * PAGE_SIZE, + source, + }) + .toPromise(), + JOB_CONCURENCY + ); + return documentResults; + }; diff --git a/targets/alert-cli/src/diff/shared/fetchPrequalified.ts b/targets/alert-cli/src/diff/shared/fetchPrequalified.ts new file mode 100644 index 000000000..1eadae1ec --- /dev/null +++ b/targets/alert-cli/src/diff/shared/fetchPrequalified.ts @@ -0,0 +1,41 @@ +import { gqlClient } from "@shared/utils"; +import { HasuraDocumentWithRelations } from "./getDocumentQuery.gql"; + +const fetchPrequalifiedQuery = ` +query fetch_prequalified { + search_prequalified { + cdtnId: id + title + contentRelations: documents(order_by: {order: asc}) { + position: order + document { + initialId: initial_id + slug + source + title + } + } + } +} +`; + +interface HasuraReturn { + search_prequalified: HasuraDocumentWithRelations[] | undefined; +} + +export async function fetchPrequalified(): Promise< + HasuraDocumentWithRelations[] | undefined +> { + const res = await gqlClient() + .query(fetchPrequalifiedQuery) + .toPromise(); + if (res.error) { + throw res.error; + } + + return res.data?.search_prequalified?.map((prequalif) => ({ + ...prequalif, + isPublished: true, + source: "prequalified", + })); +} diff --git a/targets/alert-cli/src/diff/shared/getAllDocumentsBySource.ts b/targets/alert-cli/src/diff/shared/getAllDocumentsBySource.ts index 4a6d7e91c..c8a98992e 100644 --- a/targets/alert-cli/src/diff/shared/getAllDocumentsBySource.ts +++ b/targets/alert-cli/src/diff/shared/getAllDocumentsBySource.ts @@ -1,64 +1,18 @@ -import { gqlClient } from "@shared/utils"; -import type { SourceValues } from "@socialgouv/cdtn-sources"; +import { SOURCES, type SourceValues } from "@socialgouv/cdtn-sources"; import memoizee from "memoizee"; -import { batchPromises } from "../../utils/batch-promises"; import type { AllDocumentsBySourceResult, AllDocumentsWithRelationBySourceResult, - CountDocumentsBySourceResult, HasuraDocumentForAlert, HasuraDocumentWithRelations, } from "./getDocumentQuery.gql"; import { - countDocumentsBySourceQuery, getAllDocumentsBySourceQuery, getAllDocumentsWithRelationsBySourceQuery, } from "./getDocumentQuery.gql"; - -const PAGE_SIZE = 100; -const JOB_CONCURENCY = 3; - -const createDocumentsFetcher = - < - T extends - | AllDocumentsBySourceResult - | AllDocumentsWithRelationBySourceResult - >( - gqlRequest = getAllDocumentsBySourceQuery - ) => - async (source: SourceValues[]) => { - const countResult = await gqlClient() - .query(countDocumentsBySourceQuery, { - source, - }) - .toPromise(); - - if (countResult.error || !countResult.data) { - console.error(countResult.error && "no data received"); - throw new Error("getSources"); - } - - const { count } = countResult.data.documents_aggregate.aggregate; - - const pages = Array.from( - { length: Math.ceil(count / PAGE_SIZE) }, - (_, i) => i - ); - const documentResults = await batchPromises( - pages, - async (page) => - gqlClient() - .query(gqlRequest, { - limit: PAGE_SIZE, - offset: page * PAGE_SIZE, - source, - }) - .toPromise(), - JOB_CONCURENCY - ); - return documentResults; - }; +import { fetchPrequalified } from "./fetchPrequalified"; +import { createDocumentsFetcher } from "./createDocumentsFetcher"; export async function _getDocumentsBySource( source: SourceValues[] @@ -87,12 +41,18 @@ export async function _getDocumentsWithRelationsBySource( getAllDocumentsWithRelationsBySourceQuery ); const documentResults = await fetchDocuments(source); - const documents = documentResults.flatMap((result) => { + let documents = documentResults.flatMap((result) => { if (result.status === "fulfilled" && result.value.data) { return result.value.data.documents; } return []; }); + if (source.includes(SOURCES.PREQUALIFIED)) { + const fetchedPrequalified = await fetchPrequalified(); + if (fetchedPrequalified) { + documents = documents.concat(fetchedPrequalified); + } + } return documents; } diff --git a/targets/alert-cli/src/diff/shared/getDocumentQuery.gql.ts b/targets/alert-cli/src/diff/shared/getDocumentQuery.gql.ts index ff122d488..ae4b2a89d 100644 --- a/targets/alert-cli/src/diff/shared/getDocumentQuery.gql.ts +++ b/targets/alert-cli/src/diff/shared/getDocumentQuery.gql.ts @@ -24,9 +24,9 @@ export type HasuraDocumentForAlert = Pick< cdtnId: string; }; -export type AllDocumentsBySourceResult = { +export interface AllDocumentsBySourceResult { documents: HasuraDocumentForAlert[]; -}; +} export const countDocumentsBySourceQuery = ` query coundDocumentsBySource($source:[String!]){ @@ -37,13 +37,13 @@ query coundDocumentsBySource($source:[String!]){ } }`; -export type CountDocumentsBySourceResult = { +export interface CountDocumentsBySourceResult { documents_aggregate: { aggregate: { count: number; }; }; -}; +} export const getAllDocumentsWithRelationsBySourceQuery = ` query($source: [String!], $limit:Int=10,$offset:Int=0 ) { @@ -70,6 +70,7 @@ query($source: [String!], $limit:Int=10,$offset:Int=0 ) { } } `; + export type HasuraDocumentWithRelations = Pick< HasuraDocument, "source" | "title" @@ -83,6 +84,7 @@ export type HasuraDocumentWithRelations = Pick< }; }[]; }; -export type AllDocumentsWithRelationBySourceResult = { + +export interface AllDocumentsWithRelationBySourceResult { documents: HasuraDocumentWithRelations[]; -}; +} diff --git a/targets/export-elasticsearch/src/ingester/cdtnDocuments.ts b/targets/export-elasticsearch/src/ingester/cdtnDocuments.ts index d7eee15ae..6bff5c8ea 100644 --- a/targets/export-elasticsearch/src/ingester/cdtnDocuments.ts +++ b/targets/export-elasticsearch/src/ingester/cdtnDocuments.ts @@ -32,6 +32,7 @@ import { generateAgreements } from "./agreements"; import { getGlossary } from "./common/fetchGlossary"; import { fetchThemes } from "./themes/fetchThemes"; import { updateExportEsStatusWithDocumentsCount } from "./exportStatus/updateExportEsStatusWithDocumentsCount"; +import { generatePrequalified } from "./prequalified"; import { generateEditorialContents } from "./informations/generate"; import { populateRelatedDocuments } from "./common/populateRelatedDocuments"; import { mergeRelatedDocumentsToEditorialContents } from "./informations/mergeRelatedDocumentsToEditorialContents"; @@ -362,32 +363,12 @@ export async function cdtnDocumentsGen( await updateDocs(SOURCES.HIGHLIGHTS, highlightsWithContrib); logger.info("=== PreQualified Request ==="); - const prequalified = await getDocumentBySourceWithRelation( - SOURCES.PREQUALIFIED, - getBreadcrumbs - ); - const prequalifiedWithContrib = prequalified.map((prequalif) => ({ - ...prequalif, - refs: prequalif.refs.map((ref) => { - if (!ref.description) { - const foundContrib = newGeneratedContributions.find( - (newGeneratedContribution) => { - return newGeneratedContribution.cdtnId === ref.cdtnId; - } - ); - return { - ...ref, - description: foundContrib?.description, - }; - } - return ref; - }), - })); + const prequalified = await generatePrequalified(); documentsCount = { ...documentsCount, - [SOURCES.PREQUALIFIED]: prequalifiedWithContrib.length, + [SOURCES.PREQUALIFIED]: prequalified.length, }; - await updateDocs(SOURCES.PREQUALIFIED, prequalifiedWithContrib); + await updateDocs(SOURCES.PREQUALIFIED, prequalified); logger.info("=== glossary ==="); documentsCount = { diff --git a/targets/export-elasticsearch/src/ingester/prequalified/__tests__/generatePrequalified.test.ts b/targets/export-elasticsearch/src/ingester/prequalified/__tests__/generatePrequalified.test.ts new file mode 100644 index 000000000..1600cd231 --- /dev/null +++ b/targets/export-elasticsearch/src/ingester/prequalified/__tests__/generatePrequalified.test.ts @@ -0,0 +1,86 @@ +import { generatePrequalified } from "../generatePrequalified"; + +jest.mock("../fetchPrequalified", () => ({ + fetchPrequalified: () => + Promise.resolve([ + { + id: "idPrequalified", + title: "titlePrequalified", + variants: ["prequalified1", "prequalified2"], + documents: [ + { + document: { + id: "idInformation", + cdtnId: "cdtnIdInformation", + title: "titleInformation", + slug: "slug-information", + source: "information", + text: "textInformation", + isPublished: true, + isSeachable: true, + description: "descriptionInformation", + document: {}, + }, + }, + { + document: { + id: "idContribution", + cdtnId: "cdtnIdContribution", + title: "titleContribution", + slug: "slug-contribution", + source: "contribution", + text: "textContribution", + isPublished: true, + isSeachable: true, + document: { + description: "descriptionContribution", + }, + }, + }, + ], + }, + ]), +})); + +describe("generatePrequalified", () => { + it("should return prequalified", async () => { + const result = await generatePrequalified(); + + expect(result).toEqual([ + { + breadcrumbs: [], + cdtnId: "idPrequalified", + excludeFromSearch: true, + id: "idPrequalified", + isPublished: true, + metaDescription: "titlePrequalified", + refs: [ + { + breadcrumbs: [], + cdtnId: "cdtnIdInformation", + description: "descriptionInformation", + id: "idInformation", + slug: "slug-information", + source: "information", + title: "titleInformation", + url: "", + }, + { + breadcrumbs: [], + cdtnId: "cdtnIdContribution", + description: "descriptionContribution", + id: "idContribution", + slug: "slug-contribution", + source: "contribution", + title: "titleContribution", + url: "", + }, + ], + source: "prequalified", + text: "titlePrequalified", + title: "titlePrequalified", + variants: ["prequalified1", "prequalified2"], + }, + ]); + }); +}); diff --git a/targets/export-elasticsearch/src/ingester/prequalified/fetchPrequalified.ts b/targets/export-elasticsearch/src/ingester/prequalified/fetchPrequalified.ts new file mode 100644 index 000000000..f4050e598 --- /dev/null +++ b/targets/export-elasticsearch/src/ingester/prequalified/fetchPrequalified.ts @@ -0,0 +1,70 @@ +import { gqlClient } from "@shared/utils"; +import { context } from "../context"; + +const fetchPrequalifiedQuery = ` +query fetch_prequalified { + search_prequalified { + id + title + variants + documents(order_by: {order: asc}) { + document { + id: initial_id + cdtnId: cdtn_id + title + slug + source + text + isPublished: is_published + isSearchable: is_searchable + description: meta_description + document + } + } + } + } +`; + +export interface FetchedPrequalified { + id: string; + title: string; + variants: string[]; + documents: { + document: { + id: string; + cdtnId: string; + title: string; + slug: string; + source: string; + text: string; + isPublished: boolean; + isSearchable: boolean; + description: string; + document: any; + }; + }[]; +} + +interface HasuraReturn { + search_prequalified: FetchedPrequalified[] | undefined; +} + +export async function fetchPrequalified(): Promise< + FetchedPrequalified[] | undefined +> { + const HASURA_GRAPHQL_ENDPOINT = + context.get("cdtnAdminEndpoint") || "http://localhost:8080/v1/graphql"; + const HASURA_GRAPHQL_ENDPOINT_SECRET = + context.get("cdtnAdminEndpointSecret") || "admin1"; + const res = await gqlClient({ + graphqlEndpoint: HASURA_GRAPHQL_ENDPOINT, + adminSecret: HASURA_GRAPHQL_ENDPOINT_SECRET, + }) + .query(fetchPrequalifiedQuery) + .toPromise(); + if (res.error) { + throw res.error; + } + + return res.data?.search_prequalified; +} diff --git a/targets/export-elasticsearch/src/ingester/prequalified/generatePrequalified.ts b/targets/export-elasticsearch/src/ingester/prequalified/generatePrequalified.ts new file mode 100644 index 000000000..22a38dd40 --- /dev/null +++ b/targets/export-elasticsearch/src/ingester/prequalified/generatePrequalified.ts @@ -0,0 +1,34 @@ +import { SOURCES } from "@socialgouv/cdtn-sources"; +import { fetchPrequalified } from "./fetchPrequalified"; +import { PrequalifiedElasticDocument } from "@shared/types"; + +export const generatePrequalified = async (): Promise< + PrequalifiedElasticDocument[] +> => { + const prequalified = await fetchPrequalified(); + if (!prequalified) { + return []; + } + return prequalified.map(({ variants, id, title, documents: refs }) => ({ + cdtnId: id, + id, + breadcrumbs: [], + excludeFromSearch: true, + isPublished: true, + metaDescription: title, + text: title, + title, + source: SOURCES.PREQUALIFIED, + variants, + refs: refs.map(({ document }) => ({ + id: document.id, + cdtnId: document.cdtnId, + slug: document.slug, + title: document.title, + source: document.source, + description: document.description || document.document.description, + breadcrumbs: [], + url: "", + })), + })); +}; diff --git a/targets/export-elasticsearch/src/ingester/prequalified/index.ts b/targets/export-elasticsearch/src/ingester/prequalified/index.ts new file mode 100644 index 000000000..f3b85570a --- /dev/null +++ b/targets/export-elasticsearch/src/ingester/prequalified/index.ts @@ -0,0 +1,2 @@ +export * from "./fetchPrequalified"; +export * from "./generatePrequalified"; diff --git a/targets/frontend/src/components/contributions/answers/AnswerForm.tsx b/targets/frontend/src/components/contributions/answers/AnswerForm.tsx index a122f98c1..bcab68eef 100644 --- a/targets/frontend/src/components/contributions/answers/AnswerForm.tsx +++ b/targets/frontend/src/components/contributions/answers/AnswerForm.tsx @@ -317,7 +317,6 @@ export const AnswerForm = ({ {!submitting && ( - + + )} + + + )} + + ); +}; diff --git a/targets/frontend/src/modules/prequalified/edition/PrequalifiedUpdate.tsx b/targets/frontend/src/modules/prequalified/edition/PrequalifiedUpdate.tsx new file mode 100644 index 000000000..59b27b6e2 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/edition/PrequalifiedUpdate.tsx @@ -0,0 +1,71 @@ +import { Breadcrumb, BreadcrumbLink } from "src/components/utils"; +import { usePrequalifiedQuery } from "./prequalified.query"; +import { PrequalifiedForm } from "./PrequalifiedForm"; +import { SnackBar } from "src/components/utils/SnackBar"; +import { Prequalified } from "../type"; +import { usePrequalifiedUpdateMutation } from "./prequalifiedUpdate.mutation"; +import { useState } from "react"; +import { AlertColor } from "@mui/material"; +import { ConfirmModal } from "src/modules/common/components/modals/ConfirmModal"; +import { useDeletePrequalifiedMutation } from "./prequalifiedDelete.mutation"; +import { useRouter } from "next/router"; + +export const PrequalifiedEdition = ({ id }: { id: string }): JSX.Element => { + const router = useRouter(); + const [modalDelete, setModalDelete] = useState(false); + const onDelete = useDeletePrequalifiedMutation(); + const prequalified = usePrequalifiedQuery({ id }); + const updatePrequalified = usePrequalifiedUpdateMutation(); + const [snack, setSnack] = useState<{ + open: boolean; + severity?: AlertColor; + message?: string; + }>({ + open: false, + }); + + const onSubmit = async (data: Prequalified) => { + try { + await updatePrequalified(data); + setSnack({ + open: true, + severity: "success", + message: "La réponse a été modifiée", + }); + } catch (e: any) { + setSnack({ open: true, severity: "error", message: e.message }); + } + }; + + return ( + <> + + + <>Liste des requêtes préqualifiés + + + {prequalified && ( + { + setModalDelete(true); + }} + /> + )} + + setModalDelete(false)} + onCancel={() => setModalDelete(false)} + onValidate={async () => { + if (!prequalified?.id) return; + await onDelete(prequalified?.id); + router.push("/prequalified"); + }} + /> + + ); +}; diff --git a/targets/frontend/src/modules/prequalified/edition/index.ts b/targets/frontend/src/modules/prequalified/edition/index.ts new file mode 100644 index 000000000..3b9c76fc5 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/edition/index.ts @@ -0,0 +1,2 @@ +export * from "./PrequalifiedUpdate"; +export * from "./PrequalifiedCreate"; diff --git a/targets/frontend/src/modules/prequalified/edition/prequalified.query.ts b/targets/frontend/src/modules/prequalified/edition/prequalified.query.ts new file mode 100644 index 000000000..cff9b57c7 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/edition/prequalified.query.ts @@ -0,0 +1,58 @@ +import { useQuery } from "urql"; +import { Prequalified } from "../type"; +import { useMemo } from "react"; + +const prequalifiedQuery = ` +query get_prequalified_by_id($id: uuid) { + search_prequalified (where: {id: {_eq: $id}}) { + id + title + variants + documents(order_by: {order: asc}) { + documentId + prequalifiedId + order + document { + cdtnId: cdtn_id + title + source + slug + } + } + } + } +`; + +type QueryProps = { + id: string; +}; + +type QueryResult = { + search_prequalified: Prequalified[]; +}; + +export const usePrequalifiedQuery = ({ + id, +}: QueryProps): Prequalified | undefined => { + const context = useMemo( + () => ({ additionalTypenames: ["Prequalified"] }), + [] + ); + const [result] = useQuery({ + query: prequalifiedQuery, + variables: { + id, + }, + context, + }); + + if ( + !result?.data?.search_prequalified || + !result?.data?.search_prequalified?.length + ) { + return; + } + const data = result.data?.search_prequalified[0]; + + return data; +}; diff --git a/targets/frontend/src/modules/prequalified/edition/prequalifiedCreate.mutation.ts b/targets/frontend/src/modules/prequalified/edition/prequalifiedCreate.mutation.ts new file mode 100644 index 000000000..42eb7a890 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/edition/prequalifiedCreate.mutation.ts @@ -0,0 +1,31 @@ +import { OperationResult, useMutation } from "urql"; + +import { Prequalified } from "../type"; +import { mapPrequalified } from "./prequalifiedEditionMap"; + +export const prequalifiedCreate = ` +mutation prequalified_create($value: search_prequalified_insert_input!) { + insert_search_prequalified_one(object: $value, on_conflict: {constraint: prequalified_pkey,update_columns: [id, title, variants]}) { + id + } +} +`; + +export type MutationProps = Prequalified; + +type MutationResult = ( + props: MutationProps +) => Promise>; + +export const usePrequalifiedCreateMutation = (): MutationResult => { + const [, executeCreate] = useMutation(prequalifiedCreate); + const resultFunction = async (data: MutationProps) => { + const value = mapPrequalified(data); + const result = await executeCreate({ value }); + if (result.error) { + throw new Error(result.error.message); + } + return result; + }; + return resultFunction; +}; diff --git a/targets/frontend/src/modules/prequalified/edition/prequalifiedDelete.mutation.ts b/targets/frontend/src/modules/prequalified/edition/prequalifiedDelete.mutation.ts new file mode 100644 index 000000000..e4e81603f --- /dev/null +++ b/targets/frontend/src/modules/prequalified/edition/prequalifiedDelete.mutation.ts @@ -0,0 +1,32 @@ +import { OperationResult, useMutation } from "urql"; +import { gql } from "@urql/core"; + +export const deletePrequalifiedMutation = gql` + mutation delete_prequalified($id: uuid) { + delete_search_prequalified(where: { id: { _eq: $id } }) { + affectedRows: affected_rows + } + delete_search_prequalified_documents( + where: { prequalifiedId: { _eq: $id } } + ) { + affected_rows + } + } +`; + +export type DeletePrequalifiedMutationResult = ( + id: string +) => Promise; + +export const useDeletePrequalifiedMutation = + (): DeletePrequalifiedMutationResult => { + const [, execute] = useMutation(deletePrequalifiedMutation); + const resultFunction = async (id: string) => { + const result = await execute({ id }); + if (result.error) { + throw new Error(result.error.message); + } + return result; + }; + return resultFunction; + }; diff --git a/targets/frontend/src/modules/prequalified/edition/prequalifiedEditionMap.ts b/targets/frontend/src/modules/prequalified/edition/prequalifiedEditionMap.ts new file mode 100644 index 000000000..d5868f0b4 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/edition/prequalifiedEditionMap.ts @@ -0,0 +1,29 @@ +import { Prequalified, PrequalifiedDocument } from "../type"; + +export type HasuraInput = Omit & { + documents: { + data: PrequalifiedDocument[]; + on_conflict: { + constraint: string; + update_columns: string[]; + }; + }; +}; + +export const mapPrequalified = (data: Prequalified): HasuraInput => { + return { + id: data.id, + title: data.title, + variants: data.variants, + documents: { + data: data.documents.map(({ documentId }, order) => ({ + documentId, + order, + })), + on_conflict: { + constraint: "prequalified_documents_pkey", + update_columns: ["order", "prequalifiedId", "documentId"], + }, + }, + }; +}; diff --git a/targets/frontend/src/modules/prequalified/edition/prequalifiedUpdate.mutation.ts b/targets/frontend/src/modules/prequalified/edition/prequalifiedUpdate.mutation.ts new file mode 100644 index 000000000..e848e2260 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/edition/prequalifiedUpdate.mutation.ts @@ -0,0 +1,34 @@ +import { OperationResult, useMutation } from "urql"; + +import { Prequalified } from "../type"; +import { mapPrequalified } from "./prequalifiedEditionMap"; + +export const prequalifiedUpdate = ` +mutation prequalified_create($id: uuid, $value: search_prequalified_insert_input!) { + delete_search_prequalified_documents(where: {prequalifiedId: {_eq: $id}}) { + affected_rows + } + insert_search_prequalified_one(object: $value, on_conflict: {constraint: prequalified_pkey,update_columns: [id, title, variants]}) { + id + } +} +`; + +export type MutationProps = Prequalified; + +type MutationResult = ( + props: MutationProps +) => Promise>; + +export const usePrequalifiedUpdateMutation = (): MutationResult => { + const [, executeUpdate] = useMutation(prequalifiedUpdate); + const resultFunction = async (data: MutationProps) => { + const value = mapPrequalified(data); + const result = await executeUpdate({ id: value.id, value }); + if (result.error) { + throw new Error(result.error.message); + } + return result; + }; + return resultFunction; +}; diff --git a/targets/frontend/src/modules/prequalified/index.ts b/targets/frontend/src/modules/prequalified/index.ts new file mode 100644 index 000000000..4ce7b72a9 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/index.ts @@ -0,0 +1,3 @@ +export * from "./edition"; +export * from "./list"; +export * from "./ValidationBar"; diff --git a/targets/frontend/src/modules/prequalified/list/index.ts b/targets/frontend/src/modules/prequalified/list/index.ts new file mode 100644 index 000000000..aaf38b809 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/list/index.ts @@ -0,0 +1 @@ +export * from "./prequalifiedList"; diff --git a/targets/frontend/src/modules/prequalified/list/prequalifiedList.query.ts b/targets/frontend/src/modules/prequalified/list/prequalifiedList.query.ts new file mode 100644 index 000000000..4ad00ea52 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/list/prequalifiedList.query.ts @@ -0,0 +1,47 @@ +import { Prequalified } from "../type"; +import { useQuery } from "urql"; + +const prequalifiedQuery = ` + query get_prequalified_list($search: String) { + search_prequalified( + where: { + title: { _ilike: $search } + } + order_by: {title: asc} + ) { + variants + title + id + documents { + document { + cdtnId: cdtn_id + title + source + slug + } + } + } + } +`; + +type QueryResult = { + search_prequalified: Prequalified[]; +}; + +export type PrequalifiedListQueryProps = { + search?: string; +}; + +export const usePrequalifiedListQuery = ({ + search, +}: PrequalifiedListQueryProps): Prequalified[] | undefined => { + const [result] = useQuery({ + query: prequalifiedQuery, + requestPolicy: "cache-and-network", + variables: { + search, + }, + }); + + return result?.data?.search_prequalified; +}; diff --git a/targets/frontend/src/modules/prequalified/list/prequalifiedList.tsx b/targets/frontend/src/modules/prequalified/list/prequalifiedList.tsx new file mode 100644 index 000000000..d9ecafc1f --- /dev/null +++ b/targets/frontend/src/modules/prequalified/list/prequalifiedList.tsx @@ -0,0 +1,76 @@ +import { + Link, + Paper, + Stack, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Button, + TextField, +} from "@mui/material"; +import { usePrequalifiedListQuery } from "./prequalifiedList.query"; +import { PrequalifiedRow } from "./prequalifiedRow"; +import { useState } from "react"; +import { useRouter } from "next/router"; + +export const PrequalifiedList = (): JSX.Element => { + const router = useRouter(); + const [search, setSearch] = useState(); + const data = usePrequalifiedListQuery({ search }); + return ( + <> + + { + const value = event.target.value; + setSearch(value ? `%${value}%` : undefined); + }} + /> +
+ +
+
+ + + + + + + Titre + + + Variants + + + Nb Documents + + + + + {data?.map((row) => ( + + ))} + +
+
+
+ + ); +}; diff --git a/targets/frontend/src/modules/prequalified/list/prequalifiedRow.tsx b/targets/frontend/src/modules/prequalified/list/prequalifiedRow.tsx new file mode 100644 index 000000000..f7c9a570b --- /dev/null +++ b/targets/frontend/src/modules/prequalified/list/prequalifiedRow.tsx @@ -0,0 +1,28 @@ +import TableCell from "@mui/material/TableCell"; +import TableRow from "@mui/material/TableRow"; +import { useRouter } from "next/router"; + +import { Prequalified } from "../type"; + +export const PrequalifiedRow = (props: { row: Prequalified }) => { + const { row } = props; + const router = useRouter(); + + return ( + { + router.push(`/prequalified/${row.id}`); + }} + style={{ cursor: "pointer" }} + hover + > + {row.title} + + {`${row.variants.join(", ").substring(0, 100)}...`} + + {row.documents.length} + + ); +}; diff --git a/targets/frontend/src/modules/prequalified/type.ts b/targets/frontend/src/modules/prequalified/type.ts new file mode 100644 index 000000000..a0a123827 --- /dev/null +++ b/targets/frontend/src/modules/prequalified/type.ts @@ -0,0 +1,17 @@ +import { z } from "zod"; +import { documentSchema } from "../../components/contributions"; + +export const prequalifiedDocumentSchema = z.object({ + documentId: z.string(), + order: z.number(), + document: documentSchema.optional(), +}); +export type PrequalifiedDocument = z.infer; + +export const prequalifiedSchema = z.object({ + id: z.string(), + title: z.string(), + variants: z.array(z.string()), + documents: z.array(prequalifiedDocumentSchema), +}); +export type Prequalified = z.infer; diff --git a/targets/frontend/src/pages/contenus/create/[[...source]].tsx b/targets/frontend/src/pages/contenus/create/[[...source]].tsx index 22c9e85b7..6d6751d6b 100644 --- a/targets/frontend/src/pages/contenus/create/[[...source]].tsx +++ b/targets/frontend/src/pages/contenus/create/[[...source]].tsx @@ -5,7 +5,6 @@ import { useRouter } from "next/router"; import { HighlightsForm } from "src/components/highlights"; import { Layout } from "src/components/layout/auth.layout"; import { Stack } from "src/components/layout/Stack"; -import { PrequalifiedForm } from "src/components/prequalified"; import { withCustomUrqlClient } from "src/hoc/CustomUrqlClient"; import { withUserProvider } from "src/hoc/UserProvider"; import { Content } from "src/types"; @@ -67,9 +66,6 @@ export function CreateDocumentPage() { case SOURCES.HIGHLIGHTS: ContentForm = HighlightsForm; break; - case SOURCES.PREQUALIFIED: - ContentForm = PrequalifiedForm; - break; default: // eslint-disable-next-line react/display-name ContentForm = () => null; diff --git a/targets/frontend/src/pages/contenus/edit/[id].tsx b/targets/frontend/src/pages/contenus/edit/[id].tsx index 171de978d..335436fa5 100644 --- a/targets/frontend/src/pages/contenus/edit/[id].tsx +++ b/targets/frontend/src/pages/contenus/edit/[id].tsx @@ -8,7 +8,7 @@ import { Dialog } from "src/components/dialog"; import { EditorialContentForm } from "src/components/editorialContent"; import { HighlightsForm } from "src/components/highlights"; import { Layout } from "src/components/layout/auth.layout"; -import { PrequalifiedForm } from "src/components/prequalified"; +import { PrequalifiedEdition } from "src/modules/prequalified"; import { withCustomUrqlClient } from "src/hoc/CustomUrqlClient"; import { withUserProvider } from "src/hoc/UserProvider"; import { previewContentAction } from "src/lib/preview/preview.gql"; @@ -191,14 +191,6 @@ export function EditInformationPage() { onSubmit={onSubmitHighlightMemo} /> ); - case SOURCES.PREQUALIFIED: - return ( - } - loading={updating || deleting} - onSubmit={onSubmitPrequalifiedMemo} - /> - ); default: //eslint-disable-next-line react/display-name return Chargement...; diff --git a/targets/frontend/src/pages/prequalified/[id].tsx b/targets/frontend/src/pages/prequalified/[id].tsx new file mode 100644 index 000000000..b1493abc7 --- /dev/null +++ b/targets/frontend/src/pages/prequalified/[id].tsx @@ -0,0 +1,17 @@ +import { PrequalifiedEdition } from "src/modules/prequalified"; +import { Layout } from "src/components/layout/auth.layout"; +import { withCustomUrqlClient } from "src/hoc/CustomUrqlClient"; +import { withUserProvider } from "src/hoc/UserProvider"; +import { useRouter } from "next/router"; + +export function PrequalifiedPage() { + const router = useRouter(); + const id = router?.query?.id as string; + return ( + + + + ); +} + +export default withCustomUrqlClient(withUserProvider(PrequalifiedPage)); diff --git a/targets/frontend/src/pages/prequalified/create.tsx b/targets/frontend/src/pages/prequalified/create.tsx new file mode 100644 index 000000000..486e5ea23 --- /dev/null +++ b/targets/frontend/src/pages/prequalified/create.tsx @@ -0,0 +1,14 @@ +import { PrequalifiedCreate } from "src/modules/prequalified"; +import { Layout } from "src/components/layout/auth.layout"; +import { withCustomUrqlClient } from "src/hoc/CustomUrqlClient"; +import { withUserProvider } from "src/hoc/UserProvider"; + +export function PrequalifiedNewPage() { + return ( + + + + ); +} + +export default withCustomUrqlClient(withUserProvider(PrequalifiedNewPage)); diff --git a/targets/frontend/src/pages/prequalified/index.tsx b/targets/frontend/src/pages/prequalified/index.tsx new file mode 100644 index 000000000..a3336aa7b --- /dev/null +++ b/targets/frontend/src/pages/prequalified/index.tsx @@ -0,0 +1,14 @@ +import { PrequalifiedList } from "src/modules/prequalified"; +import { Layout } from "src/components/layout/auth.layout"; +import { withCustomUrqlClient } from "src/hoc/CustomUrqlClient"; +import { withUserProvider } from "src/hoc/UserProvider"; + +export function PrequalifiedPage() { + return ( + + + + ); +} + +export default withCustomUrqlClient(withUserProvider(PrequalifiedPage)); diff --git a/targets/hasura/metadata/databases/default/tables/search_prequalified.yaml b/targets/hasura/metadata/databases/default/tables/search_prequalified.yaml new file mode 100644 index 000000000..4e60ed542 --- /dev/null +++ b/targets/hasura/metadata/databases/default/tables/search_prequalified.yaml @@ -0,0 +1,44 @@ +table: + name: prequalified + schema: search +array_relationships: + - name: documents + using: + foreign_key_constraint_on: + column: prequalified_id + table: + name: prequalified_documents + schema: search +insert_permissions: + - role: super + permission: + check: {} + columns: + - id + - title + - variants + comment: "" +select_permissions: + - role: super + permission: + columns: + - id + - title + - variants + filter: {} + comment: "" +update_permissions: + - role: super + permission: + columns: + - id + - title + - variants + filter: {} + check: {} + comment: "" +delete_permissions: + - role: super + permission: + filter: {} + comment: "" diff --git a/targets/hasura/metadata/databases/default/tables/search_prequalified_documents.yaml b/targets/hasura/metadata/databases/default/tables/search_prequalified_documents.yaml new file mode 100644 index 000000000..87cffa57e --- /dev/null +++ b/targets/hasura/metadata/databases/default/tables/search_prequalified_documents.yaml @@ -0,0 +1,50 @@ +table: + name: prequalified_documents + schema: search +configuration: + column_config: + document_id: + custom_name: documentId + prequalified_id: + custom_name: prequalifiedId + custom_column_names: + document_id: documentId + prequalified_id: prequalifiedId + custom_root_fields: {} +object_relationships: + - name: document + using: + foreign_key_constraint_on: document_id +insert_permissions: + - role: super + permission: + check: {} + columns: + - order + - document_id + - prequalified_id + comment: "" +select_permissions: + - role: super + permission: + columns: + - order + - document_id + - prequalified_id + filter: {} + comment: "" +update_permissions: + - role: super + permission: + columns: + - order + - document_id + - prequalified_id + filter: {} + check: {} + comment: "" +delete_permissions: + - role: super + permission: + filter: {} + comment: "" diff --git a/targets/hasura/metadata/databases/default/tables/tables.yaml b/targets/hasura/metadata/databases/default/tables/tables.yaml index 516e3b8c0..b2b70eee0 100644 --- a/targets/hasura/metadata/databases/default/tables/tables.yaml +++ b/targets/hasura/metadata/databases/default/tables/tables.yaml @@ -45,6 +45,8 @@ - "!include public_roles.yaml" - "!include public_service_public_contents.yaml" - "!include public_sources.yaml" +- "!include search_prequalified.yaml" +- "!include search_prequalified_documents.yaml" - "!include v1_fiches_sp.yaml" - "!include v1_fiches_travail_data_alerts.yaml" - "!include v1_fiches_vdd_alerts.yaml" diff --git a/targets/hasura/migrations/default/1709288618551_create_schema_search/down.sql b/targets/hasura/migrations/default/1709288618551_create_schema_search/down.sql new file mode 100644 index 000000000..12a87d2d6 --- /dev/null +++ b/targets/hasura/migrations/default/1709288618551_create_schema_search/down.sql @@ -0,0 +1 @@ +drop schema "search" cascade; diff --git a/targets/hasura/migrations/default/1709288618551_create_schema_search/up.sql b/targets/hasura/migrations/default/1709288618551_create_schema_search/up.sql new file mode 100644 index 000000000..913aff925 --- /dev/null +++ b/targets/hasura/migrations/default/1709288618551_create_schema_search/up.sql @@ -0,0 +1 @@ +create schema "search"; diff --git a/targets/hasura/migrations/default/1709289531525_create_table_search_prequalified/down.sql b/targets/hasura/migrations/default/1709289531525_create_table_search_prequalified/down.sql new file mode 100644 index 000000000..e40594e59 --- /dev/null +++ b/targets/hasura/migrations/default/1709289531525_create_table_search_prequalified/down.sql @@ -0,0 +1 @@ +DROP TABLE "search"."prequalified"; diff --git a/targets/hasura/migrations/default/1709289531525_create_table_search_prequalified/up.sql b/targets/hasura/migrations/default/1709289531525_create_table_search_prequalified/up.sql new file mode 100644 index 000000000..1a08c08dd --- /dev/null +++ b/targets/hasura/migrations/default/1709289531525_create_table_search_prequalified/up.sql @@ -0,0 +1,2 @@ +CREATE TABLE "search"."prequalified" ("id" uuid NOT NULL DEFAULT gen_random_uuid(), "variants" text[] NOT NULL, "title" text not null, PRIMARY KEY ("id") ); +CREATE EXTENSION IF NOT EXISTS pgcrypto; diff --git a/targets/hasura/migrations/default/1709291095355_create_table_search_prequalified_documents/down.sql b/targets/hasura/migrations/default/1709291095355_create_table_search_prequalified_documents/down.sql new file mode 100644 index 000000000..8a52d2755 --- /dev/null +++ b/targets/hasura/migrations/default/1709291095355_create_table_search_prequalified_documents/down.sql @@ -0,0 +1 @@ +DROP TABLE "search"."prequalified_documents"; diff --git a/targets/hasura/migrations/default/1709291095355_create_table_search_prequalified_documents/up.sql b/targets/hasura/migrations/default/1709291095355_create_table_search_prequalified_documents/up.sql new file mode 100644 index 000000000..3bc185498 --- /dev/null +++ b/targets/hasura/migrations/default/1709291095355_create_table_search_prequalified_documents/up.sql @@ -0,0 +1 @@ +CREATE TABLE "search"."prequalified_documents" ("prequalified_id" uuid NOT NULL, "document_id" text NOT NULL, "order" integer NOT NULL, PRIMARY KEY ("prequalified_id","document_id") , FOREIGN KEY ("prequalified_id") REFERENCES "search"."prequalified"("id") ON UPDATE cascade ON DELETE cascade, FOREIGN KEY ("document_id") REFERENCES "public"."documents"("cdtn_id") ON UPDATE cascade ON DELETE cascade); diff --git a/targets/hasura/migrations/default/1709305803863_migrate_prequalified/down.sql b/targets/hasura/migrations/default/1709305803863_migrate_prequalified/down.sql new file mode 100644 index 000000000..e69de29bb diff --git a/targets/hasura/migrations/default/1709305803863_migrate_prequalified/up.sql b/targets/hasura/migrations/default/1709305803863_migrate_prequalified/up.sql new file mode 100644 index 000000000..712788ca6 --- /dev/null +++ b/targets/hasura/migrations/default/1709305803863_migrate_prequalified/up.sql @@ -0,0 +1,43 @@ +with _prequalified_variants as ( + select distinct cdtn_id, + title, + jsonb_array_elements_text(document->'variants') as variant + from documents + where source = 'prequalified' +), +_prequalified as ( + select distinct cdtn_id, + title, + array_agg(variant) as variants + from _prequalified_variants + group by cdtn_id, title + order by variants asc +), +_prequalified_documents as ( + select distinct p.variants, + dr.document_b as document_id, + (dr.data->>'position')::int as "order" + from _prequalified p + inner join document_relations dr on dr.document_a = cdtn_id +), +_inserted_prequalified as ( + insert into "search".prequalified(title, variants) + select title, variants + from _prequalified + returning id, + variants +), +_inserted_prequalified_documents as ( + insert into search.prequalified_documents(prequalified_id, document_id, "order") + select ip.id, + pd.document_id, + pd."order" + from _prequalified_documents pd + inner join _inserted_prequalified ip on ip.variants = pd.variants + returning prequalified_id, + document_id +) +select * +from _inserted_prequalified_documents; + +delete from public.documents where source = 'prequalified';