Skip to content

Commit

Permalink
300 streamline frontend refresh network calls (#836)
Browse files Browse the repository at this point in the history
* adds placeholder levelState endpoint

* fill out handleGetLevelState

* adds reminder comment

* remove valid models and system roles from level state endpoint

* add service to get levelSTate in frontend

* adds leveStateService to index

* corrects levelState url in frontend

* further corrects levelState url in frontend

* fixes typo in comment

* renames levelState to handleStart

* adds models and system roles back to start request

* updates frontend service with changed name and added models and system roles

* replace models and system roles front end calls with the start endpoint

* erases all trace of system role endpoint

* erases all trace of valid model endpoint

* renames models to availableModels

* removes helth check from frontend and adds catch to the handshake request which does the same thing as the health check

* fix names from levelState to getStart in backend

* only loads level info after first mount

* separate logic for processing new level data from fetching it

* remove some comments

* moves main body key to App component

* Revert "moves main body key to App component"

This reverts commit 6e29718.

* moves loadBackend data to maincoomponent

* simplifies args to processBackendLevelData

* converts chatMessageDTOs to chatMessages before returning from the service

* moves messages state back to MainComponent

* moves System roles state and openHandbook to mainComponent

* removes redundant try catch and moves console log

* puts startResponse in new models file combined.ts

* removes comment

* uses object destructuring to shorten declaration

* renames to getValidOpenAIModels()

* refactors loadBackendData

* adds list of defences to show in level 3 by id

* small rename

* Revert "small rename"

This reverts commit 60440bb.

* Revert "adds list of defences to show in level 3 by id"

This reverts commit a17d13b.

* dinstinguishes between Defence and DefenceDTO. converts DTO to defence in the frontend service

* fixes console log

* remove defences_shown_level3

* clarifies nonModelDefences

* renames ALL_DEFENCES to DEFAULT_DEFENCES

* replaces a some with an includes

* uses object destructuring to simplify startService

* renames history to chatHistory in startResponse

* fix mocking for getValidOpenAIModels

* fixes different instance of same mocking problem

* moves isFirstRender logic out of hook

* sets isFirstRender at the the right place
  • Loading branch information
pmarsh-scottlogic authored Feb 22, 2024
1 parent 9a95a5e commit 73e8d8e
Show file tree
Hide file tree
Showing 26 changed files with 253 additions and 224 deletions.
15 changes: 2 additions & 13 deletions backend/src/controller/modelController.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import { Request, Response } from 'express';
import { Response } from 'express';

import { OpenAIGetModelRequest } from '@src/models/api/OpenAIGetModelRequest';
import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest';
import { OpenAiSetModelRequest } from '@src/models/api/OpenAiSetModelRequest';
import { MODEL_CONFIG } from '@src/models/chat';
import { getValidOpenAIModelsList } from '@src/openai';

function handleSetModel(req: OpenAiSetModelRequest, res: Response) {
const { model } = req.body;
Expand Down Expand Up @@ -37,14 +36,4 @@ function handleGetModel(req: OpenAIGetModelRequest, res: Response) {
res.send(req.session.chatModel);
}

function handleGetValidModels(_: Request, res: Response) {
const models = getValidOpenAIModelsList();
res.send({ models });
}

export {
handleSetModel,
handleConfigureModel,
handleGetModel,
handleGetValidModels,
};
export { handleSetModel, handleConfigureModel, handleGetModel };
30 changes: 30 additions & 0 deletions backend/src/controller/startController.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import { Response } from 'express';

import { GetStartRequest } from '@src/models/api/getStartRequest';
import { LEVEL_NAMES } from '@src/models/level';
import { getValidOpenAIModels } from '@src/openai';
import {
systemRoleLevel1,
systemRoleLevel2,
systemRoleLevel3,
} from '@src/promptTemplates';

function handleStart(req: GetStartRequest, res: Response) {
const { level } = req.query;

const systemRoles = [
{ level: LEVEL_NAMES.LEVEL_1, systemRole: systemRoleLevel1 },
{ level: LEVEL_NAMES.LEVEL_2, systemRole: systemRoleLevel2 },
{ level: LEVEL_NAMES.LEVEL_3, systemRole: systemRoleLevel3 },
];

res.send({
emails: req.session.levelState[level].sentEmails,
chatHistory: req.session.levelState[level].chatHistory,
defences: req.session.levelState[level].defences,
availableModels: getValidOpenAIModels(),
systemRoles,
});
}

export { handleStart };
19 changes: 0 additions & 19 deletions backend/src/controller/systemRoleController.ts

This file was deleted.

2 changes: 1 addition & 1 deletion backend/src/defence.ts
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ function transformMessage(
: undefined;

if (!transformedMessage) {
console.debug('No defences applied. Message unchanged.');
console.debug('No transformation applied. Message unchanged.');
return;
}

Expand Down
4 changes: 2 additions & 2 deletions backend/src/langchain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { getDocumentVectors } from './document';
import { CHAT_MODELS } from './models/chat';
import { PromptEvaluationChainReply, QaChainReply } from './models/langchain';
import { LEVEL_NAMES } from './models/level';
import { getOpenAIKey, getValidOpenAIModelsList } from './openai';
import { getOpenAIKey, getValidOpenAIModels } from './openai';
import {
promptEvalPrompt,
promptEvalContextTemplate,
Expand All @@ -31,7 +31,7 @@ function makePromptTemplate(
}

function getChatModel() {
return getValidOpenAIModelsList().includes(CHAT_MODELS.GPT_4)
return getValidOpenAIModels().includes(CHAT_MODELS.GPT_4)
? CHAT_MODELS.GPT_4
: CHAT_MODELS.GPT_3_5_TURBO;
}
Expand Down
21 changes: 21 additions & 0 deletions backend/src/models/api/getStartRequest.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import { Request } from 'express';

import { ChatMessage } from '@src/models/chatMessage';
import { Defence } from '@src/models/defence';
import { EmailInfo } from '@src/models/email';
import { LEVEL_NAMES } from '@src/models/level';

export type GetStartRequest = Request<
never,
{
emails: EmailInfo[];
chatHistory: ChatMessage[];
defences: Defence[];
availableModels: string[];
systemRoles: string[];
},
never,
{
level: LEVEL_NAMES;
}
>;
2 changes: 0 additions & 2 deletions backend/src/nonSessionRoutes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import { fileURLToPath } from 'node:url';

import { handleGetDocuments } from './controller/documentController';
import { handleHealthCheck } from './controller/healthController';
import { handleGetSystemRoles } from './controller/systemRoleController';
import { importMetaUrl } from './importMetaUtils';

const router = express.Router();
Expand All @@ -16,6 +15,5 @@ router.use(
);
router.get('/documents', handleGetDocuments);
router.get('/health', handleHealthCheck);
router.get('/systemRoles', handleGetSystemRoles);

export default router;
2 changes: 1 addition & 1 deletion backend/src/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ async function chatGptSendMessage(
};
}

export const getValidOpenAIModelsList = validOpenAiModels.get;
export const getValidOpenAIModels = validOpenAiModels.get;
export {
chatGptTools,
chatGptSendMessage,
Expand Down
6 changes: 4 additions & 2 deletions backend/src/sessionRoutes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ import {
import {
handleConfigureModel,
handleGetModel,
handleGetValidModels,
handleSetModel,
} from './controller/modelController';
import { handleResetProgress } from './controller/resetController';
import { handleStart } from './controller/startController';
import { handleTest } from './controller/testController';
import { ChatModel, defaultChatModel } from './models/chat';
import { LevelState, getInitialLevelStates } from './models/level';
Expand Down Expand Up @@ -94,6 +94,9 @@ router.use((req, _res, next) => {
next();
});

// handshake
router.get('/start', handleStart);

// defences
router.get('/defence/status', handleGetDefenceStatus);
router.post('/defence/activate', handleDefenceActivation);
Expand All @@ -112,7 +115,6 @@ router.post('/openai/addInfoToHistory', handleAddInfoToChatHistory);
router.post('/openai/clear', handleClearChatHistory);

// model configurations
router.get('/openai/validModels', handleGetValidModels);
router.get('/openai/model', handleGetModel);
router.post('/openai/model', handleSetModel);
router.post('/openai/model/configure', handleConfigureModel);
Expand Down
32 changes: 0 additions & 32 deletions backend/test/unit/controller/systemRoleController.test.ts

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jest.mock('@src/openai', () => {
jest.requireActual<typeof import('@src/openai')>('@src/openai');
return {
...originalModule,
getValidOpenAIModelsList: jest.fn(() => mockValidModels),
getValidOpenAIModels: jest.fn(() => mockValidModels),
};
});

Expand Down
2 changes: 1 addition & 1 deletion backend/test/unit/langchain.ts/initialiseQAModel.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ jest.mock('@src/openai', () => {
jest.requireActual<typeof import('@src/openai')>('@src/openai'); // can we remove this
return {
...originalModule,
getValidOpenAIModelsList: jest.fn(() => mockValidModels),
getValidOpenAIModels: jest.fn(() => mockValidModels),
};
});

Expand Down
40 changes: 3 additions & 37 deletions frontend/src/App.tsx
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
import { useCallback, useEffect, useRef, useState } from 'react';

import DocumentViewBox from './components/DocumentViewer/DocumentViewBox';
import HandbookOverlay from './components/HandbookOverlay/HandbookOverlay';
import MainComponent from './components/MainComponent/MainComponent';
import LevelsComplete from './components/Overlay/LevelsComplete';
import MissionInformation from './components/Overlay/MissionInformation';
import OverlayWelcome from './components/Overlay/OverlayWelcome';
import ResetProgressOverlay from './components/Overlay/ResetProgress';
import { LEVEL_NAMES, LevelSystemRole } from './models/level';
import { chatService, levelService, systemRoleService } from './service';
import { LEVEL_NAMES } from './models/level';
import { levelService } from './service';

import './App.css';
import './Theme.css';
Expand All @@ -28,8 +27,6 @@ function App() {
null
);

const [chatModels, setChatModels] = useState<string[]>([]);
const [systemRoles, setSystemRoles] = useState<LevelSystemRole[]>([]);
const [mainComponentKey, setMainComponentKey] = useState<number>(0);

function loadIsNewUser() {
Expand Down Expand Up @@ -72,21 +69,6 @@ function App() {
setNumCompletedLevels(Math.max(numCompletedLevels, completedLevel + 1));
}

// fetch constants from the backend on app mount
async function loadBackendData() {
try {
console.log("Initializing app's backend data");
const [models, roles] = await Promise.all([
chatService.getValidModels(),
systemRoleService.getSystemRoles(),
]);
setChatModels(models);
setSystemRoles(roles);
} catch (err) {
console.log(err);
}
}

useEffect(() => {
// save number of completed levels to local storage
localStorage.setItem('numCompletedLevels', numCompletedLevels.toString());
Expand Down Expand Up @@ -116,11 +98,6 @@ function App() {
}
}, [isNewUser]);

// load the system constants from backend on app mount
useEffect(() => {
void loadBackendData();
}, []);

useEffect(() => {
// must re-bind event listener after changing overlay type
setTimeout(() => {
Expand Down Expand Up @@ -177,16 +154,7 @@ function App() {
/>
);
}
function openHandbook() {
openOverlay(
<HandbookOverlay
currentLevel={currentLevel}
numCompletedLevels={numCompletedLevels}
systemRoles={systemRoles}
closeOverlay={closeOverlay}
/>
);
}

function openInformationOverlay() {
openOverlay(
<MissionInformation
Expand Down Expand Up @@ -270,11 +238,9 @@ function App() {
key={mainComponentKey}
currentLevel={currentLevel}
numCompletedLevels={numCompletedLevels}
chatModels={chatModels}
closeOverlay={closeOverlay}
updateNumCompletedLevels={updateNumCompletedLevels}
openDocumentViewer={openDocumentViewer}
openHandbook={openHandbook}
openOverlay={openOverlay}
openInformationOverlay={openInformationOverlay}
openLevelsCompleteOverlay={openLevelsCompleteOverlay}
Expand Down
12 changes: 5 additions & 7 deletions frontend/src/Defences.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ function makeDefenceConfigItem(
return { id, name, inputType, value: '' };
}

const DEFENCES_SHOWN_LEVEL3: Defence[] = [
const DEFAULT_DEFENCES: Defence[] = [
makeDefence(
DEFENCE_ID.CHARACTER_LIMIT,
'Character Limit',
Expand Down Expand Up @@ -83,10 +83,6 @@ const DEFENCES_SHOWN_LEVEL3: Defence[] = [
'Use an LLM to evaluate the user input for malicious content and prompt injection attacks.',
[makeDefenceConfigItem('PROMPT', 'prompt', 'text')]
),
];

const ALL_DEFENCES: Defence[] = [
...DEFENCES_SHOWN_LEVEL3,
makeDefence(
DEFENCE_ID.SYSTEM_ROLE,
'System Role',
Expand All @@ -101,6 +97,8 @@ const ALL_DEFENCES: Defence[] = [
),
];

const DEFENCES_HIDDEN_LEVEL3_IDS = [DEFENCE_ID.SYSTEM_ROLE, DEFENCE_ID.QA_LLM];

const MODEL_DEFENCES = [
DEFENCE_ID.PROMPT_EVALUATION_LLM,
DEFENCE_ID.QA_LLM,
Expand All @@ -114,8 +112,8 @@ const PROMPT_ENCLOSURE_DEFENCES = [
];

export {
DEFENCES_SHOWN_LEVEL3,
ALL_DEFENCES,
DEFAULT_DEFENCES,
MODEL_DEFENCES,
PROMPT_ENCLOSURE_DEFENCES,
DEFENCES_HIDDEN_LEVEL3_IDS,
};
6 changes: 3 additions & 3 deletions frontend/src/components/ChatBox/ChatBox.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { useEffect, useState } from 'react';

import { ALL_DEFENCES } from '@src/Defences';
import { DEFAULT_DEFENCES } from '@src/Defences';
import ExportPDFLink from '@src/components/ExportChat/ExportPDFLink';
import '@src/components/ThemedButtons/ChatButton.css';
import LoadingButton from '@src/components/ThemedButtons/LoadingButton';
Expand Down Expand Up @@ -111,7 +111,7 @@ function ChatBox({
}
response.defenceReport.alertedDefences.forEach((triggeredDefence) => {
// get user-friendly defence name
const defenceName = ALL_DEFENCES.find((defence) => {
const defenceName = DEFAULT_DEFENCES.find((defence) => {
return defence.id === triggeredDefence;
})?.name.toLowerCase();
if (defenceName) {
Expand All @@ -131,7 +131,7 @@ function ChatBox({
// add triggered defences to the chat
response.defenceReport.triggeredDefences.forEach((triggeredDefence) => {
// get user-friendly defence name
const defenceName = ALL_DEFENCES.find((defence) => {
const defenceName = DEFAULT_DEFENCES.find((defence) => {
return defence.id === triggeredDefence;
})?.name.toLowerCase();
if (defenceName) {
Expand Down
Loading

0 comments on commit 73e8d8e

Please sign in to comment.