Beam: Checklist done

This commit is contained in:
Enrico Ros
2024-03-23 22:04:20 -07:00
parent bb94b7c5c6
commit b744e9673b
5 changed files with 111 additions and 54 deletions
@@ -29,7 +29,7 @@ export const FUSION_FACTORIES: FusionFactorySpec[] = [
{
type: 'chat-generate',
label: 'Generating Checklist',
mute: true,
display: 'character-count',
method: 's-s0-h0-u0-aN-u',
// You are an intelligent agent tasked with analyzing a set of AI-generated responses to the user message to identify key insights, solutions, or ideas. Your goal is to distill these alternatives into a concise checklist of options that can address the user's query. Consider the conversation's context, the user's last message, and the diversity of perspectives offered by the Beam alternatives. Generate a clear and actionable checklist that the user can review and select from.
systemPrompt: `
@@ -37,12 +37,12 @@ You are an intelligent agent tasked with analyzing a set of {{N}} AI-generated r
Your goal is to distill these into a clear, concise, and actionable checklist that the user can review and select from.
The checklist should be brief, commensurate with the task at hand, and formatted precisely as follows:
- [ ] Insight/Solution/Theme 1: [Very brief, actionable description]
- [ ] Insight/Solution/Theme 2: [Very brief, actionable description]
- [ ] **Insight/Solution/Theme name 1**: [Very brief, actionable description]
- [ ] **Insight/Solution/Theme name 2**: [Very brief, actionable description]
...
- [ ] Insight/Solution/Theme N: [Very brief, actionable description]
- [ ] **Insight/Solution/Theme name N**: [Very brief, actionable description]
The checklist should contain no more than 3-9 items orthogonal items, especially points of difference.
The checklist should contain no more than 3-9 items orthogonal items, especially points of difference, in a single brief line each (no end period).
Prioritize items based on what would be most helpful to the user when merging the {{N}} response alternatives.`.trim(),
// Remember, the checklist should only include the most critical and relevant points, ensuring clarity and conciseness. Begin by identifying the essential insights or themes.
userPrompt: `
@@ -53,6 +53,13 @@ Ensure the checklist is comprehensive, covering the breadth of ideas presented i
{
type: 'user-input-checklist',
label: 'Criteria Selection',
outputPrompt: `
The user selected:
{{YesAnswers}}
The user did NOT select:
{{NoAnswers}}
`.trim(),
},
{
type: 'chat-generate',
@@ -64,7 +71,7 @@ Your task is to combine the {{N}} response alternatives into a single cohesive r
This synthesis should address the user's original query comprehensively, incorporating the {{N}} response alternatives following the user's chosen options.
Aim for clarity and coherence in your final output.`.trim(),
userPrompt: `
Given the user preferences below, synthesize the {{N}} response alternatives above into a single, cohesive, comprehensive response that follows my former query and the preferences below:
Given the user preferences below, synthesize the {{N}} response alternatives above into a single, cohesive, comprehensive response that follows the user query and the preferences below:
{{PrevStepOutput}}
@@ -95,23 +95,24 @@ export function gatherStartFusion(
// Execute the instructions in sequence
let promiseChain = Promise.resolve();
let promiseChain = Promise.resolve<string>('');
for (const instruction of instructions) {
promiseChain = promiseChain.then(() => {
promiseChain = promiseChain.then((previousResult: string) => {
// You can use previousResult here, if needed
inputState.updateProgressComponent(
<Typography
level='body-xs'
// endDecorator={<CircularProgress color='neutral' size='sm' sx={{ '--CircularProgress-size': '16px' }} />}
sx={{ color: 'text.secondary' }}
>
{1 + instructions.indexOf(instruction)}/{instructions.length} · {instruction.label}
{1 + instructions.indexOf(instruction)}/{instructions.length} · {instruction.label} ...
</Typography>,
);
switch (instruction.type) {
case 'chat-generate':
return executeChatGenerate(instruction, inputState);
return executeChatGenerate(instruction, inputState, previousResult);
case 'user-input-checklist':
return executeUserInputChecklist(instruction, inputState);
return executeUserInputChecklist(instruction, inputState, previousResult);
default:
return Promise.reject(new Error('Unsupported Merge instruction'));
}
@@ -136,7 +137,7 @@ export function gatherStartFusion(
console.log('Fusion aborted:', fusionId);
return onUpdateBFusion({
stage: 'stopped',
errorText: 'Stopped.',
errorText: 'Merge Canceled.',
fusingProgressComponent: undefined,
});
}
@@ -1,12 +1,14 @@
import * as React from 'react';
import { Typography } from '@mui/joy';
import { ChatMessage } from '../../../../apps/chat/components/message/ChatMessage';
import { streamAssistantMessage } from '../../../../apps/chat/editors/chat-stream';
import type { VChatMessageIn } from '~/modules/llms/llm.client';
import { bareBonesPromptMixer } from '~/modules/persona/pmix/pmix';
import type { DMessage } from '~/common/state/store-chats';
import { DMessage } from '~/common/state/store-chats';
import { getUXLabsHighPerformance } from '~/common/state/store-ux-labs';
import type { BaseInstruction, ExecutionInputState } from '../beam.gather.instructions';
@@ -19,7 +21,7 @@ type ChatGenerateMethods =
export interface ChatGenerateInstruction extends BaseInstruction {
type: 'chat-generate';
mute?: boolean;
display?: 'mute' | 'character-count';
method: ChatGenerateMethods;
systemPrompt: string;
userPrompt: string;
@@ -30,7 +32,7 @@ export interface ChatGenerateInstruction extends BaseInstruction {
/**
* Merge Execution: uses a chain of Promises to queue up (cancellable) seuqential instructions.
*/
export async function executeChatGenerate(_i: ChatGenerateInstruction, inputs: ExecutionInputState): Promise<void> {
export async function executeChatGenerate(_i: ChatGenerateInstruction, inputs: ExecutionInputState, prevStepOutput: string): Promise<string> {
// build the input messages
if (_i.method !== 's-s0-h0-u0-aN-u')
@@ -38,7 +40,7 @@ export async function executeChatGenerate(_i: ChatGenerateInstruction, inputs: E
const history: VChatMessageIn[] = [
// s
{ role: 'system', content: _mixInstructionPrompt(_i.systemPrompt, inputs.rayMessages.length) },
{ role: 'system', content: _mixChatGeneratePrompt(_i.systemPrompt, inputs.rayMessages.length, prevStepOutput) },
// s0-h0-u0
...inputs.chatMessages
.filter((m) => (m.role === 'user' || m.role === 'assistant'))
@@ -47,7 +49,7 @@ export async function executeChatGenerate(_i: ChatGenerateInstruction, inputs: E
...inputs.rayMessages
.map((m): VChatMessageIn => ({ role: 'assistant', content: m.text })),
// u
{ role: 'user', content: _mixInstructionPrompt(_i.userPrompt, inputs.rayMessages.length) },
{ role: 'user', content: _mixChatGeneratePrompt(_i.userPrompt, inputs.rayMessages.length, prevStepOutput) },
];
// reset the intermediate message
@@ -63,17 +65,29 @@ export async function executeChatGenerate(_i: ChatGenerateInstruction, inputs: E
if (update.text)
inputs.intermediateDMessage.updated = Date.now();
// recreate the UI for this
if (!_i.mute)
inputs.updateInstructionComponent(
<ChatMessage
message={inputs.intermediateDMessage}
fitScreen={true}
showAvatar={false}
adjustContentScaling={-1}
sx={fusionChatMessageSx}
/>,
);
switch (_i.display) {
case 'mute':
return;
case 'character-count':
inputs.updateInstructionComponent(
<Typography level='body-xs' sx={{ opacity: 0.5 }}>{update.text?.length || 0} characters</Typography>,
);
return;
default:
// recreate the UI for this
inputs.updateInstructionComponent(
<ChatMessage
message={inputs.intermediateDMessage}
fitScreen={true}
showAvatar={false}
adjustContentScaling={-1}
sx={fusionChatMessageSx}
/>,
);
return;
}
};
// LLM Streaming generation
@@ -86,12 +100,16 @@ export async function executeChatGenerate(_i: ChatGenerateInstruction, inputs: E
}
if (status.outcome === 'errored')
throw new Error(`Model execution error: ${status.errorMessage || 'Unknown error'}`);
// Proceed to the next step
return inputs.intermediateDMessage.text;
});
}
function _mixInstructionPrompt(prompt: string, raysReady: number): string {
function _mixChatGeneratePrompt(prompt: string, raysReady: number, prevStepOutput: string): string {
return bareBonesPromptMixer(prompt, undefined, {
'{{N}}': raysReady.toString(),
'{{PrevStepOutput}}': prevStepOutput,
});
}
@@ -28,6 +28,20 @@ export function parseTextToChecklist(text: string): UserChecklistOption[] {
}
function parseMarkdownBold(text: string) {
// Split the text by the markdown bold syntax
const parts = text.split(/(\*\*.*?\*\*)/g);
return parts.map((part, index) => {
// Check if the part is meant to be bold
if (part.startsWith('**') && part.endsWith('**')) {
return <strong key={index}>{part.slice(2, -2)}</strong>;
}
return part;
});
}
export function UserInputChecklistComponent(props: {
options: UserChecklistOption[];
onConfirm: (selectedOptions: UserChecklistOption[]) => void;
@@ -46,35 +60,45 @@ export function UserInputChecklistComponent(props: {
);
}, []);
const moreThanHalfSelected = localOptions.filter(option => option.selected).length > localOptions.length / 2;
return (
<Box sx={{ display: 'grid', gap: 3 }}>
<Typography level='body-md' sx={{ mt: 1 }}>
<Box sx={{ display: 'grid', gap: 2 }}>
<Typography color='primary' sx={{ mt: 1, fontWeight: 'lg', fontSize: 'md' }}>
Select the Merge options to apply:
</Typography>
<Box sx={{ display: 'flex', flexDirection: 'column', gap: 2 }}>
{localOptions.map((option) => (
<Checkbox
key={option.id}
size='sm'
checked={option.selected}
onChange={() => handleToggle(option.id)}
label={option.label}
sx={{ whiteSpace: 'break-spaces', ml: 2 }}
/>
))}
</Box>
{localOptions.map((option) => (
<Checkbox
key={option.id}
size='sm'
checked={option.selected}
onChange={() => handleToggle(option.id)}
label={parseMarkdownBold(option.label)}
// sx={{ ml: 2, fontSize: 'md' }}
/>
))}
<Box sx={{ display: 'flex', gap: 1 }}>
<Box sx={{ display: 'flex', gap: 1, mt: 1 }}>
<Button
onClick={() => props.onConfirm(localOptions)}
>
Confirm Selection
</Button>
<Button
color='neutral'
variant='soft'
onClick={() => setLocalOptions(localOptions.map(option => ({ ...option, selected: !moreThanHalfSelected })))}
>
{moreThanHalfSelected ? 'Uncheck All' : 'Check All'}
</Button>
<Button
color='neutral'
variant='soft'
onClick={props.onCancel}
sx={{ ml: 'auto' }}
>
Cancel
</Button>
@@ -1,10 +1,12 @@
import type { BaseInstruction, ExecutionInputState } from '../beam.gather.instructions';
import { GATHER_DEBUG_EXECUTION_CHAIN } from '../../beam.config';
import { parseTextToChecklist, UserInputChecklistComponent } from './UserInputChecklistComponent';
import { bareBonesPromptMixer } from '~/modules/persona/pmix/pmix';
export interface UserInputChecklistInstruction extends BaseInstruction {
type: 'user-input-checklist';
outputPrompt: string;
}
export interface UserChecklistOption {
@@ -14,24 +16,20 @@ export interface UserChecklistOption {
}
export interface UserChecklistValue {
checklist: UserChecklistOption[];
}
export async function executeUserInputChecklist(
_i: UserInputChecklistInstruction,
inputs: ExecutionInputState,
): Promise<UserChecklistValue> {
previousResult: string,
): Promise<string> {
return new Promise((resolve, reject) => {
// initial text to options
const inputText = inputs.intermediateDMessage.text;
const options = parseTextToChecklist(inputText);
const options = parseTextToChecklist(previousResult);
// if no options, there's an error
if (options.length < 2) {
if (GATHER_DEBUG_EXECUTION_CHAIN)
console.log('No checklist options found:', inputText);
console.log('No checklist options found:', previousResult);
reject(new Error('Oops! It looks like we had trouble understanding the Model. Could you please try again?'));
return;
}
@@ -47,12 +45,21 @@ export async function executeUserInputChecklist(
const onConfirm = (selectedOptions: UserChecklistOption[]) => {
clearState();
resolve({ checklist: selectedOptions }); // Proceed to the next step
// output prompt mixer
const outputPrompt = bareBonesPromptMixer(_i.outputPrompt, undefined, {
'{{YesAnswers}}': selectedOptions.filter(o => o.selected).map(o => `- ${o.label.trim()}`).join('\n') || 'None',
'{{NoAnswers}}': selectedOptions.filter(o => !o.selected).map(o => `- ${o.label.trim()}`).join('\n') || 'None',
});
// Proceed to the next step
resolve(outputPrompt);
};
const onCancel = () => {
clearState();
reject(new Error('User cancelled the input.'));
inputs.chainAbortController.abort('User cancelled the input.');
reject();
};
// Remove the placeholder message