fix(nx-dev): ai chat ui improvements (#18560)

This commit is contained in:
Katerina Skroumpelou 2023-08-10 15:05:54 +03:00 committed by GitHub
parent da2ca3a2de
commit aa9eaa7c0c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 49 additions and 21 deletions

View File

@ -137,9 +137,7 @@ export async function nxDevDataAccessAi(
// Note: this is experimental. I think it should work // Note: this is experimental. I think it should work
// mainly because we're testing previous response + query. // mainly because we're testing previous response + query.
if (!pageSections || pageSections.length === 0) { if (!pageSections || pageSections.length === 0) {
throw new UserError( throw new UserError('No results found.', { no_results: true });
'Nothing relevant found in the Nx documentation! Please try another query.'
);
} }
const tokenizer = new GPT3Tokenizer({ type: 'gpt3' }); const tokenizer = new GPT3Tokenizer({ type: 'gpt3' });

View File

@ -106,12 +106,18 @@ export function checkEnvVariables(
} }
export class ApplicationError extends Error { export class ApplicationError extends Error {
public type: string = 'application_error';
constructor(message: string, public data: Record<string, any> = {}) { constructor(message: string, public data: Record<string, any> = {}) {
super(message); super(message);
} }
} }
export class UserError extends ApplicationError {} export class UserError extends ApplicationError {
public override type: string = 'user_error';
constructor(message: string, data: Record<string, any> = {}) {
super(message, data);
}
}
export function initializeChat( export function initializeChat(
chatFullHistory: ChatItem[], chatFullHistory: ChatItem[],
@ -128,7 +134,7 @@ export function initializeChat(
- Step 2: Deduce the diagnostic REASONING process from the premises (clues, question), relying ONLY on the information provided in the Nx Documentation. If you recognize vulgar language, answer the question if possible, and educate the user to stay polite. - Step 2: Deduce the diagnostic REASONING process from the premises (clues, question), relying ONLY on the information provided in the Nx Documentation. If you recognize vulgar language, answer the question if possible, and educate the user to stay polite.
- Step 3: EVALUATE the reasoning. If the reasoning aligns with the Nx Documentation, accept it. Do not use any external knowledge or make assumptions outside of the provided Nx documentation. If the reasoning doesn't strictly align with the Nx Documentation or relies on external knowledge or inference, reject it and answer with the exact string: - Step 3: EVALUATE the reasoning. If the reasoning aligns with the Nx Documentation, accept it. Do not use any external knowledge or make assumptions outside of the provided Nx documentation. If the reasoning doesn't strictly align with the Nx Documentation or relies on external knowledge or inference, reject it and answer with the exact string:
"Sorry, I don't know how to help with that. You can visit the [Nx documentation](https://nx.dev/getting-started/intro) for more info." "Sorry, I don't know how to help with that. You can visit the [Nx documentation](https://nx.dev/getting-started/intro) for more info."
- Final Step: You can also rely on the messages we have exchanged so far. - Final Step: You can also rely on the messages we have exchanged so far. Do NOT reveal the approach to the user.
Nx Documentation: Nx Documentation:
${contextText} ${contextText}

View File

@ -8,7 +8,7 @@ import {
getProcessedHistory, getProcessedHistory,
ChatItem, ChatItem,
} from '@nx/nx-dev/data-access-ai'; } from '@nx/nx-dev/data-access-ai';
import { warning, infoBox } from './utils'; import { warning, infoBox, noResults } from './utils';
export function FeatureAi(): JSX.Element { export function FeatureAi(): JSX.Element {
const [chatHistory, setChatHistory] = useState<ChatItem[] | null>([]); const [chatHistory, setChatHistory] = useState<ChatItem[] | null>([]);
@ -54,8 +54,8 @@ export function FeatureAi(): JSX.Element {
sourcesMarkdown = aiResponse.sourcesMarkdown; sourcesMarkdown = aiResponse.sourcesMarkdown;
setLoading(false); setLoading(false);
} catch (error) { } catch (error: any) {
setError(error as any); setError(error);
setLoading(false); setLoading(false);
} }
sendCustomEvent('ai_query', 'ai', 'query', undefined, { sendCustomEvent('ai_query', 'ai', 'query', undefined, {
@ -66,9 +66,12 @@ export function FeatureAi(): JSX.Element {
sourcesMarkdown.length === 0 sourcesMarkdown.length === 0
? '' ? ''
: ` : `
{% callout type="info" title="Sources" %} \n
${sourcesMarkdown} {% callout type="info" title="Sources" %}
{% /callout %}`; ${sourcesMarkdown}
{% /callout %}
\n
`;
setFinalResult( setFinalResult(
renderMarkdown(completeText + sourcesMd, { filePath: '' }).node renderMarkdown(completeText + sourcesMd, { filePath: '' }).node
@ -124,7 +127,7 @@ export function FeatureAi(): JSX.Element {
function renderChatHistory(history: ChatItem[]) { function renderChatHistory(history: ChatItem[]) {
return ( return (
<div className="mx-auto bg-white p-6 rounded shadow"> <div className="mx-auto bg-white p-6 rounded shadow flex flex-col">
{history.length > 30 && ( {history.length > 30 && (
<div> <div>
You've reached the maximum message history limit. Some previous You've reached the maximum message history limit. Some previous
@ -149,16 +152,26 @@ export function FeatureAi(): JSX.Element {
ref={index === historyLength - 1 ? lastMessageRef : null} ref={index === historyLength - 1 ? lastMessageRef : null}
className={` p-2 m-2 rounded-lg ${ className={` p-2 m-2 rounded-lg ${
chatItem.role === 'assistant' ? 'bg-blue-200' : 'bg-gray-300' chatItem.role === 'assistant' ? 'bg-blue-200' : 'bg-gray-300'
} ${chatItem.role === 'user' ? 'text-right' : ''} ${
chatItem.role === 'user' ? 'self-end' : ''
}`} }`}
> >
{chatItem.role === 'assistant' && (
<strong className="text-gray-700"> <strong className="text-gray-700">
{chatItem.role === 'user' ? 'you' : 'nx assistant'}: nx assistant{' '}
<span role="img" aria-label="Narwhal">
🐳
</span>
</strong> </strong>
)}
{((chatItem.role === 'assistant' && !error) ||
chatItem.role === 'user') && (
<div className="text-gray-600 mt-1"> <div className="text-gray-600 mt-1">
{renderMarkdown(chatItem.content, { filePath: '' }).node} {renderMarkdown(chatItem.content, { filePath: '' }).node}
</div> </div>
)}
{chatItem.role === 'assistant' && {chatItem.role === 'assistant' &&
!error &&
chatHistory?.length && chatHistory?.length &&
(index === chatHistory.length - 1 && loading ? null : !feedbackSent[ (index === chatHistory.length - 1 && loading ? null : !feedbackSent[
index index
@ -194,8 +207,12 @@ export function FeatureAi(): JSX.Element {
</p> </p>
))} ))}
{error && !loading ? ( {error && !loading && chatItem.role === 'assistant' ? (
error['data']?.['no_results'] ? (
noResults
) : (
<div>There was an error: {error['message']}</div> <div>There was an error: {error['message']}</div>
)
) : null} ) : null}
</div> </div>
); );

View File

@ -19,3 +19,10 @@ export const infoBox = renderMarkdown(
`, `,
{ filePath: '' } { filePath: '' }
).node; ).node;
export const noResults = renderMarkdown(
`
Sorry, I don't know how to help with that. You can visit the [Nx documentation](https://nx.dev/getting-started/intro) for more info.
`,
{ filePath: '' }
).node;