MessageListAiChat.java
package com.vaadin.demo.component.messages;
import java.util.List;
import com.vaadin.demo.component.messages.LLMClient.Message;
import com.vaadin.flow.component.html.Div;
import com.vaadin.flow.component.messages.MessageInput;
import com.vaadin.flow.component.messages.MessageList;
import com.vaadin.flow.component.messages.MessageListItem;
import com.vaadin.flow.router.Route;
@Route("message-list-ai-chat")
public class MessageListAiChat extends Div {
private MessageListItem createItem(String text, boolean assistant) {
MessageListItem item = new MessageListItem(text,
assistant ? "Assistant" : "User");
item.setUserColorIndex(assistant ? 2 : 1);
return item;
}
public MessageListAiChat() {
String chatId = "1234"; // Placeholder chat identifier
// tag::snippet[]
MessageList list = new MessageList();
list.setMarkdown(true);
// end::snippet[]
MessageInput input = new MessageInput();
// Live region for screen reader announcements
Div liveRegion = new Div();
liveRegion.getElement().setAttribute("aria-live", "polite");
liveRegion.addClassName("screen-reader-only");
add(liveRegion);
List<Message> history = LLMClient.getHistory(chatId);
list.setItems(history.stream()
.map(message -> createItem(message.text(), message.assistant()))
.toList());
input.addSubmitListener(e -> {
String userInput = e.getValue();
// Add the user message to the list
list.addItem(createItem(userInput, false));
// Add the Assistant message to the list
MessageListItem newAssistantMessage = createItem("", true);
list.addItem(newAssistantMessage);
// Announce that AI is processing
liveRegion.setText("AI is processing the prompt");
int messageListItemCount = list.getItems().size();
LLMClient.stream(chatId, userInput).subscribe(token -> {
getUI().get().access(() -> {
// Update the Assistant message with the response
// Make sure to have server push enabled!
// See
// https://vaadin.com/docs/latest/flow/advanced/server-push
newAssistantMessage.appendText(token);
});
}, error -> {
// Handle error
}, () -> {
getUI().get().access(() -> {
if (messageListItemCount != list.getItems().size()) {
// Another message is still being processed
return;
}
// Announce that a new message is available
liveRegion.setText("New message available");
});
});
});
add(list, input);
}
}
screen-reader-only.css
.screen-reader-only {
border-width: 0;
clip: rect(0, 0, 0, 0);
height: 1px;
margin: -1px;
overflow: hidden;
padding: 0;
position: absolute;
white-space: nowrap;
width: 1px;
}
screen-reader-only.css
message-list-ai-chat.tsx
import { useCallback, useEffect } from 'react';
import { useSignal } from '@vaadin/hilla-react-signals';
import { MessageInput, type MessageInputSubmitEvent } from '@vaadin/react-components';
import { MessageList, type MessageListItem } from '@vaadin/react-components/MessageList.js';
import LLMChatService from 'Frontend/demo/services/LLMChatService.js';
function createItem(text: string, assistant = false): MessageListItem {
return {
text,
userName: assistant ? 'Assistant' : 'User',
userColorIndex: assistant ? 2 : 1,
};
}
function Example() {
const messageListItems = useSignal<MessageListItem[]>([]);
const announcement = useSignal('');
const chatId = '1234'; // Placeholder chat identifier
useEffect(() => {
LLMChatService.getHistory(chatId).then((messages) => {
messageListItems.value = messages.map((message) =>
createItem(message.text, message.assistant)
);
});
}, []);
const handleChatSubmit = useCallback((e: MessageInputSubmitEvent) => {
const userInput = e.detail.value;
// Add the user message to the list
messageListItems.value = [...messageListItems.value, createItem(userInput)];
// Add the Assistant message to the list
const newAssistantItem = createItem('', true);
messageListItems.value = [...messageListItems.value, newAssistantItem];
// Announce that AI is processing
announcement.value = 'AI is processing the prompt';
LLMChatService.stream(chatId, userInput)
.onNext((token) => {
// Append the token to the Assistant message
newAssistantItem.text += token;
// Force the MessageList to re-render
messageListItems.value = [...messageListItems.value];
})
.onComplete(() => {
// Announce that a new message is available
announcement.value = 'New message available';
});
}, []);
return (
<div>
{/* Live region for screen reader announcements */}
<div aria-live="polite" className="screen-reader-only">
{announcement.value}
</div>
{/* tag::snippet[] */}
<MessageList items={messageListItems.value} markdown />
{/* end::snippet[] */}
<MessageInput onSubmit={handleChatSubmit} />
</div>
);
}
LLMChatService.java
package com.vaadin.demo.component.messages;
import java.util.List;
import com.vaadin.demo.component.messages.LLMClient.Message;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import com.vaadin.hilla.BrowserCallable;
import reactor.core.publisher.Flux;
// tag::snippet[]
@BrowserCallable
@AnonymousAllowed
public class LLMChatService {
public List<Message> getHistory(String chatId) {
// Implement your message history retrieval logic here
return LLMClient.getHistory(chatId);
}
public Flux<String> stream(String chatId, String userMessage) {
// Implement your message streaming logic here
return LLMClient.stream(chatId, userMessage);
}
}
// end::snippet[]
screen-reader-only.css
.screen-reader-only {
border-width: 0;
clip: rect(0, 0, 0, 0);
height: 1px;
margin: -1px;
overflow: hidden;
padding: 0;
position: absolute;
white-space: nowrap;
width: 1px;
}
screen-reader-only.css
message-list-ai-chat.ts
import '@vaadin/message-list';
import '@vaadin/message-input';
import { html, LitElement } from 'lit';
import { customElement, state } from 'lit/decorators.js';
import type { MessageInputSubmitEvent } from '@vaadin/message-input';
import type { MessageListItem } from '@vaadin/message-list';
import LLMChatService from 'Frontend/demo/services/LLMChatService.js';
import { applyTheme } from 'Frontend/demo/theme';
function createItem(text: string, assistant = false): MessageListItem {
return {
text,
userName: assistant ? 'Assistant' : 'User',
userColorIndex: assistant ? 2 : 1,
};
}
@customElement('message-list-ai-chat')
export class Example extends LitElement {
protected override createRenderRoot() {
const root = super.createRenderRoot();
applyTheme(root);
return root;
}
@state()
private messageListItems: MessageListItem[] = [];
@state()
private announcement: string = '';
private chatId = '1234'; // Placeholder chat identifier
protected override async firstUpdated() {
const messages = await LLMChatService.getHistory(this.chatId);
this.messageListItems = messages.map((message) => createItem(message.text, message.assistant));
}
protected override render() {
return html`
<!-- Live region for screen reader announcements -->
<div aria-live="polite" class="screen-reader-only">${this.announcement}</div>
<!-- tag::snippet[] -->
<vaadin-message-list .items="${this.messageListItems}" markdown></vaadin-message-list>
<!-- end::snippet[] -->
<vaadin-message-input @submit="${this.handleChatSubmit}"></vaadin-message-input>
`;
}
private handleChatSubmit(e: MessageInputSubmitEvent) {
const userInput = e.detail.value;
// Add the user message to the list
this.messageListItems = [...this.messageListItems, createItem(userInput)];
// Add the Assistant message to the list
const newAssistantItem = createItem('', true);
this.messageListItems = [...this.messageListItems, newAssistantItem];
// Announce that AI is processing
this.announcement = 'AI is processing the prompt';
LLMChatService.stream(this.chatId, userInput)
.onNext((token) => {
// Append the token to the Assistant message
newAssistantItem.text += token;
// Force the MessageList to re-render
this.messageListItems = [...this.messageListItems];
})
.onComplete(() => {
// Announce that a new message is available
this.announcement = 'New message available';
});
}
}
LLMChatService.java
package com.vaadin.demo.component.messages;
import java.util.List;
import com.vaadin.demo.component.messages.LLMClient.Message;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import com.vaadin.hilla.BrowserCallable;
import reactor.core.publisher.Flux;
// tag::snippet[]
@BrowserCallable
@AnonymousAllowed
public class LLMChatService {
public List<Message> getHistory(String chatId) {
// Implement your message history retrieval logic here
return LLMClient.getHistory(chatId);
}
public Flux<String> stream(String chatId, String userMessage) {
// Implement your message streaming logic here
return LLMClient.stream(chatId, userMessage);
}
}
// end::snippet[]
screen-reader-only.css
.screen-reader-only {
border-width: 0;
clip: rect(0, 0, 0, 0);
height: 1px;
margin: -1px;
overflow: hidden;
padding: 0;
position: absolute;
white-space: nowrap;
width: 1px;
}
screen-reader-only.css