Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ask AI in scene #2129

Merged
merged 9 commits into from
Oct 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions front/src/config/i18n/de.json
Original file line number Diff line number Diff line change
Expand Up @@ -1792,6 +1792,14 @@
"textPlaceholder": "Nachrichtentext",
"explanationText": "Um eine Variable in den Text einzufügen, gib \"{{\" ein. Um einen Variablenwert festzulegen, musst du zuerst das Feld \"Gerätewert abrufen\" verwenden."
},
"askAi": {
"description": "Diese Aktion sendet eine Nachricht an die KI (ChatGPT) über Gladys Plus und antwortet Ihnen mit einer Nachricht. Wenn Sie ein Kamerabild anhängen, kann die KI sehen, was auf dem Bild ist, und sogar entsprechend handeln.",
"userLabel": "Gladys wird antworten an",
"cameraLabel": "Ein Kamerabild anhängen (optional)",
"textLabel": "Meine Frage an die KI",
"textPlaceholder": "Nachrichtentext",
"explanationText": "Um eine Variable einzufügen, geben Sie '{{' ein. Achten Sie darauf, dass Sie zuvor eine Variable in einer Aktion 'Letzten Zustand abrufen' definiert haben, die vor diesem Nachrichtenblock platziert wurde."
},
"turnOnLights": {
"label": "Wähle die Lichter aus, die eingeschaltet werden sollen"
},
Expand Down Expand Up @@ -2018,6 +2026,9 @@
},
"music": {
"play-notification": "Auf einem Lautsprecher sprechen"
},
"ai": {
"ask": "Frage die KI"
}
},
"variables": {
Expand Down
11 changes: 11 additions & 0 deletions front/src/config/i18n/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -1792,6 +1792,14 @@
"textPlaceholder": "Message text",
"explanationText": "To inject a variable in the text, press '{{'. To set a variable value, you need to use the 'Get device value' box before this one."
},
"askAi": {
"description": "This action will send a message to the AI (ChatGPT) via Gladys Plus and will reply to you with a message. If you attach a camera image, the AI will be able to see what is on the image and even take action accordingly.",
"userLabel": "Gladys will respond to",
"cameraLabel": "Attach a camera image (optional)",
"textLabel": "My question for the AI",
"textPlaceholder": "Message text",
"explanationText": "To insert a variable, type '{{'. Be careful, you must have defined a variable beforehand in a 'Retrieve the last state' action placed before this message block."
},
"turnOnLights": {
"label": "Select the lights you want to turn on"
},
Expand Down Expand Up @@ -2018,6 +2026,9 @@
},
"music": {
"play-notification": "Talk on a speaker"
},
"ai": {
"ask": "Ask AI"
}
},
"variables": {
Expand Down
11 changes: 11 additions & 0 deletions front/src/config/i18n/fr.json
Original file line number Diff line number Diff line change
Expand Up @@ -1792,6 +1792,14 @@
"textPlaceholder": "Texte du message",
"explanationText": "Pour injecter une variable, tapez '{{'. Attention, vous devez avoir défini une variable auparavant dans une action 'Récupérer le dernier état' placé avant ce bloc message."
},
"askAi": {
"description": "Cette action enverra un message à l'IA (ChatGPT) via Gladys Plus et vous répondra par message. Si vous joignez une image de caméra, l'IA pourra voir ce qu'il y a sur l'image et même faire une action en conséquence.",
"userLabel": "Gladys répondra à",
"cameraLabel": "Joindre une image de caméra (optionnel)",
"textLabel": "Ma question pour l'IA",
"textPlaceholder": "Texte du message",
"explanationText": "Pour injecter une variable, tapez '{{'. Attention, vous devez avoir défini une variable auparavant dans une action 'Récupérer le dernier état' placé avant ce bloc message."
},
"turnOnLights": {
"label": "Sélectionnez les lumières que vous souhaitez allumer"
},
Expand Down Expand Up @@ -2018,6 +2026,9 @@
},
"music": {
"play-notification": "Parler sur une enceinte"
},
"ai": {
"ask": "Demander à l'IA"
}
},
"variables": {
Expand Down
15 changes: 14 additions & 1 deletion front/src/routes/scene/edit-scene/ActionCard.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import SetAlarmMode from './actions/SetAlarmMode';
import SendMqttMessage from './actions/SendMqttMessage';
import PlayNotification from './actions/PlayNotification';
import EdfTempoCondition from './actions/EdfTempoCondition';
import AskAI from './actions/AskAI';

const deleteActionFromColumn = (columnIndex, rowIndex, deleteAction) => () => {
deleteAction(columnIndex, rowIndex);
Expand Down Expand Up @@ -64,7 +65,8 @@ const ACTION_ICON = {
[ACTIONS.ALARM.CHECK_ALARM_MODE]: 'fe fe-bell',
[ACTIONS.ALARM.SET_ALARM_MODE]: 'fe fe-bell',
[ACTIONS.MQTT.SEND]: 'fe fe-message-square',
[ACTIONS.MUSIC.PLAY_NOTIFICATION]: 'fe fe-speaker'
[ACTIONS.MUSIC.PLAY_NOTIFICATION]: 'fe fe-speaker',
[ACTIONS.AI.ASK]: 'fe fe-cpu'
};

const ACTION_CARD_TYPE = 'ACTION_CARD_TYPE';
Expand Down Expand Up @@ -401,6 +403,17 @@ const ActionCard = ({ children, ...props }) => {
triggersVariables={props.triggersVariables}
/>
)}
{props.action.type === ACTIONS.AI.ASK && (
<AskAI
action={props.action}
columnIndex={props.columnIndex}
index={props.index}
updateActionProperty={props.updateActionProperty}
actionsGroupsBefore={props.actionsGroupsBefore}
variables={props.variables}
triggersVariables={props.triggersVariables}
/>
)}
</div>
</div>
</div>
Expand Down
149 changes: 149 additions & 0 deletions front/src/routes/scene/edit-scene/actions/AskAI.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
import Select from 'react-select';
import { Component } from 'preact';
import { connect } from 'unistore/preact';
import { Localizer, Text } from 'preact-i18n';

import TextWithVariablesInjected from '../../../../components/scene/TextWithVariablesInjected';

class AskAI extends Component {
getOptions = async () => {
try {
const users = await this.props.httpClient.get('/api/v1/user');
const userOptions = [];
users.forEach(user => {
userOptions.push({
label: user.firstname,
value: user.selector
});
});

const cameras = await this.props.httpClient.get('/api/v1/camera');
const cameraOptions = cameras.map(camera => ({
label: camera.name,
value: camera.selector
}));

await this.setState({ userOptions, cameraOptions });
this.refreshSelectedOptions(this.props);
return userOptions;
} catch (e) {
console.error(e);
}
};
updateText = text => {
this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'text', text);
};
handleUserChange = selectedOption => {
if (selectedOption && selectedOption.value) {
this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'user', selectedOption.value);
} else {
this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'user', null);
}
};
handleCameraChange = selectedOption => {
if (selectedOption && selectedOption.value) {
this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'camera', selectedOption.value);
} else {
this.props.updateActionProperty(this.props.columnIndex, this.props.index, 'camera', undefined);
}
};

refreshSelectedOptions = nextProps => {
let selectedUserOption = '';
if (nextProps.action.user && this.state.userOptions) {
const userOption = this.state.userOptions.find(option => option.value === nextProps.action.user);

if (userOption) {
selectedUserOption = userOption;
}
}
let selectedCameraOption = '';
if (nextProps.action.camera && this.state.cameraOptions) {
const cameraOption = this.state.cameraOptions.find(option => option.value === nextProps.action.camera);

if (cameraOption) {
selectedCameraOption = cameraOption;
}
}
this.setState({ selectedUserOption, selectedCameraOption });
};
constructor(props) {
super(props);
this.props = props;
this.state = {
selectedOption: ''
};
}
componentDidMount() {
this.getOptions();
}
componentWillReceiveProps(nextProps) {
this.refreshSelectedOptions(nextProps);
}
render(props, { selectedUserOption, userOptions, selectedCameraOption, cameraOptions }) {
return (
<div>
<p>
<Text id="editScene.actionsCard.askAi.description" />
</p>
<div class="form-group">
<label class="form-label">
<Text id="editScene.actionsCard.askAi.textLabel" />{' '}
<span class="form-required">
<Text id="global.requiredField" />
</span>
</label>
<div class="mb-1 small">
<Text id="editScene.actionsCard.askAi.explanationText" />
</div>
<div className="tags-input">
<Localizer>
<TextWithVariablesInjected
text={props.action.text}
triggersVariables={props.triggersVariables}
actionsGroupsBefore={props.actionsGroupsBefore}
variables={props.variables}
updateText={this.updateText}
placeholder={<Text id="editScene.actionsCard.askAi.textLabel" />}
/>
</Localizer>
</div>
</div>
<div class="form-group">
<label class="form-label">
<Text id="editScene.actionsCard.askAi.userLabel" />
<span class="form-required">
<Text id="global.requiredField" />
</span>
</label>
<Select
styles={{
// Fixes the overlapping problem of the component
menu: provided => ({ ...provided, zIndex: 2 })
}}
options={userOptions}
value={selectedUserOption}
onChange={this.handleUserChange}
/>
</div>
<div class="form-group">
<label className="form-label">
<Text id="editScene.actionsCard.askAi.cameraLabel" />
</label>
<Select
styles={{
// Fixes the overlapping problem of the component
menu: provided => ({ ...provided, zIndex: 2 })
}}
options={cameraOptions}
value={selectedCameraOption}
onChange={this.handleCameraChange}
isClearable
/>
</div>
</div>
);
}
}

export default connect('httpClient', {})(AskAI);
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ const ACTION_LIST = [
ACTIONS.ALARM.CHECK_ALARM_MODE,
ACTIONS.ALARM.SET_ALARM_MODE,
ACTIONS.MQTT.SEND,
ACTIONS.MUSIC.PLAY_NOTIFICATION
ACTIONS.MUSIC.PLAY_NOTIFICATION,
ACTIONS.AI.ASK
];

const TRANSLATIONS = ACTION_LIST.reduce((acc, action) => {
Expand Down
6 changes: 5 additions & 1 deletion server/lib/gateway/gateway.forwardMessageToOpenAI.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
const logger = require('../../utils/logger');
const { Error429 } = require('../../utils/httpErrors');

const intentTranslation = {
Expand All @@ -17,16 +18,18 @@ const disableOpenAiFirstReply = new Set(['GET_TEMPERATURE', 'GET_HUMIDITY']);
* @description Handle a new message sent by a user to Gladys.
* @param {object} request - A request sent.
* @param {object} request.message - A message sent by a user.
* @param {object} request.image - An image to send.
* @param {Array} request.previousQuestions - List of previous messages.
* @param {object} request.context - Context of messages (user, etc...).
* @returns {Promise} Return classification.
* @example
* forwardMessageToOpenAI(request);
*/
async function forwardMessageToOpenAI({ message, previousQuestions, context }) {
async function forwardMessageToOpenAI({ message, image, previousQuestions, context }) {
try {
const response = await this.openAIAsk({
question: message.text,
image,
previous_questions: previousQuestions,
});

Expand Down Expand Up @@ -63,6 +66,7 @@ async function forwardMessageToOpenAI({ message, previousQuestions, context }) {

return classification;
} catch (e) {
logger.warn(e);
if (e instanceof Error429) {
await this.message.replyByIntent(message, 'openai.request.tooManyRequests', context);
} else {
Expand Down
37 changes: 32 additions & 5 deletions server/lib/message/message.reply.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,38 @@ async function reply(originalMessage, text, context, file = null) {
userId: originalMessage.user.id,
payload: messageCreated,
});
// then, we get the service sending the original message
const service = this.service.getService(originalMessage.source);
// if the service exist, we send the message
if (service) {
await service.message.send(originalMessage.source_user_id, messageCreated);

// If the source is Gladys AI, then we should answer by all means available
if (originalMessage.source === 'AI') {
const user = this.state.get('user', originalMessage.user.selector);
const telegramService = this.service.getService('telegram');
// if the service exist and the user had telegram configured
if (telegramService && user.telegram_user_id) {
// we forward the message to Telegram
await telegramService.message.send(user.telegram_user_id, messageCreated);
}
// We send the message to the nextcloud talk service
const nextcloudTalkService = this.service.getService('nextcloud-talk');
// if the service exist
if (nextcloudTalkService) {
const nextcloudTalkToken = await this.variable.getValue(
'NEXTCLOUD_TALK_TOKEN',
nextcloudTalkService.message.serviceId,
user.id,
);
// if the user had nextcloud talk configured
if (nextcloudTalkToken) {
// we forward the message to Nextcloud Talk
await nextcloudTalkService.message.send(nextcloudTalkToken, messageCreated);
}
}
} else {
// then, we get the service sending the original message
const service = this.service.getService(originalMessage.source);
// if the service exist, we send the message
if (service) {
await service.message.send(originalMessage.source_user_id, messageCreated);
}
}
} catch (e) {
logger.warn(`Unable to reply to user`);
Expand Down
24 changes: 24 additions & 0 deletions server/lib/scene/scene.actions.js
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,30 @@ const actionsFunc = {
const image = await self.device.camera.getLiveImage(action.camera);
await self.message.sendToUser(action.user, textWithVariables, image);
},
[ACTIONS.AI.ASK]: async (self, action, scope) => {
const textWithVariables = Handlebars.compile(action.text)(scope);
let image;
if (action.camera) {
image = await self.device.camera.getLiveImage(action.camera);
image = `data:${image}`;
}
const user = self.stateManager.get('user', action.user);
const message = {
source: 'AI',
user: {
id: user.id,
language: user.language,
selector: user.selector,
},
language: user.language,
text: textWithVariables,
};
await self.gateway.forwardMessageToOpenAI({
message,
image,
context: {},
});
},
[ACTIONS.DEVICE.GET_VALUE]: async (self, action, scope, columnIndex, rowIndex) => {
const deviceFeature = self.stateManager.get('deviceFeature', action.device_feature);
set(
Expand Down
Loading
Loading